katuni4ka commited on
Commit
929c01f
·
verified ·
1 Parent(s): ded7789

Upload 25 files

Browse files
model_index.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FluxPipeline",
3
+ "_diffusers_version": "0.32.1",
4
+ "_name_or_path": "katuni4ka/tiny-random-flux",
5
+ "feature_extractor": [
6
+ null,
7
+ null
8
+ ],
9
+ "image_encoder": [
10
+ null,
11
+ null
12
+ ],
13
+ "scheduler": [
14
+ "diffusers",
15
+ "FlowMatchEulerDiscreteScheduler"
16
+ ],
17
+ "text_encoder": [
18
+ "transformers",
19
+ "CLIPTextModel"
20
+ ],
21
+ "text_encoder_2": [
22
+ "transformers",
23
+ "CLIPTextModel"
24
+ ],
25
+ "tokenizer": [
26
+ "transformers",
27
+ "CLIPTokenizer"
28
+ ],
29
+ "tokenizer_2": [
30
+ "transformers",
31
+ "CLIPTokenizer"
32
+ ],
33
+ "transformer": [
34
+ "diffusers",
35
+ "FluxTransformer2DModel"
36
+ ],
37
+ "vae": [
38
+ "diffusers",
39
+ "AutoencoderKL"
40
+ ]
41
+ }
scheduler/scheduler_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FlowMatchEulerDiscreteScheduler",
3
+ "_diffusers_version": "0.32.1",
4
+ "base_image_seq_len": 256,
5
+ "base_shift": 0.5,
6
+ "invert_sigmas": false,
7
+ "max_image_seq_len": 4096,
8
+ "max_shift": 1.15,
9
+ "num_train_timesteps": 1000,
10
+ "shift": 1.0,
11
+ "shift_terminal": null,
12
+ "use_beta_sigmas": false,
13
+ "use_dynamic_shifting": false,
14
+ "use_exponential_sigmas": false,
15
+ "use_karras_sigmas": false
16
+ }
text_encoder/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_attn_implementation_autoset": true,
3
+ "_name_or_path": "/home/ea/.cache/huggingface/hub/models--katuni4ka--tiny-random-flux/snapshots/36abdcc25faf1a91425f0e38ffa8b5d427534cef/text_encoder",
4
+ "architectures": [
5
+ "CLIPTextModel"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 0,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_size": 32,
12
+ "initializer_factor": 1.0,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 37,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 77,
17
+ "model_type": "clip_text_model",
18
+ "num_attention_heads": 4,
19
+ "num_hidden_layers": 5,
20
+ "pad_token_id": 1,
21
+ "projection_dim": 32,
22
+ "torch_dtype": "float32",
23
+ "transformers_version": "4.46.3",
24
+ "vocab_size": 1000
25
+ }
text_encoder/openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:421374f6f38b7e28cba8101e65e7cd28260c3477a0315caea6ea129c01f1146a
3
+ size 267928
text_encoder/openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
text_encoder_2/config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_attn_implementation_autoset": true,
3
+ "_name_or_path": "/home/ea/.cache/huggingface/hub/models--katuni4ka--tiny-random-flux/snapshots/36abdcc25faf1a91425f0e38ffa8b5d427534cef/text_encoder_2",
4
+ "architectures": [
5
+ "CLIPTextModel"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 0,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_size": 32,
12
+ "initializer_factor": 1.0,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 37,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 512,
17
+ "model_type": "clip_text_model",
18
+ "num_attention_heads": 4,
19
+ "num_hidden_layers": 5,
20
+ "pad_token_id": 1,
21
+ "projection_dim": 32,
22
+ "torch_dtype": "float32",
23
+ "transformers_version": "4.46.3",
24
+ "vocab_size": 1000
25
+ }
text_encoder_2/openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c2d1cb42cb0cdfd843e8cabb672e36cc7232a2914c43ec7ebee5791752c7c53
3
+ size 327088
text_encoder_2/openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer/merges.txt ADDED
@@ -0,0 +1,647 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #version: 0.2
2
+ Ġ t
3
+ Ġt h
4
+ Ġ a
5
+ Ġth e</w>
6
+ i n
7
+ Ġ o
8
+ Ġ ,</w>
9
+ Ġ s
10
+ e d</w>
11
+ Ġ w
12
+ e r
13
+ Ġ .</w>
14
+ Ġ i
15
+ r e
16
+ Ġ c
17
+ n d</w>
18
+ Ġ f
19
+ Ġ b
20
+ a t
21
+ Ġo f</w>
22
+ e r</w>
23
+ e n
24
+ a r
25
+ o r
26
+ i t
27
+ Ġ p
28
+ Ġ h
29
+ Ġa nd</w>
30
+ o n
31
+ in g</w>
32
+ a n
33
+ r o
34
+ Ġ m
35
+ Ġ d
36
+ e s</w>
37
+ Ġi n</w>
38
+ o n</w>
39
+ Ġt o</w>
40
+ o u
41
+ i s
42
+ Ġ a</w>
43
+ i c
44
+ Ġ T
45
+ a l
46
+ Ġ l
47
+ Ġ =</w>
48
+ Ġ re
49
+ Ġ "</w>
50
+ e s
51
+ Ġ S
52
+ a s</w>
53
+ a l</w>
54
+ i l
55
+ e l
56
+ i on</w>
57
+ Ġ A
58
+ Ġ C
59
+ Ġ 1
60
+ Ġ Ċ</w>
61
+ u r
62
+ ĠT h
63
+ Ġ n
64
+ a s
65
+ Ġ @
66
+ e c
67
+ o m
68
+ a c
69
+ Ġ e
70
+ Ġw as</w>
71
+ Ġ M
72
+ o r</w>
73
+ a n</w>
74
+ a m
75
+ e n</w>
76
+ o l
77
+ Ġ in
78
+ Ġ g
79
+ Ġ '</w>
80
+ Ġ B
81
+ l y</w>
82
+ a t</w>
83
+ i v
84
+ t s</w>
85
+ ĠTh e</w>
86
+ u s
87
+ - @</w>
88
+ Ġ@ -@</w>
89
+ i s</w>
90
+ Ġ I
91
+ Ġw h
92
+ i g
93
+ Ġ H
94
+ Ġs t
95
+ o s
96
+ u n
97
+ t h
98
+ Ġ P
99
+ Ġw it
100
+ Ġth at</w>
101
+ i r
102
+ Ġa s</w>
103
+ e m
104
+ Ġo n</w>
105
+ r a
106
+ Ġf or</w>
107
+ Ġ R
108
+ e t
109
+ o w
110
+ Ġ 2
111
+ i d
112
+ Ġ D
113
+ l e</w>
114
+ Ġwit h</w>
115
+ l a
116
+ en t</w>
117
+ i m
118
+ Ġ F
119
+ e a
120
+ i on
121
+ Ġb y</w>
122
+ Ġ )</w>
123
+ Ġ (</w>
124
+ Ġa l
125
+ Ġc on
126
+ en t
127
+ Ġ W
128
+ Ġi s</w>
129
+ er e</w>
130
+ Ġ G
131
+ Ġ N
132
+ Ġ L
133
+ Ġh a
134
+ er s</w>
135
+ r i
136
+ t h</w>
137
+ t ed</w>
138
+ u c
139
+ Ġ J
140
+ Ġ1 9
141
+ e v
142
+ u l
143
+ Ġ v
144
+ c e</w>
145
+ at ion</w>
146
+ ro m</w>
147
+ Ġb e
148
+ Ġ E
149
+ i n</w>
150
+ Ġth e
151
+ Ġf rom</w>
152
+ Ġ O
153
+ t er</w>
154
+ Ġp ro
155
+ Ġa r
156
+ a d
157
+ Ġc om
158
+ i c</w>
159
+ a g
160
+ Ġh is</w>
161
+ Ġs h
162
+ Ġa t</w>
163
+ o v
164
+ i es</w>
165
+ o o
166
+ p p
167
+ s t
168
+ c h
169
+ Ġ r
170
+ Ġ2 0
171
+ a y</w>
172
+ i f
173
+ Ġw ere</w>
174
+ Ġc h
175
+ u t</w>
176
+ s t</w>
177
+ u t
178
+ d s</w>
179
+ o p
180
+ u m
181
+ Ġi t</w>
182
+ o c
183
+ t er
184
+ l e
185
+ ig h
186
+ u d
187
+ Ġe x
188
+ ion s</w>
189
+ at e</w>
190
+ it y</w>
191
+ at ed</w>
192
+ Ġ un
193
+ e p
194
+ q u
195
+ Ġn o
196
+ Ġ K
197
+ iv e</w>
198
+ is t
199
+ Ġo n
200
+ am e</w>
201
+ ou n
202
+ i r</w>
203
+ a b
204
+ Ġ â
205
+ in g
206
+ Ġh e</w>
207
+ l d</w>
208
+ u g
209
+ ic h</w>
210
+ Ġa n</w>
211
+ e d
212
+ Ġ k
213
+ Ġâ Ģ
214
+ Ġha d</w>
215
+ v e</w>
216
+ a in
217
+ Ġs e
218
+ t ion</w>
219
+ or e</w>
220
+ re s
221
+ Ġwh ich</w>
222
+ ĠI n</w>
223
+ o d
224
+ th er</w>
225
+ a k
226
+ Ġs p
227
+ a r</w>
228
+ Ġ y
229
+ ĠC h
230
+ on g</w>
231
+ Ġa c
232
+ es t</w>
233
+ Ġ U
234
+ a p
235
+ f f
236
+ al ly</w>
237
+ r it
238
+ ĠS t
239
+ u b
240
+ g e</w>
241
+ b er</w>
242
+ e t</w>
243
+ Ġb e</w>
244
+ e ar
245
+ Ġre c
246
+ er s
247
+ Ġf ir
248
+ o t
249
+ Ġar e</w>
250
+ Ġa n
251
+ c h</w>
252
+ o g
253
+ i a</w>
254
+ es t
255
+ in e</w>
256
+ il l
257
+ an d
258
+ e l</w>
259
+ ar y</w>
260
+ e w</w>
261
+ i d</w>
262
+ Ġf or
263
+ Ġ ;</w>
264
+ Ġcom p
265
+ Ġ V
266
+ Ġin c
267
+ t r
268
+ Ġ20 0
269
+ Ġthe ir</w>
270
+ u s</w>
271
+ Ġb ut</w>
272
+ r an
273
+ ic al</w>
274
+ Ġfir st</w>
275
+ Ġd e
276
+ Ġin t
277
+ Ġ ro
278
+ s o</w>
279
+ ĠâĢ ĵ</w>
280
+ Ġno t</w>
281
+ d ing</w>
282
+ f ter</w>
283
+ ur e</w>
284
+ Ġp ar
285
+ Ġ :</w>
286
+ i an</w>
287
+ Ġt w
288
+ ou ld</w>
289
+ Ġal so</w>
290
+ Ġi ts</w>
291
+ Ġw or
292
+ u m</w>
293
+ Ġo r</w>
294
+ os t</w>
295
+ 0 0</w>
296
+ ou r
297
+ ar d</w>
298
+ Ġre s
299
+ m p
300
+ u e</w>
301
+ Ġa b
302
+ is h</w>
303
+ Ġcon t
304
+ Ġa d
305
+ ow n</w>
306
+ al l</w>
307
+ ou g
308
+ Ġh er</w>
309
+ as t</w>
310
+ Ġ en
311
+ om e</w>
312
+ al l
313
+ d ed</w>
314
+ o w</w>
315
+ Ġha ve</w>
316
+ Ġ us
317
+ ea r</w>
318
+ ac k</w>
319
+ d uc
320
+ i al</w>
321
+ s s
322
+ en ts</w>
323
+ a in</w>
324
+ t ing</w>
325
+ Ġon e</w>
326
+ es s
327
+ Ġh as</w>
328
+ igh t</w>
329
+ a v
330
+ Ġe v
331
+ ou t</w>
332
+ a y
333
+ en ce</w>
334
+ Ġbe en</w>
335
+ e w
336
+ Ġtw o</w>
337
+ Ġc l
338
+ d er</w>
339
+ im e</w>
340
+ k s</w>
341
+ es s</w>
342
+ is h
343
+ . @</w>
344
+ Ġ@ .@</w>
345
+ Ġp la
346
+ Ġp l
347
+ Ġo r
348
+ u p</w>
349
+ m ent</w>
350
+ ur ing</w>
351
+ ol l
352
+ ĠI n
353
+ Ġth is</w>
354
+ Ġb ec
355
+ Ġcom m
356
+ Ġd is
357
+ at er</w>
358
+ ag e</w>
359
+ Ġa pp
360
+ ou s</w>
361
+ e y</w>
362
+ i l</w>
363
+ p er
364
+ ĠA l
365
+ ion al</w>
366
+ l ud
367
+ el y</w>
368
+ t t
369
+ il e</w>
370
+ i z
371
+ Ġ j
372
+ Ġwh o</w>
373
+ Ġa g
374
+ i b
375
+ Ġthe y</w>
376
+ f or
377
+ Ġo v
378
+ at h
379
+ e g
380
+ Ġs c
381
+ i p
382
+ Ġ20 1
383
+ Ġ 3
384
+ Ġp er
385
+ or y</w>
386
+ Ġd es
387
+ id e</w>
388
+ Ġs er
389
+ s e</w>
390
+ ĠH e</w>
391
+ la nd</w>
392
+ at ions</w>
393
+ r ic
394
+ i t</w>
395
+ re s</w>
396
+ er ed</w>
397
+ Ġp re
398
+ ĠS h
399
+ an ce</w>
400
+ or t</w>
401
+ an t</w>
402
+ , @</w>
403
+ Ġ@ ,@</w>
404
+ el l</w>
405
+ Ġ Y
406
+ n ed</w>
407
+ el l
408
+ it e</w>
409
+ Ġinc lud
410
+ Ġre p
411
+ Ġa fter</w>
412
+ Ġs uc
413
+ re e</w>
414
+ an y</w>
415
+ i m</w>
416
+ or t
417
+ Ġ1 8
418
+ Ġs u
419
+ ad e</w>
420
+ ou r</w>
421
+ ĠU n
422
+ ĠI t</w>
423
+ i k
424
+ ĠM ar
425
+ em ber</w>
426
+ Ġ 1</w>
427
+ e en</w>
428
+ a nd</w>
429
+ Ġs ec
430
+ ic e</w>
431
+ Ġt ime</w>
432
+ ĠA n
433
+ Ġint o</w>
434
+ Ġf in
435
+ Ġo ther</w>
436
+ Ġa tt
437
+ il l</w>
438
+ re n
439
+ ac h
440
+ as s
441
+ er al</w>
442
+ es e</w>
443
+ s h
444
+ al s</w>
445
+ it ion</w>
446
+ oug h</w>
447
+ l es</w>
448
+ am p
449
+ Ġw ould</w>
450
+ Ġm ore</w>
451
+ ro ug
452
+ ri b
453
+ er y</w>
454
+ ac e</w>
455
+ Ġ A</w>
456
+ Ġpla y
457
+ it ed</w>
458
+ k ed</w>
459
+ is t</w>
460
+ i ed</w>
461
+ Ġ 2</w>
462
+ as ed</w>
463
+ ing s</w>
464
+ an g
465
+ a m</w>
466
+ i p</w>
467
+ Ġb o
468
+ ab le</w>
469
+ t y</w>
470
+ Ġch ar
471
+ Ġc ent
472
+ et w
473
+ at es</w>
474
+ ro p
475
+ Ġ I</w>
476
+ u nd</w>
477
+ ĠA m
478
+ c es</w>
479
+ o in
480
+ Ġin ter
481
+ u p
482
+ c t
483
+ on e</w>
484
+ Ġt ra
485
+ an t
486
+ ec t
487
+ Ġal l</w>
488
+ e f
489
+ Ġcon s
490
+ ub l
491
+ n ing</w>
492
+ an s</w>
493
+ Ġf e
494
+ us t</w>
495
+ Ġ 0
496
+ Ġre m
497
+ as e</w>
498
+ on g
499
+ Ġwh en</w>
500
+ e b
501
+ ĠW h
502
+ Ġe ar
503
+ ev er</w>
504
+ Ġov er</w>
505
+ Ġk n
506
+ a us
507
+ Ġp os
508
+ a d</w>
509
+ er m
510
+ Ġsh e</w>
511
+ Ġ ra
512
+ Ġd uring</w>
513
+ as on</w>
514
+ v i
515
+ Ġex p
516
+ Ġl ea
517
+ Ġ el
518
+ Ġ 4
519
+ Ġon ly</w>
520
+ o nd</w>
521
+ Ġd ec
522
+ Ġac c
523
+ Ġo ff
524
+ is s
525
+ Ġf l
526
+ ĠE n
527
+ o t</w>
528
+ en s
529
+ os e</w>
530
+ ak e</w>
531
+ o m</w>
532
+ Ġs ev
533
+ ac h</w>
534
+ etw een</w>
535
+ er n
536
+ Ġ 3</w>
537
+ Ġp r
538
+ Ġg ro
539
+ r uc
540
+ Ġd i
541
+ Ġ19 9
542
+ ĠA r
543
+ Ġg ame</w>
544
+ Ġh im</w>
545
+ oo k</w>
546
+ Ġ up</w>
547
+ Ġab out</w>
548
+ Ġre l
549
+ for m
550
+ Ġth ree</w>
551
+ at t
552
+ ĠC om
553
+ Ġs a
554
+ ear s</w>
555
+ Ġ 5
556
+ r y</w>
557
+ Ġi mp
558
+ Ġm ost</w>
559
+ f er
560
+ Ġp res
561
+ Ġf il
562
+ Ġb etween</w>
563
+ Ġbe g
564
+ p h
565
+ or s</w>
566
+ Ġth an</w>
567
+ Ġrec or
568
+ o b
569
+ er ic
570
+ at ing</w>
571
+ Ġth roug
572
+ k ing</w>
573
+ Ġo ut</w>
574
+ Ġn um
575
+ oo d</w>
576
+ oll ow
577
+ ac t
578
+ u il
579
+ Ġc re
580
+ ol og
581
+ at ional</w>
582
+ Ġpro duc
583
+ Ġwh ile</w>
584
+ Ġl ater</w>
585
+ Ġw rit
586
+ e x
587
+ Ġst ar
588
+ Ġsp ec
589
+ e e
590
+ ish ed</w>
591
+ Ġre g
592
+ is ion</w>
593
+ ou th</w>
594
+ Ġre le
595
+ Ġa ss
596
+ Ġse ason</w>
597
+ Ġm ade</w>
598
+ il y</w>
599
+ r u
600
+ o y
601
+ t ur
602
+ t e</w>
603
+ Ġ qu
604
+ Ġm ov
605
+ ur y</w>
606
+ ĠAm eric
607
+ em ent</w>
608
+ c c
609
+ ou nd</w>
610
+ Ġl ar
611
+ Ġfor m
612
+ ec t</w>
613
+ Ġde f
614
+ Ġm us
615
+ ĠP ar
616
+ Ġm e
617
+ Ġs ub
618
+ w ay</w>
619
+ o p</w>
620
+ o h
621
+ el d</w>
622
+ i e</w>
623
+ em p
624
+ am es</w>
625
+ er n</w>
626
+ Ġn or
627
+ iv ed</w>
628
+ ev el
629
+ Ġsuc h</w>
630
+ ar ds</w>
631
+ Ġin d
632
+ ik e</w>
633
+ Ġg en
634
+ er t
635
+ Ġy ear</w>
636
+ Ġus ed</w>
637
+ Ġn ew</w>
638
+ Ġ 5</w>
639
+ Ġal b
640
+ s p
641
+ y p
642
+ Ġwit h
643
+ Ġwh ere</w>
644
+ ic s</w>
645
+ ĠTh is</w>
646
+ Ġthe m</w>
647
+ w n</w>
tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|startoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|endoftext|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ }
20
+ },
21
+ "bos_token": "<|startoftext|>",
22
+ "clean_up_tokenization_spaces": true,
23
+ "do_lower_case": true,
24
+ "eos_token": "<|endoftext|>",
25
+ "errors": "replace",
26
+ "model_max_length": 77,
27
+ "pad_token": "<|endoftext|>",
28
+ "tokenizer_class": "CLIPTokenizer",
29
+ "unk_token": "<|endoftext|>"
30
+ }
tokenizer/vocab.json ADDED
@@ -0,0 +1,1002 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "!": 2,
3
+ "!</w>": 345,
4
+ "\"": 3,
5
+ "\"</w>": 344,
6
+ "#": 4,
7
+ "#</w>": 325,
8
+ "$": 5,
9
+ "$</w>": 348,
10
+ "%": 6,
11
+ "%</w>": 351,
12
+ "&": 7,
13
+ "&</w>": 352,
14
+ "'": 8,
15
+ "'</w>": 296,
16
+ "(": 9,
17
+ "(</w>": 318,
18
+ ")": 10,
19
+ ")</w>": 330,
20
+ "*": 11,
21
+ "*</w>": 327,
22
+ "+": 12,
23
+ "+</w>": 341,
24
+ ",": 13,
25
+ ",</w>": 279,
26
+ ",@</w>": 754,
27
+ "-": 14,
28
+ "-</w>": 276,
29
+ "-@</w>": 439,
30
+ ".": 15,
31
+ ".</w>": 253,
32
+ ".@</w>": 695,
33
+ "/": 16,
34
+ "/</w>": 350,
35
+ "0": 17,
36
+ "00</w>": 647,
37
+ "0</w>": 216,
38
+ "1": 18,
39
+ "1</w>": 222,
40
+ "2": 19,
41
+ "2</w>": 231,
42
+ "3": 20,
43
+ "3</w>": 243,
44
+ "4": 21,
45
+ "4</w>": 233,
46
+ "5": 22,
47
+ "5</w>": 240,
48
+ "6": 23,
49
+ "6</w>": 226,
50
+ "7": 24,
51
+ "7</w>": 215,
52
+ "8": 25,
53
+ "8</w>": 236,
54
+ "9": 26,
55
+ "9</w>": 242,
56
+ ":": 27,
57
+ ":</w>": 353,
58
+ ";": 28,
59
+ ";</w>": 317,
60
+ "<": 29,
61
+ "<</w>": 340,
62
+ "<|endoftext|>": 1,
63
+ "<|startoftext|>": 0,
64
+ "=": 30,
65
+ "=</w>": 342,
66
+ ">": 31,
67
+ "></w>": 300,
68
+ "?": 32,
69
+ "?</w>": 346,
70
+ "@": 33,
71
+ "@</w>": 320,
72
+ "A": 34,
73
+ "A</w>": 227,
74
+ "B": 35,
75
+ "B</w>": 258,
76
+ "C": 36,
77
+ "C</w>": 239,
78
+ "D": 37,
79
+ "D</w>": 255,
80
+ "E": 38,
81
+ "E</w>": 246,
82
+ "F": 39,
83
+ "F</w>": 213,
84
+ "G": 40,
85
+ "G</w>": 283,
86
+ "H": 41,
87
+ "H</w>": 219,
88
+ "I": 42,
89
+ "I</w>": 237,
90
+ "J": 43,
91
+ "J</w>": 251,
92
+ "K": 44,
93
+ "K</w>": 254,
94
+ "L": 45,
95
+ "L</w>": 218,
96
+ "M": 46,
97
+ "M</w>": 234,
98
+ "N": 47,
99
+ "N</w>": 238,
100
+ "O": 48,
101
+ "O</w>": 265,
102
+ "P": 49,
103
+ "P</w>": 245,
104
+ "Q": 50,
105
+ "Q</w>": 309,
106
+ "R": 51,
107
+ "R</w>": 264,
108
+ "S": 52,
109
+ "S</w>": 230,
110
+ "T": 53,
111
+ "T</w>": 235,
112
+ "U": 54,
113
+ "U</w>": 268,
114
+ "V": 55,
115
+ "V</w>": 248,
116
+ "W": 56,
117
+ "W</w>": 274,
118
+ "X": 57,
119
+ "X</w>": 263,
120
+ "Y": 58,
121
+ "Y</w>": 310,
122
+ "Z": 59,
123
+ "Z</w>": 207,
124
+ "[": 60,
125
+ "[</w>": 270,
126
+ "\\": 61,
127
+ "\\</w>": 338,
128
+ "]": 62,
129
+ "]</w>": 289,
130
+ "^": 63,
131
+ "^</w>": 331,
132
+ "_": 64,
133
+ "_</w>": 334,
134
+ "`": 65,
135
+ "`</w>": 347,
136
+ "a": 66,
137
+ "a</w>": 197,
138
+ "ab": 555,
139
+ "able</w>": 820,
140
+ "ac": 420,
141
+ "ace</w>": 806,
142
+ "ach": 791,
143
+ "ach</w>": 885,
144
+ "ack</w>": 670,
145
+ "act": 929,
146
+ "ad": 508,
147
+ "ad</w>": 860,
148
+ "ade</w>": 771,
149
+ "ag": 511,
150
+ "age</w>": 710,
151
+ "ain": 568,
152
+ "ain</w>": 675,
153
+ "ak": 577,
154
+ "ake</w>": 882,
155
+ "al": 397,
156
+ "al</w>": 405,
157
+ "all": 664,
158
+ "all</w>": 658,
159
+ "ally</w>": 588,
160
+ "als</w>": 796,
161
+ "am": 426,
162
+ "am</w>": 817,
163
+ "ame</w>": 552,
164
+ "ames</w>": 976,
165
+ "amp": 800,
166
+ "an": 384,
167
+ "an</w>": 425,
168
+ "ance</w>": 751,
169
+ "and": 609,
170
+ "and</w>": 780,
171
+ "ang": 816,
172
+ "ans</w>": 844,
173
+ "ant": 837,
174
+ "ant</w>": 753,
175
+ "any</w>": 766,
176
+ "ap": 586,
177
+ "ar": 376,
178
+ "ar</w>": 579,
179
+ "ard</w>": 649,
180
+ "ards</w>": 982,
181
+ "ary</w>": 611,
182
+ "as": 416,
183
+ "as</w>": 404,
184
+ "ase</w>": 849,
185
+ "ased</w>": 814,
186
+ "ason</w>": 865,
187
+ "ass": 792,
188
+ "ast</w>": 661,
189
+ "at": 372,
190
+ "at</w>": 434,
191
+ "ate</w>": 541,
192
+ "ated</w>": 543,
193
+ "ater</w>": 709,
194
+ "ates</w>": 825,
195
+ "ath": 730,
196
+ "ating</w>": 922,
197
+ "ation</w>": 497,
198
+ "ational</w>": 933,
199
+ "ations</w>": 744,
200
+ "att": 903,
201
+ "aus": 858,
202
+ "av": 681,
203
+ "ay": 684,
204
+ "ay</w>": 523,
205
+ "b": 67,
206
+ "b</w>": 212,
207
+ "ber</w>": 593,
208
+ "c": 68,
209
+ "c</w>": 224,
210
+ "cc": 960,
211
+ "ce</w>": 496,
212
+ "ces</w>": 830,
213
+ "ch": 520,
214
+ "ch</w>": 603,
215
+ "ct": 834,
216
+ "d": 69,
217
+ "d</w>": 196,
218
+ "ded</w>": 665,
219
+ "der</w>": 690,
220
+ "ding</w>": 633,
221
+ "ds</w>": 530,
222
+ "duc": 671,
223
+ "e": 70,
224
+ "e</w>": 195,
225
+ "ea": 471,
226
+ "ear": 596,
227
+ "ear</w>": 669,
228
+ "ears</w>": 906,
229
+ "eb": 852,
230
+ "ec": 418,
231
+ "ect": 838,
232
+ "ect</w>": 964,
233
+ "ed": 563,
234
+ "ed</w>": 362,
235
+ "ee": 941,
236
+ "een</w>": 779,
237
+ "ef": 840,
238
+ "eg": 731,
239
+ "el": 407,
240
+ "el</w>": 610,
241
+ "eld</w>": 973,
242
+ "ell": 759,
243
+ "ell</w>": 756,
244
+ "ely</w>": 719,
245
+ "em": 455,
246
+ "ember</w>": 777,
247
+ "ement</w>": 959,
248
+ "emp": 975,
249
+ "en": 375,
250
+ "en</w>": 427,
251
+ "ence</w>": 685,
252
+ "ens": 880,
253
+ "ent": 478,
254
+ "ent</w>": 468,
255
+ "ents</w>": 674,
256
+ "ep": 545,
257
+ "er": 364,
258
+ "er</w>": 374,
259
+ "eral</w>": 793,
260
+ "ere</w>": 481,
261
+ "ered</w>": 748,
262
+ "eric": 921,
263
+ "erm": 861,
264
+ "ern": 887,
265
+ "ern</w>": 977,
266
+ "ers": 598,
267
+ "ers</w>": 486,
268
+ "ert": 986,
269
+ "ery</w>": 805,
270
+ "es": 402,
271
+ "es</w>": 388,
272
+ "ese</w>": 794,
273
+ "ess": 678,
274
+ "ess</w>": 693,
275
+ "est": 606,
276
+ "est</w>": 584,
277
+ "et": 460,
278
+ "et</w>": 594,
279
+ "etw": 824,
280
+ "etween</w>": 886,
281
+ "ev": 493,
282
+ "evel": 980,
283
+ "ever</w>": 855,
284
+ "ew": 687,
285
+ "ew</w>": 612,
286
+ "ex": 938,
287
+ "ey</w>": 713,
288
+ "f": 71,
289
+ "f</w>": 209,
290
+ "fer": 911,
291
+ "ff": 587,
292
+ "for": 728,
293
+ "form": 901,
294
+ "fter</w>": 634,
295
+ "g": 72,
296
+ "g</w>": 214,
297
+ "ge</w>": 592,
298
+ "h": 73,
299
+ "h</w>": 203,
300
+ "i": 74,
301
+ "i</w>": 205,
302
+ "ia</w>": 605,
303
+ "ial</w>": 672,
304
+ "ian</w>": 638,
305
+ "ib": 726,
306
+ "ic": 395,
307
+ "ic</w>": 510,
308
+ "ical</w>": 625,
309
+ "ice</w>": 782,
310
+ "ich</w>": 561,
311
+ "ics</w>": 996,
312
+ "id": 463,
313
+ "id</w>": 613,
314
+ "ide</w>": 739,
315
+ "ie</w>": 974,
316
+ "ied</w>": 812,
317
+ "ies</w>": 516,
318
+ "if": 524,
319
+ "ig": 444,
320
+ "igh": 537,
321
+ "ight</w>": 680,
322
+ "ik": 775,
323
+ "ike</w>": 984,
324
+ "il": 406,
325
+ "il</w>": 714,
326
+ "ile</w>": 721,
327
+ "ill": 608,
328
+ "ill</w>": 789,
329
+ "ily</w>": 950,
330
+ "im": 469,
331
+ "im</w>": 767,
332
+ "ime</w>": 691,
333
+ "in": 358,
334
+ "in</w>": 501,
335
+ "ine</w>": 607,
336
+ "ing": 557,
337
+ "ing</w>": 383,
338
+ "ings</w>": 815,
339
+ "ion": 472,
340
+ "ion</w>": 408,
341
+ "ional</w>": 717,
342
+ "ions</w>": 540,
343
+ "ip": 733,
344
+ "ip</w>": 818,
345
+ "ir": 453,
346
+ "ir</w>": 554,
347
+ "is": 393,
348
+ "is</w>": 441,
349
+ "ish": 694,
350
+ "ish</w>": 654,
351
+ "ished</w>": 942,
352
+ "ision</w>": 944,
353
+ "iss": 876,
354
+ "ist": 550,
355
+ "ist</w>": 811,
356
+ "it": 378,
357
+ "it</w>": 746,
358
+ "ite</w>": 760,
359
+ "ited</w>": 809,
360
+ "ition</w>": 797,
361
+ "ity</w>": 542,
362
+ "iv": 435,
363
+ "ive</w>": 549,
364
+ "ived</w>": 979,
365
+ "iz": 722,
366
+ "j": 75,
367
+ "j</w>": 288,
368
+ "k": 76,
369
+ "k</w>": 210,
370
+ "ked</w>": 810,
371
+ "king</w>": 924,
372
+ "ks</w>": 692,
373
+ "l": 77,
374
+ "l</w>": 201,
375
+ "la": 467,
376
+ "land</w>": 743,
377
+ "ld</w>": 559,
378
+ "le": 536,
379
+ "le</w>": 465,
380
+ "les</w>": 799,
381
+ "lud": 718,
382
+ "ly</w>": 433,
383
+ "m": 78,
384
+ "m</w>": 202,
385
+ "ment</w>": 701,
386
+ "mp": 651,
387
+ "n": 79,
388
+ "n</w>": 199,
389
+ "nd</w>": 369,
390
+ "ned</w>": 758,
391
+ "ning</w>": 843,
392
+ "o": 80,
393
+ "o</w>": 198,
394
+ "ob": 920,
395
+ "oc": 534,
396
+ "od": 575,
397
+ "og": 604,
398
+ "oh": 972,
399
+ "oin": 831,
400
+ "ol": 428,
401
+ "oll": 703,
402
+ "ollow": 928,
403
+ "olog": 932,
404
+ "om": 419,
405
+ "om</w>": 883,
406
+ "ome</w>": 663,
407
+ "on": 382,
408
+ "on</w>": 390,
409
+ "ond</w>": 872,
410
+ "one</w>": 835,
411
+ "ong": 850,
412
+ "ong</w>": 582,
413
+ "oo": 517,
414
+ "ood</w>": 927,
415
+ "ook</w>": 897,
416
+ "op": 531,
417
+ "op</w>": 971,
418
+ "or": 377,
419
+ "or</w>": 424,
420
+ "ore</w>": 571,
421
+ "ors</w>": 917,
422
+ "ort": 768,
423
+ "ort</w>": 752,
424
+ "ory</w>": 737,
425
+ "os": 447,
426
+ "ose</w>": 881,
427
+ "ost</w>": 646,
428
+ "ot": 600,
429
+ "ot</w>": 879,
430
+ "ou": 392,
431
+ "oug": 659,
432
+ "ough</w>": 798,
433
+ "ould</w>": 640,
434
+ "oun": 553,
435
+ "ound</w>": 961,
436
+ "our": 648,
437
+ "our</w>": 772,
438
+ "ous</w>": 712,
439
+ "out</w>": 683,
440
+ "outh</w>": 945,
441
+ "ov": 515,
442
+ "ow": 461,
443
+ "ow</w>": 666,
444
+ "own</w>": 657,
445
+ "oy": 952,
446
+ "p": 81,
447
+ "p</w>": 217,
448
+ "per": 715,
449
+ "ph": 916,
450
+ "pp": 518,
451
+ "q": 82,
452
+ "q</w>": 280,
453
+ "qu": 546,
454
+ "r": 83,
455
+ "r</w>": 204,
456
+ "ra": 457,
457
+ "ran": 624,
458
+ "re": 367,
459
+ "ree</w>": 765,
460
+ "ren": 790,
461
+ "res": 572,
462
+ "res</w>": 747,
463
+ "ri": 487,
464
+ "rib": 804,
465
+ "ric": 745,
466
+ "rit": 589,
467
+ "ro": 385,
468
+ "rom</w>": 498,
469
+ "rop": 826,
470
+ "roug": 803,
471
+ "ru": 951,
472
+ "ruc": 891,
473
+ "ry</w>": 908,
474
+ "s": 84,
475
+ "s</w>": 206,
476
+ "se</w>": 741,
477
+ "sh": 795,
478
+ "so</w>": 630,
479
+ "sp": 992,
480
+ "ss": 673,
481
+ "st": 519,
482
+ "st</w>": 528,
483
+ "t": 85,
484
+ "t</w>": 208,
485
+ "te</w>": 954,
486
+ "ted</w>": 489,
487
+ "ter": 535,
488
+ "ter</w>": 505,
489
+ "th": 449,
490
+ "th</w>": 488,
491
+ "ther</w>": 576,
492
+ "ting</w>": 676,
493
+ "tion</w>": 570,
494
+ "tr": 619,
495
+ "ts</w>": 436,
496
+ "tt": 720,
497
+ "tur": 953,
498
+ "ty</w>": 821,
499
+ "u": 86,
500
+ "u</w>": 229,
501
+ "ub": 591,
502
+ "ubl": 842,
503
+ "uc": 490,
504
+ "ud": 538,
505
+ "ue</w>": 652,
506
+ "ug": 560,
507
+ "uil": 930,
508
+ "ul": 494,
509
+ "um": 532,
510
+ "um</w>": 644,
511
+ "un": 448,
512
+ "und</w>": 828,
513
+ "up": 833,
514
+ "up</w>": 700,
515
+ "ur": 413,
516
+ "ure</w>": 635,
517
+ "uring</w>": 702,
518
+ "ury</w>": 957,
519
+ "us": 438,
520
+ "us</w>": 622,
521
+ "ust</w>": 846,
522
+ "ut": 529,
523
+ "ut</w>": 527,
524
+ "v": 87,
525
+ "v</w>": 232,
526
+ "ve</w>": 567,
527
+ "vi": 866,
528
+ "w": 88,
529
+ "w</w>": 250,
530
+ "way</w>": 970,
531
+ "wn</w>": 999,
532
+ "x": 89,
533
+ "x</w>": 269,
534
+ "y": 90,
535
+ "y</w>": 211,
536
+ "yp": 993,
537
+ "z": 91,
538
+ "z</w>": 228,
539
+ "|": 92,
540
+ "|</w>": 304,
541
+ "}": 93,
542
+ "}</w>": 336,
543
+ "~": 94,
544
+ "~</w>": 343,
545
+ "¡": 95,
546
+ "¡</w>": 220,
547
+ "¢": 96,
548
+ "¢</w>": 306,
549
+ "£": 97,
550
+ "£</w>": 323,
551
+ "¤": 98,
552
+ "¤</w>": 292,
553
+ "¥": 99,
554
+ "¥</w>": 339,
555
+ "¦": 100,
556
+ "¦</w>": 303,
557
+ "§": 101,
558
+ "§</w>": 275,
559
+ "¨": 102,
560
+ "¨</w>": 282,
561
+ "©": 103,
562
+ "©</w>": 259,
563
+ "ª": 104,
564
+ "ª</w>": 286,
565
+ "«": 105,
566
+ "«</w>": 266,
567
+ "¬": 106,
568
+ "¬</w>": 319,
569
+ "®": 107,
570
+ "®</w>": 329,
571
+ "¯": 108,
572
+ "¯</w>": 287,
573
+ "°": 109,
574
+ "°</w>": 298,
575
+ "±": 110,
576
+ "±</w>": 200,
577
+ "²": 111,
578
+ "²</w>": 284,
579
+ "³": 112,
580
+ "³</w>": 272,
581
+ "´": 113,
582
+ "´</w>": 307,
583
+ "µ": 114,
584
+ "µ</w>": 261,
585
+ "¶": 115,
586
+ "¶</w>": 301,
587
+ "·": 116,
588
+ "·</w>": 326,
589
+ "¸": 117,
590
+ "¸</w>": 257,
591
+ "¹": 118,
592
+ "¹</w>": 241,
593
+ "º": 119,
594
+ "º</w>": 260,
595
+ "»": 120,
596
+ "»</w>": 247,
597
+ "¼": 121,
598
+ "¼</w>": 305,
599
+ "½": 122,
600
+ "½</w>": 294,
601
+ "¾": 123,
602
+ "¾</w>": 316,
603
+ "¿": 124,
604
+ "¿</w>": 271,
605
+ "Â": 125,
606
+ "Ã": 126,
607
+ "Ä": 127,
608
+ "Å": 128,
609
+ "Æ": 129,
610
+ "Ç": 130,
611
+ "È": 131,
612
+ "É": 132,
613
+ "Ê": 133,
614
+ "Ë": 134,
615
+ "Ì": 135,
616
+ "Í": 136,
617
+ "Î": 137,
618
+ "Ï": 138,
619
+ "Ð": 139,
620
+ "Ñ": 140,
621
+ "Ö": 141,
622
+ "×": 142,
623
+ "Ø": 143,
624
+ "Ù": 144,
625
+ "Ü": 145,
626
+ "à": 146,
627
+ "á": 147,
628
+ "â": 148,
629
+ "ã": 149,
630
+ "ä": 150,
631
+ "å": 151,
632
+ "æ": 152,
633
+ "ç": 153,
634
+ "è": 154,
635
+ "é": 155,
636
+ "ë": 156,
637
+ "ì": 157,
638
+ "ï": 158,
639
+ "Ċ": 159,
640
+ "Ċ</w>": 349,
641
+ "Ġ": 160,
642
+ "Ġ\"</w>": 401,
643
+ "Ġ'</w>": 431,
644
+ "Ġ(</w>": 475,
645
+ "Ġ)</w>": 474,
646
+ "Ġ,</w>": 360,
647
+ "Ġ.</w>": 365,
648
+ "Ġ0": 847,
649
+ "Ġ1": 411,
650
+ "Ġ18": 769,
651
+ "Ġ19": 492,
652
+ "Ġ199": 893,
653
+ "Ġ1</w>": 778,
654
+ "Ġ2": 462,
655
+ "Ġ20": 522,
656
+ "Ġ200": 620,
657
+ "Ġ201": 734,
658
+ "Ġ2</w>": 813,
659
+ "Ġ3": 735,
660
+ "Ġ3</w>": 888,
661
+ "Ġ4": 870,
662
+ "Ġ5": 907,
663
+ "Ġ5</w>": 990,
664
+ "Ġ:</w>": 637,
665
+ "Ġ;</w>": 615,
666
+ "Ġ</w>": 333,
667
+ "Ġ=</w>": 399,
668
+ "Ġ@": 417,
669
+ "Ġ@,@</w>": 755,
670
+ "Ġ@-@</w>": 440,
671
+ "Ġ@.@</w>": 696,
672
+ "ĠA": 409,
673
+ "ĠA</w>": 807,
674
+ "ĠAl": 716,
675
+ "ĠAm": 829,
676
+ "ĠAmeric": 958,
677
+ "ĠAn": 784,
678
+ "ĠAr": 894,
679
+ "ĠB": 432,
680
+ "ĠC": 410,
681
+ "ĠCh": 581,
682
+ "ĠCom": 904,
683
+ "ĠD": 464,
684
+ "ĠE": 500,
685
+ "ĠEn": 878,
686
+ "ĠF": 470,
687
+ "ĠG": 482,
688
+ "ĠH": 445,
689
+ "ĠHe</w>": 742,
690
+ "ĠI": 442,
691
+ "ĠI</w>": 827,
692
+ "ĠIn": 704,
693
+ "ĠIn</w>": 574,
694
+ "ĠIt</w>": 774,
695
+ "ĠJ": 491,
696
+ "ĠK": 548,
697
+ "ĠL": 484,
698
+ "ĠM": 423,
699
+ "ĠMar": 776,
700
+ "ĠN": 483,
701
+ "ĠO": 504,
702
+ "ĠP": 450,
703
+ "ĠPar": 967,
704
+ "ĠR": 459,
705
+ "ĠS": 403,
706
+ "ĠSh": 750,
707
+ "ĠSt": 590,
708
+ "ĠT": 396,
709
+ "ĠTh": 414,
710
+ "ĠThe</w>": 437,
711
+ "ĠThis</w>": 997,
712
+ "ĠU": 585,
713
+ "ĠUn": 773,
714
+ "ĠV": 617,
715
+ "ĠW": 479,
716
+ "ĠWh": 853,
717
+ "ĠY": 757,
718
+ "Ġa": 356,
719
+ "Ġa</w>": 394,
720
+ "Ġab": 653,
721
+ "Ġabout</w>": 899,
722
+ "Ġac": 583,
723
+ "Ġacc": 874,
724
+ "Ġad": 656,
725
+ "Ġafter</w>": 763,
726
+ "Ġag": 725,
727
+ "Ġal": 476,
728
+ "Ġalb": 991,
729
+ "Ġall</w>": 839,
730
+ "Ġalso</w>": 641,
731
+ "Ġan": 602,
732
+ "Ġan</w>": 562,
733
+ "Ġand</w>": 381,
734
+ "Ġapp": 711,
735
+ "Ġar": 507,
736
+ "Ġare</w>": 601,
737
+ "Ġas</w>": 454,
738
+ "Ġass": 947,
739
+ "Ġat</w>": 514,
740
+ "Ġatt": 788,
741
+ "Ġb": 371,
742
+ "Ġbe": 499,
743
+ "Ġbe</w>": 595,
744
+ "Ġbec": 706,
745
+ "Ġbeen</w>": 686,
746
+ "Ġbeg": 915,
747
+ "Ġbetween</w>": 914,
748
+ "Ġbo": 819,
749
+ "Ġbut</w>": 623,
750
+ "Ġby</w>": 473,
751
+ "Ġc": 368,
752
+ "Ġcent": 823,
753
+ "Ġch": 526,
754
+ "Ġchar": 822,
755
+ "Ġcl": 689,
756
+ "Ġcom": 509,
757
+ "Ġcomm": 707,
758
+ "Ġcomp": 616,
759
+ "Ġcon": 477,
760
+ "Ġcons": 841,
761
+ "Ġcont": 655,
762
+ "Ġcre": 931,
763
+ "Ġd": 387,
764
+ "Ġde": 627,
765
+ "Ġdec": 873,
766
+ "Ġdef": 965,
767
+ "Ġdes": 738,
768
+ "Ġdi": 892,
769
+ "Ġdis": 708,
770
+ "Ġduring</w>": 864,
771
+ "Ġe": 421,
772
+ "Ġear": 854,
773
+ "Ġel": 869,
774
+ "Ġen": 662,
775
+ "Ġev": 682,
776
+ "Ġex": 539,
777
+ "Ġexp": 867,
778
+ "Ġf": 370,
779
+ "Ġfe": 845,
780
+ "Ġfil": 913,
781
+ "Ġfin": 786,
782
+ "Ġfir": 599,
783
+ "Ġfirst</w>": 626,
784
+ "Ġfl": 877,
785
+ "Ġfor": 614,
786
+ "Ġfor</w>": 458,
787
+ "Ġform": 963,
788
+ "Ġfrom</w>": 503,
789
+ "Ġg": 430,
790
+ "Ġgame</w>": 895,
791
+ "Ġgen": 985,
792
+ "Ġgro": 890,
793
+ "Ġh": 380,
794
+ "Ġha": 485,
795
+ "Ġhad</w>": 566,
796
+ "Ġhas</w>": 679,
797
+ "Ġhave</w>": 667,
798
+ "Ġhe</w>": 558,
799
+ "Ġher</w>": 660,
800
+ "Ġhim</w>": 896,
801
+ "Ġhis</w>": 512,
802
+ "Ġi": 366,
803
+ "Ġimp": 909,
804
+ "Ġin": 429,
805
+ "Ġin</w>": 389,
806
+ "Ġinc": 618,
807
+ "Ġinclud": 761,
808
+ "Ġind": 983,
809
+ "Ġint": 628,
810
+ "Ġinter": 832,
811
+ "Ġinto</w>": 785,
812
+ "Ġis</w>": 480,
813
+ "Ġit</w>": 533,
814
+ "Ġits</w>": 642,
815
+ "Ġj": 723,
816
+ "Ġk": 564,
817
+ "Ġkn": 857,
818
+ "Ġl": 398,
819
+ "Ġlar": 962,
820
+ "Ġlater</w>": 936,
821
+ "Ġlea": 868,
822
+ "Ġm": 386,
823
+ "Ġmade</w>": 949,
824
+ "Ġme": 968,
825
+ "Ġmore</w>": 802,
826
+ "Ġmost</w>": 910,
827
+ "Ġmov": 956,
828
+ "Ġmus": 966,
829
+ "Ġn": 415,
830
+ "Ġnew</w>": 989,
831
+ "Ġno": 547,
832
+ "Ġnor": 978,
833
+ "Ġnot</w>": 632,
834
+ "Ġnum": 926,
835
+ "Ġo": 359,
836
+ "Ġof</w>": 373,
837
+ "Ġoff": 875,
838
+ "Ġon": 551,
839
+ "Ġon</w>": 456,
840
+ "Ġone</w>": 677,
841
+ "Ġonly</w>": 871,
842
+ "Ġor": 699,
843
+ "Ġor</w>": 645,
844
+ "Ġother</w>": 787,
845
+ "Ġout</w>": 925,
846
+ "Ġov": 729,
847
+ "Ġover</w>": 856,
848
+ "Ġp": 379,
849
+ "Ġpar": 636,
850
+ "Ġper": 736,
851
+ "Ġpl": 698,
852
+ "Ġpla": 697,
853
+ "Ġplay": 808,
854
+ "Ġpos": 859,
855
+ "Ġpr": 889,
856
+ "Ġpre": 749,
857
+ "Ġpres": 912,
858
+ "Ġpro": 506,
859
+ "Ġproduc": 934,
860
+ "Ġqu": 955,
861
+ "Ġr": 521,
862
+ "Ġra": 863,
863
+ "Ġre": 400,
864
+ "Ġrec": 597,
865
+ "Ġrecor": 919,
866
+ "Ġreg": 943,
867
+ "Ġrel": 900,
868
+ "Ġrele": 946,
869
+ "Ġrem": 848,
870
+ "Ġrep": 762,
871
+ "Ġres": 650,
872
+ "Ġro": 629,
873
+ "Ġs": 361,
874
+ "Ġsa": 905,
875
+ "Ġsc": 732,
876
+ "Ġse": 569,
877
+ "Ġseason</w>": 948,
878
+ "Ġsec": 781,
879
+ "Ġser": 740,
880
+ "Ġsev": 884,
881
+ "Ġsh": 513,
882
+ "Ġshe</w>": 862,
883
+ "Ġsp": 578,
884
+ "Ġspec": 940,
885
+ "Ġst": 446,
886
+ "Ġstar": 939,
887
+ "Ġsu": 770,
888
+ "Ġsub": 969,
889
+ "Ġsuc": 764,
890
+ "Ġsuch</w>": 981,
891
+ "Ġt": 354,
892
+ "Ġth": 355,
893
+ "Ġthan</w>": 918,
894
+ "Ġthat</w>": 452,
895
+ "Ġthe": 502,
896
+ "Ġthe</w>": 357,
897
+ "Ġtheir</w>": 621,
898
+ "Ġthem</w>": 998,
899
+ "Ġthey</w>": 727,
900
+ "Ġthis</w>": 705,
901
+ "Ġthree</w>": 902,
902
+ "Ġthroug": 923,
903
+ "Ġtime</w>": 783,
904
+ "Ġto</w>": 391,
905
+ "Ġtra": 836,
906
+ "Ġtw": 639,
907
+ "Ġtwo</w>": 688,
908
+ "Ġun": 544,
909
+ "Ġup</w>": 898,
910
+ "Ġus": 668,
911
+ "Ġused</w>": 988,
912
+ "Ġv": 495,
913
+ "Ġw": 363,
914
+ "Ġwas</w>": 422,
915
+ "Ġwere</w>": 525,
916
+ "Ġwh": 443,
917
+ "Ġwhen</w>": 851,
918
+ "Ġwhere</w>": 995,
919
+ "Ġwhich</w>": 573,
920
+ "Ġwhile</w>": 935,
921
+ "Ġwho</w>": 724,
922
+ "Ġwit": 451,
923
+ "Ġwith": 994,
924
+ "Ġwith</w>": 466,
925
+ "Ġwor": 643,
926
+ "Ġwould</w>": 801,
927
+ "Ġwrit": 937,
928
+ "Ġy": 580,
929
+ "Ġyear</w>": 987,
930
+ "Ġâ": 556,
931
+ "ĠâĢ": 565,
932
+ "ĠâĢĵ</w>": 631,
933
+ "ĠĊ</w>": 412,
934
+ "Ģ": 161,
935
+ "Ģ</w>": 223,
936
+ "ģ": 162,
937
+ "ģ</w>": 273,
938
+ "Ĥ": 163,
939
+ "Ĥ</w>": 262,
940
+ "ĥ": 164,
941
+ "ĥ</w>": 337,
942
+ "Ħ": 165,
943
+ "Ħ</w>": 278,
944
+ "ħ": 166,
945
+ "ħ</w>": 281,
946
+ "Ĩ": 167,
947
+ "Ĩ</w>": 308,
948
+ "ĩ": 168,
949
+ "ĩ</w>": 225,
950
+ "Ī": 169,
951
+ "Ī</w>": 221,
952
+ "ī": 170,
953
+ "ī</w>": 244,
954
+ "Ĭ": 171,
955
+ "Ĭ</w>": 315,
956
+ "ĭ": 172,
957
+ "ĭ</w>": 321,
958
+ "Į": 173,
959
+ "Į</w>": 324,
960
+ "į": 174,
961
+ "į</w>": 302,
962
+ "İ": 175,
963
+ "İ</w>": 249,
964
+ "ı": 176,
965
+ "ı</w>": 332,
966
+ "IJ": 177,
967
+ "IJ</w>": 295,
968
+ "ij": 178,
969
+ "ij</w>": 313,
970
+ "Ĵ": 179,
971
+ "Ĵ</w>": 328,
972
+ "ĵ": 180,
973
+ "ĵ</w>": 312,
974
+ "Ķ": 181,
975
+ "Ķ</w>": 256,
976
+ "ķ": 182,
977
+ "ķ</w>": 314,
978
+ "ĸ": 183,
979
+ "ĸ</w>": 277,
980
+ "Ĺ": 184,
981
+ "Ĺ</w>": 322,
982
+ "ĺ": 185,
983
+ "ĺ</w>": 285,
984
+ "Ļ": 186,
985
+ "Ļ</w>": 267,
986
+ "ļ": 187,
987
+ "ļ</w>": 290,
988
+ "Ľ": 188,
989
+ "Ľ</w>": 311,
990
+ "ľ": 189,
991
+ "ľ</w>": 299,
992
+ "Ŀ": 190,
993
+ "Ŀ</w>": 291,
994
+ "ŀ": 191,
995
+ "ŀ</w>": 293,
996
+ "Ł": 192,
997
+ "Ł</w>": 335,
998
+ "ł": 193,
999
+ "ł</w>": 252,
1000
+ "Ń": 194,
1001
+ "Ń</w>": 297
1002
+ }
tokenizer_2/merges.txt ADDED
@@ -0,0 +1,647 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #version: 0.2
2
+ Ġ t
3
+ Ġt h
4
+ Ġ a
5
+ Ġth e</w>
6
+ i n
7
+ Ġ o
8
+ Ġ ,</w>
9
+ Ġ s
10
+ e d</w>
11
+ Ġ w
12
+ e r
13
+ Ġ .</w>
14
+ Ġ i
15
+ r e
16
+ Ġ c
17
+ n d</w>
18
+ Ġ f
19
+ Ġ b
20
+ a t
21
+ Ġo f</w>
22
+ e r</w>
23
+ e n
24
+ a r
25
+ o r
26
+ i t
27
+ Ġ p
28
+ Ġ h
29
+ Ġa nd</w>
30
+ o n
31
+ in g</w>
32
+ a n
33
+ r o
34
+ Ġ m
35
+ Ġ d
36
+ e s</w>
37
+ Ġi n</w>
38
+ o n</w>
39
+ Ġt o</w>
40
+ o u
41
+ i s
42
+ Ġ a</w>
43
+ i c
44
+ Ġ T
45
+ a l
46
+ Ġ l
47
+ Ġ =</w>
48
+ Ġ re
49
+ Ġ "</w>
50
+ e s
51
+ Ġ S
52
+ a s</w>
53
+ a l</w>
54
+ i l
55
+ e l
56
+ i on</w>
57
+ Ġ A
58
+ Ġ C
59
+ Ġ 1
60
+ Ġ Ċ</w>
61
+ u r
62
+ ĠT h
63
+ Ġ n
64
+ a s
65
+ Ġ @
66
+ e c
67
+ o m
68
+ a c
69
+ Ġ e
70
+ Ġw as</w>
71
+ Ġ M
72
+ o r</w>
73
+ a n</w>
74
+ a m
75
+ e n</w>
76
+ o l
77
+ Ġ in
78
+ Ġ g
79
+ Ġ '</w>
80
+ Ġ B
81
+ l y</w>
82
+ a t</w>
83
+ i v
84
+ t s</w>
85
+ ĠTh e</w>
86
+ u s
87
+ - @</w>
88
+ Ġ@ -@</w>
89
+ i s</w>
90
+ Ġ I
91
+ Ġw h
92
+ i g
93
+ Ġ H
94
+ Ġs t
95
+ o s
96
+ u n
97
+ t h
98
+ Ġ P
99
+ Ġw it
100
+ Ġth at</w>
101
+ i r
102
+ Ġa s</w>
103
+ e m
104
+ Ġo n</w>
105
+ r a
106
+ Ġf or</w>
107
+ Ġ R
108
+ e t
109
+ o w
110
+ Ġ 2
111
+ i d
112
+ Ġ D
113
+ l e</w>
114
+ Ġwit h</w>
115
+ l a
116
+ en t</w>
117
+ i m
118
+ Ġ F
119
+ e a
120
+ i on
121
+ Ġb y</w>
122
+ Ġ )</w>
123
+ Ġ (</w>
124
+ Ġa l
125
+ Ġc on
126
+ en t
127
+ Ġ W
128
+ Ġi s</w>
129
+ er e</w>
130
+ Ġ G
131
+ Ġ N
132
+ Ġ L
133
+ Ġh a
134
+ er s</w>
135
+ r i
136
+ t h</w>
137
+ t ed</w>
138
+ u c
139
+ Ġ J
140
+ Ġ1 9
141
+ e v
142
+ u l
143
+ Ġ v
144
+ c e</w>
145
+ at ion</w>
146
+ ro m</w>
147
+ Ġb e
148
+ Ġ E
149
+ i n</w>
150
+ Ġth e
151
+ Ġf rom</w>
152
+ Ġ O
153
+ t er</w>
154
+ Ġp ro
155
+ Ġa r
156
+ a d
157
+ Ġc om
158
+ i c</w>
159
+ a g
160
+ Ġh is</w>
161
+ Ġs h
162
+ Ġa t</w>
163
+ o v
164
+ i es</w>
165
+ o o
166
+ p p
167
+ s t
168
+ c h
169
+ Ġ r
170
+ Ġ2 0
171
+ a y</w>
172
+ i f
173
+ Ġw ere</w>
174
+ Ġc h
175
+ u t</w>
176
+ s t</w>
177
+ u t
178
+ d s</w>
179
+ o p
180
+ u m
181
+ Ġi t</w>
182
+ o c
183
+ t er
184
+ l e
185
+ ig h
186
+ u d
187
+ Ġe x
188
+ ion s</w>
189
+ at e</w>
190
+ it y</w>
191
+ at ed</w>
192
+ Ġ un
193
+ e p
194
+ q u
195
+ Ġn o
196
+ Ġ K
197
+ iv e</w>
198
+ is t
199
+ Ġo n
200
+ am e</w>
201
+ ou n
202
+ i r</w>
203
+ a b
204
+ Ġ â
205
+ in g
206
+ Ġh e</w>
207
+ l d</w>
208
+ u g
209
+ ic h</w>
210
+ Ġa n</w>
211
+ e d
212
+ Ġ k
213
+ Ġâ Ģ
214
+ Ġha d</w>
215
+ v e</w>
216
+ a in
217
+ Ġs e
218
+ t ion</w>
219
+ or e</w>
220
+ re s
221
+ Ġwh ich</w>
222
+ ĠI n</w>
223
+ o d
224
+ th er</w>
225
+ a k
226
+ Ġs p
227
+ a r</w>
228
+ Ġ y
229
+ ĠC h
230
+ on g</w>
231
+ Ġa c
232
+ es t</w>
233
+ Ġ U
234
+ a p
235
+ f f
236
+ al ly</w>
237
+ r it
238
+ ĠS t
239
+ u b
240
+ g e</w>
241
+ b er</w>
242
+ e t</w>
243
+ Ġb e</w>
244
+ e ar
245
+ Ġre c
246
+ er s
247
+ Ġf ir
248
+ o t
249
+ Ġar e</w>
250
+ Ġa n
251
+ c h</w>
252
+ o g
253
+ i a</w>
254
+ es t
255
+ in e</w>
256
+ il l
257
+ an d
258
+ e l</w>
259
+ ar y</w>
260
+ e w</w>
261
+ i d</w>
262
+ Ġf or
263
+ Ġ ;</w>
264
+ Ġcom p
265
+ Ġ V
266
+ Ġin c
267
+ t r
268
+ Ġ20 0
269
+ Ġthe ir</w>
270
+ u s</w>
271
+ Ġb ut</w>
272
+ r an
273
+ ic al</w>
274
+ Ġfir st</w>
275
+ Ġd e
276
+ Ġin t
277
+ Ġ ro
278
+ s o</w>
279
+ ĠâĢ ĵ</w>
280
+ Ġno t</w>
281
+ d ing</w>
282
+ f ter</w>
283
+ ur e</w>
284
+ Ġp ar
285
+ Ġ :</w>
286
+ i an</w>
287
+ Ġt w
288
+ ou ld</w>
289
+ Ġal so</w>
290
+ Ġi ts</w>
291
+ Ġw or
292
+ u m</w>
293
+ Ġo r</w>
294
+ os t</w>
295
+ 0 0</w>
296
+ ou r
297
+ ar d</w>
298
+ Ġre s
299
+ m p
300
+ u e</w>
301
+ Ġa b
302
+ is h</w>
303
+ Ġcon t
304
+ Ġa d
305
+ ow n</w>
306
+ al l</w>
307
+ ou g
308
+ Ġh er</w>
309
+ as t</w>
310
+ Ġ en
311
+ om e</w>
312
+ al l
313
+ d ed</w>
314
+ o w</w>
315
+ Ġha ve</w>
316
+ Ġ us
317
+ ea r</w>
318
+ ac k</w>
319
+ d uc
320
+ i al</w>
321
+ s s
322
+ en ts</w>
323
+ a in</w>
324
+ t ing</w>
325
+ Ġon e</w>
326
+ es s
327
+ Ġh as</w>
328
+ igh t</w>
329
+ a v
330
+ Ġe v
331
+ ou t</w>
332
+ a y
333
+ en ce</w>
334
+ Ġbe en</w>
335
+ e w
336
+ Ġtw o</w>
337
+ Ġc l
338
+ d er</w>
339
+ im e</w>
340
+ k s</w>
341
+ es s</w>
342
+ is h
343
+ . @</w>
344
+ Ġ@ .@</w>
345
+ Ġp la
346
+ Ġp l
347
+ Ġo r
348
+ u p</w>
349
+ m ent</w>
350
+ ur ing</w>
351
+ ol l
352
+ ĠI n
353
+ Ġth is</w>
354
+ Ġb ec
355
+ Ġcom m
356
+ Ġd is
357
+ at er</w>
358
+ ag e</w>
359
+ Ġa pp
360
+ ou s</w>
361
+ e y</w>
362
+ i l</w>
363
+ p er
364
+ ĠA l
365
+ ion al</w>
366
+ l ud
367
+ el y</w>
368
+ t t
369
+ il e</w>
370
+ i z
371
+ Ġ j
372
+ Ġwh o</w>
373
+ Ġa g
374
+ i b
375
+ Ġthe y</w>
376
+ f or
377
+ Ġo v
378
+ at h
379
+ e g
380
+ Ġs c
381
+ i p
382
+ Ġ20 1
383
+ Ġ 3
384
+ Ġp er
385
+ or y</w>
386
+ Ġd es
387
+ id e</w>
388
+ Ġs er
389
+ s e</w>
390
+ ĠH e</w>
391
+ la nd</w>
392
+ at ions</w>
393
+ r ic
394
+ i t</w>
395
+ re s</w>
396
+ er ed</w>
397
+ Ġp re
398
+ ĠS h
399
+ an ce</w>
400
+ or t</w>
401
+ an t</w>
402
+ , @</w>
403
+ Ġ@ ,@</w>
404
+ el l</w>
405
+ Ġ Y
406
+ n ed</w>
407
+ el l
408
+ it e</w>
409
+ Ġinc lud
410
+ Ġre p
411
+ Ġa fter</w>
412
+ Ġs uc
413
+ re e</w>
414
+ an y</w>
415
+ i m</w>
416
+ or t
417
+ Ġ1 8
418
+ Ġs u
419
+ ad e</w>
420
+ ou r</w>
421
+ ĠU n
422
+ ĠI t</w>
423
+ i k
424
+ ĠM ar
425
+ em ber</w>
426
+ Ġ 1</w>
427
+ e en</w>
428
+ a nd</w>
429
+ Ġs ec
430
+ ic e</w>
431
+ Ġt ime</w>
432
+ ĠA n
433
+ Ġint o</w>
434
+ Ġf in
435
+ Ġo ther</w>
436
+ Ġa tt
437
+ il l</w>
438
+ re n
439
+ ac h
440
+ as s
441
+ er al</w>
442
+ es e</w>
443
+ s h
444
+ al s</w>
445
+ it ion</w>
446
+ oug h</w>
447
+ l es</w>
448
+ am p
449
+ Ġw ould</w>
450
+ Ġm ore</w>
451
+ ro ug
452
+ ri b
453
+ er y</w>
454
+ ac e</w>
455
+ Ġ A</w>
456
+ Ġpla y
457
+ it ed</w>
458
+ k ed</w>
459
+ is t</w>
460
+ i ed</w>
461
+ Ġ 2</w>
462
+ as ed</w>
463
+ ing s</w>
464
+ an g
465
+ a m</w>
466
+ i p</w>
467
+ Ġb o
468
+ ab le</w>
469
+ t y</w>
470
+ Ġch ar
471
+ Ġc ent
472
+ et w
473
+ at es</w>
474
+ ro p
475
+ Ġ I</w>
476
+ u nd</w>
477
+ ĠA m
478
+ c es</w>
479
+ o in
480
+ Ġin ter
481
+ u p
482
+ c t
483
+ on e</w>
484
+ Ġt ra
485
+ an t
486
+ ec t
487
+ Ġal l</w>
488
+ e f
489
+ Ġcon s
490
+ ub l
491
+ n ing</w>
492
+ an s</w>
493
+ Ġf e
494
+ us t</w>
495
+ Ġ 0
496
+ Ġre m
497
+ as e</w>
498
+ on g
499
+ Ġwh en</w>
500
+ e b
501
+ ĠW h
502
+ Ġe ar
503
+ ev er</w>
504
+ Ġov er</w>
505
+ Ġk n
506
+ a us
507
+ Ġp os
508
+ a d</w>
509
+ er m
510
+ Ġsh e</w>
511
+ Ġ ra
512
+ Ġd uring</w>
513
+ as on</w>
514
+ v i
515
+ Ġex p
516
+ Ġl ea
517
+ Ġ el
518
+ Ġ 4
519
+ Ġon ly</w>
520
+ o nd</w>
521
+ Ġd ec
522
+ Ġac c
523
+ Ġo ff
524
+ is s
525
+ Ġf l
526
+ ĠE n
527
+ o t</w>
528
+ en s
529
+ os e</w>
530
+ ak e</w>
531
+ o m</w>
532
+ Ġs ev
533
+ ac h</w>
534
+ etw een</w>
535
+ er n
536
+ Ġ 3</w>
537
+ Ġp r
538
+ Ġg ro
539
+ r uc
540
+ Ġd i
541
+ Ġ19 9
542
+ ĠA r
543
+ Ġg ame</w>
544
+ Ġh im</w>
545
+ oo k</w>
546
+ Ġ up</w>
547
+ Ġab out</w>
548
+ Ġre l
549
+ for m
550
+ Ġth ree</w>
551
+ at t
552
+ ĠC om
553
+ Ġs a
554
+ ear s</w>
555
+ Ġ 5
556
+ r y</w>
557
+ Ġi mp
558
+ Ġm ost</w>
559
+ f er
560
+ Ġp res
561
+ Ġf il
562
+ Ġb etween</w>
563
+ Ġbe g
564
+ p h
565
+ or s</w>
566
+ Ġth an</w>
567
+ Ġrec or
568
+ o b
569
+ er ic
570
+ at ing</w>
571
+ Ġth roug
572
+ k ing</w>
573
+ Ġo ut</w>
574
+ Ġn um
575
+ oo d</w>
576
+ oll ow
577
+ ac t
578
+ u il
579
+ Ġc re
580
+ ol og
581
+ at ional</w>
582
+ Ġpro duc
583
+ Ġwh ile</w>
584
+ Ġl ater</w>
585
+ Ġw rit
586
+ e x
587
+ Ġst ar
588
+ Ġsp ec
589
+ e e
590
+ ish ed</w>
591
+ Ġre g
592
+ is ion</w>
593
+ ou th</w>
594
+ Ġre le
595
+ Ġa ss
596
+ Ġse ason</w>
597
+ Ġm ade</w>
598
+ il y</w>
599
+ r u
600
+ o y
601
+ t ur
602
+ t e</w>
603
+ Ġ qu
604
+ Ġm ov
605
+ ur y</w>
606
+ ĠAm eric
607
+ em ent</w>
608
+ c c
609
+ ou nd</w>
610
+ Ġl ar
611
+ Ġfor m
612
+ ec t</w>
613
+ Ġde f
614
+ Ġm us
615
+ ĠP ar
616
+ Ġm e
617
+ Ġs ub
618
+ w ay</w>
619
+ o p</w>
620
+ o h
621
+ el d</w>
622
+ i e</w>
623
+ em p
624
+ am es</w>
625
+ er n</w>
626
+ Ġn or
627
+ iv ed</w>
628
+ ev el
629
+ Ġsuc h</w>
630
+ ar ds</w>
631
+ Ġin d
632
+ ik e</w>
633
+ Ġg en
634
+ er t
635
+ Ġy ear</w>
636
+ Ġus ed</w>
637
+ Ġn ew</w>
638
+ Ġ 5</w>
639
+ Ġal b
640
+ s p
641
+ y p
642
+ Ġwit h
643
+ Ġwh ere</w>
644
+ ic s</w>
645
+ ĠTh is</w>
646
+ Ġthe m</w>
647
+ w n</w>
tokenizer_2/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer_2/tokenizer_config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|startoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|endoftext|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ }
20
+ },
21
+ "bos_token": "<|startoftext|>",
22
+ "clean_up_tokenization_spaces": true,
23
+ "do_lower_case": true,
24
+ "eos_token": "<|endoftext|>",
25
+ "errors": "replace",
26
+ "model_max_length": 512,
27
+ "pad_token": "<|endoftext|>",
28
+ "tokenizer_class": "CLIPTokenizer",
29
+ "unk_token": "<|endoftext|>"
30
+ }
tokenizer_2/vocab.json ADDED
@@ -0,0 +1,1002 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "!": 2,
3
+ "!</w>": 345,
4
+ "\"": 3,
5
+ "\"</w>": 344,
6
+ "#": 4,
7
+ "#</w>": 325,
8
+ "$": 5,
9
+ "$</w>": 348,
10
+ "%": 6,
11
+ "%</w>": 351,
12
+ "&": 7,
13
+ "&</w>": 352,
14
+ "'": 8,
15
+ "'</w>": 296,
16
+ "(": 9,
17
+ "(</w>": 318,
18
+ ")": 10,
19
+ ")</w>": 330,
20
+ "*": 11,
21
+ "*</w>": 327,
22
+ "+": 12,
23
+ "+</w>": 341,
24
+ ",": 13,
25
+ ",</w>": 279,
26
+ ",@</w>": 754,
27
+ "-": 14,
28
+ "-</w>": 276,
29
+ "-@</w>": 439,
30
+ ".": 15,
31
+ ".</w>": 253,
32
+ ".@</w>": 695,
33
+ "/": 16,
34
+ "/</w>": 350,
35
+ "0": 17,
36
+ "00</w>": 647,
37
+ "0</w>": 216,
38
+ "1": 18,
39
+ "1</w>": 222,
40
+ "2": 19,
41
+ "2</w>": 231,
42
+ "3": 20,
43
+ "3</w>": 243,
44
+ "4": 21,
45
+ "4</w>": 233,
46
+ "5": 22,
47
+ "5</w>": 240,
48
+ "6": 23,
49
+ "6</w>": 226,
50
+ "7": 24,
51
+ "7</w>": 215,
52
+ "8": 25,
53
+ "8</w>": 236,
54
+ "9": 26,
55
+ "9</w>": 242,
56
+ ":": 27,
57
+ ":</w>": 353,
58
+ ";": 28,
59
+ ";</w>": 317,
60
+ "<": 29,
61
+ "<</w>": 340,
62
+ "<|endoftext|>": 1,
63
+ "<|startoftext|>": 0,
64
+ "=": 30,
65
+ "=</w>": 342,
66
+ ">": 31,
67
+ "></w>": 300,
68
+ "?": 32,
69
+ "?</w>": 346,
70
+ "@": 33,
71
+ "@</w>": 320,
72
+ "A": 34,
73
+ "A</w>": 227,
74
+ "B": 35,
75
+ "B</w>": 258,
76
+ "C": 36,
77
+ "C</w>": 239,
78
+ "D": 37,
79
+ "D</w>": 255,
80
+ "E": 38,
81
+ "E</w>": 246,
82
+ "F": 39,
83
+ "F</w>": 213,
84
+ "G": 40,
85
+ "G</w>": 283,
86
+ "H": 41,
87
+ "H</w>": 219,
88
+ "I": 42,
89
+ "I</w>": 237,
90
+ "J": 43,
91
+ "J</w>": 251,
92
+ "K": 44,
93
+ "K</w>": 254,
94
+ "L": 45,
95
+ "L</w>": 218,
96
+ "M": 46,
97
+ "M</w>": 234,
98
+ "N": 47,
99
+ "N</w>": 238,
100
+ "O": 48,
101
+ "O</w>": 265,
102
+ "P": 49,
103
+ "P</w>": 245,
104
+ "Q": 50,
105
+ "Q</w>": 309,
106
+ "R": 51,
107
+ "R</w>": 264,
108
+ "S": 52,
109
+ "S</w>": 230,
110
+ "T": 53,
111
+ "T</w>": 235,
112
+ "U": 54,
113
+ "U</w>": 268,
114
+ "V": 55,
115
+ "V</w>": 248,
116
+ "W": 56,
117
+ "W</w>": 274,
118
+ "X": 57,
119
+ "X</w>": 263,
120
+ "Y": 58,
121
+ "Y</w>": 310,
122
+ "Z": 59,
123
+ "Z</w>": 207,
124
+ "[": 60,
125
+ "[</w>": 270,
126
+ "\\": 61,
127
+ "\\</w>": 338,
128
+ "]": 62,
129
+ "]</w>": 289,
130
+ "^": 63,
131
+ "^</w>": 331,
132
+ "_": 64,
133
+ "_</w>": 334,
134
+ "`": 65,
135
+ "`</w>": 347,
136
+ "a": 66,
137
+ "a</w>": 197,
138
+ "ab": 555,
139
+ "able</w>": 820,
140
+ "ac": 420,
141
+ "ace</w>": 806,
142
+ "ach": 791,
143
+ "ach</w>": 885,
144
+ "ack</w>": 670,
145
+ "act": 929,
146
+ "ad": 508,
147
+ "ad</w>": 860,
148
+ "ade</w>": 771,
149
+ "ag": 511,
150
+ "age</w>": 710,
151
+ "ain": 568,
152
+ "ain</w>": 675,
153
+ "ak": 577,
154
+ "ake</w>": 882,
155
+ "al": 397,
156
+ "al</w>": 405,
157
+ "all": 664,
158
+ "all</w>": 658,
159
+ "ally</w>": 588,
160
+ "als</w>": 796,
161
+ "am": 426,
162
+ "am</w>": 817,
163
+ "ame</w>": 552,
164
+ "ames</w>": 976,
165
+ "amp": 800,
166
+ "an": 384,
167
+ "an</w>": 425,
168
+ "ance</w>": 751,
169
+ "and": 609,
170
+ "and</w>": 780,
171
+ "ang": 816,
172
+ "ans</w>": 844,
173
+ "ant": 837,
174
+ "ant</w>": 753,
175
+ "any</w>": 766,
176
+ "ap": 586,
177
+ "ar": 376,
178
+ "ar</w>": 579,
179
+ "ard</w>": 649,
180
+ "ards</w>": 982,
181
+ "ary</w>": 611,
182
+ "as": 416,
183
+ "as</w>": 404,
184
+ "ase</w>": 849,
185
+ "ased</w>": 814,
186
+ "ason</w>": 865,
187
+ "ass": 792,
188
+ "ast</w>": 661,
189
+ "at": 372,
190
+ "at</w>": 434,
191
+ "ate</w>": 541,
192
+ "ated</w>": 543,
193
+ "ater</w>": 709,
194
+ "ates</w>": 825,
195
+ "ath": 730,
196
+ "ating</w>": 922,
197
+ "ation</w>": 497,
198
+ "ational</w>": 933,
199
+ "ations</w>": 744,
200
+ "att": 903,
201
+ "aus": 858,
202
+ "av": 681,
203
+ "ay": 684,
204
+ "ay</w>": 523,
205
+ "b": 67,
206
+ "b</w>": 212,
207
+ "ber</w>": 593,
208
+ "c": 68,
209
+ "c</w>": 224,
210
+ "cc": 960,
211
+ "ce</w>": 496,
212
+ "ces</w>": 830,
213
+ "ch": 520,
214
+ "ch</w>": 603,
215
+ "ct": 834,
216
+ "d": 69,
217
+ "d</w>": 196,
218
+ "ded</w>": 665,
219
+ "der</w>": 690,
220
+ "ding</w>": 633,
221
+ "ds</w>": 530,
222
+ "duc": 671,
223
+ "e": 70,
224
+ "e</w>": 195,
225
+ "ea": 471,
226
+ "ear": 596,
227
+ "ear</w>": 669,
228
+ "ears</w>": 906,
229
+ "eb": 852,
230
+ "ec": 418,
231
+ "ect": 838,
232
+ "ect</w>": 964,
233
+ "ed": 563,
234
+ "ed</w>": 362,
235
+ "ee": 941,
236
+ "een</w>": 779,
237
+ "ef": 840,
238
+ "eg": 731,
239
+ "el": 407,
240
+ "el</w>": 610,
241
+ "eld</w>": 973,
242
+ "ell": 759,
243
+ "ell</w>": 756,
244
+ "ely</w>": 719,
245
+ "em": 455,
246
+ "ember</w>": 777,
247
+ "ement</w>": 959,
248
+ "emp": 975,
249
+ "en": 375,
250
+ "en</w>": 427,
251
+ "ence</w>": 685,
252
+ "ens": 880,
253
+ "ent": 478,
254
+ "ent</w>": 468,
255
+ "ents</w>": 674,
256
+ "ep": 545,
257
+ "er": 364,
258
+ "er</w>": 374,
259
+ "eral</w>": 793,
260
+ "ere</w>": 481,
261
+ "ered</w>": 748,
262
+ "eric": 921,
263
+ "erm": 861,
264
+ "ern": 887,
265
+ "ern</w>": 977,
266
+ "ers": 598,
267
+ "ers</w>": 486,
268
+ "ert": 986,
269
+ "ery</w>": 805,
270
+ "es": 402,
271
+ "es</w>": 388,
272
+ "ese</w>": 794,
273
+ "ess": 678,
274
+ "ess</w>": 693,
275
+ "est": 606,
276
+ "est</w>": 584,
277
+ "et": 460,
278
+ "et</w>": 594,
279
+ "etw": 824,
280
+ "etween</w>": 886,
281
+ "ev": 493,
282
+ "evel": 980,
283
+ "ever</w>": 855,
284
+ "ew": 687,
285
+ "ew</w>": 612,
286
+ "ex": 938,
287
+ "ey</w>": 713,
288
+ "f": 71,
289
+ "f</w>": 209,
290
+ "fer": 911,
291
+ "ff": 587,
292
+ "for": 728,
293
+ "form": 901,
294
+ "fter</w>": 634,
295
+ "g": 72,
296
+ "g</w>": 214,
297
+ "ge</w>": 592,
298
+ "h": 73,
299
+ "h</w>": 203,
300
+ "i": 74,
301
+ "i</w>": 205,
302
+ "ia</w>": 605,
303
+ "ial</w>": 672,
304
+ "ian</w>": 638,
305
+ "ib": 726,
306
+ "ic": 395,
307
+ "ic</w>": 510,
308
+ "ical</w>": 625,
309
+ "ice</w>": 782,
310
+ "ich</w>": 561,
311
+ "ics</w>": 996,
312
+ "id": 463,
313
+ "id</w>": 613,
314
+ "ide</w>": 739,
315
+ "ie</w>": 974,
316
+ "ied</w>": 812,
317
+ "ies</w>": 516,
318
+ "if": 524,
319
+ "ig": 444,
320
+ "igh": 537,
321
+ "ight</w>": 680,
322
+ "ik": 775,
323
+ "ike</w>": 984,
324
+ "il": 406,
325
+ "il</w>": 714,
326
+ "ile</w>": 721,
327
+ "ill": 608,
328
+ "ill</w>": 789,
329
+ "ily</w>": 950,
330
+ "im": 469,
331
+ "im</w>": 767,
332
+ "ime</w>": 691,
333
+ "in": 358,
334
+ "in</w>": 501,
335
+ "ine</w>": 607,
336
+ "ing": 557,
337
+ "ing</w>": 383,
338
+ "ings</w>": 815,
339
+ "ion": 472,
340
+ "ion</w>": 408,
341
+ "ional</w>": 717,
342
+ "ions</w>": 540,
343
+ "ip": 733,
344
+ "ip</w>": 818,
345
+ "ir": 453,
346
+ "ir</w>": 554,
347
+ "is": 393,
348
+ "is</w>": 441,
349
+ "ish": 694,
350
+ "ish</w>": 654,
351
+ "ished</w>": 942,
352
+ "ision</w>": 944,
353
+ "iss": 876,
354
+ "ist": 550,
355
+ "ist</w>": 811,
356
+ "it": 378,
357
+ "it</w>": 746,
358
+ "ite</w>": 760,
359
+ "ited</w>": 809,
360
+ "ition</w>": 797,
361
+ "ity</w>": 542,
362
+ "iv": 435,
363
+ "ive</w>": 549,
364
+ "ived</w>": 979,
365
+ "iz": 722,
366
+ "j": 75,
367
+ "j</w>": 288,
368
+ "k": 76,
369
+ "k</w>": 210,
370
+ "ked</w>": 810,
371
+ "king</w>": 924,
372
+ "ks</w>": 692,
373
+ "l": 77,
374
+ "l</w>": 201,
375
+ "la": 467,
376
+ "land</w>": 743,
377
+ "ld</w>": 559,
378
+ "le": 536,
379
+ "le</w>": 465,
380
+ "les</w>": 799,
381
+ "lud": 718,
382
+ "ly</w>": 433,
383
+ "m": 78,
384
+ "m</w>": 202,
385
+ "ment</w>": 701,
386
+ "mp": 651,
387
+ "n": 79,
388
+ "n</w>": 199,
389
+ "nd</w>": 369,
390
+ "ned</w>": 758,
391
+ "ning</w>": 843,
392
+ "o": 80,
393
+ "o</w>": 198,
394
+ "ob": 920,
395
+ "oc": 534,
396
+ "od": 575,
397
+ "og": 604,
398
+ "oh": 972,
399
+ "oin": 831,
400
+ "ol": 428,
401
+ "oll": 703,
402
+ "ollow": 928,
403
+ "olog": 932,
404
+ "om": 419,
405
+ "om</w>": 883,
406
+ "ome</w>": 663,
407
+ "on": 382,
408
+ "on</w>": 390,
409
+ "ond</w>": 872,
410
+ "one</w>": 835,
411
+ "ong": 850,
412
+ "ong</w>": 582,
413
+ "oo": 517,
414
+ "ood</w>": 927,
415
+ "ook</w>": 897,
416
+ "op": 531,
417
+ "op</w>": 971,
418
+ "or": 377,
419
+ "or</w>": 424,
420
+ "ore</w>": 571,
421
+ "ors</w>": 917,
422
+ "ort": 768,
423
+ "ort</w>": 752,
424
+ "ory</w>": 737,
425
+ "os": 447,
426
+ "ose</w>": 881,
427
+ "ost</w>": 646,
428
+ "ot": 600,
429
+ "ot</w>": 879,
430
+ "ou": 392,
431
+ "oug": 659,
432
+ "ough</w>": 798,
433
+ "ould</w>": 640,
434
+ "oun": 553,
435
+ "ound</w>": 961,
436
+ "our": 648,
437
+ "our</w>": 772,
438
+ "ous</w>": 712,
439
+ "out</w>": 683,
440
+ "outh</w>": 945,
441
+ "ov": 515,
442
+ "ow": 461,
443
+ "ow</w>": 666,
444
+ "own</w>": 657,
445
+ "oy": 952,
446
+ "p": 81,
447
+ "p</w>": 217,
448
+ "per": 715,
449
+ "ph": 916,
450
+ "pp": 518,
451
+ "q": 82,
452
+ "q</w>": 280,
453
+ "qu": 546,
454
+ "r": 83,
455
+ "r</w>": 204,
456
+ "ra": 457,
457
+ "ran": 624,
458
+ "re": 367,
459
+ "ree</w>": 765,
460
+ "ren": 790,
461
+ "res": 572,
462
+ "res</w>": 747,
463
+ "ri": 487,
464
+ "rib": 804,
465
+ "ric": 745,
466
+ "rit": 589,
467
+ "ro": 385,
468
+ "rom</w>": 498,
469
+ "rop": 826,
470
+ "roug": 803,
471
+ "ru": 951,
472
+ "ruc": 891,
473
+ "ry</w>": 908,
474
+ "s": 84,
475
+ "s</w>": 206,
476
+ "se</w>": 741,
477
+ "sh": 795,
478
+ "so</w>": 630,
479
+ "sp": 992,
480
+ "ss": 673,
481
+ "st": 519,
482
+ "st</w>": 528,
483
+ "t": 85,
484
+ "t</w>": 208,
485
+ "te</w>": 954,
486
+ "ted</w>": 489,
487
+ "ter": 535,
488
+ "ter</w>": 505,
489
+ "th": 449,
490
+ "th</w>": 488,
491
+ "ther</w>": 576,
492
+ "ting</w>": 676,
493
+ "tion</w>": 570,
494
+ "tr": 619,
495
+ "ts</w>": 436,
496
+ "tt": 720,
497
+ "tur": 953,
498
+ "ty</w>": 821,
499
+ "u": 86,
500
+ "u</w>": 229,
501
+ "ub": 591,
502
+ "ubl": 842,
503
+ "uc": 490,
504
+ "ud": 538,
505
+ "ue</w>": 652,
506
+ "ug": 560,
507
+ "uil": 930,
508
+ "ul": 494,
509
+ "um": 532,
510
+ "um</w>": 644,
511
+ "un": 448,
512
+ "und</w>": 828,
513
+ "up": 833,
514
+ "up</w>": 700,
515
+ "ur": 413,
516
+ "ure</w>": 635,
517
+ "uring</w>": 702,
518
+ "ury</w>": 957,
519
+ "us": 438,
520
+ "us</w>": 622,
521
+ "ust</w>": 846,
522
+ "ut": 529,
523
+ "ut</w>": 527,
524
+ "v": 87,
525
+ "v</w>": 232,
526
+ "ve</w>": 567,
527
+ "vi": 866,
528
+ "w": 88,
529
+ "w</w>": 250,
530
+ "way</w>": 970,
531
+ "wn</w>": 999,
532
+ "x": 89,
533
+ "x</w>": 269,
534
+ "y": 90,
535
+ "y</w>": 211,
536
+ "yp": 993,
537
+ "z": 91,
538
+ "z</w>": 228,
539
+ "|": 92,
540
+ "|</w>": 304,
541
+ "}": 93,
542
+ "}</w>": 336,
543
+ "~": 94,
544
+ "~</w>": 343,
545
+ "¡": 95,
546
+ "¡</w>": 220,
547
+ "¢": 96,
548
+ "¢</w>": 306,
549
+ "£": 97,
550
+ "£</w>": 323,
551
+ "¤": 98,
552
+ "¤</w>": 292,
553
+ "¥": 99,
554
+ "¥</w>": 339,
555
+ "¦": 100,
556
+ "¦</w>": 303,
557
+ "§": 101,
558
+ "§</w>": 275,
559
+ "¨": 102,
560
+ "¨</w>": 282,
561
+ "©": 103,
562
+ "©</w>": 259,
563
+ "ª": 104,
564
+ "ª</w>": 286,
565
+ "«": 105,
566
+ "«</w>": 266,
567
+ "¬": 106,
568
+ "¬</w>": 319,
569
+ "®": 107,
570
+ "®</w>": 329,
571
+ "¯": 108,
572
+ "¯</w>": 287,
573
+ "°": 109,
574
+ "°</w>": 298,
575
+ "±": 110,
576
+ "±</w>": 200,
577
+ "²": 111,
578
+ "²</w>": 284,
579
+ "³": 112,
580
+ "³</w>": 272,
581
+ "´": 113,
582
+ "´</w>": 307,
583
+ "µ": 114,
584
+ "µ</w>": 261,
585
+ "¶": 115,
586
+ "¶</w>": 301,
587
+ "·": 116,
588
+ "·</w>": 326,
589
+ "¸": 117,
590
+ "¸</w>": 257,
591
+ "¹": 118,
592
+ "¹</w>": 241,
593
+ "º": 119,
594
+ "º</w>": 260,
595
+ "»": 120,
596
+ "»</w>": 247,
597
+ "¼": 121,
598
+ "¼</w>": 305,
599
+ "½": 122,
600
+ "½</w>": 294,
601
+ "¾": 123,
602
+ "¾</w>": 316,
603
+ "¿": 124,
604
+ "¿</w>": 271,
605
+ "Â": 125,
606
+ "Ã": 126,
607
+ "Ä": 127,
608
+ "Å": 128,
609
+ "Æ": 129,
610
+ "Ç": 130,
611
+ "È": 131,
612
+ "É": 132,
613
+ "Ê": 133,
614
+ "Ë": 134,
615
+ "Ì": 135,
616
+ "Í": 136,
617
+ "Î": 137,
618
+ "Ï": 138,
619
+ "Ð": 139,
620
+ "Ñ": 140,
621
+ "Ö": 141,
622
+ "×": 142,
623
+ "Ø": 143,
624
+ "Ù": 144,
625
+ "Ü": 145,
626
+ "à": 146,
627
+ "á": 147,
628
+ "â": 148,
629
+ "ã": 149,
630
+ "ä": 150,
631
+ "å": 151,
632
+ "æ": 152,
633
+ "ç": 153,
634
+ "è": 154,
635
+ "é": 155,
636
+ "ë": 156,
637
+ "ì": 157,
638
+ "ï": 158,
639
+ "Ċ": 159,
640
+ "Ċ</w>": 349,
641
+ "Ġ": 160,
642
+ "Ġ\"</w>": 401,
643
+ "Ġ'</w>": 431,
644
+ "Ġ(</w>": 475,
645
+ "Ġ)</w>": 474,
646
+ "Ġ,</w>": 360,
647
+ "Ġ.</w>": 365,
648
+ "Ġ0": 847,
649
+ "Ġ1": 411,
650
+ "Ġ18": 769,
651
+ "Ġ19": 492,
652
+ "Ġ199": 893,
653
+ "Ġ1</w>": 778,
654
+ "Ġ2": 462,
655
+ "Ġ20": 522,
656
+ "Ġ200": 620,
657
+ "Ġ201": 734,
658
+ "Ġ2</w>": 813,
659
+ "Ġ3": 735,
660
+ "Ġ3</w>": 888,
661
+ "Ġ4": 870,
662
+ "Ġ5": 907,
663
+ "Ġ5</w>": 990,
664
+ "Ġ:</w>": 637,
665
+ "Ġ;</w>": 615,
666
+ "Ġ</w>": 333,
667
+ "Ġ=</w>": 399,
668
+ "Ġ@": 417,
669
+ "Ġ@,@</w>": 755,
670
+ "Ġ@-@</w>": 440,
671
+ "Ġ@.@</w>": 696,
672
+ "ĠA": 409,
673
+ "ĠA</w>": 807,
674
+ "ĠAl": 716,
675
+ "ĠAm": 829,
676
+ "ĠAmeric": 958,
677
+ "ĠAn": 784,
678
+ "ĠAr": 894,
679
+ "ĠB": 432,
680
+ "ĠC": 410,
681
+ "ĠCh": 581,
682
+ "ĠCom": 904,
683
+ "ĠD": 464,
684
+ "ĠE": 500,
685
+ "ĠEn": 878,
686
+ "ĠF": 470,
687
+ "ĠG": 482,
688
+ "ĠH": 445,
689
+ "ĠHe</w>": 742,
690
+ "ĠI": 442,
691
+ "ĠI</w>": 827,
692
+ "ĠIn": 704,
693
+ "ĠIn</w>": 574,
694
+ "ĠIt</w>": 774,
695
+ "ĠJ": 491,
696
+ "ĠK": 548,
697
+ "ĠL": 484,
698
+ "ĠM": 423,
699
+ "ĠMar": 776,
700
+ "ĠN": 483,
701
+ "ĠO": 504,
702
+ "ĠP": 450,
703
+ "ĠPar": 967,
704
+ "ĠR": 459,
705
+ "ĠS": 403,
706
+ "ĠSh": 750,
707
+ "ĠSt": 590,
708
+ "ĠT": 396,
709
+ "ĠTh": 414,
710
+ "ĠThe</w>": 437,
711
+ "ĠThis</w>": 997,
712
+ "ĠU": 585,
713
+ "ĠUn": 773,
714
+ "ĠV": 617,
715
+ "ĠW": 479,
716
+ "ĠWh": 853,
717
+ "ĠY": 757,
718
+ "Ġa": 356,
719
+ "Ġa</w>": 394,
720
+ "Ġab": 653,
721
+ "Ġabout</w>": 899,
722
+ "Ġac": 583,
723
+ "Ġacc": 874,
724
+ "Ġad": 656,
725
+ "Ġafter</w>": 763,
726
+ "Ġag": 725,
727
+ "Ġal": 476,
728
+ "Ġalb": 991,
729
+ "Ġall</w>": 839,
730
+ "Ġalso</w>": 641,
731
+ "Ġan": 602,
732
+ "Ġan</w>": 562,
733
+ "Ġand</w>": 381,
734
+ "Ġapp": 711,
735
+ "Ġar": 507,
736
+ "Ġare</w>": 601,
737
+ "Ġas</w>": 454,
738
+ "Ġass": 947,
739
+ "Ġat</w>": 514,
740
+ "Ġatt": 788,
741
+ "Ġb": 371,
742
+ "Ġbe": 499,
743
+ "Ġbe</w>": 595,
744
+ "Ġbec": 706,
745
+ "Ġbeen</w>": 686,
746
+ "Ġbeg": 915,
747
+ "Ġbetween</w>": 914,
748
+ "Ġbo": 819,
749
+ "Ġbut</w>": 623,
750
+ "Ġby</w>": 473,
751
+ "Ġc": 368,
752
+ "Ġcent": 823,
753
+ "Ġch": 526,
754
+ "Ġchar": 822,
755
+ "Ġcl": 689,
756
+ "Ġcom": 509,
757
+ "Ġcomm": 707,
758
+ "Ġcomp": 616,
759
+ "Ġcon": 477,
760
+ "Ġcons": 841,
761
+ "Ġcont": 655,
762
+ "Ġcre": 931,
763
+ "Ġd": 387,
764
+ "Ġde": 627,
765
+ "Ġdec": 873,
766
+ "Ġdef": 965,
767
+ "Ġdes": 738,
768
+ "Ġdi": 892,
769
+ "Ġdis": 708,
770
+ "Ġduring</w>": 864,
771
+ "Ġe": 421,
772
+ "Ġear": 854,
773
+ "Ġel": 869,
774
+ "Ġen": 662,
775
+ "Ġev": 682,
776
+ "Ġex": 539,
777
+ "Ġexp": 867,
778
+ "Ġf": 370,
779
+ "Ġfe": 845,
780
+ "Ġfil": 913,
781
+ "Ġfin": 786,
782
+ "Ġfir": 599,
783
+ "Ġfirst</w>": 626,
784
+ "Ġfl": 877,
785
+ "Ġfor": 614,
786
+ "Ġfor</w>": 458,
787
+ "Ġform": 963,
788
+ "Ġfrom</w>": 503,
789
+ "Ġg": 430,
790
+ "Ġgame</w>": 895,
791
+ "Ġgen": 985,
792
+ "Ġgro": 890,
793
+ "Ġh": 380,
794
+ "Ġha": 485,
795
+ "Ġhad</w>": 566,
796
+ "Ġhas</w>": 679,
797
+ "Ġhave</w>": 667,
798
+ "Ġhe</w>": 558,
799
+ "Ġher</w>": 660,
800
+ "Ġhim</w>": 896,
801
+ "Ġhis</w>": 512,
802
+ "Ġi": 366,
803
+ "Ġimp": 909,
804
+ "Ġin": 429,
805
+ "Ġin</w>": 389,
806
+ "Ġinc": 618,
807
+ "Ġinclud": 761,
808
+ "Ġind": 983,
809
+ "Ġint": 628,
810
+ "Ġinter": 832,
811
+ "Ġinto</w>": 785,
812
+ "Ġis</w>": 480,
813
+ "Ġit</w>": 533,
814
+ "Ġits</w>": 642,
815
+ "Ġj": 723,
816
+ "Ġk": 564,
817
+ "Ġkn": 857,
818
+ "Ġl": 398,
819
+ "Ġlar": 962,
820
+ "Ġlater</w>": 936,
821
+ "Ġlea": 868,
822
+ "Ġm": 386,
823
+ "Ġmade</w>": 949,
824
+ "Ġme": 968,
825
+ "Ġmore</w>": 802,
826
+ "Ġmost</w>": 910,
827
+ "Ġmov": 956,
828
+ "Ġmus": 966,
829
+ "Ġn": 415,
830
+ "Ġnew</w>": 989,
831
+ "Ġno": 547,
832
+ "Ġnor": 978,
833
+ "Ġnot</w>": 632,
834
+ "Ġnum": 926,
835
+ "Ġo": 359,
836
+ "Ġof</w>": 373,
837
+ "Ġoff": 875,
838
+ "Ġon": 551,
839
+ "Ġon</w>": 456,
840
+ "Ġone</w>": 677,
841
+ "Ġonly</w>": 871,
842
+ "Ġor": 699,
843
+ "Ġor</w>": 645,
844
+ "Ġother</w>": 787,
845
+ "Ġout</w>": 925,
846
+ "Ġov": 729,
847
+ "Ġover</w>": 856,
848
+ "Ġp": 379,
849
+ "Ġpar": 636,
850
+ "Ġper": 736,
851
+ "Ġpl": 698,
852
+ "Ġpla": 697,
853
+ "Ġplay": 808,
854
+ "Ġpos": 859,
855
+ "Ġpr": 889,
856
+ "Ġpre": 749,
857
+ "Ġpres": 912,
858
+ "Ġpro": 506,
859
+ "Ġproduc": 934,
860
+ "Ġqu": 955,
861
+ "Ġr": 521,
862
+ "Ġra": 863,
863
+ "Ġre": 400,
864
+ "Ġrec": 597,
865
+ "Ġrecor": 919,
866
+ "Ġreg": 943,
867
+ "Ġrel": 900,
868
+ "Ġrele": 946,
869
+ "Ġrem": 848,
870
+ "Ġrep": 762,
871
+ "Ġres": 650,
872
+ "Ġro": 629,
873
+ "Ġs": 361,
874
+ "Ġsa": 905,
875
+ "Ġsc": 732,
876
+ "Ġse": 569,
877
+ "Ġseason</w>": 948,
878
+ "Ġsec": 781,
879
+ "Ġser": 740,
880
+ "Ġsev": 884,
881
+ "Ġsh": 513,
882
+ "Ġshe</w>": 862,
883
+ "Ġsp": 578,
884
+ "Ġspec": 940,
885
+ "Ġst": 446,
886
+ "Ġstar": 939,
887
+ "Ġsu": 770,
888
+ "Ġsub": 969,
889
+ "Ġsuc": 764,
890
+ "Ġsuch</w>": 981,
891
+ "Ġt": 354,
892
+ "Ġth": 355,
893
+ "Ġthan</w>": 918,
894
+ "Ġthat</w>": 452,
895
+ "Ġthe": 502,
896
+ "Ġthe</w>": 357,
897
+ "Ġtheir</w>": 621,
898
+ "Ġthem</w>": 998,
899
+ "Ġthey</w>": 727,
900
+ "Ġthis</w>": 705,
901
+ "Ġthree</w>": 902,
902
+ "Ġthroug": 923,
903
+ "Ġtime</w>": 783,
904
+ "Ġto</w>": 391,
905
+ "Ġtra": 836,
906
+ "Ġtw": 639,
907
+ "Ġtwo</w>": 688,
908
+ "Ġun": 544,
909
+ "Ġup</w>": 898,
910
+ "Ġus": 668,
911
+ "Ġused</w>": 988,
912
+ "Ġv": 495,
913
+ "Ġw": 363,
914
+ "Ġwas</w>": 422,
915
+ "Ġwere</w>": 525,
916
+ "Ġwh": 443,
917
+ "Ġwhen</w>": 851,
918
+ "Ġwhere</w>": 995,
919
+ "Ġwhich</w>": 573,
920
+ "Ġwhile</w>": 935,
921
+ "Ġwho</w>": 724,
922
+ "Ġwit": 451,
923
+ "Ġwith": 994,
924
+ "Ġwith</w>": 466,
925
+ "Ġwor": 643,
926
+ "Ġwould</w>": 801,
927
+ "Ġwrit": 937,
928
+ "Ġy": 580,
929
+ "Ġyear</w>": 987,
930
+ "Ġâ": 556,
931
+ "ĠâĢ": 565,
932
+ "ĠâĢĵ</w>": 631,
933
+ "ĠĊ</w>": 412,
934
+ "Ģ": 161,
935
+ "Ģ</w>": 223,
936
+ "ģ": 162,
937
+ "ģ</w>": 273,
938
+ "Ĥ": 163,
939
+ "Ĥ</w>": 262,
940
+ "ĥ": 164,
941
+ "ĥ</w>": 337,
942
+ "Ħ": 165,
943
+ "Ħ</w>": 278,
944
+ "ħ": 166,
945
+ "ħ</w>": 281,
946
+ "Ĩ": 167,
947
+ "Ĩ</w>": 308,
948
+ "ĩ": 168,
949
+ "ĩ</w>": 225,
950
+ "Ī": 169,
951
+ "Ī</w>": 221,
952
+ "ī": 170,
953
+ "ī</w>": 244,
954
+ "Ĭ": 171,
955
+ "Ĭ</w>": 315,
956
+ "ĭ": 172,
957
+ "ĭ</w>": 321,
958
+ "Į": 173,
959
+ "Į</w>": 324,
960
+ "į": 174,
961
+ "į</w>": 302,
962
+ "İ": 175,
963
+ "İ</w>": 249,
964
+ "ı": 176,
965
+ "ı</w>": 332,
966
+ "IJ": 177,
967
+ "IJ</w>": 295,
968
+ "ij": 178,
969
+ "ij</w>": 313,
970
+ "Ĵ": 179,
971
+ "Ĵ</w>": 328,
972
+ "ĵ": 180,
973
+ "ĵ</w>": 312,
974
+ "Ķ": 181,
975
+ "Ķ</w>": 256,
976
+ "ķ": 182,
977
+ "ķ</w>": 314,
978
+ "ĸ": 183,
979
+ "ĸ</w>": 277,
980
+ "Ĺ": 184,
981
+ "Ĺ</w>": 322,
982
+ "ĺ": 185,
983
+ "ĺ</w>": 285,
984
+ "Ļ": 186,
985
+ "Ļ</w>": 267,
986
+ "ļ": 187,
987
+ "ļ</w>": 290,
988
+ "Ľ": 188,
989
+ "Ľ</w>": 311,
990
+ "ľ": 189,
991
+ "ľ</w>": 299,
992
+ "Ŀ": 190,
993
+ "Ŀ</w>": 291,
994
+ "ŀ": 191,
995
+ "ŀ</w>": 293,
996
+ "Ł": 192,
997
+ "Ł</w>": 335,
998
+ "ł": 193,
999
+ "ł</w>": 252,
1000
+ "Ń": 194,
1001
+ "Ń</w>": 297
1002
+ }
transformer/config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FluxTransformer2DModel",
3
+ "_diffusers_version": "0.32.1",
4
+ "_name_or_path": "/home/ea/.cache/huggingface/hub/models--katuni4ka--tiny-random-flux/snapshots/36abdcc25faf1a91425f0e38ffa8b5d427534cef/transformer",
5
+ "attention_head_dim": 16,
6
+ "axes_dims_rope": [
7
+ 4,
8
+ 4,
9
+ 8
10
+ ],
11
+ "guidance_embeds": false,
12
+ "in_channels": 4,
13
+ "joint_attention_dim": 32,
14
+ "num_attention_heads": 2,
15
+ "num_layers": 1,
16
+ "num_single_layers": 1,
17
+ "out_channels": null,
18
+ "patch_size": 1,
19
+ "pooled_projection_dim": 32
20
+ }
transformer/openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2599fc92714e674334db1396c46fa35f1fc7808cc9588578c4b85f1b83d075d
3
+ size 274392
transformer/openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
vae_decoder/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKL",
3
+ "_diffusers_version": "0.32.1",
4
+ "_name_or_path": "/home/ea/.cache/huggingface/hub/models--katuni4ka--tiny-random-flux/snapshots/36abdcc25faf1a91425f0e38ffa8b5d427534cef/vae",
5
+ "act_fn": "silu",
6
+ "block_out_channels": [
7
+ 4
8
+ ],
9
+ "down_block_types": [
10
+ "DownEncoderBlock2D"
11
+ ],
12
+ "force_upcast": true,
13
+ "in_channels": 3,
14
+ "latent_channels": 1,
15
+ "latents_mean": null,
16
+ "latents_std": null,
17
+ "layers_per_block": 1,
18
+ "mid_block_add_attention": true,
19
+ "norm_num_groups": 1,
20
+ "out_channels": 3,
21
+ "sample_size": 32,
22
+ "scaling_factor": 1.5035,
23
+ "shift_factor": 0.0609,
24
+ "up_block_types": [
25
+ "UpDecoderBlock2D"
26
+ ],
27
+ "use_post_quant_conv": false,
28
+ "use_quant_conv": false
29
+ }
vae_decoder/openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f74425394c7084d8f86d01d20d30b4a927437a6e8a1edaf49a41aa02d7373ffe
3
+ size 5848
vae_decoder/openvino_model.xml ADDED
@@ -0,0 +1,2258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model9" version="11">
3
+ <layers>
4
+ <layer id="0" name="latent_sample" type="Parameter" version="opset1">
5
+ <data shape="?,1,?,?" element_type="f32" />
6
+ <output>
7
+ <port id="0" precision="FP32" names="latent_sample">
8
+ <dim>-1</dim>
9
+ <dim>1</dim>
10
+ <dim>-1</dim>
11
+ <dim>-1</dim>
12
+ </port>
13
+ </output>
14
+ </layer>
15
+ <layer id="1" name="self.decoder.conv_in.weight" type="Const" version="opset1">
16
+ <data element_type="f32" shape="4, 1, 3, 3" offset="0" size="144" />
17
+ <output>
18
+ <port id="0" precision="FP32" names="self.decoder.conv_in.weight">
19
+ <dim>4</dim>
20
+ <dim>1</dim>
21
+ <dim>3</dim>
22
+ <dim>3</dim>
23
+ </port>
24
+ </output>
25
+ </layer>
26
+ <layer id="2" name="__module.decoder.conv_in/aten::_convolution/Convolution" type="Convolution" version="opset1">
27
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
28
+ <input>
29
+ <port id="0" precision="FP32">
30
+ <dim>-1</dim>
31
+ <dim>1</dim>
32
+ <dim>-1</dim>
33
+ <dim>-1</dim>
34
+ </port>
35
+ <port id="1" precision="FP32">
36
+ <dim>4</dim>
37
+ <dim>1</dim>
38
+ <dim>3</dim>
39
+ <dim>3</dim>
40
+ </port>
41
+ </input>
42
+ <output>
43
+ <port id="2" precision="FP32">
44
+ <dim>-1</dim>
45
+ <dim>4</dim>
46
+ <dim>-1</dim>
47
+ <dim>-1</dim>
48
+ </port>
49
+ </output>
50
+ </layer>
51
+ <layer id="3" name="__module.decoder.conv_in/aten::_convolution/Reshape" type="Const" version="opset1">
52
+ <data element_type="f32" shape="1, 4, 1, 1" offset="144" size="16" />
53
+ <output>
54
+ <port id="0" precision="FP32">
55
+ <dim>1</dim>
56
+ <dim>4</dim>
57
+ <dim>1</dim>
58
+ <dim>1</dim>
59
+ </port>
60
+ </output>
61
+ </layer>
62
+ <layer id="4" name="__module.decoder.conv_in/aten::_convolution/Add" type="Add" version="opset1">
63
+ <data auto_broadcast="numpy" />
64
+ <input>
65
+ <port id="0" precision="FP32">
66
+ <dim>-1</dim>
67
+ <dim>4</dim>
68
+ <dim>-1</dim>
69
+ <dim>-1</dim>
70
+ </port>
71
+ <port id="1" precision="FP32">
72
+ <dim>1</dim>
73
+ <dim>4</dim>
74
+ <dim>1</dim>
75
+ <dim>1</dim>
76
+ </port>
77
+ </input>
78
+ <output>
79
+ <port id="2" precision="FP32" names="36,input.1">
80
+ <dim>-1</dim>
81
+ <dim>4</dim>
82
+ <dim>-1</dim>
83
+ <dim>-1</dim>
84
+ </port>
85
+ </output>
86
+ </layer>
87
+ <layer id="5" name="self.decoder.mid_block.resnets.0.norm1.weight" type="Const" version="opset1">
88
+ <data element_type="f32" shape="4" offset="160" size="16" />
89
+ <output>
90
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.0.norm1.weight">
91
+ <dim>4</dim>
92
+ </port>
93
+ </output>
94
+ </layer>
95
+ <layer id="6" name="self.decoder.mid_block.resnets.0.norm1.bias" type="Const" version="opset1">
96
+ <data element_type="f32" shape="4" offset="176" size="16" />
97
+ <output>
98
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.0.norm1.bias">
99
+ <dim>4</dim>
100
+ </port>
101
+ </output>
102
+ </layer>
103
+ <layer id="7" name="__module.decoder.mid_block.resnets.0.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
104
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
105
+ <input>
106
+ <port id="0" precision="FP32">
107
+ <dim>-1</dim>
108
+ <dim>4</dim>
109
+ <dim>-1</dim>
110
+ <dim>-1</dim>
111
+ </port>
112
+ <port id="1" precision="FP32">
113
+ <dim>4</dim>
114
+ </port>
115
+ <port id="2" precision="FP32">
116
+ <dim>4</dim>
117
+ </port>
118
+ </input>
119
+ <output>
120
+ <port id="3" precision="FP32" names="53,input.3">
121
+ <dim>-1</dim>
122
+ <dim>4</dim>
123
+ <dim>-1</dim>
124
+ <dim>-1</dim>
125
+ </port>
126
+ </output>
127
+ </layer>
128
+ <layer id="8" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish" type="Swish" version="opset4">
129
+ <input>
130
+ <port id="0" precision="FP32">
131
+ <dim>-1</dim>
132
+ <dim>4</dim>
133
+ <dim>-1</dim>
134
+ <dim>-1</dim>
135
+ </port>
136
+ </input>
137
+ <output>
138
+ <port id="1" precision="FP32" names="54">
139
+ <dim>-1</dim>
140
+ <dim>4</dim>
141
+ <dim>-1</dim>
142
+ <dim>-1</dim>
143
+ </port>
144
+ </output>
145
+ </layer>
146
+ <layer id="9" name="self.decoder.mid_block.resnets.0.conv1.weight" type="Const" version="opset1">
147
+ <data element_type="f32" shape="4, 4, 3, 3" offset="192" size="576" />
148
+ <output>
149
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.0.conv1.weight">
150
+ <dim>4</dim>
151
+ <dim>4</dim>
152
+ <dim>3</dim>
153
+ <dim>3</dim>
154
+ </port>
155
+ </output>
156
+ </layer>
157
+ <layer id="10" name="__module.decoder.mid_block.resnets.0.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
158
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
159
+ <input>
160
+ <port id="0" precision="FP32">
161
+ <dim>-1</dim>
162
+ <dim>4</dim>
163
+ <dim>-1</dim>
164
+ <dim>-1</dim>
165
+ </port>
166
+ <port id="1" precision="FP32">
167
+ <dim>4</dim>
168
+ <dim>4</dim>
169
+ <dim>3</dim>
170
+ <dim>3</dim>
171
+ </port>
172
+ </input>
173
+ <output>
174
+ <port id="2" precision="FP32">
175
+ <dim>-1</dim>
176
+ <dim>4</dim>
177
+ <dim>-1</dim>
178
+ <dim>-1</dim>
179
+ </port>
180
+ </output>
181
+ </layer>
182
+ <layer id="11" name="__module.decoder.mid_block.resnets.0.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
183
+ <data element_type="f32" shape="1, 4, 1, 1" offset="768" size="16" />
184
+ <output>
185
+ <port id="0" precision="FP32">
186
+ <dim>1</dim>
187
+ <dim>4</dim>
188
+ <dim>1</dim>
189
+ <dim>1</dim>
190
+ </port>
191
+ </output>
192
+ </layer>
193
+ <layer id="12" name="__module.decoder.mid_block.resnets.0.conv1/aten::_convolution/Add" type="Add" version="opset1">
194
+ <data auto_broadcast="numpy" />
195
+ <input>
196
+ <port id="0" precision="FP32">
197
+ <dim>-1</dim>
198
+ <dim>4</dim>
199
+ <dim>-1</dim>
200
+ <dim>-1</dim>
201
+ </port>
202
+ <port id="1" precision="FP32">
203
+ <dim>1</dim>
204
+ <dim>4</dim>
205
+ <dim>1</dim>
206
+ <dim>1</dim>
207
+ </port>
208
+ </input>
209
+ <output>
210
+ <port id="2" precision="FP32" names="61,input.5">
211
+ <dim>-1</dim>
212
+ <dim>4</dim>
213
+ <dim>-1</dim>
214
+ <dim>-1</dim>
215
+ </port>
216
+ </output>
217
+ </layer>
218
+ <layer id="13" name="self.decoder.mid_block.resnets.0.norm2.weight" type="Const" version="opset1">
219
+ <data element_type="f32" shape="4" offset="160" size="16" />
220
+ <output>
221
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.0.norm2.weight">
222
+ <dim>4</dim>
223
+ </port>
224
+ </output>
225
+ </layer>
226
+ <layer id="14" name="self.decoder.mid_block.resnets.0.norm2.bias" type="Const" version="opset1">
227
+ <data element_type="f32" shape="4" offset="176" size="16" />
228
+ <output>
229
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.0.norm2.bias">
230
+ <dim>4</dim>
231
+ </port>
232
+ </output>
233
+ </layer>
234
+ <layer id="15" name="__module.decoder.mid_block.resnets.0.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
235
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
236
+ <input>
237
+ <port id="0" precision="FP32">
238
+ <dim>-1</dim>
239
+ <dim>4</dim>
240
+ <dim>-1</dim>
241
+ <dim>-1</dim>
242
+ </port>
243
+ <port id="1" precision="FP32">
244
+ <dim>4</dim>
245
+ </port>
246
+ <port id="2" precision="FP32">
247
+ <dim>4</dim>
248
+ </port>
249
+ </input>
250
+ <output>
251
+ <port id="3" precision="FP32" names="64,input.7">
252
+ <dim>-1</dim>
253
+ <dim>4</dim>
254
+ <dim>-1</dim>
255
+ <dim>-1</dim>
256
+ </port>
257
+ </output>
258
+ </layer>
259
+ <layer id="16" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_1" type="Swish" version="opset4">
260
+ <input>
261
+ <port id="0" precision="FP32">
262
+ <dim>-1</dim>
263
+ <dim>4</dim>
264
+ <dim>-1</dim>
265
+ <dim>-1</dim>
266
+ </port>
267
+ </input>
268
+ <output>
269
+ <port id="1" precision="FP32" names="65,input.9">
270
+ <dim>-1</dim>
271
+ <dim>4</dim>
272
+ <dim>-1</dim>
273
+ <dim>-1</dim>
274
+ </port>
275
+ </output>
276
+ </layer>
277
+ <layer id="17" name="self.decoder.mid_block.resnets.0.conv2.weight" type="Const" version="opset1">
278
+ <data element_type="f32" shape="4, 4, 3, 3" offset="784" size="576" />
279
+ <output>
280
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.0.conv2.weight">
281
+ <dim>4</dim>
282
+ <dim>4</dim>
283
+ <dim>3</dim>
284
+ <dim>3</dim>
285
+ </port>
286
+ </output>
287
+ </layer>
288
+ <layer id="18" name="__module.decoder.mid_block.resnets.0.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
289
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
290
+ <input>
291
+ <port id="0" precision="FP32">
292
+ <dim>-1</dim>
293
+ <dim>4</dim>
294
+ <dim>-1</dim>
295
+ <dim>-1</dim>
296
+ </port>
297
+ <port id="1" precision="FP32">
298
+ <dim>4</dim>
299
+ <dim>4</dim>
300
+ <dim>3</dim>
301
+ <dim>3</dim>
302
+ </port>
303
+ </input>
304
+ <output>
305
+ <port id="2" precision="FP32">
306
+ <dim>-1</dim>
307
+ <dim>4</dim>
308
+ <dim>-1</dim>
309
+ <dim>-1</dim>
310
+ </port>
311
+ </output>
312
+ </layer>
313
+ <layer id="19" name="__module.decoder.mid_block.resnets.0.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
314
+ <data element_type="f32" shape="1, 4, 1, 1" offset="1360" size="16" />
315
+ <output>
316
+ <port id="0" precision="FP32">
317
+ <dim>1</dim>
318
+ <dim>4</dim>
319
+ <dim>1</dim>
320
+ <dim>1</dim>
321
+ </port>
322
+ </output>
323
+ </layer>
324
+ <layer id="20" name="__module.decoder.mid_block.resnets.0.conv2/aten::_convolution/Add" type="Add" version="opset1">
325
+ <data auto_broadcast="numpy" />
326
+ <input>
327
+ <port id="0" precision="FP32">
328
+ <dim>-1</dim>
329
+ <dim>4</dim>
330
+ <dim>-1</dim>
331
+ <dim>-1</dim>
332
+ </port>
333
+ <port id="1" precision="FP32">
334
+ <dim>1</dim>
335
+ <dim>4</dim>
336
+ <dim>1</dim>
337
+ <dim>1</dim>
338
+ </port>
339
+ </input>
340
+ <output>
341
+ <port id="2" precision="FP32" names="73,hidden_states.1">
342
+ <dim>-1</dim>
343
+ <dim>4</dim>
344
+ <dim>-1</dim>
345
+ <dim>-1</dim>
346
+ </port>
347
+ </output>
348
+ </layer>
349
+ <layer id="21" name="__module.decoder.mid_block.resnets.0/aten::add/Add" type="Add" version="opset1">
350
+ <data auto_broadcast="numpy" />
351
+ <input>
352
+ <port id="0" precision="FP32">
353
+ <dim>-1</dim>
354
+ <dim>4</dim>
355
+ <dim>-1</dim>
356
+ <dim>-1</dim>
357
+ </port>
358
+ <port id="1" precision="FP32">
359
+ <dim>-1</dim>
360
+ <dim>4</dim>
361
+ <dim>-1</dim>
362
+ <dim>-1</dim>
363
+ </port>
364
+ </input>
365
+ <output>
366
+ <port id="2" precision="FP32" names="74,75,hidden_states.3">
367
+ <dim>-1</dim>
368
+ <dim>4</dim>
369
+ <dim>-1</dim>
370
+ <dim>-1</dim>
371
+ </port>
372
+ </output>
373
+ </layer>
374
+ <layer id="22" name="Constant_29629" type="Const" version="opset1">
375
+ <data element_type="i64" shape="3" offset="1376" size="24" />
376
+ <output>
377
+ <port id="0" precision="I64">
378
+ <dim>3</dim>
379
+ </port>
380
+ </output>
381
+ </layer>
382
+ <layer id="23" name="__module.decoder.mid_block.attentions.0/aten::view/Reshape" type="Reshape" version="opset1">
383
+ <data special_zero="true" />
384
+ <input>
385
+ <port id="0" precision="FP32">
386
+ <dim>-1</dim>
387
+ <dim>4</dim>
388
+ <dim>-1</dim>
389
+ <dim>-1</dim>
390
+ </port>
391
+ <port id="1" precision="I64">
392
+ <dim>3</dim>
393
+ </port>
394
+ </input>
395
+ <output>
396
+ <port id="2" precision="FP32" names="91">
397
+ <dim>-1</dim>
398
+ <dim>4</dim>
399
+ <dim>-1</dim>
400
+ </port>
401
+ </output>
402
+ </layer>
403
+ <layer id="24" name="__module.decoder.mid_block.attentions.0/aten::transpose/Constant" type="Const" version="opset1">
404
+ <data element_type="i32" shape="3" offset="1400" size="12" />
405
+ <output>
406
+ <port id="0" precision="I32">
407
+ <dim>3</dim>
408
+ </port>
409
+ </output>
410
+ </layer>
411
+ <layer id="25" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose" type="Transpose" version="opset1">
412
+ <input>
413
+ <port id="0" precision="FP32">
414
+ <dim>-1</dim>
415
+ <dim>4</dim>
416
+ <dim>-1</dim>
417
+ </port>
418
+ <port id="1" precision="I32">
419
+ <dim>3</dim>
420
+ </port>
421
+ </input>
422
+ <output>
423
+ <port id="2" precision="FP32" names="92,hidden_states.5">
424
+ <dim>-1</dim>
425
+ <dim>-1</dim>
426
+ <dim>4</dim>
427
+ </port>
428
+ </output>
429
+ </layer>
430
+ <layer id="26" name="__module.decoder.mid_block.attentions.0/aten::transpose/Constant_1" type="Const" version="opset1">
431
+ <data element_type="i32" shape="3" offset="1400" size="12" />
432
+ <output>
433
+ <port id="0" precision="I32">
434
+ <dim>3</dim>
435
+ </port>
436
+ </output>
437
+ </layer>
438
+ <layer id="27" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose_1" type="Transpose" version="opset1">
439
+ <input>
440
+ <port id="0" precision="FP32">
441
+ <dim>-1</dim>
442
+ <dim>-1</dim>
443
+ <dim>4</dim>
444
+ </port>
445
+ <port id="1" precision="I32">
446
+ <dim>3</dim>
447
+ </port>
448
+ </input>
449
+ <output>
450
+ <port id="2" precision="FP32" names="94,input.11">
451
+ <dim>-1</dim>
452
+ <dim>4</dim>
453
+ <dim>-1</dim>
454
+ </port>
455
+ </output>
456
+ </layer>
457
+ <layer id="28" name="self.decoder.mid_block.attentions.0.group_norm.weight" type="Const" version="opset1">
458
+ <data element_type="f32" shape="4" offset="160" size="16" />
459
+ <output>
460
+ <port id="0" precision="FP32" names="self.decoder.mid_block.attentions.0.group_norm.weight">
461
+ <dim>4</dim>
462
+ </port>
463
+ </output>
464
+ </layer>
465
+ <layer id="29" name="self.decoder.mid_block.attentions.0.group_norm.bias" type="Const" version="opset1">
466
+ <data element_type="f32" shape="4" offset="176" size="16" />
467
+ <output>
468
+ <port id="0" precision="FP32" names="self.decoder.mid_block.attentions.0.group_norm.bias">
469
+ <dim>4</dim>
470
+ </port>
471
+ </output>
472
+ </layer>
473
+ <layer id="30" name="__module.decoder.mid_block.attentions.0.group_norm/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
474
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
475
+ <input>
476
+ <port id="0" precision="FP32">
477
+ <dim>-1</dim>
478
+ <dim>4</dim>
479
+ <dim>-1</dim>
480
+ </port>
481
+ <port id="1" precision="FP32">
482
+ <dim>4</dim>
483
+ </port>
484
+ <port id="2" precision="FP32">
485
+ <dim>4</dim>
486
+ </port>
487
+ </input>
488
+ <output>
489
+ <port id="3" precision="FP32" names="97">
490
+ <dim>-1</dim>
491
+ <dim>4</dim>
492
+ <dim>-1</dim>
493
+ </port>
494
+ </output>
495
+ </layer>
496
+ <layer id="31" name="self.decoder.mid_block.attentions.0.to_q.weight" type="Const" version="opset1">
497
+ <data element_type="f32" shape="4, 4" offset="1412" size="64" />
498
+ <output>
499
+ <port id="0" precision="FP32" names="self.decoder.mid_block.attentions.0.to_q.weight">
500
+ <dim>4</dim>
501
+ <dim>4</dim>
502
+ </port>
503
+ </output>
504
+ </layer>
505
+ <layer id="32" name="__module.decoder.mid_block.attentions.0.to_q/aten::linear/MatMul" type="MatMul" version="opset1">
506
+ <data transpose_a="true" transpose_b="true" />
507
+ <input>
508
+ <port id="0" precision="FP32">
509
+ <dim>-1</dim>
510
+ <dim>4</dim>
511
+ <dim>-1</dim>
512
+ </port>
513
+ <port id="1" precision="FP32">
514
+ <dim>4</dim>
515
+ <dim>4</dim>
516
+ </port>
517
+ </input>
518
+ <output>
519
+ <port id="2" precision="FP32">
520
+ <dim>-1</dim>
521
+ <dim>-1</dim>
522
+ <dim>4</dim>
523
+ </port>
524
+ </output>
525
+ </layer>
526
+ <layer id="33" name="Constant_29514" type="Const" version="opset1">
527
+ <data element_type="f32" shape="1, 1, 4" offset="1476" size="16" />
528
+ <output>
529
+ <port id="0" precision="FP32">
530
+ <dim>1</dim>
531
+ <dim>1</dim>
532
+ <dim>4</dim>
533
+ </port>
534
+ </output>
535
+ </layer>
536
+ <layer id="34" name="__module.decoder.mid_block.attentions.0.to_q/aten::linear/Add" type="Add" version="opset1">
537
+ <data auto_broadcast="numpy" />
538
+ <input>
539
+ <port id="0" precision="FP32">
540
+ <dim>-1</dim>
541
+ <dim>-1</dim>
542
+ <dim>4</dim>
543
+ </port>
544
+ <port id="1" precision="FP32">
545
+ <dim>1</dim>
546
+ <dim>1</dim>
547
+ <dim>4</dim>
548
+ </port>
549
+ </input>
550
+ <output>
551
+ <port id="2" precision="FP32" names="101,query">
552
+ <dim>-1</dim>
553
+ <dim>-1</dim>
554
+ <dim>4</dim>
555
+ </port>
556
+ </output>
557
+ </layer>
558
+ <layer id="35" name="Constant_29630" type="Const" version="opset1">
559
+ <data element_type="i64" shape="4" offset="1492" size="32" />
560
+ <output>
561
+ <port id="0" precision="I64">
562
+ <dim>4</dim>
563
+ </port>
564
+ </output>
565
+ </layer>
566
+ <layer id="36" name="__module.decoder.mid_block.attentions.0/aten::view/Reshape_1" type="Reshape" version="opset1">
567
+ <data special_zero="true" />
568
+ <input>
569
+ <port id="0" precision="FP32">
570
+ <dim>-1</dim>
571
+ <dim>-1</dim>
572
+ <dim>4</dim>
573
+ </port>
574
+ <port id="1" precision="I64">
575
+ <dim>4</dim>
576
+ </port>
577
+ </input>
578
+ <output>
579
+ <port id="2" precision="FP32" names="115">
580
+ <dim>-1</dim>
581
+ <dim>-1</dim>
582
+ <dim>1</dim>
583
+ <dim>4</dim>
584
+ </port>
585
+ </output>
586
+ </layer>
587
+ <layer id="37" name="Constant_29449" type="Const" version="opset1">
588
+ <data element_type="i64" shape="4" offset="1524" size="32" />
589
+ <output>
590
+ <port id="0" precision="I64">
591
+ <dim>4</dim>
592
+ </port>
593
+ </output>
594
+ </layer>
595
+ <layer id="38" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose_3" type="Reshape" version="opset1">
596
+ <data special_zero="true" />
597
+ <input>
598
+ <port id="0" precision="FP32">
599
+ <dim>-1</dim>
600
+ <dim>-1</dim>
601
+ <dim>1</dim>
602
+ <dim>4</dim>
603
+ </port>
604
+ <port id="1" precision="I64">
605
+ <dim>4</dim>
606
+ </port>
607
+ </input>
608
+ <output>
609
+ <port id="2" precision="FP32" names="116">
610
+ <dim>-1</dim>
611
+ <dim>1</dim>
612
+ <dim>-1</dim>
613
+ <dim>4</dim>
614
+ </port>
615
+ </output>
616
+ </layer>
617
+ <layer id="39" name="self.decoder.mid_block.attentions.0.to_k.weight" type="Const" version="opset1">
618
+ <data element_type="f32" shape="4, 4" offset="1556" size="64" />
619
+ <output>
620
+ <port id="0" precision="FP32" names="self.decoder.mid_block.attentions.0.to_k.weight">
621
+ <dim>4</dim>
622
+ <dim>4</dim>
623
+ </port>
624
+ </output>
625
+ </layer>
626
+ <layer id="40" name="__module.decoder.mid_block.attentions.0.to_k/aten::linear/MatMul" type="MatMul" version="opset1">
627
+ <data transpose_a="true" transpose_b="true" />
628
+ <input>
629
+ <port id="0" precision="FP32">
630
+ <dim>-1</dim>
631
+ <dim>4</dim>
632
+ <dim>-1</dim>
633
+ </port>
634
+ <port id="1" precision="FP32">
635
+ <dim>4</dim>
636
+ <dim>4</dim>
637
+ </port>
638
+ </input>
639
+ <output>
640
+ <port id="2" precision="FP32">
641
+ <dim>-1</dim>
642
+ <dim>-1</dim>
643
+ <dim>4</dim>
644
+ </port>
645
+ </output>
646
+ </layer>
647
+ <layer id="41" name="Constant_29515" type="Const" version="opset1">
648
+ <data element_type="f32" shape="1, 1, 4" offset="1620" size="16" />
649
+ <output>
650
+ <port id="0" precision="FP32">
651
+ <dim>1</dim>
652
+ <dim>1</dim>
653
+ <dim>4</dim>
654
+ </port>
655
+ </output>
656
+ </layer>
657
+ <layer id="42" name="__module.decoder.mid_block.attentions.0.to_k/aten::linear/Add" type="Add" version="opset1">
658
+ <data auto_broadcast="numpy" />
659
+ <input>
660
+ <port id="0" precision="FP32">
661
+ <dim>-1</dim>
662
+ <dim>-1</dim>
663
+ <dim>4</dim>
664
+ </port>
665
+ <port id="1" precision="FP32">
666
+ <dim>1</dim>
667
+ <dim>1</dim>
668
+ <dim>4</dim>
669
+ </port>
670
+ </input>
671
+ <output>
672
+ <port id="2" precision="FP32" names="104,key">
673
+ <dim>-1</dim>
674
+ <dim>-1</dim>
675
+ <dim>4</dim>
676
+ </port>
677
+ </output>
678
+ </layer>
679
+ <layer id="43" name="Constant_29631" type="Const" version="opset1">
680
+ <data element_type="i64" shape="4" offset="1492" size="32" />
681
+ <output>
682
+ <port id="0" precision="I64">
683
+ <dim>4</dim>
684
+ </port>
685
+ </output>
686
+ </layer>
687
+ <layer id="44" name="__module.decoder.mid_block.attentions.0/aten::view/Reshape_2" type="Reshape" version="opset1">
688
+ <data special_zero="true" />
689
+ <input>
690
+ <port id="0" precision="FP32">
691
+ <dim>-1</dim>
692
+ <dim>-1</dim>
693
+ <dim>4</dim>
694
+ </port>
695
+ <port id="1" precision="I64">
696
+ <dim>4</dim>
697
+ </port>
698
+ </input>
699
+ <output>
700
+ <port id="2" precision="FP32" names="118">
701
+ <dim>-1</dim>
702
+ <dim>-1</dim>
703
+ <dim>1</dim>
704
+ <dim>4</dim>
705
+ </port>
706
+ </output>
707
+ </layer>
708
+ <layer id="45" name="Constant_29453" type="Const" version="opset1">
709
+ <data element_type="i64" shape="4" offset="1524" size="32" />
710
+ <output>
711
+ <port id="0" precision="I64">
712
+ <dim>4</dim>
713
+ </port>
714
+ </output>
715
+ </layer>
716
+ <layer id="46" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose_4" type="Reshape" version="opset1">
717
+ <data special_zero="true" />
718
+ <input>
719
+ <port id="0" precision="FP32">
720
+ <dim>-1</dim>
721
+ <dim>-1</dim>
722
+ <dim>1</dim>
723
+ <dim>4</dim>
724
+ </port>
725
+ <port id="1" precision="I64">
726
+ <dim>4</dim>
727
+ </port>
728
+ </input>
729
+ <output>
730
+ <port id="2" precision="FP32" names="119">
731
+ <dim>-1</dim>
732
+ <dim>1</dim>
733
+ <dim>-1</dim>
734
+ <dim>4</dim>
735
+ </port>
736
+ </output>
737
+ </layer>
738
+ <layer id="47" name="self.decoder.mid_block.attentions.0.to_v.weight" type="Const" version="opset1">
739
+ <data element_type="f32" shape="4, 4" offset="1636" size="64" />
740
+ <output>
741
+ <port id="0" precision="FP32" names="self.decoder.mid_block.attentions.0.to_v.weight">
742
+ <dim>4</dim>
743
+ <dim>4</dim>
744
+ </port>
745
+ </output>
746
+ </layer>
747
+ <layer id="48" name="__module.decoder.mid_block.attentions.0.to_v/aten::linear/MatMul" type="MatMul" version="opset1">
748
+ <data transpose_a="true" transpose_b="true" />
749
+ <input>
750
+ <port id="0" precision="FP32">
751
+ <dim>-1</dim>
752
+ <dim>4</dim>
753
+ <dim>-1</dim>
754
+ </port>
755
+ <port id="1" precision="FP32">
756
+ <dim>4</dim>
757
+ <dim>4</dim>
758
+ </port>
759
+ </input>
760
+ <output>
761
+ <port id="2" precision="FP32">
762
+ <dim>-1</dim>
763
+ <dim>-1</dim>
764
+ <dim>4</dim>
765
+ </port>
766
+ </output>
767
+ </layer>
768
+ <layer id="49" name="Constant_29516" type="Const" version="opset1">
769
+ <data element_type="f32" shape="1, 1, 4" offset="1700" size="16" />
770
+ <output>
771
+ <port id="0" precision="FP32">
772
+ <dim>1</dim>
773
+ <dim>1</dim>
774
+ <dim>4</dim>
775
+ </port>
776
+ </output>
777
+ </layer>
778
+ <layer id="50" name="__module.decoder.mid_block.attentions.0.to_v/aten::linear/Add" type="Add" version="opset1">
779
+ <data auto_broadcast="numpy" />
780
+ <input>
781
+ <port id="0" precision="FP32">
782
+ <dim>-1</dim>
783
+ <dim>-1</dim>
784
+ <dim>4</dim>
785
+ </port>
786
+ <port id="1" precision="FP32">
787
+ <dim>1</dim>
788
+ <dim>1</dim>
789
+ <dim>4</dim>
790
+ </port>
791
+ </input>
792
+ <output>
793
+ <port id="2" precision="FP32" names="107,value">
794
+ <dim>-1</dim>
795
+ <dim>-1</dim>
796
+ <dim>4</dim>
797
+ </port>
798
+ </output>
799
+ </layer>
800
+ <layer id="51" name="Constant_29632" type="Const" version="opset1">
801
+ <data element_type="i64" shape="4" offset="1492" size="32" />
802
+ <output>
803
+ <port id="0" precision="I64">
804
+ <dim>4</dim>
805
+ </port>
806
+ </output>
807
+ </layer>
808
+ <layer id="52" name="__module.decoder.mid_block.attentions.0/aten::view/Reshape_3" type="Reshape" version="opset1">
809
+ <data special_zero="true" />
810
+ <input>
811
+ <port id="0" precision="FP32">
812
+ <dim>-1</dim>
813
+ <dim>-1</dim>
814
+ <dim>4</dim>
815
+ </port>
816
+ <port id="1" precision="I64">
817
+ <dim>4</dim>
818
+ </port>
819
+ </input>
820
+ <output>
821
+ <port id="2" precision="FP32" names="121">
822
+ <dim>-1</dim>
823
+ <dim>-1</dim>
824
+ <dim>1</dim>
825
+ <dim>4</dim>
826
+ </port>
827
+ </output>
828
+ </layer>
829
+ <layer id="53" name="Constant_29457" type="Const" version="opset1">
830
+ <data element_type="i64" shape="4" offset="1524" size="32" />
831
+ <output>
832
+ <port id="0" precision="I64">
833
+ <dim>4</dim>
834
+ </port>
835
+ </output>
836
+ </layer>
837
+ <layer id="54" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose_5" type="Reshape" version="opset1">
838
+ <data special_zero="true" />
839
+ <input>
840
+ <port id="0" precision="FP32">
841
+ <dim>-1</dim>
842
+ <dim>-1</dim>
843
+ <dim>1</dim>
844
+ <dim>4</dim>
845
+ </port>
846
+ <port id="1" precision="I64">
847
+ <dim>4</dim>
848
+ </port>
849
+ </input>
850
+ <output>
851
+ <port id="2" precision="FP32" names="122">
852
+ <dim>-1</dim>
853
+ <dim>1</dim>
854
+ <dim>-1</dim>
855
+ <dim>4</dim>
856
+ </port>
857
+ </output>
858
+ </layer>
859
+ <layer id="55" name="__module.decoder.mid_block.attentions.0/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
860
+ <data causal="false" />
861
+ <input>
862
+ <port id="0" precision="FP32">
863
+ <dim>-1</dim>
864
+ <dim>1</dim>
865
+ <dim>-1</dim>
866
+ <dim>4</dim>
867
+ </port>
868
+ <port id="1" precision="FP32">
869
+ <dim>-1</dim>
870
+ <dim>1</dim>
871
+ <dim>-1</dim>
872
+ <dim>4</dim>
873
+ </port>
874
+ <port id="2" precision="FP32">
875
+ <dim>-1</dim>
876
+ <dim>1</dim>
877
+ <dim>-1</dim>
878
+ <dim>4</dim>
879
+ </port>
880
+ </input>
881
+ <output>
882
+ <port id="3" precision="FP32" names="123,hidden_states.7">
883
+ <dim>-1</dim>
884
+ <dim>1</dim>
885
+ <dim>-1</dim>
886
+ <dim>4</dim>
887
+ </port>
888
+ </output>
889
+ </layer>
890
+ <layer id="56" name="Constant_29459" type="Const" version="opset1">
891
+ <data element_type="i64" shape="4" offset="1716" size="32" />
892
+ <output>
893
+ <port id="0" precision="I64">
894
+ <dim>4</dim>
895
+ </port>
896
+ </output>
897
+ </layer>
898
+ <layer id="57" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose_6" type="Reshape" version="opset1">
899
+ <data special_zero="true" />
900
+ <input>
901
+ <port id="0" precision="FP32">
902
+ <dim>-1</dim>
903
+ <dim>1</dim>
904
+ <dim>-1</dim>
905
+ <dim>4</dim>
906
+ </port>
907
+ <port id="1" precision="I64">
908
+ <dim>4</dim>
909
+ </port>
910
+ </input>
911
+ <output>
912
+ <port id="2" precision="FP32" names="124">
913
+ <dim>-1</dim>
914
+ <dim>-1</dim>
915
+ <dim>1</dim>
916
+ <dim>4</dim>
917
+ </port>
918
+ </output>
919
+ </layer>
920
+ <layer id="58" name="Constant_29633" type="Const" version="opset1">
921
+ <data element_type="i64" shape="3" offset="1748" size="24" />
922
+ <output>
923
+ <port id="0" precision="I64">
924
+ <dim>3</dim>
925
+ </port>
926
+ </output>
927
+ </layer>
928
+ <layer id="59" name="__module.decoder.mid_block.attentions.0/aten::reshape/Reshape" type="Reshape" version="opset1">
929
+ <data special_zero="true" />
930
+ <input>
931
+ <port id="0" precision="FP32">
932
+ <dim>-1</dim>
933
+ <dim>-1</dim>
934
+ <dim>1</dim>
935
+ <dim>4</dim>
936
+ </port>
937
+ <port id="1" precision="I64">
938
+ <dim>3</dim>
939
+ </port>
940
+ </input>
941
+ <output>
942
+ <port id="2" precision="FP32" names="128,129,hidden_states.9">
943
+ <dim>-1</dim>
944
+ <dim>-1</dim>
945
+ <dim>4</dim>
946
+ </port>
947
+ </output>
948
+ </layer>
949
+ <layer id="60" name="self.decoder.mid_block.attentions.0.to_out.0.weight" type="Const" version="opset1">
950
+ <data element_type="f32" shape="4, 4" offset="1772" size="64" />
951
+ <output>
952
+ <port id="0" precision="FP32" names="self.decoder.mid_block.attentions.0.to_out.0.weight">
953
+ <dim>4</dim>
954
+ <dim>4</dim>
955
+ </port>
956
+ </output>
957
+ </layer>
958
+ <layer id="61" name="__module.decoder.mid_block.attentions.0.to_out.0/aten::linear/MatMul" type="MatMul" version="opset1">
959
+ <data transpose_a="false" transpose_b="true" />
960
+ <input>
961
+ <port id="0" precision="FP32">
962
+ <dim>-1</dim>
963
+ <dim>-1</dim>
964
+ <dim>4</dim>
965
+ </port>
966
+ <port id="1" precision="FP32">
967
+ <dim>4</dim>
968
+ <dim>4</dim>
969
+ </port>
970
+ </input>
971
+ <output>
972
+ <port id="2" precision="FP32">
973
+ <dim>-1</dim>
974
+ <dim>-1</dim>
975
+ <dim>4</dim>
976
+ </port>
977
+ </output>
978
+ </layer>
979
+ <layer id="62" name="Constant_29517" type="Const" version="opset1">
980
+ <data element_type="f32" shape="1, 1, 4" offset="1836" size="16" />
981
+ <output>
982
+ <port id="0" precision="FP32">
983
+ <dim>1</dim>
984
+ <dim>1</dim>
985
+ <dim>4</dim>
986
+ </port>
987
+ </output>
988
+ </layer>
989
+ <layer id="63" name="__module.decoder.mid_block.attentions.0.to_out.0/aten::linear/Add" type="Add" version="opset1">
990
+ <data auto_broadcast="numpy" />
991
+ <input>
992
+ <port id="0" precision="FP32">
993
+ <dim>-1</dim>
994
+ <dim>-1</dim>
995
+ <dim>4</dim>
996
+ </port>
997
+ <port id="1" precision="FP32">
998
+ <dim>1</dim>
999
+ <dim>1</dim>
1000
+ <dim>4</dim>
1001
+ </port>
1002
+ </input>
1003
+ <output>
1004
+ <port id="2" precision="FP32" names="132,input.13">
1005
+ <dim>-1</dim>
1006
+ <dim>-1</dim>
1007
+ <dim>4</dim>
1008
+ </port>
1009
+ </output>
1010
+ </layer>
1011
+ <layer id="64" name="__module.decoder.mid_block.attentions.0/aten::transpose/Constant_7" type="Const" version="opset1">
1012
+ <data element_type="i32" shape="3" offset="1400" size="12" />
1013
+ <output>
1014
+ <port id="0" precision="I32">
1015
+ <dim>3</dim>
1016
+ </port>
1017
+ </output>
1018
+ </layer>
1019
+ <layer id="65" name="__module.decoder.mid_block.attentions.0/aten::transpose/Transpose_7" type="Transpose" version="opset1">
1020
+ <input>
1021
+ <port id="0" precision="FP32">
1022
+ <dim>-1</dim>
1023
+ <dim>-1</dim>
1024
+ <dim>4</dim>
1025
+ </port>
1026
+ <port id="1" precision="I32">
1027
+ <dim>3</dim>
1028
+ </port>
1029
+ </input>
1030
+ <output>
1031
+ <port id="2" precision="FP32" names="134">
1032
+ <dim>-1</dim>
1033
+ <dim>4</dim>
1034
+ <dim>-1</dim>
1035
+ </port>
1036
+ </output>
1037
+ </layer>
1038
+ <layer id="66" name="__module.decoder.mid_block.attentions.0/aten::size/ShapeOf" type="ShapeOf" version="opset3">
1039
+ <data output_type="i64" />
1040
+ <input>
1041
+ <port id="0" precision="FP32">
1042
+ <dim>-1</dim>
1043
+ <dim>4</dim>
1044
+ <dim>-1</dim>
1045
+ <dim>-1</dim>
1046
+ </port>
1047
+ </input>
1048
+ <output>
1049
+ <port id="1" precision="I64">
1050
+ <dim>4</dim>
1051
+ </port>
1052
+ </output>
1053
+ </layer>
1054
+ <layer id="67" name="__module.decoder.mid_block.attentions.0/aten::reshape/Reshape_1" type="Reshape" version="opset1">
1055
+ <data special_zero="false" />
1056
+ <input>
1057
+ <port id="0" precision="FP32">
1058
+ <dim>-1</dim>
1059
+ <dim>4</dim>
1060
+ <dim>-1</dim>
1061
+ </port>
1062
+ <port id="1" precision="I64">
1063
+ <dim>4</dim>
1064
+ </port>
1065
+ </input>
1066
+ <output>
1067
+ <port id="2" precision="FP32" names="136,hidden_states.13">
1068
+ <dim>-1</dim>
1069
+ <dim>4</dim>
1070
+ <dim>-1</dim>
1071
+ <dim>-1</dim>
1072
+ </port>
1073
+ </output>
1074
+ </layer>
1075
+ <layer id="68" name="__module.decoder.mid_block.attentions.0/aten::add/Add" type="Add" version="opset1">
1076
+ <data auto_broadcast="numpy" />
1077
+ <input>
1078
+ <port id="0" precision="FP32">
1079
+ <dim>-1</dim>
1080
+ <dim>4</dim>
1081
+ <dim>-1</dim>
1082
+ <dim>-1</dim>
1083
+ </port>
1084
+ <port id="1" precision="FP32">
1085
+ <dim>-1</dim>
1086
+ <dim>4</dim>
1087
+ <dim>-1</dim>
1088
+ <dim>-1</dim>
1089
+ </port>
1090
+ </input>
1091
+ <output>
1092
+ <port id="2" precision="FP32" names="137,138,hidden_states.15,input.15">
1093
+ <dim>-1</dim>
1094
+ <dim>4</dim>
1095
+ <dim>-1</dim>
1096
+ <dim>-1</dim>
1097
+ </port>
1098
+ </output>
1099
+ </layer>
1100
+ <layer id="69" name="self.decoder.mid_block.resnets.1.norm1.weight" type="Const" version="opset1">
1101
+ <data element_type="f32" shape="4" offset="160" size="16" />
1102
+ <output>
1103
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.1.norm1.weight">
1104
+ <dim>4</dim>
1105
+ </port>
1106
+ </output>
1107
+ </layer>
1108
+ <layer id="70" name="self.decoder.mid_block.resnets.1.norm1.bias" type="Const" version="opset1">
1109
+ <data element_type="f32" shape="4" offset="176" size="16" />
1110
+ <output>
1111
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.1.norm1.bias">
1112
+ <dim>4</dim>
1113
+ </port>
1114
+ </output>
1115
+ </layer>
1116
+ <layer id="71" name="__module.decoder.mid_block.resnets.1.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1117
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1118
+ <input>
1119
+ <port id="0" precision="FP32">
1120
+ <dim>-1</dim>
1121
+ <dim>4</dim>
1122
+ <dim>-1</dim>
1123
+ <dim>-1</dim>
1124
+ </port>
1125
+ <port id="1" precision="FP32">
1126
+ <dim>4</dim>
1127
+ </port>
1128
+ <port id="2" precision="FP32">
1129
+ <dim>4</dim>
1130
+ </port>
1131
+ </input>
1132
+ <output>
1133
+ <port id="3" precision="FP32" names="147,input.17">
1134
+ <dim>-1</dim>
1135
+ <dim>4</dim>
1136
+ <dim>-1</dim>
1137
+ <dim>-1</dim>
1138
+ </port>
1139
+ </output>
1140
+ </layer>
1141
+ <layer id="72" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_2" type="Swish" version="opset4">
1142
+ <input>
1143
+ <port id="0" precision="FP32">
1144
+ <dim>-1</dim>
1145
+ <dim>4</dim>
1146
+ <dim>-1</dim>
1147
+ <dim>-1</dim>
1148
+ </port>
1149
+ </input>
1150
+ <output>
1151
+ <port id="1" precision="FP32" names="148">
1152
+ <dim>-1</dim>
1153
+ <dim>4</dim>
1154
+ <dim>-1</dim>
1155
+ <dim>-1</dim>
1156
+ </port>
1157
+ </output>
1158
+ </layer>
1159
+ <layer id="73" name="self.decoder.mid_block.resnets.1.conv1.weight" type="Const" version="opset1">
1160
+ <data element_type="f32" shape="4, 4, 3, 3" offset="1852" size="576" />
1161
+ <output>
1162
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.1.conv1.weight">
1163
+ <dim>4</dim>
1164
+ <dim>4</dim>
1165
+ <dim>3</dim>
1166
+ <dim>3</dim>
1167
+ </port>
1168
+ </output>
1169
+ </layer>
1170
+ <layer id="74" name="__module.decoder.mid_block.resnets.1.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1171
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1172
+ <input>
1173
+ <port id="0" precision="FP32">
1174
+ <dim>-1</dim>
1175
+ <dim>4</dim>
1176
+ <dim>-1</dim>
1177
+ <dim>-1</dim>
1178
+ </port>
1179
+ <port id="1" precision="FP32">
1180
+ <dim>4</dim>
1181
+ <dim>4</dim>
1182
+ <dim>3</dim>
1183
+ <dim>3</dim>
1184
+ </port>
1185
+ </input>
1186
+ <output>
1187
+ <port id="2" precision="FP32">
1188
+ <dim>-1</dim>
1189
+ <dim>4</dim>
1190
+ <dim>-1</dim>
1191
+ <dim>-1</dim>
1192
+ </port>
1193
+ </output>
1194
+ </layer>
1195
+ <layer id="75" name="__module.decoder.mid_block.resnets.1.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
1196
+ <data element_type="f32" shape="1, 4, 1, 1" offset="2428" size="16" />
1197
+ <output>
1198
+ <port id="0" precision="FP32">
1199
+ <dim>1</dim>
1200
+ <dim>4</dim>
1201
+ <dim>1</dim>
1202
+ <dim>1</dim>
1203
+ </port>
1204
+ </output>
1205
+ </layer>
1206
+ <layer id="76" name="__module.decoder.mid_block.resnets.1.conv1/aten::_convolution/Add" type="Add" version="opset1">
1207
+ <data auto_broadcast="numpy" />
1208
+ <input>
1209
+ <port id="0" precision="FP32">
1210
+ <dim>-1</dim>
1211
+ <dim>4</dim>
1212
+ <dim>-1</dim>
1213
+ <dim>-1</dim>
1214
+ </port>
1215
+ <port id="1" precision="FP32">
1216
+ <dim>1</dim>
1217
+ <dim>4</dim>
1218
+ <dim>1</dim>
1219
+ <dim>1</dim>
1220
+ </port>
1221
+ </input>
1222
+ <output>
1223
+ <port id="2" precision="FP32" names="155,input.19">
1224
+ <dim>-1</dim>
1225
+ <dim>4</dim>
1226
+ <dim>-1</dim>
1227
+ <dim>-1</dim>
1228
+ </port>
1229
+ </output>
1230
+ </layer>
1231
+ <layer id="77" name="self.decoder.mid_block.resnets.1.norm2.weight" type="Const" version="opset1">
1232
+ <data element_type="f32" shape="4" offset="160" size="16" />
1233
+ <output>
1234
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.1.norm2.weight">
1235
+ <dim>4</dim>
1236
+ </port>
1237
+ </output>
1238
+ </layer>
1239
+ <layer id="78" name="self.decoder.mid_block.resnets.1.norm2.bias" type="Const" version="opset1">
1240
+ <data element_type="f32" shape="4" offset="176" size="16" />
1241
+ <output>
1242
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.1.norm2.bias">
1243
+ <dim>4</dim>
1244
+ </port>
1245
+ </output>
1246
+ </layer>
1247
+ <layer id="79" name="__module.decoder.mid_block.resnets.1.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1248
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1249
+ <input>
1250
+ <port id="0" precision="FP32">
1251
+ <dim>-1</dim>
1252
+ <dim>4</dim>
1253
+ <dim>-1</dim>
1254
+ <dim>-1</dim>
1255
+ </port>
1256
+ <port id="1" precision="FP32">
1257
+ <dim>4</dim>
1258
+ </port>
1259
+ <port id="2" precision="FP32">
1260
+ <dim>4</dim>
1261
+ </port>
1262
+ </input>
1263
+ <output>
1264
+ <port id="3" precision="FP32" names="158,input.21">
1265
+ <dim>-1</dim>
1266
+ <dim>4</dim>
1267
+ <dim>-1</dim>
1268
+ <dim>-1</dim>
1269
+ </port>
1270
+ </output>
1271
+ </layer>
1272
+ <layer id="80" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_3" type="Swish" version="opset4">
1273
+ <input>
1274
+ <port id="0" precision="FP32">
1275
+ <dim>-1</dim>
1276
+ <dim>4</dim>
1277
+ <dim>-1</dim>
1278
+ <dim>-1</dim>
1279
+ </port>
1280
+ </input>
1281
+ <output>
1282
+ <port id="1" precision="FP32" names="159,input.23">
1283
+ <dim>-1</dim>
1284
+ <dim>4</dim>
1285
+ <dim>-1</dim>
1286
+ <dim>-1</dim>
1287
+ </port>
1288
+ </output>
1289
+ </layer>
1290
+ <layer id="81" name="self.decoder.mid_block.resnets.1.conv2.weight" type="Const" version="opset1">
1291
+ <data element_type="f32" shape="4, 4, 3, 3" offset="2444" size="576" />
1292
+ <output>
1293
+ <port id="0" precision="FP32" names="self.decoder.mid_block.resnets.1.conv2.weight">
1294
+ <dim>4</dim>
1295
+ <dim>4</dim>
1296
+ <dim>3</dim>
1297
+ <dim>3</dim>
1298
+ </port>
1299
+ </output>
1300
+ </layer>
1301
+ <layer id="82" name="__module.decoder.mid_block.resnets.1.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1302
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1303
+ <input>
1304
+ <port id="0" precision="FP32">
1305
+ <dim>-1</dim>
1306
+ <dim>4</dim>
1307
+ <dim>-1</dim>
1308
+ <dim>-1</dim>
1309
+ </port>
1310
+ <port id="1" precision="FP32">
1311
+ <dim>4</dim>
1312
+ <dim>4</dim>
1313
+ <dim>3</dim>
1314
+ <dim>3</dim>
1315
+ </port>
1316
+ </input>
1317
+ <output>
1318
+ <port id="2" precision="FP32">
1319
+ <dim>-1</dim>
1320
+ <dim>4</dim>
1321
+ <dim>-1</dim>
1322
+ <dim>-1</dim>
1323
+ </port>
1324
+ </output>
1325
+ </layer>
1326
+ <layer id="83" name="__module.decoder.mid_block.resnets.1.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
1327
+ <data element_type="f32" shape="1, 4, 1, 1" offset="3020" size="16" />
1328
+ <output>
1329
+ <port id="0" precision="FP32">
1330
+ <dim>1</dim>
1331
+ <dim>4</dim>
1332
+ <dim>1</dim>
1333
+ <dim>1</dim>
1334
+ </port>
1335
+ </output>
1336
+ </layer>
1337
+ <layer id="84" name="__module.decoder.mid_block.resnets.1.conv2/aten::_convolution/Add" type="Add" version="opset1">
1338
+ <data auto_broadcast="numpy" />
1339
+ <input>
1340
+ <port id="0" precision="FP32">
1341
+ <dim>-1</dim>
1342
+ <dim>4</dim>
1343
+ <dim>-1</dim>
1344
+ <dim>-1</dim>
1345
+ </port>
1346
+ <port id="1" precision="FP32">
1347
+ <dim>1</dim>
1348
+ <dim>4</dim>
1349
+ <dim>1</dim>
1350
+ <dim>1</dim>
1351
+ </port>
1352
+ </input>
1353
+ <output>
1354
+ <port id="2" precision="FP32" names="167,hidden_states.17">
1355
+ <dim>-1</dim>
1356
+ <dim>4</dim>
1357
+ <dim>-1</dim>
1358
+ <dim>-1</dim>
1359
+ </port>
1360
+ </output>
1361
+ </layer>
1362
+ <layer id="85" name="__module.decoder.mid_block.resnets.1/aten::add/Add" type="Add" version="opset1">
1363
+ <data auto_broadcast="numpy" />
1364
+ <input>
1365
+ <port id="0" precision="FP32">
1366
+ <dim>-1</dim>
1367
+ <dim>4</dim>
1368
+ <dim>-1</dim>
1369
+ <dim>-1</dim>
1370
+ </port>
1371
+ <port id="1" precision="FP32">
1372
+ <dim>-1</dim>
1373
+ <dim>4</dim>
1374
+ <dim>-1</dim>
1375
+ <dim>-1</dim>
1376
+ </port>
1377
+ </input>
1378
+ <output>
1379
+ <port id="2" precision="FP32" names="168,169,170,input.25,sample">
1380
+ <dim>-1</dim>
1381
+ <dim>4</dim>
1382
+ <dim>-1</dim>
1383
+ <dim>-1</dim>
1384
+ </port>
1385
+ </output>
1386
+ </layer>
1387
+ <layer id="86" name="self.decoder.up_blocks.0.resnets.0.norm1.weight" type="Const" version="opset1">
1388
+ <data element_type="f32" shape="4" offset="160" size="16" />
1389
+ <output>
1390
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.0.norm1.weight">
1391
+ <dim>4</dim>
1392
+ </port>
1393
+ </output>
1394
+ </layer>
1395
+ <layer id="87" name="self.decoder.up_blocks.0.resnets.0.norm1.bias" type="Const" version="opset1">
1396
+ <data element_type="f32" shape="4" offset="176" size="16" />
1397
+ <output>
1398
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.0.norm1.bias">
1399
+ <dim>4</dim>
1400
+ </port>
1401
+ </output>
1402
+ </layer>
1403
+ <layer id="88" name="__module.decoder.up_blocks.0.resnets.0.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1404
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1405
+ <input>
1406
+ <port id="0" precision="FP32">
1407
+ <dim>-1</dim>
1408
+ <dim>4</dim>
1409
+ <dim>-1</dim>
1410
+ <dim>-1</dim>
1411
+ </port>
1412
+ <port id="1" precision="FP32">
1413
+ <dim>4</dim>
1414
+ </port>
1415
+ <port id="2" precision="FP32">
1416
+ <dim>4</dim>
1417
+ </port>
1418
+ </input>
1419
+ <output>
1420
+ <port id="3" precision="FP32" names="182,input.27">
1421
+ <dim>-1</dim>
1422
+ <dim>4</dim>
1423
+ <dim>-1</dim>
1424
+ <dim>-1</dim>
1425
+ </port>
1426
+ </output>
1427
+ </layer>
1428
+ <layer id="89" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_4" type="Swish" version="opset4">
1429
+ <input>
1430
+ <port id="0" precision="FP32">
1431
+ <dim>-1</dim>
1432
+ <dim>4</dim>
1433
+ <dim>-1</dim>
1434
+ <dim>-1</dim>
1435
+ </port>
1436
+ </input>
1437
+ <output>
1438
+ <port id="1" precision="FP32" names="183">
1439
+ <dim>-1</dim>
1440
+ <dim>4</dim>
1441
+ <dim>-1</dim>
1442
+ <dim>-1</dim>
1443
+ </port>
1444
+ </output>
1445
+ </layer>
1446
+ <layer id="90" name="self.decoder.up_blocks.0.resnets.0.conv1.weight" type="Const" version="opset1">
1447
+ <data element_type="f32" shape="4, 4, 3, 3" offset="3036" size="576" />
1448
+ <output>
1449
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.0.conv1.weight">
1450
+ <dim>4</dim>
1451
+ <dim>4</dim>
1452
+ <dim>3</dim>
1453
+ <dim>3</dim>
1454
+ </port>
1455
+ </output>
1456
+ </layer>
1457
+ <layer id="91" name="__module.decoder.up_blocks.0.resnets.0.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1458
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1459
+ <input>
1460
+ <port id="0" precision="FP32">
1461
+ <dim>-1</dim>
1462
+ <dim>4</dim>
1463
+ <dim>-1</dim>
1464
+ <dim>-1</dim>
1465
+ </port>
1466
+ <port id="1" precision="FP32">
1467
+ <dim>4</dim>
1468
+ <dim>4</dim>
1469
+ <dim>3</dim>
1470
+ <dim>3</dim>
1471
+ </port>
1472
+ </input>
1473
+ <output>
1474
+ <port id="2" precision="FP32">
1475
+ <dim>-1</dim>
1476
+ <dim>4</dim>
1477
+ <dim>-1</dim>
1478
+ <dim>-1</dim>
1479
+ </port>
1480
+ </output>
1481
+ </layer>
1482
+ <layer id="92" name="__module.decoder.up_blocks.0.resnets.0.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
1483
+ <data element_type="f32" shape="1, 4, 1, 1" offset="3612" size="16" />
1484
+ <output>
1485
+ <port id="0" precision="FP32">
1486
+ <dim>1</dim>
1487
+ <dim>4</dim>
1488
+ <dim>1</dim>
1489
+ <dim>1</dim>
1490
+ </port>
1491
+ </output>
1492
+ </layer>
1493
+ <layer id="93" name="__module.decoder.up_blocks.0.resnets.0.conv1/aten::_convolution/Add" type="Add" version="opset1">
1494
+ <data auto_broadcast="numpy" />
1495
+ <input>
1496
+ <port id="0" precision="FP32">
1497
+ <dim>-1</dim>
1498
+ <dim>4</dim>
1499
+ <dim>-1</dim>
1500
+ <dim>-1</dim>
1501
+ </port>
1502
+ <port id="1" precision="FP32">
1503
+ <dim>1</dim>
1504
+ <dim>4</dim>
1505
+ <dim>1</dim>
1506
+ <dim>1</dim>
1507
+ </port>
1508
+ </input>
1509
+ <output>
1510
+ <port id="2" precision="FP32" names="190,input.29">
1511
+ <dim>-1</dim>
1512
+ <dim>4</dim>
1513
+ <dim>-1</dim>
1514
+ <dim>-1</dim>
1515
+ </port>
1516
+ </output>
1517
+ </layer>
1518
+ <layer id="94" name="self.decoder.up_blocks.0.resnets.0.norm2.weight" type="Const" version="opset1">
1519
+ <data element_type="f32" shape="4" offset="160" size="16" />
1520
+ <output>
1521
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.0.norm2.weight">
1522
+ <dim>4</dim>
1523
+ </port>
1524
+ </output>
1525
+ </layer>
1526
+ <layer id="95" name="self.decoder.up_blocks.0.resnets.0.norm2.bias" type="Const" version="opset1">
1527
+ <data element_type="f32" shape="4" offset="176" size="16" />
1528
+ <output>
1529
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.0.norm2.bias">
1530
+ <dim>4</dim>
1531
+ </port>
1532
+ </output>
1533
+ </layer>
1534
+ <layer id="96" name="__module.decoder.up_blocks.0.resnets.0.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1535
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1536
+ <input>
1537
+ <port id="0" precision="FP32">
1538
+ <dim>-1</dim>
1539
+ <dim>4</dim>
1540
+ <dim>-1</dim>
1541
+ <dim>-1</dim>
1542
+ </port>
1543
+ <port id="1" precision="FP32">
1544
+ <dim>4</dim>
1545
+ </port>
1546
+ <port id="2" precision="FP32">
1547
+ <dim>4</dim>
1548
+ </port>
1549
+ </input>
1550
+ <output>
1551
+ <port id="3" precision="FP32" names="193,input.31">
1552
+ <dim>-1</dim>
1553
+ <dim>4</dim>
1554
+ <dim>-1</dim>
1555
+ <dim>-1</dim>
1556
+ </port>
1557
+ </output>
1558
+ </layer>
1559
+ <layer id="97" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_5" type="Swish" version="opset4">
1560
+ <input>
1561
+ <port id="0" precision="FP32">
1562
+ <dim>-1</dim>
1563
+ <dim>4</dim>
1564
+ <dim>-1</dim>
1565
+ <dim>-1</dim>
1566
+ </port>
1567
+ </input>
1568
+ <output>
1569
+ <port id="1" precision="FP32" names="194,input.33">
1570
+ <dim>-1</dim>
1571
+ <dim>4</dim>
1572
+ <dim>-1</dim>
1573
+ <dim>-1</dim>
1574
+ </port>
1575
+ </output>
1576
+ </layer>
1577
+ <layer id="98" name="self.decoder.up_blocks.0.resnets.0.conv2.weight" type="Const" version="opset1">
1578
+ <data element_type="f32" shape="4, 4, 3, 3" offset="3628" size="576" />
1579
+ <output>
1580
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.0.conv2.weight">
1581
+ <dim>4</dim>
1582
+ <dim>4</dim>
1583
+ <dim>3</dim>
1584
+ <dim>3</dim>
1585
+ </port>
1586
+ </output>
1587
+ </layer>
1588
+ <layer id="99" name="__module.decoder.up_blocks.0.resnets.0.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1589
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1590
+ <input>
1591
+ <port id="0" precision="FP32">
1592
+ <dim>-1</dim>
1593
+ <dim>4</dim>
1594
+ <dim>-1</dim>
1595
+ <dim>-1</dim>
1596
+ </port>
1597
+ <port id="1" precision="FP32">
1598
+ <dim>4</dim>
1599
+ <dim>4</dim>
1600
+ <dim>3</dim>
1601
+ <dim>3</dim>
1602
+ </port>
1603
+ </input>
1604
+ <output>
1605
+ <port id="2" precision="FP32">
1606
+ <dim>-1</dim>
1607
+ <dim>4</dim>
1608
+ <dim>-1</dim>
1609
+ <dim>-1</dim>
1610
+ </port>
1611
+ </output>
1612
+ </layer>
1613
+ <layer id="100" name="__module.decoder.up_blocks.0.resnets.0.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
1614
+ <data element_type="f32" shape="1, 4, 1, 1" offset="4204" size="16" />
1615
+ <output>
1616
+ <port id="0" precision="FP32">
1617
+ <dim>1</dim>
1618
+ <dim>4</dim>
1619
+ <dim>1</dim>
1620
+ <dim>1</dim>
1621
+ </port>
1622
+ </output>
1623
+ </layer>
1624
+ <layer id="101" name="__module.decoder.up_blocks.0.resnets.0.conv2/aten::_convolution/Add" type="Add" version="opset1">
1625
+ <data auto_broadcast="numpy" />
1626
+ <input>
1627
+ <port id="0" precision="FP32">
1628
+ <dim>-1</dim>
1629
+ <dim>4</dim>
1630
+ <dim>-1</dim>
1631
+ <dim>-1</dim>
1632
+ </port>
1633
+ <port id="1" precision="FP32">
1634
+ <dim>1</dim>
1635
+ <dim>4</dim>
1636
+ <dim>1</dim>
1637
+ <dim>1</dim>
1638
+ </port>
1639
+ </input>
1640
+ <output>
1641
+ <port id="2" precision="FP32" names="202,hidden_states.19">
1642
+ <dim>-1</dim>
1643
+ <dim>4</dim>
1644
+ <dim>-1</dim>
1645
+ <dim>-1</dim>
1646
+ </port>
1647
+ </output>
1648
+ </layer>
1649
+ <layer id="102" name="__module.decoder.up_blocks.0.resnets.0/aten::add/Add" type="Add" version="opset1">
1650
+ <data auto_broadcast="numpy" />
1651
+ <input>
1652
+ <port id="0" precision="FP32">
1653
+ <dim>-1</dim>
1654
+ <dim>4</dim>
1655
+ <dim>-1</dim>
1656
+ <dim>-1</dim>
1657
+ </port>
1658
+ <port id="1" precision="FP32">
1659
+ <dim>-1</dim>
1660
+ <dim>4</dim>
1661
+ <dim>-1</dim>
1662
+ <dim>-1</dim>
1663
+ </port>
1664
+ </input>
1665
+ <output>
1666
+ <port id="2" precision="FP32" names="203,204,input.35">
1667
+ <dim>-1</dim>
1668
+ <dim>4</dim>
1669
+ <dim>-1</dim>
1670
+ <dim>-1</dim>
1671
+ </port>
1672
+ </output>
1673
+ </layer>
1674
+ <layer id="103" name="self.decoder.up_blocks.0.resnets.1.norm1.weight" type="Const" version="opset1">
1675
+ <data element_type="f32" shape="4" offset="160" size="16" />
1676
+ <output>
1677
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.1.norm1.weight">
1678
+ <dim>4</dim>
1679
+ </port>
1680
+ </output>
1681
+ </layer>
1682
+ <layer id="104" name="self.decoder.up_blocks.0.resnets.1.norm1.bias" type="Const" version="opset1">
1683
+ <data element_type="f32" shape="4" offset="176" size="16" />
1684
+ <output>
1685
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.1.norm1.bias">
1686
+ <dim>4</dim>
1687
+ </port>
1688
+ </output>
1689
+ </layer>
1690
+ <layer id="105" name="__module.decoder.up_blocks.0.resnets.1.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1691
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1692
+ <input>
1693
+ <port id="0" precision="FP32">
1694
+ <dim>-1</dim>
1695
+ <dim>4</dim>
1696
+ <dim>-1</dim>
1697
+ <dim>-1</dim>
1698
+ </port>
1699
+ <port id="1" precision="FP32">
1700
+ <dim>4</dim>
1701
+ </port>
1702
+ <port id="2" precision="FP32">
1703
+ <dim>4</dim>
1704
+ </port>
1705
+ </input>
1706
+ <output>
1707
+ <port id="3" precision="FP32" names="212,input.37">
1708
+ <dim>-1</dim>
1709
+ <dim>4</dim>
1710
+ <dim>-1</dim>
1711
+ <dim>-1</dim>
1712
+ </port>
1713
+ </output>
1714
+ </layer>
1715
+ <layer id="106" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_6" type="Swish" version="opset4">
1716
+ <input>
1717
+ <port id="0" precision="FP32">
1718
+ <dim>-1</dim>
1719
+ <dim>4</dim>
1720
+ <dim>-1</dim>
1721
+ <dim>-1</dim>
1722
+ </port>
1723
+ </input>
1724
+ <output>
1725
+ <port id="1" precision="FP32" names="213">
1726
+ <dim>-1</dim>
1727
+ <dim>4</dim>
1728
+ <dim>-1</dim>
1729
+ <dim>-1</dim>
1730
+ </port>
1731
+ </output>
1732
+ </layer>
1733
+ <layer id="107" name="self.decoder.up_blocks.0.resnets.1.conv1.weight" type="Const" version="opset1">
1734
+ <data element_type="f32" shape="4, 4, 3, 3" offset="4220" size="576" />
1735
+ <output>
1736
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.1.conv1.weight">
1737
+ <dim>4</dim>
1738
+ <dim>4</dim>
1739
+ <dim>3</dim>
1740
+ <dim>3</dim>
1741
+ </port>
1742
+ </output>
1743
+ </layer>
1744
+ <layer id="108" name="__module.decoder.up_blocks.0.resnets.1.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1745
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1746
+ <input>
1747
+ <port id="0" precision="FP32">
1748
+ <dim>-1</dim>
1749
+ <dim>4</dim>
1750
+ <dim>-1</dim>
1751
+ <dim>-1</dim>
1752
+ </port>
1753
+ <port id="1" precision="FP32">
1754
+ <dim>4</dim>
1755
+ <dim>4</dim>
1756
+ <dim>3</dim>
1757
+ <dim>3</dim>
1758
+ </port>
1759
+ </input>
1760
+ <output>
1761
+ <port id="2" precision="FP32">
1762
+ <dim>-1</dim>
1763
+ <dim>4</dim>
1764
+ <dim>-1</dim>
1765
+ <dim>-1</dim>
1766
+ </port>
1767
+ </output>
1768
+ </layer>
1769
+ <layer id="109" name="__module.decoder.up_blocks.0.resnets.1.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
1770
+ <data element_type="f32" shape="1, 4, 1, 1" offset="4796" size="16" />
1771
+ <output>
1772
+ <port id="0" precision="FP32">
1773
+ <dim>1</dim>
1774
+ <dim>4</dim>
1775
+ <dim>1</dim>
1776
+ <dim>1</dim>
1777
+ </port>
1778
+ </output>
1779
+ </layer>
1780
+ <layer id="110" name="__module.decoder.up_blocks.0.resnets.1.conv1/aten::_convolution/Add" type="Add" version="opset1">
1781
+ <data auto_broadcast="numpy" />
1782
+ <input>
1783
+ <port id="0" precision="FP32">
1784
+ <dim>-1</dim>
1785
+ <dim>4</dim>
1786
+ <dim>-1</dim>
1787
+ <dim>-1</dim>
1788
+ </port>
1789
+ <port id="1" precision="FP32">
1790
+ <dim>1</dim>
1791
+ <dim>4</dim>
1792
+ <dim>1</dim>
1793
+ <dim>1</dim>
1794
+ </port>
1795
+ </input>
1796
+ <output>
1797
+ <port id="2" precision="FP32" names="220,input.39">
1798
+ <dim>-1</dim>
1799
+ <dim>4</dim>
1800
+ <dim>-1</dim>
1801
+ <dim>-1</dim>
1802
+ </port>
1803
+ </output>
1804
+ </layer>
1805
+ <layer id="111" name="self.decoder.up_blocks.0.resnets.1.norm2.weight" type="Const" version="opset1">
1806
+ <data element_type="f32" shape="4" offset="160" size="16" />
1807
+ <output>
1808
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.1.norm2.weight">
1809
+ <dim>4</dim>
1810
+ </port>
1811
+ </output>
1812
+ </layer>
1813
+ <layer id="112" name="self.decoder.up_blocks.0.resnets.1.norm2.bias" type="Const" version="opset1">
1814
+ <data element_type="f32" shape="4" offset="176" size="16" />
1815
+ <output>
1816
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.1.norm2.bias">
1817
+ <dim>4</dim>
1818
+ </port>
1819
+ </output>
1820
+ </layer>
1821
+ <layer id="113" name="__module.decoder.up_blocks.0.resnets.1.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1822
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1823
+ <input>
1824
+ <port id="0" precision="FP32">
1825
+ <dim>-1</dim>
1826
+ <dim>4</dim>
1827
+ <dim>-1</dim>
1828
+ <dim>-1</dim>
1829
+ </port>
1830
+ <port id="1" precision="FP32">
1831
+ <dim>4</dim>
1832
+ </port>
1833
+ <port id="2" precision="FP32">
1834
+ <dim>4</dim>
1835
+ </port>
1836
+ </input>
1837
+ <output>
1838
+ <port id="3" precision="FP32" names="223,input.41">
1839
+ <dim>-1</dim>
1840
+ <dim>4</dim>
1841
+ <dim>-1</dim>
1842
+ <dim>-1</dim>
1843
+ </port>
1844
+ </output>
1845
+ </layer>
1846
+ <layer id="114" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_7" type="Swish" version="opset4">
1847
+ <input>
1848
+ <port id="0" precision="FP32">
1849
+ <dim>-1</dim>
1850
+ <dim>4</dim>
1851
+ <dim>-1</dim>
1852
+ <dim>-1</dim>
1853
+ </port>
1854
+ </input>
1855
+ <output>
1856
+ <port id="1" precision="FP32" names="224,input.43">
1857
+ <dim>-1</dim>
1858
+ <dim>4</dim>
1859
+ <dim>-1</dim>
1860
+ <dim>-1</dim>
1861
+ </port>
1862
+ </output>
1863
+ </layer>
1864
+ <layer id="115" name="self.decoder.up_blocks.0.resnets.1.conv2.weight" type="Const" version="opset1">
1865
+ <data element_type="f32" shape="4, 4, 3, 3" offset="4812" size="576" />
1866
+ <output>
1867
+ <port id="0" precision="FP32" names="self.decoder.up_blocks.0.resnets.1.conv2.weight">
1868
+ <dim>4</dim>
1869
+ <dim>4</dim>
1870
+ <dim>3</dim>
1871
+ <dim>3</dim>
1872
+ </port>
1873
+ </output>
1874
+ </layer>
1875
+ <layer id="116" name="__module.decoder.up_blocks.0.resnets.1.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1876
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1877
+ <input>
1878
+ <port id="0" precision="FP32">
1879
+ <dim>-1</dim>
1880
+ <dim>4</dim>
1881
+ <dim>-1</dim>
1882
+ <dim>-1</dim>
1883
+ </port>
1884
+ <port id="1" precision="FP32">
1885
+ <dim>4</dim>
1886
+ <dim>4</dim>
1887
+ <dim>3</dim>
1888
+ <dim>3</dim>
1889
+ </port>
1890
+ </input>
1891
+ <output>
1892
+ <port id="2" precision="FP32">
1893
+ <dim>-1</dim>
1894
+ <dim>4</dim>
1895
+ <dim>-1</dim>
1896
+ <dim>-1</dim>
1897
+ </port>
1898
+ </output>
1899
+ </layer>
1900
+ <layer id="117" name="__module.decoder.up_blocks.0.resnets.1.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
1901
+ <data element_type="f32" shape="1, 4, 1, 1" offset="5388" size="16" />
1902
+ <output>
1903
+ <port id="0" precision="FP32">
1904
+ <dim>1</dim>
1905
+ <dim>4</dim>
1906
+ <dim>1</dim>
1907
+ <dim>1</dim>
1908
+ </port>
1909
+ </output>
1910
+ </layer>
1911
+ <layer id="118" name="__module.decoder.up_blocks.0.resnets.1.conv2/aten::_convolution/Add" type="Add" version="opset1">
1912
+ <data auto_broadcast="numpy" />
1913
+ <input>
1914
+ <port id="0" precision="FP32">
1915
+ <dim>-1</dim>
1916
+ <dim>4</dim>
1917
+ <dim>-1</dim>
1918
+ <dim>-1</dim>
1919
+ </port>
1920
+ <port id="1" precision="FP32">
1921
+ <dim>1</dim>
1922
+ <dim>4</dim>
1923
+ <dim>1</dim>
1924
+ <dim>1</dim>
1925
+ </port>
1926
+ </input>
1927
+ <output>
1928
+ <port id="2" precision="FP32" names="232,hidden_states">
1929
+ <dim>-1</dim>
1930
+ <dim>4</dim>
1931
+ <dim>-1</dim>
1932
+ <dim>-1</dim>
1933
+ </port>
1934
+ </output>
1935
+ </layer>
1936
+ <layer id="119" name="__module.decoder.up_blocks.0.resnets.1/aten::add/Add" type="Add" version="opset1">
1937
+ <data auto_broadcast="numpy" />
1938
+ <input>
1939
+ <port id="0" precision="FP32">
1940
+ <dim>-1</dim>
1941
+ <dim>4</dim>
1942
+ <dim>-1</dim>
1943
+ <dim>-1</dim>
1944
+ </port>
1945
+ <port id="1" precision="FP32">
1946
+ <dim>-1</dim>
1947
+ <dim>4</dim>
1948
+ <dim>-1</dim>
1949
+ <dim>-1</dim>
1950
+ </port>
1951
+ </input>
1952
+ <output>
1953
+ <port id="2" precision="FP32" names="233,234,input.45">
1954
+ <dim>-1</dim>
1955
+ <dim>4</dim>
1956
+ <dim>-1</dim>
1957
+ <dim>-1</dim>
1958
+ </port>
1959
+ </output>
1960
+ </layer>
1961
+ <layer id="120" name="self.decoder.conv_norm_out.weight" type="Const" version="opset1">
1962
+ <data element_type="f32" shape="4" offset="160" size="16" />
1963
+ <output>
1964
+ <port id="0" precision="FP32" names="self.decoder.conv_norm_out.weight">
1965
+ <dim>4</dim>
1966
+ </port>
1967
+ </output>
1968
+ </layer>
1969
+ <layer id="121" name="self.decoder.conv_norm_out.bias" type="Const" version="opset1">
1970
+ <data element_type="f32" shape="4" offset="176" size="16" />
1971
+ <output>
1972
+ <port id="0" precision="FP32" names="self.decoder.conv_norm_out.bias">
1973
+ <dim>4</dim>
1974
+ </port>
1975
+ </output>
1976
+ </layer>
1977
+ <layer id="122" name="__module.decoder.conv_norm_out/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1978
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1979
+ <input>
1980
+ <port id="0" precision="FP32">
1981
+ <dim>-1</dim>
1982
+ <dim>4</dim>
1983
+ <dim>-1</dim>
1984
+ <dim>-1</dim>
1985
+ </port>
1986
+ <port id="1" precision="FP32">
1987
+ <dim>4</dim>
1988
+ </port>
1989
+ <port id="2" precision="FP32">
1990
+ <dim>4</dim>
1991
+ </port>
1992
+ </input>
1993
+ <output>
1994
+ <port id="3" precision="FP32" names="237,input">
1995
+ <dim>-1</dim>
1996
+ <dim>4</dim>
1997
+ <dim>-1</dim>
1998
+ <dim>-1</dim>
1999
+ </port>
2000
+ </output>
2001
+ </layer>
2002
+ <layer id="123" name="__module.decoder.conv_act/aten::silu/Swish" type="Swish" version="opset4">
2003
+ <input>
2004
+ <port id="0" precision="FP32">
2005
+ <dim>-1</dim>
2006
+ <dim>4</dim>
2007
+ <dim>-1</dim>
2008
+ <dim>-1</dim>
2009
+ </port>
2010
+ </input>
2011
+ <output>
2012
+ <port id="1" precision="FP32" names="238">
2013
+ <dim>-1</dim>
2014
+ <dim>4</dim>
2015
+ <dim>-1</dim>
2016
+ <dim>-1</dim>
2017
+ </port>
2018
+ </output>
2019
+ </layer>
2020
+ <layer id="124" name="self.decoder.conv_out.weight" type="Const" version="opset1">
2021
+ <data element_type="f32" shape="3, 4, 3, 3" offset="5404" size="432" />
2022
+ <output>
2023
+ <port id="0" precision="FP32" names="self.decoder.conv_out.weight">
2024
+ <dim>3</dim>
2025
+ <dim>4</dim>
2026
+ <dim>3</dim>
2027
+ <dim>3</dim>
2028
+ </port>
2029
+ </output>
2030
+ </layer>
2031
+ <layer id="125" name="__module.decoder.conv_out/aten::_convolution/Convolution" type="Convolution" version="opset1">
2032
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
2033
+ <input>
2034
+ <port id="0" precision="FP32">
2035
+ <dim>-1</dim>
2036
+ <dim>4</dim>
2037
+ <dim>-1</dim>
2038
+ <dim>-1</dim>
2039
+ </port>
2040
+ <port id="1" precision="FP32">
2041
+ <dim>3</dim>
2042
+ <dim>4</dim>
2043
+ <dim>3</dim>
2044
+ <dim>3</dim>
2045
+ </port>
2046
+ </input>
2047
+ <output>
2048
+ <port id="2" precision="FP32">
2049
+ <dim>-1</dim>
2050
+ <dim>3</dim>
2051
+ <dim>-1</dim>
2052
+ <dim>-1</dim>
2053
+ </port>
2054
+ </output>
2055
+ </layer>
2056
+ <layer id="126" name="__module.decoder.conv_out/aten::_convolution/Reshape" type="Const" version="opset1">
2057
+ <data element_type="f32" shape="1, 3, 1, 1" offset="5836" size="12" />
2058
+ <output>
2059
+ <port id="0" precision="FP32">
2060
+ <dim>1</dim>
2061
+ <dim>3</dim>
2062
+ <dim>1</dim>
2063
+ <dim>1</dim>
2064
+ </port>
2065
+ </output>
2066
+ </layer>
2067
+ <layer id="127" name="__module.decoder.conv_out/aten::_convolution/Add" type="Add" version="opset1">
2068
+ <data auto_broadcast="numpy" />
2069
+ <input>
2070
+ <port id="0" precision="FP32">
2071
+ <dim>-1</dim>
2072
+ <dim>3</dim>
2073
+ <dim>-1</dim>
2074
+ <dim>-1</dim>
2075
+ </port>
2076
+ <port id="1" precision="FP32">
2077
+ <dim>1</dim>
2078
+ <dim>3</dim>
2079
+ <dim>1</dim>
2080
+ <dim>1</dim>
2081
+ </port>
2082
+ </input>
2083
+ <output>
2084
+ <port id="2" precision="FP32" names="sample">
2085
+ <dim>-1</dim>
2086
+ <dim>3</dim>
2087
+ <dim>-1</dim>
2088
+ <dim>-1</dim>
2089
+ </port>
2090
+ </output>
2091
+ </layer>
2092
+ <layer id="128" name="Result_26759" type="Result" version="opset1">
2093
+ <input>
2094
+ <port id="0" precision="FP32">
2095
+ <dim>-1</dim>
2096
+ <dim>3</dim>
2097
+ <dim>-1</dim>
2098
+ <dim>-1</dim>
2099
+ </port>
2100
+ </input>
2101
+ </layer>
2102
+ </layers>
2103
+ <edges>
2104
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
2105
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
2106
+ <edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
2107
+ <edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
2108
+ <edge from-layer="4" from-port="2" to-layer="7" to-port="0" />
2109
+ <edge from-layer="4" from-port="2" to-layer="21" to-port="0" />
2110
+ <edge from-layer="5" from-port="0" to-layer="7" to-port="1" />
2111
+ <edge from-layer="6" from-port="0" to-layer="7" to-port="2" />
2112
+ <edge from-layer="7" from-port="3" to-layer="8" to-port="0" />
2113
+ <edge from-layer="8" from-port="1" to-layer="10" to-port="0" />
2114
+ <edge from-layer="9" from-port="0" to-layer="10" to-port="1" />
2115
+ <edge from-layer="10" from-port="2" to-layer="12" to-port="0" />
2116
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
2117
+ <edge from-layer="12" from-port="2" to-layer="15" to-port="0" />
2118
+ <edge from-layer="13" from-port="0" to-layer="15" to-port="1" />
2119
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="2" />
2120
+ <edge from-layer="15" from-port="3" to-layer="16" to-port="0" />
2121
+ <edge from-layer="16" from-port="1" to-layer="18" to-port="0" />
2122
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="1" />
2123
+ <edge from-layer="18" from-port="2" to-layer="20" to-port="0" />
2124
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
2125
+ <edge from-layer="20" from-port="2" to-layer="21" to-port="1" />
2126
+ <edge from-layer="21" from-port="2" to-layer="68" to-port="1" />
2127
+ <edge from-layer="21" from-port="2" to-layer="66" to-port="0" />
2128
+ <edge from-layer="21" from-port="2" to-layer="23" to-port="0" />
2129
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="1" />
2130
+ <edge from-layer="23" from-port="2" to-layer="25" to-port="0" />
2131
+ <edge from-layer="24" from-port="0" to-layer="25" to-port="1" />
2132
+ <edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
2133
+ <edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
2134
+ <edge from-layer="27" from-port="2" to-layer="30" to-port="0" />
2135
+ <edge from-layer="28" from-port="0" to-layer="30" to-port="1" />
2136
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="2" />
2137
+ <edge from-layer="30" from-port="3" to-layer="48" to-port="0" />
2138
+ <edge from-layer="30" from-port="3" to-layer="40" to-port="0" />
2139
+ <edge from-layer="30" from-port="3" to-layer="32" to-port="0" />
2140
+ <edge from-layer="31" from-port="0" to-layer="32" to-port="1" />
2141
+ <edge from-layer="32" from-port="2" to-layer="34" to-port="0" />
2142
+ <edge from-layer="33" from-port="0" to-layer="34" to-port="1" />
2143
+ <edge from-layer="34" from-port="2" to-layer="36" to-port="0" />
2144
+ <edge from-layer="35" from-port="0" to-layer="36" to-port="1" />
2145
+ <edge from-layer="36" from-port="2" to-layer="38" to-port="0" />
2146
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
2147
+ <edge from-layer="38" from-port="2" to-layer="55" to-port="0" />
2148
+ <edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
2149
+ <edge from-layer="40" from-port="2" to-layer="42" to-port="0" />
2150
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
2151
+ <edge from-layer="42" from-port="2" to-layer="44" to-port="0" />
2152
+ <edge from-layer="43" from-port="0" to-layer="44" to-port="1" />
2153
+ <edge from-layer="44" from-port="2" to-layer="46" to-port="0" />
2154
+ <edge from-layer="45" from-port="0" to-layer="46" to-port="1" />
2155
+ <edge from-layer="46" from-port="2" to-layer="55" to-port="1" />
2156
+ <edge from-layer="47" from-port="0" to-layer="48" to-port="1" />
2157
+ <edge from-layer="48" from-port="2" to-layer="50" to-port="0" />
2158
+ <edge from-layer="49" from-port="0" to-layer="50" to-port="1" />
2159
+ <edge from-layer="50" from-port="2" to-layer="52" to-port="0" />
2160
+ <edge from-layer="51" from-port="0" to-layer="52" to-port="1" />
2161
+ <edge from-layer="52" from-port="2" to-layer="54" to-port="0" />
2162
+ <edge from-layer="53" from-port="0" to-layer="54" to-port="1" />
2163
+ <edge from-layer="54" from-port="2" to-layer="55" to-port="2" />
2164
+ <edge from-layer="55" from-port="3" to-layer="57" to-port="0" />
2165
+ <edge from-layer="56" from-port="0" to-layer="57" to-port="1" />
2166
+ <edge from-layer="57" from-port="2" to-layer="59" to-port="0" />
2167
+ <edge from-layer="58" from-port="0" to-layer="59" to-port="1" />
2168
+ <edge from-layer="59" from-port="2" to-layer="61" to-port="0" />
2169
+ <edge from-layer="60" from-port="0" to-layer="61" to-port="1" />
2170
+ <edge from-layer="61" from-port="2" to-layer="63" to-port="0" />
2171
+ <edge from-layer="62" from-port="0" to-layer="63" to-port="1" />
2172
+ <edge from-layer="63" from-port="2" to-layer="65" to-port="0" />
2173
+ <edge from-layer="64" from-port="0" to-layer="65" to-port="1" />
2174
+ <edge from-layer="65" from-port="2" to-layer="67" to-port="0" />
2175
+ <edge from-layer="66" from-port="1" to-layer="67" to-port="1" />
2176
+ <edge from-layer="67" from-port="2" to-layer="68" to-port="0" />
2177
+ <edge from-layer="68" from-port="2" to-layer="71" to-port="0" />
2178
+ <edge from-layer="68" from-port="2" to-layer="85" to-port="0" />
2179
+ <edge from-layer="69" from-port="0" to-layer="71" to-port="1" />
2180
+ <edge from-layer="70" from-port="0" to-layer="71" to-port="2" />
2181
+ <edge from-layer="71" from-port="3" to-layer="72" to-port="0" />
2182
+ <edge from-layer="72" from-port="1" to-layer="74" to-port="0" />
2183
+ <edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
2184
+ <edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
2185
+ <edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
2186
+ <edge from-layer="76" from-port="2" to-layer="79" to-port="0" />
2187
+ <edge from-layer="77" from-port="0" to-layer="79" to-port="1" />
2188
+ <edge from-layer="78" from-port="0" to-layer="79" to-port="2" />
2189
+ <edge from-layer="79" from-port="3" to-layer="80" to-port="0" />
2190
+ <edge from-layer="80" from-port="1" to-layer="82" to-port="0" />
2191
+ <edge from-layer="81" from-port="0" to-layer="82" to-port="1" />
2192
+ <edge from-layer="82" from-port="2" to-layer="84" to-port="0" />
2193
+ <edge from-layer="83" from-port="0" to-layer="84" to-port="1" />
2194
+ <edge from-layer="84" from-port="2" to-layer="85" to-port="1" />
2195
+ <edge from-layer="85" from-port="2" to-layer="88" to-port="0" />
2196
+ <edge from-layer="85" from-port="2" to-layer="102" to-port="0" />
2197
+ <edge from-layer="86" from-port="0" to-layer="88" to-port="1" />
2198
+ <edge from-layer="87" from-port="0" to-layer="88" to-port="2" />
2199
+ <edge from-layer="88" from-port="3" to-layer="89" to-port="0" />
2200
+ <edge from-layer="89" from-port="1" to-layer="91" to-port="0" />
2201
+ <edge from-layer="90" from-port="0" to-layer="91" to-port="1" />
2202
+ <edge from-layer="91" from-port="2" to-layer="93" to-port="0" />
2203
+ <edge from-layer="92" from-port="0" to-layer="93" to-port="1" />
2204
+ <edge from-layer="93" from-port="2" to-layer="96" to-port="0" />
2205
+ <edge from-layer="94" from-port="0" to-layer="96" to-port="1" />
2206
+ <edge from-layer="95" from-port="0" to-layer="96" to-port="2" />
2207
+ <edge from-layer="96" from-port="3" to-layer="97" to-port="0" />
2208
+ <edge from-layer="97" from-port="1" to-layer="99" to-port="0" />
2209
+ <edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
2210
+ <edge from-layer="99" from-port="2" to-layer="101" to-port="0" />
2211
+ <edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
2212
+ <edge from-layer="101" from-port="2" to-layer="102" to-port="1" />
2213
+ <edge from-layer="102" from-port="2" to-layer="105" to-port="0" />
2214
+ <edge from-layer="102" from-port="2" to-layer="119" to-port="0" />
2215
+ <edge from-layer="103" from-port="0" to-layer="105" to-port="1" />
2216
+ <edge from-layer="104" from-port="0" to-layer="105" to-port="2" />
2217
+ <edge from-layer="105" from-port="3" to-layer="106" to-port="0" />
2218
+ <edge from-layer="106" from-port="1" to-layer="108" to-port="0" />
2219
+ <edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
2220
+ <edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
2221
+ <edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
2222
+ <edge from-layer="110" from-port="2" to-layer="113" to-port="0" />
2223
+ <edge from-layer="111" from-port="0" to-layer="113" to-port="1" />
2224
+ <edge from-layer="112" from-port="0" to-layer="113" to-port="2" />
2225
+ <edge from-layer="113" from-port="3" to-layer="114" to-port="0" />
2226
+ <edge from-layer="114" from-port="1" to-layer="116" to-port="0" />
2227
+ <edge from-layer="115" from-port="0" to-layer="116" to-port="1" />
2228
+ <edge from-layer="116" from-port="2" to-layer="118" to-port="0" />
2229
+ <edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
2230
+ <edge from-layer="118" from-port="2" to-layer="119" to-port="1" />
2231
+ <edge from-layer="119" from-port="2" to-layer="122" to-port="0" />
2232
+ <edge from-layer="120" from-port="0" to-layer="122" to-port="1" />
2233
+ <edge from-layer="121" from-port="0" to-layer="122" to-port="2" />
2234
+ <edge from-layer="122" from-port="3" to-layer="123" to-port="0" />
2235
+ <edge from-layer="123" from-port="1" to-layer="125" to-port="0" />
2236
+ <edge from-layer="124" from-port="0" to-layer="125" to-port="1" />
2237
+ <edge from-layer="125" from-port="2" to-layer="127" to-port="0" />
2238
+ <edge from-layer="126" from-port="0" to-layer="127" to-port="1" />
2239
+ <edge from-layer="127" from-port="2" to-layer="128" to-port="0" />
2240
+ </edges>
2241
+ <rt_info>
2242
+ <Runtime_version value="2024.6.0-17404-4c0f47d2335-releases/2024/6" />
2243
+ <conversion_parameters>
2244
+ <framework value="pytorch" />
2245
+ <is_python_object value="True" />
2246
+ </conversion_parameters>
2247
+ <optimum>
2248
+ <diffusers_version value="0.32.1" />
2249
+ <optimum_intel_version value="1.22.0.dev0+bb1c68ae" />
2250
+ <optimum_version value="1.24.0.dev0" />
2251
+ <pytorch_version value="2.5.1+cpu" />
2252
+ <transformers_version value="4.46.3" />
2253
+ </optimum>
2254
+ <runtime_options>
2255
+ <ACTIVATIONS_SCALE_FACTOR value="8.0" />
2256
+ </runtime_options>
2257
+ </rt_info>
2258
+ </net>
vae_encoder/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKL",
3
+ "_diffusers_version": "0.32.1",
4
+ "_name_or_path": "/home/ea/.cache/huggingface/hub/models--katuni4ka--tiny-random-flux/snapshots/36abdcc25faf1a91425f0e38ffa8b5d427534cef/vae",
5
+ "act_fn": "silu",
6
+ "block_out_channels": [
7
+ 4
8
+ ],
9
+ "down_block_types": [
10
+ "DownEncoderBlock2D"
11
+ ],
12
+ "force_upcast": true,
13
+ "in_channels": 3,
14
+ "latent_channels": 1,
15
+ "latents_mean": null,
16
+ "latents_std": null,
17
+ "layers_per_block": 1,
18
+ "mid_block_add_attention": true,
19
+ "norm_num_groups": 1,
20
+ "out_channels": 3,
21
+ "sample_size": 32,
22
+ "scaling_factor": 1.5035,
23
+ "shift_factor": 0.0609,
24
+ "up_block_types": [
25
+ "UpDecoderBlock2D"
26
+ ],
27
+ "use_post_quant_conv": false,
28
+ "use_quant_conv": false
29
+ }
vae_encoder/openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ef26912f63198b03405f38dac43f272c3b72ffb011c36e89c49b000cdcd5572
3
+ size 4804
vae_encoder/openvino_model.xml ADDED
@@ -0,0 +1,1953 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model6" version="11">
3
+ <layers>
4
+ <layer id="0" name="sample" type="Parameter" version="opset1">
5
+ <data shape="?,3,?,?" element_type="f32" />
6
+ <output>
7
+ <port id="0" precision="FP32" names="sample">
8
+ <dim>-1</dim>
9
+ <dim>3</dim>
10
+ <dim>-1</dim>
11
+ <dim>-1</dim>
12
+ </port>
13
+ </output>
14
+ </layer>
15
+ <layer id="1" name="self.encoder.conv_in.weight" type="Const" version="opset1">
16
+ <data element_type="f32" shape="4, 3, 3, 3" offset="0" size="432" />
17
+ <output>
18
+ <port id="0" precision="FP32" names="self.encoder.conv_in.weight">
19
+ <dim>4</dim>
20
+ <dim>3</dim>
21
+ <dim>3</dim>
22
+ <dim>3</dim>
23
+ </port>
24
+ </output>
25
+ </layer>
26
+ <layer id="2" name="__module.encoder.conv_in/aten::_convolution/Convolution" type="Convolution" version="opset1">
27
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
28
+ <input>
29
+ <port id="0" precision="FP32">
30
+ <dim>-1</dim>
31
+ <dim>3</dim>
32
+ <dim>-1</dim>
33
+ <dim>-1</dim>
34
+ </port>
35
+ <port id="1" precision="FP32">
36
+ <dim>4</dim>
37
+ <dim>3</dim>
38
+ <dim>3</dim>
39
+ <dim>3</dim>
40
+ </port>
41
+ </input>
42
+ <output>
43
+ <port id="2" precision="FP32">
44
+ <dim>-1</dim>
45
+ <dim>4</dim>
46
+ <dim>-1</dim>
47
+ <dim>-1</dim>
48
+ </port>
49
+ </output>
50
+ </layer>
51
+ <layer id="3" name="__module.encoder.conv_in/aten::_convolution/Reshape" type="Const" version="opset1">
52
+ <data element_type="f32" shape="1, 4, 1, 1" offset="432" size="16" />
53
+ <output>
54
+ <port id="0" precision="FP32">
55
+ <dim>1</dim>
56
+ <dim>4</dim>
57
+ <dim>1</dim>
58
+ <dim>1</dim>
59
+ </port>
60
+ </output>
61
+ </layer>
62
+ <layer id="4" name="__module.encoder.conv_in/aten::_convolution/Add" type="Add" version="opset1">
63
+ <data auto_broadcast="numpy" />
64
+ <input>
65
+ <port id="0" precision="FP32">
66
+ <dim>-1</dim>
67
+ <dim>4</dim>
68
+ <dim>-1</dim>
69
+ <dim>-1</dim>
70
+ </port>
71
+ <port id="1" precision="FP32">
72
+ <dim>1</dim>
73
+ <dim>4</dim>
74
+ <dim>1</dim>
75
+ <dim>1</dim>
76
+ </port>
77
+ </input>
78
+ <output>
79
+ <port id="2" precision="FP32" names="37,input.1">
80
+ <dim>-1</dim>
81
+ <dim>4</dim>
82
+ <dim>-1</dim>
83
+ <dim>-1</dim>
84
+ </port>
85
+ </output>
86
+ </layer>
87
+ <layer id="5" name="self.encoder.down_blocks.0.resnets.0.norm1.weight" type="Const" version="opset1">
88
+ <data element_type="f32" shape="4" offset="448" size="16" />
89
+ <output>
90
+ <port id="0" precision="FP32" names="self.encoder.down_blocks.0.resnets.0.norm1.weight">
91
+ <dim>4</dim>
92
+ </port>
93
+ </output>
94
+ </layer>
95
+ <layer id="6" name="self.encoder.down_blocks.0.resnets.0.norm1.bias" type="Const" version="opset1">
96
+ <data element_type="f32" shape="4" offset="464" size="16" />
97
+ <output>
98
+ <port id="0" precision="FP32" names="self.encoder.down_blocks.0.resnets.0.norm1.bias">
99
+ <dim>4</dim>
100
+ </port>
101
+ </output>
102
+ </layer>
103
+ <layer id="7" name="__module.encoder.down_blocks.0.resnets.0.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
104
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
105
+ <input>
106
+ <port id="0" precision="FP32">
107
+ <dim>-1</dim>
108
+ <dim>4</dim>
109
+ <dim>-1</dim>
110
+ <dim>-1</dim>
111
+ </port>
112
+ <port id="1" precision="FP32">
113
+ <dim>4</dim>
114
+ </port>
115
+ <port id="2" precision="FP32">
116
+ <dim>4</dim>
117
+ </port>
118
+ </input>
119
+ <output>
120
+ <port id="3" precision="FP32" names="47,input.3">
121
+ <dim>-1</dim>
122
+ <dim>4</dim>
123
+ <dim>-1</dim>
124
+ <dim>-1</dim>
125
+ </port>
126
+ </output>
127
+ </layer>
128
+ <layer id="8" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish" type="Swish" version="opset4">
129
+ <input>
130
+ <port id="0" precision="FP32">
131
+ <dim>-1</dim>
132
+ <dim>4</dim>
133
+ <dim>-1</dim>
134
+ <dim>-1</dim>
135
+ </port>
136
+ </input>
137
+ <output>
138
+ <port id="1" precision="FP32" names="48">
139
+ <dim>-1</dim>
140
+ <dim>4</dim>
141
+ <dim>-1</dim>
142
+ <dim>-1</dim>
143
+ </port>
144
+ </output>
145
+ </layer>
146
+ <layer id="9" name="self.encoder.down_blocks.0.resnets.0.conv1.weight" type="Const" version="opset1">
147
+ <data element_type="f32" shape="4, 4, 3, 3" offset="480" size="576" />
148
+ <output>
149
+ <port id="0" precision="FP32" names="self.encoder.down_blocks.0.resnets.0.conv1.weight">
150
+ <dim>4</dim>
151
+ <dim>4</dim>
152
+ <dim>3</dim>
153
+ <dim>3</dim>
154
+ </port>
155
+ </output>
156
+ </layer>
157
+ <layer id="10" name="__module.encoder.down_blocks.0.resnets.0.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
158
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
159
+ <input>
160
+ <port id="0" precision="FP32">
161
+ <dim>-1</dim>
162
+ <dim>4</dim>
163
+ <dim>-1</dim>
164
+ <dim>-1</dim>
165
+ </port>
166
+ <port id="1" precision="FP32">
167
+ <dim>4</dim>
168
+ <dim>4</dim>
169
+ <dim>3</dim>
170
+ <dim>3</dim>
171
+ </port>
172
+ </input>
173
+ <output>
174
+ <port id="2" precision="FP32">
175
+ <dim>-1</dim>
176
+ <dim>4</dim>
177
+ <dim>-1</dim>
178
+ <dim>-1</dim>
179
+ </port>
180
+ </output>
181
+ </layer>
182
+ <layer id="11" name="__module.encoder.down_blocks.0.resnets.0.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
183
+ <data element_type="f32" shape="1, 4, 1, 1" offset="1056" size="16" />
184
+ <output>
185
+ <port id="0" precision="FP32">
186
+ <dim>1</dim>
187
+ <dim>4</dim>
188
+ <dim>1</dim>
189
+ <dim>1</dim>
190
+ </port>
191
+ </output>
192
+ </layer>
193
+ <layer id="12" name="__module.encoder.down_blocks.0.resnets.0.conv1/aten::_convolution/Add" type="Add" version="opset1">
194
+ <data auto_broadcast="numpy" />
195
+ <input>
196
+ <port id="0" precision="FP32">
197
+ <dim>-1</dim>
198
+ <dim>4</dim>
199
+ <dim>-1</dim>
200
+ <dim>-1</dim>
201
+ </port>
202
+ <port id="1" precision="FP32">
203
+ <dim>1</dim>
204
+ <dim>4</dim>
205
+ <dim>1</dim>
206
+ <dim>1</dim>
207
+ </port>
208
+ </input>
209
+ <output>
210
+ <port id="2" precision="FP32" names="55,input.5">
211
+ <dim>-1</dim>
212
+ <dim>4</dim>
213
+ <dim>-1</dim>
214
+ <dim>-1</dim>
215
+ </port>
216
+ </output>
217
+ </layer>
218
+ <layer id="13" name="self.encoder.down_blocks.0.resnets.0.norm2.weight" type="Const" version="opset1">
219
+ <data element_type="f32" shape="4" offset="448" size="16" />
220
+ <output>
221
+ <port id="0" precision="FP32" names="self.encoder.down_blocks.0.resnets.0.norm2.weight">
222
+ <dim>4</dim>
223
+ </port>
224
+ </output>
225
+ </layer>
226
+ <layer id="14" name="self.encoder.down_blocks.0.resnets.0.norm2.bias" type="Const" version="opset1">
227
+ <data element_type="f32" shape="4" offset="464" size="16" />
228
+ <output>
229
+ <port id="0" precision="FP32" names="self.encoder.down_blocks.0.resnets.0.norm2.bias">
230
+ <dim>4</dim>
231
+ </port>
232
+ </output>
233
+ </layer>
234
+ <layer id="15" name="__module.encoder.down_blocks.0.resnets.0.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
235
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
236
+ <input>
237
+ <port id="0" precision="FP32">
238
+ <dim>-1</dim>
239
+ <dim>4</dim>
240
+ <dim>-1</dim>
241
+ <dim>-1</dim>
242
+ </port>
243
+ <port id="1" precision="FP32">
244
+ <dim>4</dim>
245
+ </port>
246
+ <port id="2" precision="FP32">
247
+ <dim>4</dim>
248
+ </port>
249
+ </input>
250
+ <output>
251
+ <port id="3" precision="FP32" names="58,input.7">
252
+ <dim>-1</dim>
253
+ <dim>4</dim>
254
+ <dim>-1</dim>
255
+ <dim>-1</dim>
256
+ </port>
257
+ </output>
258
+ </layer>
259
+ <layer id="16" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_1" type="Swish" version="opset4">
260
+ <input>
261
+ <port id="0" precision="FP32">
262
+ <dim>-1</dim>
263
+ <dim>4</dim>
264
+ <dim>-1</dim>
265
+ <dim>-1</dim>
266
+ </port>
267
+ </input>
268
+ <output>
269
+ <port id="1" precision="FP32" names="59,input.9">
270
+ <dim>-1</dim>
271
+ <dim>4</dim>
272
+ <dim>-1</dim>
273
+ <dim>-1</dim>
274
+ </port>
275
+ </output>
276
+ </layer>
277
+ <layer id="17" name="self.encoder.down_blocks.0.resnets.0.conv2.weight" type="Const" version="opset1">
278
+ <data element_type="f32" shape="4, 4, 3, 3" offset="1072" size="576" />
279
+ <output>
280
+ <port id="0" precision="FP32" names="self.encoder.down_blocks.0.resnets.0.conv2.weight">
281
+ <dim>4</dim>
282
+ <dim>4</dim>
283
+ <dim>3</dim>
284
+ <dim>3</dim>
285
+ </port>
286
+ </output>
287
+ </layer>
288
+ <layer id="18" name="__module.encoder.down_blocks.0.resnets.0.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
289
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
290
+ <input>
291
+ <port id="0" precision="FP32">
292
+ <dim>-1</dim>
293
+ <dim>4</dim>
294
+ <dim>-1</dim>
295
+ <dim>-1</dim>
296
+ </port>
297
+ <port id="1" precision="FP32">
298
+ <dim>4</dim>
299
+ <dim>4</dim>
300
+ <dim>3</dim>
301
+ <dim>3</dim>
302
+ </port>
303
+ </input>
304
+ <output>
305
+ <port id="2" precision="FP32">
306
+ <dim>-1</dim>
307
+ <dim>4</dim>
308
+ <dim>-1</dim>
309
+ <dim>-1</dim>
310
+ </port>
311
+ </output>
312
+ </layer>
313
+ <layer id="19" name="__module.encoder.down_blocks.0.resnets.0.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
314
+ <data element_type="f32" shape="1, 4, 1, 1" offset="1648" size="16" />
315
+ <output>
316
+ <port id="0" precision="FP32">
317
+ <dim>1</dim>
318
+ <dim>4</dim>
319
+ <dim>1</dim>
320
+ <dim>1</dim>
321
+ </port>
322
+ </output>
323
+ </layer>
324
+ <layer id="20" name="__module.encoder.down_blocks.0.resnets.0.conv2/aten::_convolution/Add" type="Add" version="opset1">
325
+ <data auto_broadcast="numpy" />
326
+ <input>
327
+ <port id="0" precision="FP32">
328
+ <dim>-1</dim>
329
+ <dim>4</dim>
330
+ <dim>-1</dim>
331
+ <dim>-1</dim>
332
+ </port>
333
+ <port id="1" precision="FP32">
334
+ <dim>1</dim>
335
+ <dim>4</dim>
336
+ <dim>1</dim>
337
+ <dim>1</dim>
338
+ </port>
339
+ </input>
340
+ <output>
341
+ <port id="2" precision="FP32" names="67,hidden_states.1">
342
+ <dim>-1</dim>
343
+ <dim>4</dim>
344
+ <dim>-1</dim>
345
+ <dim>-1</dim>
346
+ </port>
347
+ </output>
348
+ </layer>
349
+ <layer id="21" name="__module.encoder.down_blocks.0.resnets.0/aten::add/Add" type="Add" version="opset1">
350
+ <data auto_broadcast="numpy" />
351
+ <input>
352
+ <port id="0" precision="FP32">
353
+ <dim>-1</dim>
354
+ <dim>4</dim>
355
+ <dim>-1</dim>
356
+ <dim>-1</dim>
357
+ </port>
358
+ <port id="1" precision="FP32">
359
+ <dim>-1</dim>
360
+ <dim>4</dim>
361
+ <dim>-1</dim>
362
+ <dim>-1</dim>
363
+ </port>
364
+ </input>
365
+ <output>
366
+ <port id="2" precision="FP32" names="68,69,input.11">
367
+ <dim>-1</dim>
368
+ <dim>4</dim>
369
+ <dim>-1</dim>
370
+ <dim>-1</dim>
371
+ </port>
372
+ </output>
373
+ </layer>
374
+ <layer id="22" name="self.encoder.mid_block.resnets.0.norm1.weight" type="Const" version="opset1">
375
+ <data element_type="f32" shape="4" offset="448" size="16" />
376
+ <output>
377
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.0.norm1.weight">
378
+ <dim>4</dim>
379
+ </port>
380
+ </output>
381
+ </layer>
382
+ <layer id="23" name="self.encoder.mid_block.resnets.0.norm1.bias" type="Const" version="opset1">
383
+ <data element_type="f32" shape="4" offset="464" size="16" />
384
+ <output>
385
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.0.norm1.bias">
386
+ <dim>4</dim>
387
+ </port>
388
+ </output>
389
+ </layer>
390
+ <layer id="24" name="__module.encoder.mid_block.resnets.0.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
391
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
392
+ <input>
393
+ <port id="0" precision="FP32">
394
+ <dim>-1</dim>
395
+ <dim>4</dim>
396
+ <dim>-1</dim>
397
+ <dim>-1</dim>
398
+ </port>
399
+ <port id="1" precision="FP32">
400
+ <dim>4</dim>
401
+ </port>
402
+ <port id="2" precision="FP32">
403
+ <dim>4</dim>
404
+ </port>
405
+ </input>
406
+ <output>
407
+ <port id="3" precision="FP32" names="83,input.13">
408
+ <dim>-1</dim>
409
+ <dim>4</dim>
410
+ <dim>-1</dim>
411
+ <dim>-1</dim>
412
+ </port>
413
+ </output>
414
+ </layer>
415
+ <layer id="25" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_2" type="Swish" version="opset4">
416
+ <input>
417
+ <port id="0" precision="FP32">
418
+ <dim>-1</dim>
419
+ <dim>4</dim>
420
+ <dim>-1</dim>
421
+ <dim>-1</dim>
422
+ </port>
423
+ </input>
424
+ <output>
425
+ <port id="1" precision="FP32" names="84">
426
+ <dim>-1</dim>
427
+ <dim>4</dim>
428
+ <dim>-1</dim>
429
+ <dim>-1</dim>
430
+ </port>
431
+ </output>
432
+ </layer>
433
+ <layer id="26" name="self.encoder.mid_block.resnets.0.conv1.weight" type="Const" version="opset1">
434
+ <data element_type="f32" shape="4, 4, 3, 3" offset="1664" size="576" />
435
+ <output>
436
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.0.conv1.weight">
437
+ <dim>4</dim>
438
+ <dim>4</dim>
439
+ <dim>3</dim>
440
+ <dim>3</dim>
441
+ </port>
442
+ </output>
443
+ </layer>
444
+ <layer id="27" name="__module.encoder.mid_block.resnets.0.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
445
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
446
+ <input>
447
+ <port id="0" precision="FP32">
448
+ <dim>-1</dim>
449
+ <dim>4</dim>
450
+ <dim>-1</dim>
451
+ <dim>-1</dim>
452
+ </port>
453
+ <port id="1" precision="FP32">
454
+ <dim>4</dim>
455
+ <dim>4</dim>
456
+ <dim>3</dim>
457
+ <dim>3</dim>
458
+ </port>
459
+ </input>
460
+ <output>
461
+ <port id="2" precision="FP32">
462
+ <dim>-1</dim>
463
+ <dim>4</dim>
464
+ <dim>-1</dim>
465
+ <dim>-1</dim>
466
+ </port>
467
+ </output>
468
+ </layer>
469
+ <layer id="28" name="__module.encoder.mid_block.resnets.0.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
470
+ <data element_type="f32" shape="1, 4, 1, 1" offset="2240" size="16" />
471
+ <output>
472
+ <port id="0" precision="FP32">
473
+ <dim>1</dim>
474
+ <dim>4</dim>
475
+ <dim>1</dim>
476
+ <dim>1</dim>
477
+ </port>
478
+ </output>
479
+ </layer>
480
+ <layer id="29" name="__module.encoder.mid_block.resnets.0.conv1/aten::_convolution/Add" type="Add" version="opset1">
481
+ <data auto_broadcast="numpy" />
482
+ <input>
483
+ <port id="0" precision="FP32">
484
+ <dim>-1</dim>
485
+ <dim>4</dim>
486
+ <dim>-1</dim>
487
+ <dim>-1</dim>
488
+ </port>
489
+ <port id="1" precision="FP32">
490
+ <dim>1</dim>
491
+ <dim>4</dim>
492
+ <dim>1</dim>
493
+ <dim>1</dim>
494
+ </port>
495
+ </input>
496
+ <output>
497
+ <port id="2" precision="FP32" names="91,input.15">
498
+ <dim>-1</dim>
499
+ <dim>4</dim>
500
+ <dim>-1</dim>
501
+ <dim>-1</dim>
502
+ </port>
503
+ </output>
504
+ </layer>
505
+ <layer id="30" name="self.encoder.mid_block.resnets.0.norm2.weight" type="Const" version="opset1">
506
+ <data element_type="f32" shape="4" offset="448" size="16" />
507
+ <output>
508
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.0.norm2.weight">
509
+ <dim>4</dim>
510
+ </port>
511
+ </output>
512
+ </layer>
513
+ <layer id="31" name="self.encoder.mid_block.resnets.0.norm2.bias" type="Const" version="opset1">
514
+ <data element_type="f32" shape="4" offset="464" size="16" />
515
+ <output>
516
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.0.norm2.bias">
517
+ <dim>4</dim>
518
+ </port>
519
+ </output>
520
+ </layer>
521
+ <layer id="32" name="__module.encoder.mid_block.resnets.0.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
522
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
523
+ <input>
524
+ <port id="0" precision="FP32">
525
+ <dim>-1</dim>
526
+ <dim>4</dim>
527
+ <dim>-1</dim>
528
+ <dim>-1</dim>
529
+ </port>
530
+ <port id="1" precision="FP32">
531
+ <dim>4</dim>
532
+ </port>
533
+ <port id="2" precision="FP32">
534
+ <dim>4</dim>
535
+ </port>
536
+ </input>
537
+ <output>
538
+ <port id="3" precision="FP32" names="94,input.17">
539
+ <dim>-1</dim>
540
+ <dim>4</dim>
541
+ <dim>-1</dim>
542
+ <dim>-1</dim>
543
+ </port>
544
+ </output>
545
+ </layer>
546
+ <layer id="33" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_3" type="Swish" version="opset4">
547
+ <input>
548
+ <port id="0" precision="FP32">
549
+ <dim>-1</dim>
550
+ <dim>4</dim>
551
+ <dim>-1</dim>
552
+ <dim>-1</dim>
553
+ </port>
554
+ </input>
555
+ <output>
556
+ <port id="1" precision="FP32" names="95,input.19">
557
+ <dim>-1</dim>
558
+ <dim>4</dim>
559
+ <dim>-1</dim>
560
+ <dim>-1</dim>
561
+ </port>
562
+ </output>
563
+ </layer>
564
+ <layer id="34" name="self.encoder.mid_block.resnets.0.conv2.weight" type="Const" version="opset1">
565
+ <data element_type="f32" shape="4, 4, 3, 3" offset="2256" size="576" />
566
+ <output>
567
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.0.conv2.weight">
568
+ <dim>4</dim>
569
+ <dim>4</dim>
570
+ <dim>3</dim>
571
+ <dim>3</dim>
572
+ </port>
573
+ </output>
574
+ </layer>
575
+ <layer id="35" name="__module.encoder.mid_block.resnets.0.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
576
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
577
+ <input>
578
+ <port id="0" precision="FP32">
579
+ <dim>-1</dim>
580
+ <dim>4</dim>
581
+ <dim>-1</dim>
582
+ <dim>-1</dim>
583
+ </port>
584
+ <port id="1" precision="FP32">
585
+ <dim>4</dim>
586
+ <dim>4</dim>
587
+ <dim>3</dim>
588
+ <dim>3</dim>
589
+ </port>
590
+ </input>
591
+ <output>
592
+ <port id="2" precision="FP32">
593
+ <dim>-1</dim>
594
+ <dim>4</dim>
595
+ <dim>-1</dim>
596
+ <dim>-1</dim>
597
+ </port>
598
+ </output>
599
+ </layer>
600
+ <layer id="36" name="__module.encoder.mid_block.resnets.0.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
601
+ <data element_type="f32" shape="1, 4, 1, 1" offset="2832" size="16" />
602
+ <output>
603
+ <port id="0" precision="FP32">
604
+ <dim>1</dim>
605
+ <dim>4</dim>
606
+ <dim>1</dim>
607
+ <dim>1</dim>
608
+ </port>
609
+ </output>
610
+ </layer>
611
+ <layer id="37" name="__module.encoder.mid_block.resnets.0.conv2/aten::_convolution/Add" type="Add" version="opset1">
612
+ <data auto_broadcast="numpy" />
613
+ <input>
614
+ <port id="0" precision="FP32">
615
+ <dim>-1</dim>
616
+ <dim>4</dim>
617
+ <dim>-1</dim>
618
+ <dim>-1</dim>
619
+ </port>
620
+ <port id="1" precision="FP32">
621
+ <dim>1</dim>
622
+ <dim>4</dim>
623
+ <dim>1</dim>
624
+ <dim>1</dim>
625
+ </port>
626
+ </input>
627
+ <output>
628
+ <port id="2" precision="FP32" names="103,hidden_states.3">
629
+ <dim>-1</dim>
630
+ <dim>4</dim>
631
+ <dim>-1</dim>
632
+ <dim>-1</dim>
633
+ </port>
634
+ </output>
635
+ </layer>
636
+ <layer id="38" name="__module.encoder.mid_block.resnets.0/aten::add/Add" type="Add" version="opset1">
637
+ <data auto_broadcast="numpy" />
638
+ <input>
639
+ <port id="0" precision="FP32">
640
+ <dim>-1</dim>
641
+ <dim>4</dim>
642
+ <dim>-1</dim>
643
+ <dim>-1</dim>
644
+ </port>
645
+ <port id="1" precision="FP32">
646
+ <dim>-1</dim>
647
+ <dim>4</dim>
648
+ <dim>-1</dim>
649
+ <dim>-1</dim>
650
+ </port>
651
+ </input>
652
+ <output>
653
+ <port id="2" precision="FP32" names="104,105,hidden_states.5">
654
+ <dim>-1</dim>
655
+ <dim>4</dim>
656
+ <dim>-1</dim>
657
+ <dim>-1</dim>
658
+ </port>
659
+ </output>
660
+ </layer>
661
+ <layer id="39" name="Constant_24885" type="Const" version="opset1">
662
+ <data element_type="i64" shape="3" offset="2848" size="24" />
663
+ <output>
664
+ <port id="0" precision="I64">
665
+ <dim>3</dim>
666
+ </port>
667
+ </output>
668
+ </layer>
669
+ <layer id="40" name="__module.encoder.mid_block.attentions.0/aten::view/Reshape" type="Reshape" version="opset1">
670
+ <data special_zero="true" />
671
+ <input>
672
+ <port id="0" precision="FP32">
673
+ <dim>-1</dim>
674
+ <dim>4</dim>
675
+ <dim>-1</dim>
676
+ <dim>-1</dim>
677
+ </port>
678
+ <port id="1" precision="I64">
679
+ <dim>3</dim>
680
+ </port>
681
+ </input>
682
+ <output>
683
+ <port id="2" precision="FP32" names="121">
684
+ <dim>-1</dim>
685
+ <dim>4</dim>
686
+ <dim>-1</dim>
687
+ </port>
688
+ </output>
689
+ </layer>
690
+ <layer id="41" name="__module.encoder.mid_block.attentions.0/aten::transpose/Constant" type="Const" version="opset1">
691
+ <data element_type="i32" shape="3" offset="2872" size="12" />
692
+ <output>
693
+ <port id="0" precision="I32">
694
+ <dim>3</dim>
695
+ </port>
696
+ </output>
697
+ </layer>
698
+ <layer id="42" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose" type="Transpose" version="opset1">
699
+ <input>
700
+ <port id="0" precision="FP32">
701
+ <dim>-1</dim>
702
+ <dim>4</dim>
703
+ <dim>-1</dim>
704
+ </port>
705
+ <port id="1" precision="I32">
706
+ <dim>3</dim>
707
+ </port>
708
+ </input>
709
+ <output>
710
+ <port id="2" precision="FP32" names="122,hidden_states.7">
711
+ <dim>-1</dim>
712
+ <dim>-1</dim>
713
+ <dim>4</dim>
714
+ </port>
715
+ </output>
716
+ </layer>
717
+ <layer id="43" name="__module.encoder.mid_block.attentions.0/aten::transpose/Constant_1" type="Const" version="opset1">
718
+ <data element_type="i32" shape="3" offset="2872" size="12" />
719
+ <output>
720
+ <port id="0" precision="I32">
721
+ <dim>3</dim>
722
+ </port>
723
+ </output>
724
+ </layer>
725
+ <layer id="44" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose_1" type="Transpose" version="opset1">
726
+ <input>
727
+ <port id="0" precision="FP32">
728
+ <dim>-1</dim>
729
+ <dim>-1</dim>
730
+ <dim>4</dim>
731
+ </port>
732
+ <port id="1" precision="I32">
733
+ <dim>3</dim>
734
+ </port>
735
+ </input>
736
+ <output>
737
+ <port id="2" precision="FP32" names="124,input.21">
738
+ <dim>-1</dim>
739
+ <dim>4</dim>
740
+ <dim>-1</dim>
741
+ </port>
742
+ </output>
743
+ </layer>
744
+ <layer id="45" name="self.encoder.mid_block.attentions.0.group_norm.weight" type="Const" version="opset1">
745
+ <data element_type="f32" shape="4" offset="448" size="16" />
746
+ <output>
747
+ <port id="0" precision="FP32" names="self.encoder.mid_block.attentions.0.group_norm.weight">
748
+ <dim>4</dim>
749
+ </port>
750
+ </output>
751
+ </layer>
752
+ <layer id="46" name="self.encoder.mid_block.attentions.0.group_norm.bias" type="Const" version="opset1">
753
+ <data element_type="f32" shape="4" offset="464" size="16" />
754
+ <output>
755
+ <port id="0" precision="FP32" names="self.encoder.mid_block.attentions.0.group_norm.bias">
756
+ <dim>4</dim>
757
+ </port>
758
+ </output>
759
+ </layer>
760
+ <layer id="47" name="__module.encoder.mid_block.attentions.0.group_norm/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
761
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
762
+ <input>
763
+ <port id="0" precision="FP32">
764
+ <dim>-1</dim>
765
+ <dim>4</dim>
766
+ <dim>-1</dim>
767
+ </port>
768
+ <port id="1" precision="FP32">
769
+ <dim>4</dim>
770
+ </port>
771
+ <port id="2" precision="FP32">
772
+ <dim>4</dim>
773
+ </port>
774
+ </input>
775
+ <output>
776
+ <port id="3" precision="FP32" names="127">
777
+ <dim>-1</dim>
778
+ <dim>4</dim>
779
+ <dim>-1</dim>
780
+ </port>
781
+ </output>
782
+ </layer>
783
+ <layer id="48" name="self.encoder.mid_block.attentions.0.to_q.weight" type="Const" version="opset1">
784
+ <data element_type="f32" shape="4, 4" offset="2884" size="64" />
785
+ <output>
786
+ <port id="0" precision="FP32" names="self.encoder.mid_block.attentions.0.to_q.weight">
787
+ <dim>4</dim>
788
+ <dim>4</dim>
789
+ </port>
790
+ </output>
791
+ </layer>
792
+ <layer id="49" name="__module.encoder.mid_block.attentions.0.to_q/aten::linear/MatMul" type="MatMul" version="opset1">
793
+ <data transpose_a="true" transpose_b="true" />
794
+ <input>
795
+ <port id="0" precision="FP32">
796
+ <dim>-1</dim>
797
+ <dim>4</dim>
798
+ <dim>-1</dim>
799
+ </port>
800
+ <port id="1" precision="FP32">
801
+ <dim>4</dim>
802
+ <dim>4</dim>
803
+ </port>
804
+ </input>
805
+ <output>
806
+ <port id="2" precision="FP32">
807
+ <dim>-1</dim>
808
+ <dim>-1</dim>
809
+ <dim>4</dim>
810
+ </port>
811
+ </output>
812
+ </layer>
813
+ <layer id="50" name="Constant_24770" type="Const" version="opset1">
814
+ <data element_type="f32" shape="1, 1, 4" offset="2948" size="16" />
815
+ <output>
816
+ <port id="0" precision="FP32">
817
+ <dim>1</dim>
818
+ <dim>1</dim>
819
+ <dim>4</dim>
820
+ </port>
821
+ </output>
822
+ </layer>
823
+ <layer id="51" name="__module.encoder.mid_block.attentions.0.to_q/aten::linear/Add" type="Add" version="opset1">
824
+ <data auto_broadcast="numpy" />
825
+ <input>
826
+ <port id="0" precision="FP32">
827
+ <dim>-1</dim>
828
+ <dim>-1</dim>
829
+ <dim>4</dim>
830
+ </port>
831
+ <port id="1" precision="FP32">
832
+ <dim>1</dim>
833
+ <dim>1</dim>
834
+ <dim>4</dim>
835
+ </port>
836
+ </input>
837
+ <output>
838
+ <port id="2" precision="FP32" names="131,query">
839
+ <dim>-1</dim>
840
+ <dim>-1</dim>
841
+ <dim>4</dim>
842
+ </port>
843
+ </output>
844
+ </layer>
845
+ <layer id="52" name="Constant_24886" type="Const" version="opset1">
846
+ <data element_type="i64" shape="4" offset="2964" size="32" />
847
+ <output>
848
+ <port id="0" precision="I64">
849
+ <dim>4</dim>
850
+ </port>
851
+ </output>
852
+ </layer>
853
+ <layer id="53" name="__module.encoder.mid_block.attentions.0/aten::view/Reshape_1" type="Reshape" version="opset1">
854
+ <data special_zero="true" />
855
+ <input>
856
+ <port id="0" precision="FP32">
857
+ <dim>-1</dim>
858
+ <dim>-1</dim>
859
+ <dim>4</dim>
860
+ </port>
861
+ <port id="1" precision="I64">
862
+ <dim>4</dim>
863
+ </port>
864
+ </input>
865
+ <output>
866
+ <port id="2" precision="FP32" names="145">
867
+ <dim>-1</dim>
868
+ <dim>-1</dim>
869
+ <dim>1</dim>
870
+ <dim>4</dim>
871
+ </port>
872
+ </output>
873
+ </layer>
874
+ <layer id="54" name="Constant_24717" type="Const" version="opset1">
875
+ <data element_type="i64" shape="4" offset="2996" size="32" />
876
+ <output>
877
+ <port id="0" precision="I64">
878
+ <dim>4</dim>
879
+ </port>
880
+ </output>
881
+ </layer>
882
+ <layer id="55" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose_3" type="Reshape" version="opset1">
883
+ <data special_zero="true" />
884
+ <input>
885
+ <port id="0" precision="FP32">
886
+ <dim>-1</dim>
887
+ <dim>-1</dim>
888
+ <dim>1</dim>
889
+ <dim>4</dim>
890
+ </port>
891
+ <port id="1" precision="I64">
892
+ <dim>4</dim>
893
+ </port>
894
+ </input>
895
+ <output>
896
+ <port id="2" precision="FP32" names="146">
897
+ <dim>-1</dim>
898
+ <dim>1</dim>
899
+ <dim>-1</dim>
900
+ <dim>4</dim>
901
+ </port>
902
+ </output>
903
+ </layer>
904
+ <layer id="56" name="self.encoder.mid_block.attentions.0.to_k.weight" type="Const" version="opset1">
905
+ <data element_type="f32" shape="4, 4" offset="3028" size="64" />
906
+ <output>
907
+ <port id="0" precision="FP32" names="self.encoder.mid_block.attentions.0.to_k.weight">
908
+ <dim>4</dim>
909
+ <dim>4</dim>
910
+ </port>
911
+ </output>
912
+ </layer>
913
+ <layer id="57" name="__module.encoder.mid_block.attentions.0.to_k/aten::linear/MatMul" type="MatMul" version="opset1">
914
+ <data transpose_a="true" transpose_b="true" />
915
+ <input>
916
+ <port id="0" precision="FP32">
917
+ <dim>-1</dim>
918
+ <dim>4</dim>
919
+ <dim>-1</dim>
920
+ </port>
921
+ <port id="1" precision="FP32">
922
+ <dim>4</dim>
923
+ <dim>4</dim>
924
+ </port>
925
+ </input>
926
+ <output>
927
+ <port id="2" precision="FP32">
928
+ <dim>-1</dim>
929
+ <dim>-1</dim>
930
+ <dim>4</dim>
931
+ </port>
932
+ </output>
933
+ </layer>
934
+ <layer id="58" name="Constant_24771" type="Const" version="opset1">
935
+ <data element_type="f32" shape="1, 1, 4" offset="3092" size="16" />
936
+ <output>
937
+ <port id="0" precision="FP32">
938
+ <dim>1</dim>
939
+ <dim>1</dim>
940
+ <dim>4</dim>
941
+ </port>
942
+ </output>
943
+ </layer>
944
+ <layer id="59" name="__module.encoder.mid_block.attentions.0.to_k/aten::linear/Add" type="Add" version="opset1">
945
+ <data auto_broadcast="numpy" />
946
+ <input>
947
+ <port id="0" precision="FP32">
948
+ <dim>-1</dim>
949
+ <dim>-1</dim>
950
+ <dim>4</dim>
951
+ </port>
952
+ <port id="1" precision="FP32">
953
+ <dim>1</dim>
954
+ <dim>1</dim>
955
+ <dim>4</dim>
956
+ </port>
957
+ </input>
958
+ <output>
959
+ <port id="2" precision="FP32" names="134,key">
960
+ <dim>-1</dim>
961
+ <dim>-1</dim>
962
+ <dim>4</dim>
963
+ </port>
964
+ </output>
965
+ </layer>
966
+ <layer id="60" name="Constant_24887" type="Const" version="opset1">
967
+ <data element_type="i64" shape="4" offset="2964" size="32" />
968
+ <output>
969
+ <port id="0" precision="I64">
970
+ <dim>4</dim>
971
+ </port>
972
+ </output>
973
+ </layer>
974
+ <layer id="61" name="__module.encoder.mid_block.attentions.0/aten::view/Reshape_2" type="Reshape" version="opset1">
975
+ <data special_zero="true" />
976
+ <input>
977
+ <port id="0" precision="FP32">
978
+ <dim>-1</dim>
979
+ <dim>-1</dim>
980
+ <dim>4</dim>
981
+ </port>
982
+ <port id="1" precision="I64">
983
+ <dim>4</dim>
984
+ </port>
985
+ </input>
986
+ <output>
987
+ <port id="2" precision="FP32" names="148">
988
+ <dim>-1</dim>
989
+ <dim>-1</dim>
990
+ <dim>1</dim>
991
+ <dim>4</dim>
992
+ </port>
993
+ </output>
994
+ </layer>
995
+ <layer id="62" name="Constant_24721" type="Const" version="opset1">
996
+ <data element_type="i64" shape="4" offset="2996" size="32" />
997
+ <output>
998
+ <port id="0" precision="I64">
999
+ <dim>4</dim>
1000
+ </port>
1001
+ </output>
1002
+ </layer>
1003
+ <layer id="63" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose_4" type="Reshape" version="opset1">
1004
+ <data special_zero="true" />
1005
+ <input>
1006
+ <port id="0" precision="FP32">
1007
+ <dim>-1</dim>
1008
+ <dim>-1</dim>
1009
+ <dim>1</dim>
1010
+ <dim>4</dim>
1011
+ </port>
1012
+ <port id="1" precision="I64">
1013
+ <dim>4</dim>
1014
+ </port>
1015
+ </input>
1016
+ <output>
1017
+ <port id="2" precision="FP32" names="149">
1018
+ <dim>-1</dim>
1019
+ <dim>1</dim>
1020
+ <dim>-1</dim>
1021
+ <dim>4</dim>
1022
+ </port>
1023
+ </output>
1024
+ </layer>
1025
+ <layer id="64" name="self.encoder.mid_block.attentions.0.to_v.weight" type="Const" version="opset1">
1026
+ <data element_type="f32" shape="4, 4" offset="3108" size="64" />
1027
+ <output>
1028
+ <port id="0" precision="FP32" names="self.encoder.mid_block.attentions.0.to_v.weight">
1029
+ <dim>4</dim>
1030
+ <dim>4</dim>
1031
+ </port>
1032
+ </output>
1033
+ </layer>
1034
+ <layer id="65" name="__module.encoder.mid_block.attentions.0.to_v/aten::linear/MatMul" type="MatMul" version="opset1">
1035
+ <data transpose_a="true" transpose_b="true" />
1036
+ <input>
1037
+ <port id="0" precision="FP32">
1038
+ <dim>-1</dim>
1039
+ <dim>4</dim>
1040
+ <dim>-1</dim>
1041
+ </port>
1042
+ <port id="1" precision="FP32">
1043
+ <dim>4</dim>
1044
+ <dim>4</dim>
1045
+ </port>
1046
+ </input>
1047
+ <output>
1048
+ <port id="2" precision="FP32">
1049
+ <dim>-1</dim>
1050
+ <dim>-1</dim>
1051
+ <dim>4</dim>
1052
+ </port>
1053
+ </output>
1054
+ </layer>
1055
+ <layer id="66" name="Constant_24772" type="Const" version="opset1">
1056
+ <data element_type="f32" shape="1, 1, 4" offset="3172" size="16" />
1057
+ <output>
1058
+ <port id="0" precision="FP32">
1059
+ <dim>1</dim>
1060
+ <dim>1</dim>
1061
+ <dim>4</dim>
1062
+ </port>
1063
+ </output>
1064
+ </layer>
1065
+ <layer id="67" name="__module.encoder.mid_block.attentions.0.to_v/aten::linear/Add" type="Add" version="opset1">
1066
+ <data auto_broadcast="numpy" />
1067
+ <input>
1068
+ <port id="0" precision="FP32">
1069
+ <dim>-1</dim>
1070
+ <dim>-1</dim>
1071
+ <dim>4</dim>
1072
+ </port>
1073
+ <port id="1" precision="FP32">
1074
+ <dim>1</dim>
1075
+ <dim>1</dim>
1076
+ <dim>4</dim>
1077
+ </port>
1078
+ </input>
1079
+ <output>
1080
+ <port id="2" precision="FP32" names="137,value">
1081
+ <dim>-1</dim>
1082
+ <dim>-1</dim>
1083
+ <dim>4</dim>
1084
+ </port>
1085
+ </output>
1086
+ </layer>
1087
+ <layer id="68" name="Constant_24888" type="Const" version="opset1">
1088
+ <data element_type="i64" shape="4" offset="2964" size="32" />
1089
+ <output>
1090
+ <port id="0" precision="I64">
1091
+ <dim>4</dim>
1092
+ </port>
1093
+ </output>
1094
+ </layer>
1095
+ <layer id="69" name="__module.encoder.mid_block.attentions.0/aten::view/Reshape_3" type="Reshape" version="opset1">
1096
+ <data special_zero="true" />
1097
+ <input>
1098
+ <port id="0" precision="FP32">
1099
+ <dim>-1</dim>
1100
+ <dim>-1</dim>
1101
+ <dim>4</dim>
1102
+ </port>
1103
+ <port id="1" precision="I64">
1104
+ <dim>4</dim>
1105
+ </port>
1106
+ </input>
1107
+ <output>
1108
+ <port id="2" precision="FP32" names="151">
1109
+ <dim>-1</dim>
1110
+ <dim>-1</dim>
1111
+ <dim>1</dim>
1112
+ <dim>4</dim>
1113
+ </port>
1114
+ </output>
1115
+ </layer>
1116
+ <layer id="70" name="Constant_24725" type="Const" version="opset1">
1117
+ <data element_type="i64" shape="4" offset="2996" size="32" />
1118
+ <output>
1119
+ <port id="0" precision="I64">
1120
+ <dim>4</dim>
1121
+ </port>
1122
+ </output>
1123
+ </layer>
1124
+ <layer id="71" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose_5" type="Reshape" version="opset1">
1125
+ <data special_zero="true" />
1126
+ <input>
1127
+ <port id="0" precision="FP32">
1128
+ <dim>-1</dim>
1129
+ <dim>-1</dim>
1130
+ <dim>1</dim>
1131
+ <dim>4</dim>
1132
+ </port>
1133
+ <port id="1" precision="I64">
1134
+ <dim>4</dim>
1135
+ </port>
1136
+ </input>
1137
+ <output>
1138
+ <port id="2" precision="FP32" names="152">
1139
+ <dim>-1</dim>
1140
+ <dim>1</dim>
1141
+ <dim>-1</dim>
1142
+ <dim>4</dim>
1143
+ </port>
1144
+ </output>
1145
+ </layer>
1146
+ <layer id="72" name="__module.encoder.mid_block.attentions.0/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
1147
+ <data causal="false" />
1148
+ <input>
1149
+ <port id="0" precision="FP32">
1150
+ <dim>-1</dim>
1151
+ <dim>1</dim>
1152
+ <dim>-1</dim>
1153
+ <dim>4</dim>
1154
+ </port>
1155
+ <port id="1" precision="FP32">
1156
+ <dim>-1</dim>
1157
+ <dim>1</dim>
1158
+ <dim>-1</dim>
1159
+ <dim>4</dim>
1160
+ </port>
1161
+ <port id="2" precision="FP32">
1162
+ <dim>-1</dim>
1163
+ <dim>1</dim>
1164
+ <dim>-1</dim>
1165
+ <dim>4</dim>
1166
+ </port>
1167
+ </input>
1168
+ <output>
1169
+ <port id="3" precision="FP32" names="153,hidden_states.9">
1170
+ <dim>-1</dim>
1171
+ <dim>1</dim>
1172
+ <dim>-1</dim>
1173
+ <dim>4</dim>
1174
+ </port>
1175
+ </output>
1176
+ </layer>
1177
+ <layer id="73" name="Constant_24727" type="Const" version="opset1">
1178
+ <data element_type="i64" shape="4" offset="3188" size="32" />
1179
+ <output>
1180
+ <port id="0" precision="I64">
1181
+ <dim>4</dim>
1182
+ </port>
1183
+ </output>
1184
+ </layer>
1185
+ <layer id="74" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose_6" type="Reshape" version="opset1">
1186
+ <data special_zero="true" />
1187
+ <input>
1188
+ <port id="0" precision="FP32">
1189
+ <dim>-1</dim>
1190
+ <dim>1</dim>
1191
+ <dim>-1</dim>
1192
+ <dim>4</dim>
1193
+ </port>
1194
+ <port id="1" precision="I64">
1195
+ <dim>4</dim>
1196
+ </port>
1197
+ </input>
1198
+ <output>
1199
+ <port id="2" precision="FP32" names="154">
1200
+ <dim>-1</dim>
1201
+ <dim>-1</dim>
1202
+ <dim>1</dim>
1203
+ <dim>4</dim>
1204
+ </port>
1205
+ </output>
1206
+ </layer>
1207
+ <layer id="75" name="Constant_24889" type="Const" version="opset1">
1208
+ <data element_type="i64" shape="3" offset="3220" size="24" />
1209
+ <output>
1210
+ <port id="0" precision="I64">
1211
+ <dim>3</dim>
1212
+ </port>
1213
+ </output>
1214
+ </layer>
1215
+ <layer id="76" name="__module.encoder.mid_block.attentions.0/aten::reshape/Reshape" type="Reshape" version="opset1">
1216
+ <data special_zero="true" />
1217
+ <input>
1218
+ <port id="0" precision="FP32">
1219
+ <dim>-1</dim>
1220
+ <dim>-1</dim>
1221
+ <dim>1</dim>
1222
+ <dim>4</dim>
1223
+ </port>
1224
+ <port id="1" precision="I64">
1225
+ <dim>3</dim>
1226
+ </port>
1227
+ </input>
1228
+ <output>
1229
+ <port id="2" precision="FP32" names="158,159,hidden_states.11">
1230
+ <dim>-1</dim>
1231
+ <dim>-1</dim>
1232
+ <dim>4</dim>
1233
+ </port>
1234
+ </output>
1235
+ </layer>
1236
+ <layer id="77" name="self.encoder.mid_block.attentions.0.to_out.0.weight" type="Const" version="opset1">
1237
+ <data element_type="f32" shape="4, 4" offset="3244" size="64" />
1238
+ <output>
1239
+ <port id="0" precision="FP32" names="self.encoder.mid_block.attentions.0.to_out.0.weight">
1240
+ <dim>4</dim>
1241
+ <dim>4</dim>
1242
+ </port>
1243
+ </output>
1244
+ </layer>
1245
+ <layer id="78" name="__module.encoder.mid_block.attentions.0.to_out.0/aten::linear/MatMul" type="MatMul" version="opset1">
1246
+ <data transpose_a="false" transpose_b="true" />
1247
+ <input>
1248
+ <port id="0" precision="FP32">
1249
+ <dim>-1</dim>
1250
+ <dim>-1</dim>
1251
+ <dim>4</dim>
1252
+ </port>
1253
+ <port id="1" precision="FP32">
1254
+ <dim>4</dim>
1255
+ <dim>4</dim>
1256
+ </port>
1257
+ </input>
1258
+ <output>
1259
+ <port id="2" precision="FP32">
1260
+ <dim>-1</dim>
1261
+ <dim>-1</dim>
1262
+ <dim>4</dim>
1263
+ </port>
1264
+ </output>
1265
+ </layer>
1266
+ <layer id="79" name="Constant_24773" type="Const" version="opset1">
1267
+ <data element_type="f32" shape="1, 1, 4" offset="3308" size="16" />
1268
+ <output>
1269
+ <port id="0" precision="FP32">
1270
+ <dim>1</dim>
1271
+ <dim>1</dim>
1272
+ <dim>4</dim>
1273
+ </port>
1274
+ </output>
1275
+ </layer>
1276
+ <layer id="80" name="__module.encoder.mid_block.attentions.0.to_out.0/aten::linear/Add" type="Add" version="opset1">
1277
+ <data auto_broadcast="numpy" />
1278
+ <input>
1279
+ <port id="0" precision="FP32">
1280
+ <dim>-1</dim>
1281
+ <dim>-1</dim>
1282
+ <dim>4</dim>
1283
+ </port>
1284
+ <port id="1" precision="FP32">
1285
+ <dim>1</dim>
1286
+ <dim>1</dim>
1287
+ <dim>4</dim>
1288
+ </port>
1289
+ </input>
1290
+ <output>
1291
+ <port id="2" precision="FP32" names="162,input.23">
1292
+ <dim>-1</dim>
1293
+ <dim>-1</dim>
1294
+ <dim>4</dim>
1295
+ </port>
1296
+ </output>
1297
+ </layer>
1298
+ <layer id="81" name="__module.encoder.mid_block.attentions.0/aten::transpose/Constant_7" type="Const" version="opset1">
1299
+ <data element_type="i32" shape="3" offset="2872" size="12" />
1300
+ <output>
1301
+ <port id="0" precision="I32">
1302
+ <dim>3</dim>
1303
+ </port>
1304
+ </output>
1305
+ </layer>
1306
+ <layer id="82" name="__module.encoder.mid_block.attentions.0/aten::transpose/Transpose_7" type="Transpose" version="opset1">
1307
+ <input>
1308
+ <port id="0" precision="FP32">
1309
+ <dim>-1</dim>
1310
+ <dim>-1</dim>
1311
+ <dim>4</dim>
1312
+ </port>
1313
+ <port id="1" precision="I32">
1314
+ <dim>3</dim>
1315
+ </port>
1316
+ </input>
1317
+ <output>
1318
+ <port id="2" precision="FP32" names="164">
1319
+ <dim>-1</dim>
1320
+ <dim>4</dim>
1321
+ <dim>-1</dim>
1322
+ </port>
1323
+ </output>
1324
+ </layer>
1325
+ <layer id="83" name="__module.encoder.mid_block.attentions.0/aten::size/ShapeOf" type="ShapeOf" version="opset3">
1326
+ <data output_type="i64" />
1327
+ <input>
1328
+ <port id="0" precision="FP32">
1329
+ <dim>-1</dim>
1330
+ <dim>4</dim>
1331
+ <dim>-1</dim>
1332
+ <dim>-1</dim>
1333
+ </port>
1334
+ </input>
1335
+ <output>
1336
+ <port id="1" precision="I64">
1337
+ <dim>4</dim>
1338
+ </port>
1339
+ </output>
1340
+ </layer>
1341
+ <layer id="84" name="__module.encoder.mid_block.attentions.0/aten::reshape/Reshape_1" type="Reshape" version="opset1">
1342
+ <data special_zero="false" />
1343
+ <input>
1344
+ <port id="0" precision="FP32">
1345
+ <dim>-1</dim>
1346
+ <dim>4</dim>
1347
+ <dim>-1</dim>
1348
+ </port>
1349
+ <port id="1" precision="I64">
1350
+ <dim>4</dim>
1351
+ </port>
1352
+ </input>
1353
+ <output>
1354
+ <port id="2" precision="FP32" names="166,hidden_states.15">
1355
+ <dim>-1</dim>
1356
+ <dim>4</dim>
1357
+ <dim>-1</dim>
1358
+ <dim>-1</dim>
1359
+ </port>
1360
+ </output>
1361
+ </layer>
1362
+ <layer id="85" name="__module.encoder.mid_block.attentions.0/aten::add/Add" type="Add" version="opset1">
1363
+ <data auto_broadcast="numpy" />
1364
+ <input>
1365
+ <port id="0" precision="FP32">
1366
+ <dim>-1</dim>
1367
+ <dim>4</dim>
1368
+ <dim>-1</dim>
1369
+ <dim>-1</dim>
1370
+ </port>
1371
+ <port id="1" precision="FP32">
1372
+ <dim>-1</dim>
1373
+ <dim>4</dim>
1374
+ <dim>-1</dim>
1375
+ <dim>-1</dim>
1376
+ </port>
1377
+ </input>
1378
+ <output>
1379
+ <port id="2" precision="FP32" names="167,168,hidden_states.17,input.25">
1380
+ <dim>-1</dim>
1381
+ <dim>4</dim>
1382
+ <dim>-1</dim>
1383
+ <dim>-1</dim>
1384
+ </port>
1385
+ </output>
1386
+ </layer>
1387
+ <layer id="86" name="self.encoder.mid_block.resnets.1.norm1.weight" type="Const" version="opset1">
1388
+ <data element_type="f32" shape="4" offset="448" size="16" />
1389
+ <output>
1390
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.1.norm1.weight">
1391
+ <dim>4</dim>
1392
+ </port>
1393
+ </output>
1394
+ </layer>
1395
+ <layer id="87" name="self.encoder.mid_block.resnets.1.norm1.bias" type="Const" version="opset1">
1396
+ <data element_type="f32" shape="4" offset="464" size="16" />
1397
+ <output>
1398
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.1.norm1.bias">
1399
+ <dim>4</dim>
1400
+ </port>
1401
+ </output>
1402
+ </layer>
1403
+ <layer id="88" name="__module.encoder.mid_block.resnets.1.norm1/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1404
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1405
+ <input>
1406
+ <port id="0" precision="FP32">
1407
+ <dim>-1</dim>
1408
+ <dim>4</dim>
1409
+ <dim>-1</dim>
1410
+ <dim>-1</dim>
1411
+ </port>
1412
+ <port id="1" precision="FP32">
1413
+ <dim>4</dim>
1414
+ </port>
1415
+ <port id="2" precision="FP32">
1416
+ <dim>4</dim>
1417
+ </port>
1418
+ </input>
1419
+ <output>
1420
+ <port id="3" precision="FP32" names="176,input.27">
1421
+ <dim>-1</dim>
1422
+ <dim>4</dim>
1423
+ <dim>-1</dim>
1424
+ <dim>-1</dim>
1425
+ </port>
1426
+ </output>
1427
+ </layer>
1428
+ <layer id="89" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_4" type="Swish" version="opset4">
1429
+ <input>
1430
+ <port id="0" precision="FP32">
1431
+ <dim>-1</dim>
1432
+ <dim>4</dim>
1433
+ <dim>-1</dim>
1434
+ <dim>-1</dim>
1435
+ </port>
1436
+ </input>
1437
+ <output>
1438
+ <port id="1" precision="FP32" names="177">
1439
+ <dim>-1</dim>
1440
+ <dim>4</dim>
1441
+ <dim>-1</dim>
1442
+ <dim>-1</dim>
1443
+ </port>
1444
+ </output>
1445
+ </layer>
1446
+ <layer id="90" name="self.encoder.mid_block.resnets.1.conv1.weight" type="Const" version="opset1">
1447
+ <data element_type="f32" shape="4, 4, 3, 3" offset="3324" size="576" />
1448
+ <output>
1449
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.1.conv1.weight">
1450
+ <dim>4</dim>
1451
+ <dim>4</dim>
1452
+ <dim>3</dim>
1453
+ <dim>3</dim>
1454
+ </port>
1455
+ </output>
1456
+ </layer>
1457
+ <layer id="91" name="__module.encoder.mid_block.resnets.1.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
1458
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1459
+ <input>
1460
+ <port id="0" precision="FP32">
1461
+ <dim>-1</dim>
1462
+ <dim>4</dim>
1463
+ <dim>-1</dim>
1464
+ <dim>-1</dim>
1465
+ </port>
1466
+ <port id="1" precision="FP32">
1467
+ <dim>4</dim>
1468
+ <dim>4</dim>
1469
+ <dim>3</dim>
1470
+ <dim>3</dim>
1471
+ </port>
1472
+ </input>
1473
+ <output>
1474
+ <port id="2" precision="FP32">
1475
+ <dim>-1</dim>
1476
+ <dim>4</dim>
1477
+ <dim>-1</dim>
1478
+ <dim>-1</dim>
1479
+ </port>
1480
+ </output>
1481
+ </layer>
1482
+ <layer id="92" name="__module.encoder.mid_block.resnets.1.conv1/aten::_convolution/Reshape" type="Const" version="opset1">
1483
+ <data element_type="f32" shape="1, 4, 1, 1" offset="3900" size="16" />
1484
+ <output>
1485
+ <port id="0" precision="FP32">
1486
+ <dim>1</dim>
1487
+ <dim>4</dim>
1488
+ <dim>1</dim>
1489
+ <dim>1</dim>
1490
+ </port>
1491
+ </output>
1492
+ </layer>
1493
+ <layer id="93" name="__module.encoder.mid_block.resnets.1.conv1/aten::_convolution/Add" type="Add" version="opset1">
1494
+ <data auto_broadcast="numpy" />
1495
+ <input>
1496
+ <port id="0" precision="FP32">
1497
+ <dim>-1</dim>
1498
+ <dim>4</dim>
1499
+ <dim>-1</dim>
1500
+ <dim>-1</dim>
1501
+ </port>
1502
+ <port id="1" precision="FP32">
1503
+ <dim>1</dim>
1504
+ <dim>4</dim>
1505
+ <dim>1</dim>
1506
+ <dim>1</dim>
1507
+ </port>
1508
+ </input>
1509
+ <output>
1510
+ <port id="2" precision="FP32" names="184,input.29">
1511
+ <dim>-1</dim>
1512
+ <dim>4</dim>
1513
+ <dim>-1</dim>
1514
+ <dim>-1</dim>
1515
+ </port>
1516
+ </output>
1517
+ </layer>
1518
+ <layer id="94" name="self.encoder.mid_block.resnets.1.norm2.weight" type="Const" version="opset1">
1519
+ <data element_type="f32" shape="4" offset="448" size="16" />
1520
+ <output>
1521
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.1.norm2.weight">
1522
+ <dim>4</dim>
1523
+ </port>
1524
+ </output>
1525
+ </layer>
1526
+ <layer id="95" name="self.encoder.mid_block.resnets.1.norm2.bias" type="Const" version="opset1">
1527
+ <data element_type="f32" shape="4" offset="464" size="16" />
1528
+ <output>
1529
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.1.norm2.bias">
1530
+ <dim>4</dim>
1531
+ </port>
1532
+ </output>
1533
+ </layer>
1534
+ <layer id="96" name="__module.encoder.mid_block.resnets.1.norm2/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1535
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1536
+ <input>
1537
+ <port id="0" precision="FP32">
1538
+ <dim>-1</dim>
1539
+ <dim>4</dim>
1540
+ <dim>-1</dim>
1541
+ <dim>-1</dim>
1542
+ </port>
1543
+ <port id="1" precision="FP32">
1544
+ <dim>4</dim>
1545
+ </port>
1546
+ <port id="2" precision="FP32">
1547
+ <dim>4</dim>
1548
+ </port>
1549
+ </input>
1550
+ <output>
1551
+ <port id="3" precision="FP32" names="187,input.31">
1552
+ <dim>-1</dim>
1553
+ <dim>4</dim>
1554
+ <dim>-1</dim>
1555
+ <dim>-1</dim>
1556
+ </port>
1557
+ </output>
1558
+ </layer>
1559
+ <layer id="97" name="__module.decoder.mid_block.resnets.1.nonlinearity/aten::silu/Swish_5" type="Swish" version="opset4">
1560
+ <input>
1561
+ <port id="0" precision="FP32">
1562
+ <dim>-1</dim>
1563
+ <dim>4</dim>
1564
+ <dim>-1</dim>
1565
+ <dim>-1</dim>
1566
+ </port>
1567
+ </input>
1568
+ <output>
1569
+ <port id="1" precision="FP32" names="188,input.33">
1570
+ <dim>-1</dim>
1571
+ <dim>4</dim>
1572
+ <dim>-1</dim>
1573
+ <dim>-1</dim>
1574
+ </port>
1575
+ </output>
1576
+ </layer>
1577
+ <layer id="98" name="self.encoder.mid_block.resnets.1.conv2.weight" type="Const" version="opset1">
1578
+ <data element_type="f32" shape="4, 4, 3, 3" offset="3916" size="576" />
1579
+ <output>
1580
+ <port id="0" precision="FP32" names="self.encoder.mid_block.resnets.1.conv2.weight">
1581
+ <dim>4</dim>
1582
+ <dim>4</dim>
1583
+ <dim>3</dim>
1584
+ <dim>3</dim>
1585
+ </port>
1586
+ </output>
1587
+ </layer>
1588
+ <layer id="99" name="__module.encoder.mid_block.resnets.1.conv2/aten::_convolution/Convolution" type="Convolution" version="opset1">
1589
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1590
+ <input>
1591
+ <port id="0" precision="FP32">
1592
+ <dim>-1</dim>
1593
+ <dim>4</dim>
1594
+ <dim>-1</dim>
1595
+ <dim>-1</dim>
1596
+ </port>
1597
+ <port id="1" precision="FP32">
1598
+ <dim>4</dim>
1599
+ <dim>4</dim>
1600
+ <dim>3</dim>
1601
+ <dim>3</dim>
1602
+ </port>
1603
+ </input>
1604
+ <output>
1605
+ <port id="2" precision="FP32">
1606
+ <dim>-1</dim>
1607
+ <dim>4</dim>
1608
+ <dim>-1</dim>
1609
+ <dim>-1</dim>
1610
+ </port>
1611
+ </output>
1612
+ </layer>
1613
+ <layer id="100" name="__module.encoder.mid_block.resnets.1.conv2/aten::_convolution/Reshape" type="Const" version="opset1">
1614
+ <data element_type="f32" shape="1, 4, 1, 1" offset="4492" size="16" />
1615
+ <output>
1616
+ <port id="0" precision="FP32">
1617
+ <dim>1</dim>
1618
+ <dim>4</dim>
1619
+ <dim>1</dim>
1620
+ <dim>1</dim>
1621
+ </port>
1622
+ </output>
1623
+ </layer>
1624
+ <layer id="101" name="__module.encoder.mid_block.resnets.1.conv2/aten::_convolution/Add" type="Add" version="opset1">
1625
+ <data auto_broadcast="numpy" />
1626
+ <input>
1627
+ <port id="0" precision="FP32">
1628
+ <dim>-1</dim>
1629
+ <dim>4</dim>
1630
+ <dim>-1</dim>
1631
+ <dim>-1</dim>
1632
+ </port>
1633
+ <port id="1" precision="FP32">
1634
+ <dim>1</dim>
1635
+ <dim>4</dim>
1636
+ <dim>1</dim>
1637
+ <dim>1</dim>
1638
+ </port>
1639
+ </input>
1640
+ <output>
1641
+ <port id="2" precision="FP32" names="196,hidden_states">
1642
+ <dim>-1</dim>
1643
+ <dim>4</dim>
1644
+ <dim>-1</dim>
1645
+ <dim>-1</dim>
1646
+ </port>
1647
+ </output>
1648
+ </layer>
1649
+ <layer id="102" name="__module.encoder.mid_block.resnets.1/aten::add/Add" type="Add" version="opset1">
1650
+ <data auto_broadcast="numpy" />
1651
+ <input>
1652
+ <port id="0" precision="FP32">
1653
+ <dim>-1</dim>
1654
+ <dim>4</dim>
1655
+ <dim>-1</dim>
1656
+ <dim>-1</dim>
1657
+ </port>
1658
+ <port id="1" precision="FP32">
1659
+ <dim>-1</dim>
1660
+ <dim>4</dim>
1661
+ <dim>-1</dim>
1662
+ <dim>-1</dim>
1663
+ </port>
1664
+ </input>
1665
+ <output>
1666
+ <port id="2" precision="FP32" names="197,198,input.35">
1667
+ <dim>-1</dim>
1668
+ <dim>4</dim>
1669
+ <dim>-1</dim>
1670
+ <dim>-1</dim>
1671
+ </port>
1672
+ </output>
1673
+ </layer>
1674
+ <layer id="103" name="self.encoder.conv_norm_out.weight" type="Const" version="opset1">
1675
+ <data element_type="f32" shape="4" offset="448" size="16" />
1676
+ <output>
1677
+ <port id="0" precision="FP32" names="self.encoder.conv_norm_out.weight">
1678
+ <dim>4</dim>
1679
+ </port>
1680
+ </output>
1681
+ </layer>
1682
+ <layer id="104" name="self.encoder.conv_norm_out.bias" type="Const" version="opset1">
1683
+ <data element_type="f32" shape="4" offset="464" size="16" />
1684
+ <output>
1685
+ <port id="0" precision="FP32" names="self.encoder.conv_norm_out.bias">
1686
+ <dim>4</dim>
1687
+ </port>
1688
+ </output>
1689
+ </layer>
1690
+ <layer id="105" name="__module.encoder.conv_norm_out/aten::group_norm/GroupNormalization" type="GroupNormalization" version="opset12">
1691
+ <data num_groups="1" epsilon="9.9999999747524271e-07" />
1692
+ <input>
1693
+ <port id="0" precision="FP32">
1694
+ <dim>-1</dim>
1695
+ <dim>4</dim>
1696
+ <dim>-1</dim>
1697
+ <dim>-1</dim>
1698
+ </port>
1699
+ <port id="1" precision="FP32">
1700
+ <dim>4</dim>
1701
+ </port>
1702
+ <port id="2" precision="FP32">
1703
+ <dim>4</dim>
1704
+ </port>
1705
+ </input>
1706
+ <output>
1707
+ <port id="3" precision="FP32" names="201,input">
1708
+ <dim>-1</dim>
1709
+ <dim>4</dim>
1710
+ <dim>-1</dim>
1711
+ <dim>-1</dim>
1712
+ </port>
1713
+ </output>
1714
+ </layer>
1715
+ <layer id="106" name="__module.encoder.conv_act/aten::silu/Swish" type="Swish" version="opset4">
1716
+ <input>
1717
+ <port id="0" precision="FP32">
1718
+ <dim>-1</dim>
1719
+ <dim>4</dim>
1720
+ <dim>-1</dim>
1721
+ <dim>-1</dim>
1722
+ </port>
1723
+ </input>
1724
+ <output>
1725
+ <port id="1" precision="FP32" names="202">
1726
+ <dim>-1</dim>
1727
+ <dim>4</dim>
1728
+ <dim>-1</dim>
1729
+ <dim>-1</dim>
1730
+ </port>
1731
+ </output>
1732
+ </layer>
1733
+ <layer id="107" name="self.encoder.conv_out.weight" type="Const" version="opset1">
1734
+ <data element_type="f32" shape="2, 4, 3, 3" offset="4508" size="288" />
1735
+ <output>
1736
+ <port id="0" precision="FP32" names="self.encoder.conv_out.weight">
1737
+ <dim>2</dim>
1738
+ <dim>4</dim>
1739
+ <dim>3</dim>
1740
+ <dim>3</dim>
1741
+ </port>
1742
+ </output>
1743
+ </layer>
1744
+ <layer id="108" name="__module.encoder.conv_out/aten::_convolution/Convolution" type="Convolution" version="opset1">
1745
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1746
+ <input>
1747
+ <port id="0" precision="FP32">
1748
+ <dim>-1</dim>
1749
+ <dim>4</dim>
1750
+ <dim>-1</dim>
1751
+ <dim>-1</dim>
1752
+ </port>
1753
+ <port id="1" precision="FP32">
1754
+ <dim>2</dim>
1755
+ <dim>4</dim>
1756
+ <dim>3</dim>
1757
+ <dim>3</dim>
1758
+ </port>
1759
+ </input>
1760
+ <output>
1761
+ <port id="2" precision="FP32">
1762
+ <dim>-1</dim>
1763
+ <dim>2</dim>
1764
+ <dim>-1</dim>
1765
+ <dim>-1</dim>
1766
+ </port>
1767
+ </output>
1768
+ </layer>
1769
+ <layer id="109" name="__module.encoder.conv_out/aten::_convolution/Reshape" type="Const" version="opset1">
1770
+ <data element_type="f32" shape="1, 2, 1, 1" offset="4796" size="8" />
1771
+ <output>
1772
+ <port id="0" precision="FP32">
1773
+ <dim>1</dim>
1774
+ <dim>2</dim>
1775
+ <dim>1</dim>
1776
+ <dim>1</dim>
1777
+ </port>
1778
+ </output>
1779
+ </layer>
1780
+ <layer id="110" name="__module.encoder.conv_out/aten::_convolution/Add" type="Add" version="opset1">
1781
+ <data auto_broadcast="numpy" />
1782
+ <input>
1783
+ <port id="0" precision="FP32">
1784
+ <dim>-1</dim>
1785
+ <dim>2</dim>
1786
+ <dim>-1</dim>
1787
+ <dim>-1</dim>
1788
+ </port>
1789
+ <port id="1" precision="FP32">
1790
+ <dim>1</dim>
1791
+ <dim>2</dim>
1792
+ <dim>1</dim>
1793
+ <dim>1</dim>
1794
+ </port>
1795
+ </input>
1796
+ <output>
1797
+ <port id="2" precision="FP32" names="latent_parameters">
1798
+ <dim>-1</dim>
1799
+ <dim>2</dim>
1800
+ <dim>-1</dim>
1801
+ <dim>-1</dim>
1802
+ </port>
1803
+ </output>
1804
+ </layer>
1805
+ <layer id="111" name="Result_22115" type="Result" version="opset1">
1806
+ <input>
1807
+ <port id="0" precision="FP32">
1808
+ <dim>-1</dim>
1809
+ <dim>2</dim>
1810
+ <dim>-1</dim>
1811
+ <dim>-1</dim>
1812
+ </port>
1813
+ </input>
1814
+ </layer>
1815
+ </layers>
1816
+ <edges>
1817
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
1818
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
1819
+ <edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
1820
+ <edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
1821
+ <edge from-layer="4" from-port="2" to-layer="21" to-port="0" />
1822
+ <edge from-layer="4" from-port="2" to-layer="7" to-port="0" />
1823
+ <edge from-layer="5" from-port="0" to-layer="7" to-port="1" />
1824
+ <edge from-layer="6" from-port="0" to-layer="7" to-port="2" />
1825
+ <edge from-layer="7" from-port="3" to-layer="8" to-port="0" />
1826
+ <edge from-layer="8" from-port="1" to-layer="10" to-port="0" />
1827
+ <edge from-layer="9" from-port="0" to-layer="10" to-port="1" />
1828
+ <edge from-layer="10" from-port="2" to-layer="12" to-port="0" />
1829
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
1830
+ <edge from-layer="12" from-port="2" to-layer="15" to-port="0" />
1831
+ <edge from-layer="13" from-port="0" to-layer="15" to-port="1" />
1832
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="2" />
1833
+ <edge from-layer="15" from-port="3" to-layer="16" to-port="0" />
1834
+ <edge from-layer="16" from-port="1" to-layer="18" to-port="0" />
1835
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="1" />
1836
+ <edge from-layer="18" from-port="2" to-layer="20" to-port="0" />
1837
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
1838
+ <edge from-layer="20" from-port="2" to-layer="21" to-port="1" />
1839
+ <edge from-layer="21" from-port="2" to-layer="24" to-port="0" />
1840
+ <edge from-layer="21" from-port="2" to-layer="38" to-port="0" />
1841
+ <edge from-layer="22" from-port="0" to-layer="24" to-port="1" />
1842
+ <edge from-layer="23" from-port="0" to-layer="24" to-port="2" />
1843
+ <edge from-layer="24" from-port="3" to-layer="25" to-port="0" />
1844
+ <edge from-layer="25" from-port="1" to-layer="27" to-port="0" />
1845
+ <edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
1846
+ <edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
1847
+ <edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
1848
+ <edge from-layer="29" from-port="2" to-layer="32" to-port="0" />
1849
+ <edge from-layer="30" from-port="0" to-layer="32" to-port="1" />
1850
+ <edge from-layer="31" from-port="0" to-layer="32" to-port="2" />
1851
+ <edge from-layer="32" from-port="3" to-layer="33" to-port="0" />
1852
+ <edge from-layer="33" from-port="1" to-layer="35" to-port="0" />
1853
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
1854
+ <edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
1855
+ <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
1856
+ <edge from-layer="37" from-port="2" to-layer="38" to-port="1" />
1857
+ <edge from-layer="38" from-port="2" to-layer="40" to-port="0" />
1858
+ <edge from-layer="38" from-port="2" to-layer="85" to-port="1" />
1859
+ <edge from-layer="38" from-port="2" to-layer="83" to-port="0" />
1860
+ <edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
1861
+ <edge from-layer="40" from-port="2" to-layer="42" to-port="0" />
1862
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
1863
+ <edge from-layer="42" from-port="2" to-layer="44" to-port="0" />
1864
+ <edge from-layer="43" from-port="0" to-layer="44" to-port="1" />
1865
+ <edge from-layer="44" from-port="2" to-layer="47" to-port="0" />
1866
+ <edge from-layer="45" from-port="0" to-layer="47" to-port="1" />
1867
+ <edge from-layer="46" from-port="0" to-layer="47" to-port="2" />
1868
+ <edge from-layer="47" from-port="3" to-layer="65" to-port="0" />
1869
+ <edge from-layer="47" from-port="3" to-layer="57" to-port="0" />
1870
+ <edge from-layer="47" from-port="3" to-layer="49" to-port="0" />
1871
+ <edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
1872
+ <edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
1873
+ <edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
1874
+ <edge from-layer="51" from-port="2" to-layer="53" to-port="0" />
1875
+ <edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
1876
+ <edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
1877
+ <edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
1878
+ <edge from-layer="55" from-port="2" to-layer="72" to-port="0" />
1879
+ <edge from-layer="56" from-port="0" to-layer="57" to-port="1" />
1880
+ <edge from-layer="57" from-port="2" to-layer="59" to-port="0" />
1881
+ <edge from-layer="58" from-port="0" to-layer="59" to-port="1" />
1882
+ <edge from-layer="59" from-port="2" to-layer="61" to-port="0" />
1883
+ <edge from-layer="60" from-port="0" to-layer="61" to-port="1" />
1884
+ <edge from-layer="61" from-port="2" to-layer="63" to-port="0" />
1885
+ <edge from-layer="62" from-port="0" to-layer="63" to-port="1" />
1886
+ <edge from-layer="63" from-port="2" to-layer="72" to-port="1" />
1887
+ <edge from-layer="64" from-port="0" to-layer="65" to-port="1" />
1888
+ <edge from-layer="65" from-port="2" to-layer="67" to-port="0" />
1889
+ <edge from-layer="66" from-port="0" to-layer="67" to-port="1" />
1890
+ <edge from-layer="67" from-port="2" to-layer="69" to-port="0" />
1891
+ <edge from-layer="68" from-port="0" to-layer="69" to-port="1" />
1892
+ <edge from-layer="69" from-port="2" to-layer="71" to-port="0" />
1893
+ <edge from-layer="70" from-port="0" to-layer="71" to-port="1" />
1894
+ <edge from-layer="71" from-port="2" to-layer="72" to-port="2" />
1895
+ <edge from-layer="72" from-port="3" to-layer="74" to-port="0" />
1896
+ <edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
1897
+ <edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
1898
+ <edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
1899
+ <edge from-layer="76" from-port="2" to-layer="78" to-port="0" />
1900
+ <edge from-layer="77" from-port="0" to-layer="78" to-port="1" />
1901
+ <edge from-layer="78" from-port="2" to-layer="80" to-port="0" />
1902
+ <edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
1903
+ <edge from-layer="80" from-port="2" to-layer="82" to-port="0" />
1904
+ <edge from-layer="81" from-port="0" to-layer="82" to-port="1" />
1905
+ <edge from-layer="82" from-port="2" to-layer="84" to-port="0" />
1906
+ <edge from-layer="83" from-port="1" to-layer="84" to-port="1" />
1907
+ <edge from-layer="84" from-port="2" to-layer="85" to-port="0" />
1908
+ <edge from-layer="85" from-port="2" to-layer="88" to-port="0" />
1909
+ <edge from-layer="85" from-port="2" to-layer="102" to-port="0" />
1910
+ <edge from-layer="86" from-port="0" to-layer="88" to-port="1" />
1911
+ <edge from-layer="87" from-port="0" to-layer="88" to-port="2" />
1912
+ <edge from-layer="88" from-port="3" to-layer="89" to-port="0" />
1913
+ <edge from-layer="89" from-port="1" to-layer="91" to-port="0" />
1914
+ <edge from-layer="90" from-port="0" to-layer="91" to-port="1" />
1915
+ <edge from-layer="91" from-port="2" to-layer="93" to-port="0" />
1916
+ <edge from-layer="92" from-port="0" to-layer="93" to-port="1" />
1917
+ <edge from-layer="93" from-port="2" to-layer="96" to-port="0" />
1918
+ <edge from-layer="94" from-port="0" to-layer="96" to-port="1" />
1919
+ <edge from-layer="95" from-port="0" to-layer="96" to-port="2" />
1920
+ <edge from-layer="96" from-port="3" to-layer="97" to-port="0" />
1921
+ <edge from-layer="97" from-port="1" to-layer="99" to-port="0" />
1922
+ <edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
1923
+ <edge from-layer="99" from-port="2" to-layer="101" to-port="0" />
1924
+ <edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
1925
+ <edge from-layer="101" from-port="2" to-layer="102" to-port="1" />
1926
+ <edge from-layer="102" from-port="2" to-layer="105" to-port="0" />
1927
+ <edge from-layer="103" from-port="0" to-layer="105" to-port="1" />
1928
+ <edge from-layer="104" from-port="0" to-layer="105" to-port="2" />
1929
+ <edge from-layer="105" from-port="3" to-layer="106" to-port="0" />
1930
+ <edge from-layer="106" from-port="1" to-layer="108" to-port="0" />
1931
+ <edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
1932
+ <edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
1933
+ <edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
1934
+ <edge from-layer="110" from-port="2" to-layer="111" to-port="0" />
1935
+ </edges>
1936
+ <rt_info>
1937
+ <Runtime_version value="2024.6.0-17404-4c0f47d2335-releases/2024/6" />
1938
+ <conversion_parameters>
1939
+ <framework value="pytorch" />
1940
+ <is_python_object value="True" />
1941
+ </conversion_parameters>
1942
+ <optimum>
1943
+ <diffusers_version value="0.32.1" />
1944
+ <optimum_intel_version value="1.22.0.dev0+bb1c68ae" />
1945
+ <optimum_version value="1.24.0.dev0" />
1946
+ <pytorch_version value="2.5.1+cpu" />
1947
+ <transformers_version value="4.46.3" />
1948
+ </optimum>
1949
+ <runtime_options>
1950
+ <ACTIVATIONS_SCALE_FACTOR value="8.0" />
1951
+ </runtime_options>
1952
+ </rt_info>
1953
+ </net>