openvino/openvino_model.xml
206.4 KB · 6874 lines · xml Raw
1 <?xml version="1.0"?>
2 <net name="Model0" version="11">
3 <layers>
4 <layer id="2" name="input_ids" type="Parameter" version="opset1">
5 <data shape="?,?" element_type="i64" />
6 <output>
7 <port id="0" precision="I64" names="input_ids">
8 <dim>-1</dim>
9 <dim>-1</dim>
10 </port>
11 </output>
12 </layer>
13 <layer id="1" name="attention_mask" type="Parameter" version="opset1">
14 <data shape="?,?" element_type="i64" />
15 <output>
16 <port id="0" precision="I64" names="attention_mask">
17 <dim>-1</dim>
18 <dim>-1</dim>
19 </port>
20 </output>
21 </layer>
22 <layer id="0" name="token_type_ids" type="Parameter" version="opset1">
23 <data shape="?,?" element_type="i64" />
24 <output>
25 <port id="0" precision="I64" names="token_type_ids">
26 <dim>-1</dim>
27 <dim>-1</dim>
28 </port>
29 </output>
30 </layer>
31 <layer id="3" name="self.embeddings.word_embeddings.weight" type="Const" version="opset1">
32 <data element_type="f32" shape="30522, 384" offset="0" size="46881792" />
33 <output>
34 <port id="0" precision="FP32" names="self.embeddings.word_embeddings.weight">
35 <dim>30522</dim>
36 <dim>384</dim>
37 </port>
38 </output>
39 </layer>
40 <layer id="4" name="__module.embeddings.word_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
41 <data destination_type="i32" />
42 <input>
43 <port id="0" precision="I64">
44 <dim>-1</dim>
45 <dim>-1</dim>
46 </port>
47 </input>
48 <output>
49 <port id="1" precision="I32">
50 <dim>-1</dim>
51 <dim>-1</dim>
52 </port>
53 </output>
54 </layer>
55 <layer id="5" name="__module.embeddings.word_embeddings/aten::embedding/Constant" type="Const" version="opset1">
56 <data element_type="i32" shape="" offset="46881792" size="4" />
57 <output>
58 <port id="0" precision="I32" />
59 </output>
60 </layer>
61 <layer id="6" name="__module.embeddings.word_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
62 <data batch_dims="0" />
63 <input>
64 <port id="0" precision="FP32">
65 <dim>30522</dim>
66 <dim>384</dim>
67 </port>
68 <port id="1" precision="I32">
69 <dim>-1</dim>
70 <dim>-1</dim>
71 </port>
72 <port id="2" precision="I32" />
73 </input>
74 <output>
75 <port id="3" precision="FP32" names="79,inputs_embeds">
76 <dim>-1</dim>
77 <dim>-1</dim>
78 <dim>384</dim>
79 </port>
80 </output>
81 </layer>
82 <layer id="7" name="self.embeddings.token_type_embeddings.weight" type="Const" version="opset1">
83 <data element_type="f32" shape="2, 384" offset="46881796" size="3072" />
84 <output>
85 <port id="0" precision="FP32" names="self.embeddings.token_type_embeddings.weight">
86 <dim>2</dim>
87 <dim>384</dim>
88 </port>
89 </output>
90 </layer>
91 <layer id="8" name="__module.embeddings.token_type_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
92 <data destination_type="i32" />
93 <input>
94 <port id="0" precision="I64">
95 <dim>-1</dim>
96 <dim>-1</dim>
97 </port>
98 </input>
99 <output>
100 <port id="1" precision="I32">
101 <dim>-1</dim>
102 <dim>-1</dim>
103 </port>
104 </output>
105 </layer>
106 <layer id="9" name="__module.embeddings.token_type_embeddings/aten::embedding/Constant" type="Const" version="opset1">
107 <data element_type="i32" shape="" offset="46881792" size="4" />
108 <output>
109 <port id="0" precision="I32" />
110 </output>
111 </layer>
112 <layer id="10" name="__module.embeddings.token_type_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
113 <data batch_dims="0" />
114 <input>
115 <port id="0" precision="FP32">
116 <dim>2</dim>
117 <dim>384</dim>
118 </port>
119 <port id="1" precision="I32">
120 <dim>-1</dim>
121 <dim>-1</dim>
122 </port>
123 <port id="2" precision="I32" />
124 </input>
125 <output>
126 <port id="3" precision="FP32" names="81,token_type_embeddings.1">
127 <dim>-1</dim>
128 <dim>-1</dim>
129 <dim>384</dim>
130 </port>
131 </output>
132 </layer>
133 <layer id="11" name="__module.embeddings/aten::add/Add" type="Add" version="opset1">
134 <data auto_broadcast="numpy" />
135 <input>
136 <port id="0" precision="FP32">
137 <dim>-1</dim>
138 <dim>-1</dim>
139 <dim>384</dim>
140 </port>
141 <port id="1" precision="FP32">
142 <dim>-1</dim>
143 <dim>-1</dim>
144 <dim>384</dim>
145 </port>
146 </input>
147 <output>
148 <port id="2" precision="FP32" names="82_1">
149 <dim>-1</dim>
150 <dim>-1</dim>
151 <dim>384</dim>
152 </port>
153 </output>
154 </layer>
155 <layer id="12" name="self.embeddings.position_embeddings.weight" type="Const" version="opset1">
156 <data element_type="f32" shape="512, 384" offset="46884868" size="786432" />
157 <output>
158 <port id="0" precision="FP32" names="self.embeddings.position_embeddings.weight">
159 <dim>512</dim>
160 <dim>384</dim>
161 </port>
162 </output>
163 </layer>
164 <layer id="13" name="__module.embeddings/aten::slice/Slice" type="Const" version="opset1">
165 <data element_type="i64" shape="1, 512" offset="47671300" size="4096" />
166 <output>
167 <port id="0" precision="I64" names="76">
168 <dim>1</dim>
169 <dim>512</dim>
170 </port>
171 </output>
172 </layer>
173 <layer id="14" name="__module.embeddings/aten::slice/Reshape" type="Const" version="opset1">
174 <data element_type="i64" shape="1" offset="47675396" size="8" />
175 <output>
176 <port id="0" precision="I64">
177 <dim>1</dim>
178 </port>
179 </output>
180 </layer>
181 <layer id="15" name="ShapeOf_6355" type="ShapeOf" version="opset3">
182 <data output_type="i64" />
183 <input>
184 <port id="0" precision="I64">
185 <dim>-1</dim>
186 <dim>-1</dim>
187 </port>
188 </input>
189 <output>
190 <port id="1" precision="I64">
191 <dim>2</dim>
192 </port>
193 </output>
194 </layer>
195 <layer id="16" name="Constant_6476" type="Const" version="opset1">
196 <data element_type="i64" shape="1" offset="47675404" size="8" />
197 <output>
198 <port id="0" precision="I64">
199 <dim>1</dim>
200 </port>
201 </output>
202 </layer>
203 <layer id="17" name="Constant_6357" type="Const" version="opset1">
204 <data element_type="i64" shape="" offset="47675396" size="8" />
205 <output>
206 <port id="0" precision="I64" />
207 </output>
208 </layer>
209 <layer id="18" name="Gather_6358" type="Gather" version="opset8">
210 <data batch_dims="0" />
211 <input>
212 <port id="0" precision="I64">
213 <dim>2</dim>
214 </port>
215 <port id="1" precision="I64">
216 <dim>1</dim>
217 </port>
218 <port id="2" precision="I64" />
219 </input>
220 <output>
221 <port id="3" precision="I64" names="10,72,74,75,8">
222 <dim>1</dim>
223 </port>
224 </output>
225 </layer>
226 <layer id="19" name="__module.embeddings/aten::slice/Reshape_2" type="Const" version="opset1">
227 <data element_type="i64" shape="1" offset="47675404" size="8" />
228 <output>
229 <port id="0" precision="I64">
230 <dim>1</dim>
231 </port>
232 </output>
233 </layer>
234 <layer id="20" name="__module.embeddings/aten::slice/Reshape_3" type="Const" version="opset1">
235 <data element_type="i64" shape="1" offset="47675404" size="8" />
236 <output>
237 <port id="0" precision="I64">
238 <dim>1</dim>
239 </port>
240 </output>
241 </layer>
242 <layer id="21" name="__module.embeddings/aten::slice/Slice_1" type="Slice" version="opset8">
243 <input>
244 <port id="0" precision="I64">
245 <dim>1</dim>
246 <dim>512</dim>
247 </port>
248 <port id="1" precision="I64">
249 <dim>1</dim>
250 </port>
251 <port id="2" precision="I64">
252 <dim>1</dim>
253 </port>
254 <port id="3" precision="I64">
255 <dim>1</dim>
256 </port>
257 <port id="4" precision="I64">
258 <dim>1</dim>
259 </port>
260 </input>
261 <output>
262 <port id="5" precision="I64" names="77">
263 <dim>1</dim>
264 <dim>-1</dim>
265 </port>
266 </output>
267 </layer>
268 <layer id="22" name="__module.embeddings.position_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
269 <data destination_type="i32" />
270 <input>
271 <port id="0" precision="I64">
272 <dim>1</dim>
273 <dim>-1</dim>
274 </port>
275 </input>
276 <output>
277 <port id="1" precision="I32">
278 <dim>1</dim>
279 <dim>-1</dim>
280 </port>
281 </output>
282 </layer>
283 <layer id="23" name="__module.embeddings.position_embeddings/aten::embedding/Constant" type="Const" version="opset1">
284 <data element_type="i32" shape="" offset="46881792" size="4" />
285 <output>
286 <port id="0" precision="I32" />
287 </output>
288 </layer>
289 <layer id="24" name="__module.embeddings.position_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
290 <data batch_dims="0" />
291 <input>
292 <port id="0" precision="FP32">
293 <dim>512</dim>
294 <dim>384</dim>
295 </port>
296 <port id="1" precision="I32">
297 <dim>1</dim>
298 <dim>-1</dim>
299 </port>
300 <port id="2" precision="I32" />
301 </input>
302 <output>
303 <port id="3" precision="FP32" names="84,position_embeddings.1">
304 <dim>1</dim>
305 <dim>-1</dim>
306 <dim>384</dim>
307 </port>
308 </output>
309 </layer>
310 <layer id="25" name="__module.embeddings/aten::add_/Add" type="Add" version="opset1">
311 <data auto_broadcast="numpy" />
312 <input>
313 <port id="0" precision="FP32">
314 <dim>-1</dim>
315 <dim>-1</dim>
316 <dim>384</dim>
317 </port>
318 <port id="1" precision="FP32">
319 <dim>1</dim>
320 <dim>-1</dim>
321 <dim>384</dim>
322 </port>
323 </input>
324 <output>
325 <port id="2" precision="FP32" names="82,embeddings.1">
326 <dim>-1</dim>
327 <dim>-1</dim>
328 <dim>384</dim>
329 </port>
330 </output>
331 </layer>
332 <layer id="26" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
333 <data element_type="i32" shape="1" offset="47675412" size="4" />
334 <output>
335 <port id="0" precision="I32">
336 <dim>1</dim>
337 </port>
338 </output>
339 </layer>
340 <layer id="27" name="__module.embeddings.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
341 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
342 <input>
343 <port id="0" precision="FP32">
344 <dim>-1</dim>
345 <dim>-1</dim>
346 <dim>384</dim>
347 </port>
348 <port id="1" precision="I32">
349 <dim>1</dim>
350 </port>
351 </input>
352 <output>
353 <port id="2" precision="FP32">
354 <dim>-1</dim>
355 <dim>-1</dim>
356 <dim>384</dim>
357 </port>
358 </output>
359 </layer>
360 <layer id="28" name="Constant_6230" type="Const" version="opset1">
361 <data element_type="f32" shape="1, 1, 384" offset="47675416" size="1536" />
362 <output>
363 <port id="0" precision="FP32">
364 <dim>1</dim>
365 <dim>1</dim>
366 <dim>384</dim>
367 </port>
368 </output>
369 </layer>
370 <layer id="29" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
371 <data auto_broadcast="numpy" />
372 <input>
373 <port id="0" precision="FP32">
374 <dim>-1</dim>
375 <dim>-1</dim>
376 <dim>384</dim>
377 </port>
378 <port id="1" precision="FP32">
379 <dim>1</dim>
380 <dim>1</dim>
381 <dim>384</dim>
382 </port>
383 </input>
384 <output>
385 <port id="2" precision="FP32">
386 <dim>-1</dim>
387 <dim>-1</dim>
388 <dim>384</dim>
389 </port>
390 </output>
391 </layer>
392 <layer id="30" name="Constant_6231" type="Const" version="opset1">
393 <data element_type="f32" shape="1, 1, 384" offset="47676952" size="1536" />
394 <output>
395 <port id="0" precision="FP32">
396 <dim>1</dim>
397 <dim>1</dim>
398 <dim>384</dim>
399 </port>
400 </output>
401 </layer>
402 <layer id="31" name="__module.embeddings.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
403 <data auto_broadcast="numpy" />
404 <input>
405 <port id="0" precision="FP32">
406 <dim>-1</dim>
407 <dim>-1</dim>
408 <dim>384</dim>
409 </port>
410 <port id="1" precision="FP32">
411 <dim>1</dim>
412 <dim>1</dim>
413 <dim>384</dim>
414 </port>
415 </input>
416 <output>
417 <port id="2" precision="FP32" names="89,input.1">
418 <dim>-1</dim>
419 <dim>-1</dim>
420 <dim>384</dim>
421 </port>
422 </output>
423 </layer>
424 <layer id="32" name="self.encoder.layer.0.attention.self.query.weight" type="Const" version="opset1">
425 <data element_type="f32" shape="384, 384" offset="47678488" size="589824" />
426 <output>
427 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.query.weight">
428 <dim>384</dim>
429 <dim>384</dim>
430 </port>
431 </output>
432 </layer>
433 <layer id="33" name="__module.encoder.layer.0.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
434 <data transpose_a="false" transpose_b="true" />
435 <input>
436 <port id="0" precision="FP32">
437 <dim>-1</dim>
438 <dim>-1</dim>
439 <dim>384</dim>
440 </port>
441 <port id="1" precision="FP32">
442 <dim>384</dim>
443 <dim>384</dim>
444 </port>
445 </input>
446 <output>
447 <port id="2" precision="FP32">
448 <dim>-1</dim>
449 <dim>-1</dim>
450 <dim>384</dim>
451 </port>
452 </output>
453 </layer>
454 <layer id="34" name="Constant_6232" type="Const" version="opset1">
455 <data element_type="f32" shape="1, 1, 384" offset="48268312" size="1536" />
456 <output>
457 <port id="0" precision="FP32">
458 <dim>1</dim>
459 <dim>1</dim>
460 <dim>384</dim>
461 </port>
462 </output>
463 </layer>
464 <layer id="35" name="__module.encoder.layer.0.attention.self.query/aten::linear/Add" type="Add" version="opset1">
465 <data auto_broadcast="numpy" />
466 <input>
467 <port id="0" precision="FP32">
468 <dim>-1</dim>
469 <dim>-1</dim>
470 <dim>384</dim>
471 </port>
472 <port id="1" precision="FP32">
473 <dim>1</dim>
474 <dim>1</dim>
475 <dim>384</dim>
476 </port>
477 </input>
478 <output>
479 <port id="2" precision="FP32" names="129,x.1">
480 <dim>-1</dim>
481 <dim>-1</dim>
482 <dim>384</dim>
483 </port>
484 </output>
485 </layer>
486 <layer id="36" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
487 <data element_type="i64" shape="4" offset="48269848" size="32" />
488 <output>
489 <port id="0" precision="I64">
490 <dim>4</dim>
491 </port>
492 </output>
493 </layer>
494 <layer id="37" name="__module.encoder.layer.0.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
495 <data special_zero="true" />
496 <input>
497 <port id="0" precision="FP32">
498 <dim>-1</dim>
499 <dim>-1</dim>
500 <dim>384</dim>
501 </port>
502 <port id="1" precision="I64">
503 <dim>4</dim>
504 </port>
505 </input>
506 <output>
507 <port id="2" precision="FP32" names="133,x.3">
508 <dim>-1</dim>
509 <dim>-1</dim>
510 <dim>12</dim>
511 <dim>32</dim>
512 </port>
513 </output>
514 </layer>
515 <layer id="38" name="Constant_247" type="Const" version="opset1">
516 <data element_type="i64" shape="4" offset="48269880" size="32" />
517 <output>
518 <port id="0" precision="I64" names="134">
519 <dim>4</dim>
520 </port>
521 </output>
522 </layer>
523 <layer id="39" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
524 <input>
525 <port id="0" precision="FP32">
526 <dim>-1</dim>
527 <dim>-1</dim>
528 <dim>12</dim>
529 <dim>32</dim>
530 </port>
531 <port id="1" precision="I64">
532 <dim>4</dim>
533 </port>
534 </input>
535 <output>
536 <port id="2" precision="FP32" names="135">
537 <dim>-1</dim>
538 <dim>12</dim>
539 <dim>-1</dim>
540 <dim>32</dim>
541 </port>
542 </output>
543 </layer>
544 <layer id="40" name="self.encoder.layer.0.attention.self.key.weight" type="Const" version="opset1">
545 <data element_type="f32" shape="384, 384" offset="48269912" size="589824" />
546 <output>
547 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.key.weight">
548 <dim>384</dim>
549 <dim>384</dim>
550 </port>
551 </output>
552 </layer>
553 <layer id="41" name="__module.encoder.layer.0.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
554 <data transpose_a="false" transpose_b="true" />
555 <input>
556 <port id="0" precision="FP32">
557 <dim>-1</dim>
558 <dim>-1</dim>
559 <dim>384</dim>
560 </port>
561 <port id="1" precision="FP32">
562 <dim>384</dim>
563 <dim>384</dim>
564 </port>
565 </input>
566 <output>
567 <port id="2" precision="FP32">
568 <dim>-1</dim>
569 <dim>-1</dim>
570 <dim>384</dim>
571 </port>
572 </output>
573 </layer>
574 <layer id="42" name="Constant_6233" type="Const" version="opset1">
575 <data element_type="f32" shape="1, 1, 384" offset="48859736" size="1536" />
576 <output>
577 <port id="0" precision="FP32">
578 <dim>1</dim>
579 <dim>1</dim>
580 <dim>384</dim>
581 </port>
582 </output>
583 </layer>
584 <layer id="43" name="__module.encoder.layer.0.attention.self.key/aten::linear/Add" type="Add" version="opset1">
585 <data auto_broadcast="numpy" />
586 <input>
587 <port id="0" precision="FP32">
588 <dim>-1</dim>
589 <dim>-1</dim>
590 <dim>384</dim>
591 </port>
592 <port id="1" precision="FP32">
593 <dim>1</dim>
594 <dim>1</dim>
595 <dim>384</dim>
596 </port>
597 </input>
598 <output>
599 <port id="2" precision="FP32" names="138,x.5">
600 <dim>-1</dim>
601 <dim>-1</dim>
602 <dim>384</dim>
603 </port>
604 </output>
605 </layer>
606 <layer id="44" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
607 <data element_type="i64" shape="4" offset="48269848" size="32" />
608 <output>
609 <port id="0" precision="I64">
610 <dim>4</dim>
611 </port>
612 </output>
613 </layer>
614 <layer id="45" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
615 <data special_zero="true" />
616 <input>
617 <port id="0" precision="FP32">
618 <dim>-1</dim>
619 <dim>-1</dim>
620 <dim>384</dim>
621 </port>
622 <port id="1" precision="I64">
623 <dim>4</dim>
624 </port>
625 </input>
626 <output>
627 <port id="2" precision="FP32" names="142,x.7">
628 <dim>-1</dim>
629 <dim>-1</dim>
630 <dim>12</dim>
631 <dim>32</dim>
632 </port>
633 </output>
634 </layer>
635 <layer id="46" name="Constant_272" type="Const" version="opset1">
636 <data element_type="i64" shape="4" offset="48269880" size="32" />
637 <output>
638 <port id="0" precision="I64" names="143">
639 <dim>4</dim>
640 </port>
641 </output>
642 </layer>
643 <layer id="47" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
644 <input>
645 <port id="0" precision="FP32">
646 <dim>-1</dim>
647 <dim>-1</dim>
648 <dim>12</dim>
649 <dim>32</dim>
650 </port>
651 <port id="1" precision="I64">
652 <dim>4</dim>
653 </port>
654 </input>
655 <output>
656 <port id="2" precision="FP32" names="144">
657 <dim>-1</dim>
658 <dim>12</dim>
659 <dim>-1</dim>
660 <dim>32</dim>
661 </port>
662 </output>
663 </layer>
664 <layer id="48" name="self.encoder.layer.0.attention.self.value.weight" type="Const" version="opset1">
665 <data element_type="f32" shape="384, 384" offset="48861272" size="589824" />
666 <output>
667 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.value.weight">
668 <dim>384</dim>
669 <dim>384</dim>
670 </port>
671 </output>
672 </layer>
673 <layer id="49" name="__module.encoder.layer.0.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
674 <data transpose_a="false" transpose_b="true" />
675 <input>
676 <port id="0" precision="FP32">
677 <dim>-1</dim>
678 <dim>-1</dim>
679 <dim>384</dim>
680 </port>
681 <port id="1" precision="FP32">
682 <dim>384</dim>
683 <dim>384</dim>
684 </port>
685 </input>
686 <output>
687 <port id="2" precision="FP32">
688 <dim>-1</dim>
689 <dim>-1</dim>
690 <dim>384</dim>
691 </port>
692 </output>
693 </layer>
694 <layer id="50" name="Constant_6234" type="Const" version="opset1">
695 <data element_type="f32" shape="1, 1, 384" offset="49451096" size="1536" />
696 <output>
697 <port id="0" precision="FP32">
698 <dim>1</dim>
699 <dim>1</dim>
700 <dim>384</dim>
701 </port>
702 </output>
703 </layer>
704 <layer id="51" name="__module.encoder.layer.0.attention.self.value/aten::linear/Add" type="Add" version="opset1">
705 <data auto_broadcast="numpy" />
706 <input>
707 <port id="0" precision="FP32">
708 <dim>-1</dim>
709 <dim>-1</dim>
710 <dim>384</dim>
711 </port>
712 <port id="1" precision="FP32">
713 <dim>1</dim>
714 <dim>1</dim>
715 <dim>384</dim>
716 </port>
717 </input>
718 <output>
719 <port id="2" precision="FP32" names="147,x.9">
720 <dim>-1</dim>
721 <dim>-1</dim>
722 <dim>384</dim>
723 </port>
724 </output>
725 </layer>
726 <layer id="52" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
727 <data element_type="i64" shape="4" offset="48269848" size="32" />
728 <output>
729 <port id="0" precision="I64">
730 <dim>4</dim>
731 </port>
732 </output>
733 </layer>
734 <layer id="53" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
735 <data special_zero="true" />
736 <input>
737 <port id="0" precision="FP32">
738 <dim>-1</dim>
739 <dim>-1</dim>
740 <dim>384</dim>
741 </port>
742 <port id="1" precision="I64">
743 <dim>4</dim>
744 </port>
745 </input>
746 <output>
747 <port id="2" precision="FP32" names="151,x.11">
748 <dim>-1</dim>
749 <dim>-1</dim>
750 <dim>12</dim>
751 <dim>32</dim>
752 </port>
753 </output>
754 </layer>
755 <layer id="54" name="Constant_297" type="Const" version="opset1">
756 <data element_type="i64" shape="4" offset="48269880" size="32" />
757 <output>
758 <port id="0" precision="I64" names="152">
759 <dim>4</dim>
760 </port>
761 </output>
762 </layer>
763 <layer id="55" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
764 <input>
765 <port id="0" precision="FP32">
766 <dim>-1</dim>
767 <dim>-1</dim>
768 <dim>12</dim>
769 <dim>32</dim>
770 </port>
771 <port id="1" precision="I64">
772 <dim>4</dim>
773 </port>
774 </input>
775 <output>
776 <port id="2" precision="FP32" names="153">
777 <dim>-1</dim>
778 <dim>12</dim>
779 <dim>-1</dim>
780 <dim>32</dim>
781 </port>
782 </output>
783 </layer>
784 <layer id="56" name="Constant_6236" type="Const" version="opset1">
785 <data element_type="f32" shape="1, 1, 1, 1" offset="49452632" size="4" />
786 <output>
787 <port id="0" precision="FP32">
788 <dim>1</dim>
789 <dim>1</dim>
790 <dim>1</dim>
791 <dim>1</dim>
792 </port>
793 </output>
794 </layer>
795 <layer id="57" name="25" type="Const" version="opset1">
796 <data element_type="i64" shape="" offset="47675404" size="8" />
797 <output>
798 <port id="0" precision="I64" names="25" />
799 </output>
800 </layer>
801 <layer id="58" name="aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
802 <input>
803 <port id="0" precision="I64">
804 <dim>-1</dim>
805 <dim>-1</dim>
806 </port>
807 <port id="1" precision="I64" />
808 </input>
809 <output>
810 <port id="2" precision="I64" names="26">
811 <dim>-1</dim>
812 <dim>1</dim>
813 <dim>-1</dim>
814 </port>
815 </output>
816 </layer>
817 <layer id="59" name="27" type="Const" version="opset1">
818 <data element_type="i64" shape="" offset="49452636" size="8" />
819 <output>
820 <port id="0" precision="I64" names="27" />
821 </output>
822 </layer>
823 <layer id="60" name="aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
824 <input>
825 <port id="0" precision="I64">
826 <dim>-1</dim>
827 <dim>1</dim>
828 <dim>-1</dim>
829 </port>
830 <port id="1" precision="I64" />
831 </input>
832 <output>
833 <port id="2" precision="I64" names="28,33">
834 <dim>-1</dim>
835 <dim>1</dim>
836 <dim>1</dim>
837 <dim>-1</dim>
838 </port>
839 </output>
840 </layer>
841 <layer id="61" name="ShapeOf_6363" type="ShapeOf" version="opset3">
842 <data output_type="i64" />
843 <input>
844 <port id="0" precision="I64">
845 <dim>-1</dim>
846 <dim>-1</dim>
847 </port>
848 </input>
849 <output>
850 <port id="1" precision="I64">
851 <dim>2</dim>
852 </port>
853 </output>
854 </layer>
855 <layer id="62" name="Constant_6479" type="Const" version="opset1">
856 <data element_type="i64" shape="1" offset="47675396" size="8" />
857 <output>
858 <port id="0" precision="I64">
859 <dim>1</dim>
860 </port>
861 </output>
862 </layer>
863 <layer id="63" name="Constant_6365" type="Const" version="opset1">
864 <data element_type="i64" shape="" offset="47675396" size="8" />
865 <output>
866 <port id="0" precision="I64" />
867 </output>
868 </layer>
869 <layer id="64" name="Gather_6366" type="Gather" version="opset8">
870 <data batch_dims="0" />
871 <input>
872 <port id="0" precision="I64">
873 <dim>2</dim>
874 </port>
875 <port id="1" precision="I64">
876 <dim>1</dim>
877 </port>
878 <port id="2" precision="I64" />
879 </input>
880 <output>
881 <port id="3" precision="I64" names="13,15">
882 <dim>1</dim>
883 </port>
884 </output>
885 </layer>
886 <layer id="65" name="Constant_5460" type="Const" version="opset1">
887 <data element_type="i64" shape="1" offset="47675404" size="8" />
888 <output>
889 <port id="0" precision="I64">
890 <dim>1</dim>
891 </port>
892 </output>
893 </layer>
894 <layer id="66" name="Constant_6482" type="Const" version="opset1">
895 <data element_type="i64" shape="1" offset="47675404" size="8" />
896 <output>
897 <port id="0" precision="I64">
898 <dim>1</dim>
899 </port>
900 </output>
901 </layer>
902 <layer id="67" name="Constant_6373" type="Const" version="opset1">
903 <data element_type="i64" shape="" offset="47675396" size="8" />
904 <output>
905 <port id="0" precision="I64" />
906 </output>
907 </layer>
908 <layer id="68" name="Gather_6374" type="Gather" version="opset8">
909 <data batch_dims="0" />
910 <input>
911 <port id="0" precision="I64">
912 <dim>2</dim>
913 </port>
914 <port id="1" precision="I64">
915 <dim>1</dim>
916 </port>
917 <port id="2" precision="I64" />
918 </input>
919 <output>
920 <port id="3" precision="I64" names="17,19">
921 <dim>1</dim>
922 </port>
923 </output>
924 </layer>
925 <layer id="69" name="prim::ListConstruct/Concat" type="Concat" version="opset1">
926 <data axis="0" />
927 <input>
928 <port id="0" precision="I64">
929 <dim>1</dim>
930 </port>
931 <port id="1" precision="I64">
932 <dim>1</dim>
933 </port>
934 <port id="2" precision="I64">
935 <dim>1</dim>
936 </port>
937 <port id="3" precision="I64">
938 <dim>1</dim>
939 </port>
940 </input>
941 <output>
942 <port id="4" precision="I64" names="35">
943 <dim>4</dim>
944 </port>
945 </output>
946 </layer>
947 <layer id="70" name="aten::expand/Broadcast" type="Broadcast" version="opset3">
948 <data mode="bidirectional" />
949 <input>
950 <port id="0" precision="I64">
951 <dim>-1</dim>
952 <dim>1</dim>
953 <dim>1</dim>
954 <dim>-1</dim>
955 </port>
956 <port id="1" precision="I64">
957 <dim>4</dim>
958 </port>
959 </input>
960 <output>
961 <port id="2" precision="I64" names="37">
962 <dim>-1</dim>
963 <dim>1</dim>
964 <dim>-1</dim>
965 <dim>-1</dim>
966 </port>
967 </output>
968 </layer>
969 <layer id="71" name="aten::to/Convert" type="Convert" version="opset1">
970 <data destination_type="f32" />
971 <input>
972 <port id="0" precision="I64">
973 <dim>-1</dim>
974 <dim>1</dim>
975 <dim>-1</dim>
976 <dim>-1</dim>
977 </port>
978 </input>
979 <output>
980 <port id="1" precision="FP32" names="42">
981 <dim>-1</dim>
982 <dim>1</dim>
983 <dim>-1</dim>
984 <dim>-1</dim>
985 </port>
986 </output>
987 </layer>
988 <layer id="72" name="Constant_6235" type="Const" version="opset1">
989 <data element_type="f32" shape="1, 1, 1, 1" offset="49452632" size="4" />
990 <output>
991 <port id="0" precision="FP32">
992 <dim>1</dim>
993 <dim>1</dim>
994 <dim>1</dim>
995 <dim>1</dim>
996 </port>
997 </output>
998 </layer>
999 <layer id="73" name="aten::rsub/Multiply" type="Multiply" version="opset1">
1000 <data auto_broadcast="numpy" />
1001 <input>
1002 <port id="0" precision="FP32">
1003 <dim>-1</dim>
1004 <dim>1</dim>
1005 <dim>-1</dim>
1006 <dim>-1</dim>
1007 </port>
1008 <port id="1" precision="FP32">
1009 <dim>1</dim>
1010 <dim>1</dim>
1011 <dim>1</dim>
1012 <dim>1</dim>
1013 </port>
1014 </input>
1015 <output>
1016 <port id="2" precision="FP32">
1017 <dim>-1</dim>
1018 <dim>1</dim>
1019 <dim>-1</dim>
1020 <dim>-1</dim>
1021 </port>
1022 </output>
1023 </layer>
1024 <layer id="74" name="aten::rsub/Subtract" type="Subtract" version="opset1">
1025 <data auto_broadcast="numpy" />
1026 <input>
1027 <port id="0" precision="FP32">
1028 <dim>1</dim>
1029 <dim>1</dim>
1030 <dim>1</dim>
1031 <dim>1</dim>
1032 </port>
1033 <port id="1" precision="FP32">
1034 <dim>-1</dim>
1035 <dim>1</dim>
1036 <dim>-1</dim>
1037 <dim>-1</dim>
1038 </port>
1039 </input>
1040 <output>
1041 <port id="2" precision="FP32" names="45,inverted_mask">
1042 <dim>-1</dim>
1043 <dim>1</dim>
1044 <dim>-1</dim>
1045 <dim>-1</dim>
1046 </port>
1047 </output>
1048 </layer>
1049 <layer id="75" name="aten::to/Convert_1" type="Convert" version="opset1">
1050 <data destination_type="boolean" />
1051 <input>
1052 <port id="0" precision="FP32">
1053 <dim>-1</dim>
1054 <dim>1</dim>
1055 <dim>-1</dim>
1056 <dim>-1</dim>
1057 </port>
1058 </input>
1059 <output>
1060 <port id="1" precision="BOOL" names="50">
1061 <dim>-1</dim>
1062 <dim>1</dim>
1063 <dim>-1</dim>
1064 <dim>-1</dim>
1065 </port>
1066 </output>
1067 </layer>
1068 <layer id="76" name="aten::masked_fill/ConvertLike" type="Const" version="opset1">
1069 <data element_type="f32" shape="" offset="49452644" size="4" />
1070 <output>
1071 <port id="0" precision="FP32" />
1072 </output>
1073 </layer>
1074 <layer id="77" name="aten::masked_fill/Select" type="Select" version="opset1">
1075 <data auto_broadcast="numpy" />
1076 <input>
1077 <port id="0" precision="BOOL">
1078 <dim>-1</dim>
1079 <dim>1</dim>
1080 <dim>-1</dim>
1081 <dim>-1</dim>
1082 </port>
1083 <port id="1" precision="FP32" />
1084 <port id="2" precision="FP32">
1085 <dim>-1</dim>
1086 <dim>1</dim>
1087 <dim>-1</dim>
1088 <dim>-1</dim>
1089 </port>
1090 </input>
1091 <output>
1092 <port id="3" precision="FP32" names="52">
1093 <dim>-1</dim>
1094 <dim>1</dim>
1095 <dim>-1</dim>
1096 <dim>-1</dim>
1097 </port>
1098 </output>
1099 </layer>
1100 <layer id="78" name="__module.encoder.layer.0.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
1101 <data causal="false" />
1102 <input>
1103 <port id="0" precision="FP32">
1104 <dim>-1</dim>
1105 <dim>12</dim>
1106 <dim>-1</dim>
1107 <dim>32</dim>
1108 </port>
1109 <port id="1" precision="FP32">
1110 <dim>-1</dim>
1111 <dim>12</dim>
1112 <dim>-1</dim>
1113 <dim>32</dim>
1114 </port>
1115 <port id="2" precision="FP32">
1116 <dim>-1</dim>
1117 <dim>12</dim>
1118 <dim>-1</dim>
1119 <dim>32</dim>
1120 </port>
1121 <port id="3" precision="FP32">
1122 <dim>-1</dim>
1123 <dim>1</dim>
1124 <dim>-1</dim>
1125 <dim>-1</dim>
1126 </port>
1127 </input>
1128 <output>
1129 <port id="4" precision="FP32" names="154,attn_output.1">
1130 <dim>-1</dim>
1131 <dim>12</dim>
1132 <dim>-1</dim>
1133 <dim>32</dim>
1134 </port>
1135 </output>
1136 </layer>
1137 <layer id="79" name="__module.encoder.layer.0.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
1138 <data element_type="i32" shape="4" offset="49452648" size="16" />
1139 <output>
1140 <port id="0" precision="I32">
1141 <dim>4</dim>
1142 </port>
1143 </output>
1144 </layer>
1145 <layer id="80" name="__module.encoder.layer.0.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
1146 <input>
1147 <port id="0" precision="FP32">
1148 <dim>-1</dim>
1149 <dim>12</dim>
1150 <dim>-1</dim>
1151 <dim>32</dim>
1152 </port>
1153 <port id="1" precision="I32">
1154 <dim>4</dim>
1155 </port>
1156 </input>
1157 <output>
1158 <port id="2" precision="FP32" names="155,attn_output.3">
1159 <dim>-1</dim>
1160 <dim>-1</dim>
1161 <dim>12</dim>
1162 <dim>32</dim>
1163 </port>
1164 </output>
1165 </layer>
1166 <layer id="81" name="__module.encoder.layer.0.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
1167 <data output_type="i64" />
1168 <input>
1169 <port id="0" precision="FP32">
1170 <dim>-1</dim>
1171 <dim>-1</dim>
1172 <dim>384</dim>
1173 </port>
1174 </input>
1175 <output>
1176 <port id="1" precision="I64">
1177 <dim>3</dim>
1178 </port>
1179 </output>
1180 </layer>
1181 <layer id="82" name="Constant_5737" type="Const" version="opset1">
1182 <data element_type="i64" shape="2" offset="49452664" size="16" />
1183 <output>
1184 <port id="0" precision="I64">
1185 <dim>2</dim>
1186 </port>
1187 </output>
1188 </layer>
1189 <layer id="83" name="Constant_5738" type="Const" version="opset1">
1190 <data element_type="i64" shape="" offset="47675396" size="8" />
1191 <output>
1192 <port id="0" precision="I64" />
1193 </output>
1194 </layer>
1195 <layer id="84" name="Gather_5739" type="Gather" version="opset8">
1196 <data batch_dims="0" />
1197 <input>
1198 <port id="0" precision="I64">
1199 <dim>3</dim>
1200 </port>
1201 <port id="1" precision="I64">
1202 <dim>2</dim>
1203 </port>
1204 <port id="2" precision="I64" />
1205 </input>
1206 <output>
1207 <port id="3" precision="I64">
1208 <dim>2</dim>
1209 </port>
1210 </output>
1211 </layer>
1212 <layer id="85" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Reshape_1_3" type="Const" version="opset1">
1213 <data element_type="i64" shape="1" offset="49452680" size="8" />
1214 <output>
1215 <port id="0" precision="I64">
1216 <dim>1</dim>
1217 </port>
1218 </output>
1219 </layer>
1220 <layer id="86" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
1221 <data axis="0" />
1222 <input>
1223 <port id="0" precision="I64">
1224 <dim>2</dim>
1225 </port>
1226 <port id="1" precision="I64">
1227 <dim>1</dim>
1228 </port>
1229 </input>
1230 <output>
1231 <port id="2" precision="I64" names="156">
1232 <dim>3</dim>
1233 </port>
1234 </output>
1235 </layer>
1236 <layer id="87" name="__module.encoder.layer.0.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
1237 <data special_zero="false" />
1238 <input>
1239 <port id="0" precision="FP32">
1240 <dim>-1</dim>
1241 <dim>-1</dim>
1242 <dim>12</dim>
1243 <dim>32</dim>
1244 </port>
1245 <port id="1" precision="I64">
1246 <dim>3</dim>
1247 </port>
1248 </input>
1249 <output>
1250 <port id="2" precision="FP32" names="157">
1251 <dim>-1</dim>
1252 <dim>-1</dim>
1253 <dim>384</dim>
1254 </port>
1255 </output>
1256 </layer>
1257 <layer id="88" name="self.encoder.layer.0.attention.output.dense.weight" type="Const" version="opset1">
1258 <data element_type="f32" shape="384, 384" offset="49452688" size="589824" />
1259 <output>
1260 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.output.dense.weight">
1261 <dim>384</dim>
1262 <dim>384</dim>
1263 </port>
1264 </output>
1265 </layer>
1266 <layer id="89" name="__module.encoder.layer.0.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1267 <data transpose_a="false" transpose_b="true" />
1268 <input>
1269 <port id="0" precision="FP32">
1270 <dim>-1</dim>
1271 <dim>-1</dim>
1272 <dim>384</dim>
1273 </port>
1274 <port id="1" precision="FP32">
1275 <dim>384</dim>
1276 <dim>384</dim>
1277 </port>
1278 </input>
1279 <output>
1280 <port id="2" precision="FP32">
1281 <dim>-1</dim>
1282 <dim>-1</dim>
1283 <dim>384</dim>
1284 </port>
1285 </output>
1286 </layer>
1287 <layer id="90" name="Constant_6237" type="Const" version="opset1">
1288 <data element_type="f32" shape="1, 1, 384" offset="50042512" size="1536" />
1289 <output>
1290 <port id="0" precision="FP32">
1291 <dim>1</dim>
1292 <dim>1</dim>
1293 <dim>384</dim>
1294 </port>
1295 </output>
1296 </layer>
1297 <layer id="91" name="__module.encoder.layer.0.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
1298 <data auto_broadcast="numpy" />
1299 <input>
1300 <port id="0" precision="FP32">
1301 <dim>-1</dim>
1302 <dim>-1</dim>
1303 <dim>384</dim>
1304 </port>
1305 <port id="1" precision="FP32">
1306 <dim>1</dim>
1307 <dim>1</dim>
1308 <dim>384</dim>
1309 </port>
1310 </input>
1311 <output>
1312 <port id="2" precision="FP32" names="163,input.3">
1313 <dim>-1</dim>
1314 <dim>-1</dim>
1315 <dim>384</dim>
1316 </port>
1317 </output>
1318 </layer>
1319 <layer id="92" name="__module.encoder.layer.0.attention.output/aten::add/Add" type="Add" version="opset1">
1320 <data auto_broadcast="numpy" />
1321 <input>
1322 <port id="0" precision="FP32">
1323 <dim>-1</dim>
1324 <dim>-1</dim>
1325 <dim>384</dim>
1326 </port>
1327 <port id="1" precision="FP32">
1328 <dim>-1</dim>
1329 <dim>-1</dim>
1330 <dim>384</dim>
1331 </port>
1332 </input>
1333 <output>
1334 <port id="2" precision="FP32" names="165">
1335 <dim>-1</dim>
1336 <dim>-1</dim>
1337 <dim>384</dim>
1338 </port>
1339 </output>
1340 </layer>
1341 <layer id="93" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
1342 <data element_type="i32" shape="1" offset="47675412" size="4" />
1343 <output>
1344 <port id="0" precision="I32">
1345 <dim>1</dim>
1346 </port>
1347 </output>
1348 </layer>
1349 <layer id="94" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
1350 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1351 <input>
1352 <port id="0" precision="FP32">
1353 <dim>-1</dim>
1354 <dim>-1</dim>
1355 <dim>384</dim>
1356 </port>
1357 <port id="1" precision="I32">
1358 <dim>1</dim>
1359 </port>
1360 </input>
1361 <output>
1362 <port id="2" precision="FP32">
1363 <dim>-1</dim>
1364 <dim>-1</dim>
1365 <dim>384</dim>
1366 </port>
1367 </output>
1368 </layer>
1369 <layer id="95" name="Constant_6238" type="Const" version="opset1">
1370 <data element_type="f32" shape="1, 1, 384" offset="50044048" size="1536" />
1371 <output>
1372 <port id="0" precision="FP32">
1373 <dim>1</dim>
1374 <dim>1</dim>
1375 <dim>384</dim>
1376 </port>
1377 </output>
1378 </layer>
1379 <layer id="96" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
1380 <data auto_broadcast="numpy" />
1381 <input>
1382 <port id="0" precision="FP32">
1383 <dim>-1</dim>
1384 <dim>-1</dim>
1385 <dim>384</dim>
1386 </port>
1387 <port id="1" precision="FP32">
1388 <dim>1</dim>
1389 <dim>1</dim>
1390 <dim>384</dim>
1391 </port>
1392 </input>
1393 <output>
1394 <port id="2" precision="FP32">
1395 <dim>-1</dim>
1396 <dim>-1</dim>
1397 <dim>384</dim>
1398 </port>
1399 </output>
1400 </layer>
1401 <layer id="97" name="Constant_6239" type="Const" version="opset1">
1402 <data element_type="f32" shape="1, 1, 384" offset="50045584" size="1536" />
1403 <output>
1404 <port id="0" precision="FP32">
1405 <dim>1</dim>
1406 <dim>1</dim>
1407 <dim>384</dim>
1408 </port>
1409 </output>
1410 </layer>
1411 <layer id="98" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
1412 <data auto_broadcast="numpy" />
1413 <input>
1414 <port id="0" precision="FP32">
1415 <dim>-1</dim>
1416 <dim>-1</dim>
1417 <dim>384</dim>
1418 </port>
1419 <port id="1" precision="FP32">
1420 <dim>1</dim>
1421 <dim>1</dim>
1422 <dim>384</dim>
1423 </port>
1424 </input>
1425 <output>
1426 <port id="2" precision="FP32" names="169,input_tensor.1">
1427 <dim>-1</dim>
1428 <dim>-1</dim>
1429 <dim>384</dim>
1430 </port>
1431 </output>
1432 </layer>
1433 <layer id="99" name="self.encoder.layer.0.intermediate.dense.weight" type="Const" version="opset1">
1434 <data element_type="f32" shape="1536, 384" offset="50047120" size="2359296" />
1435 <output>
1436 <port id="0" precision="FP32" names="self.encoder.layer.0.intermediate.dense.weight">
1437 <dim>1536</dim>
1438 <dim>384</dim>
1439 </port>
1440 </output>
1441 </layer>
1442 <layer id="100" name="__module.encoder.layer.0.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1443 <data transpose_a="false" transpose_b="true" />
1444 <input>
1445 <port id="0" precision="FP32">
1446 <dim>-1</dim>
1447 <dim>-1</dim>
1448 <dim>384</dim>
1449 </port>
1450 <port id="1" precision="FP32">
1451 <dim>1536</dim>
1452 <dim>384</dim>
1453 </port>
1454 </input>
1455 <output>
1456 <port id="2" precision="FP32">
1457 <dim>-1</dim>
1458 <dim>-1</dim>
1459 <dim>1536</dim>
1460 </port>
1461 </output>
1462 </layer>
1463 <layer id="101" name="Constant_6240" type="Const" version="opset1">
1464 <data element_type="f32" shape="1, 1, 1536" offset="52406416" size="6144" />
1465 <output>
1466 <port id="0" precision="FP32">
1467 <dim>1</dim>
1468 <dim>1</dim>
1469 <dim>1536</dim>
1470 </port>
1471 </output>
1472 </layer>
1473 <layer id="102" name="__module.encoder.layer.0.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
1474 <data auto_broadcast="numpy" />
1475 <input>
1476 <port id="0" precision="FP32">
1477 <dim>-1</dim>
1478 <dim>-1</dim>
1479 <dim>1536</dim>
1480 </port>
1481 <port id="1" precision="FP32">
1482 <dim>1</dim>
1483 <dim>1</dim>
1484 <dim>1536</dim>
1485 </port>
1486 </input>
1487 <output>
1488 <port id="2" precision="FP32" names="174">
1489 <dim>-1</dim>
1490 <dim>-1</dim>
1491 <dim>1536</dim>
1492 </port>
1493 </output>
1494 </layer>
1495 <layer id="103" name="__module.encoder.layer.0.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
1496 <data approximation_mode="ERF" />
1497 <input>
1498 <port id="0" precision="FP32">
1499 <dim>-1</dim>
1500 <dim>-1</dim>
1501 <dim>1536</dim>
1502 </port>
1503 </input>
1504 <output>
1505 <port id="1" precision="FP32" names="175">
1506 <dim>-1</dim>
1507 <dim>-1</dim>
1508 <dim>1536</dim>
1509 </port>
1510 </output>
1511 </layer>
1512 <layer id="104" name="self.encoder.layer.0.output.dense.weight" type="Const" version="opset1">
1513 <data element_type="f32" shape="384, 1536" offset="52412560" size="2359296" />
1514 <output>
1515 <port id="0" precision="FP32" names="self.encoder.layer.0.output.dense.weight">
1516 <dim>384</dim>
1517 <dim>1536</dim>
1518 </port>
1519 </output>
1520 </layer>
1521 <layer id="105" name="__module.encoder.layer.0.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1522 <data transpose_a="false" transpose_b="true" />
1523 <input>
1524 <port id="0" precision="FP32">
1525 <dim>-1</dim>
1526 <dim>-1</dim>
1527 <dim>1536</dim>
1528 </port>
1529 <port id="1" precision="FP32">
1530 <dim>384</dim>
1531 <dim>1536</dim>
1532 </port>
1533 </input>
1534 <output>
1535 <port id="2" precision="FP32">
1536 <dim>-1</dim>
1537 <dim>-1</dim>
1538 <dim>384</dim>
1539 </port>
1540 </output>
1541 </layer>
1542 <layer id="106" name="Constant_6241" type="Const" version="opset1">
1543 <data element_type="f32" shape="1, 1, 384" offset="54771856" size="1536" />
1544 <output>
1545 <port id="0" precision="FP32">
1546 <dim>1</dim>
1547 <dim>1</dim>
1548 <dim>384</dim>
1549 </port>
1550 </output>
1551 </layer>
1552 <layer id="107" name="__module.encoder.layer.0.output.dense/aten::linear/Add" type="Add" version="opset1">
1553 <data auto_broadcast="numpy" />
1554 <input>
1555 <port id="0" precision="FP32">
1556 <dim>-1</dim>
1557 <dim>-1</dim>
1558 <dim>384</dim>
1559 </port>
1560 <port id="1" precision="FP32">
1561 <dim>1</dim>
1562 <dim>1</dim>
1563 <dim>384</dim>
1564 </port>
1565 </input>
1566 <output>
1567 <port id="2" precision="FP32" names="181,input.5">
1568 <dim>-1</dim>
1569 <dim>-1</dim>
1570 <dim>384</dim>
1571 </port>
1572 </output>
1573 </layer>
1574 <layer id="108" name="__module.encoder.layer.0.output/aten::add/Add" type="Add" version="opset1">
1575 <data auto_broadcast="numpy" />
1576 <input>
1577 <port id="0" precision="FP32">
1578 <dim>-1</dim>
1579 <dim>-1</dim>
1580 <dim>384</dim>
1581 </port>
1582 <port id="1" precision="FP32">
1583 <dim>-1</dim>
1584 <dim>-1</dim>
1585 <dim>384</dim>
1586 </port>
1587 </input>
1588 <output>
1589 <port id="2" precision="FP32" names="183">
1590 <dim>-1</dim>
1591 <dim>-1</dim>
1592 <dim>384</dim>
1593 </port>
1594 </output>
1595 </layer>
1596 <layer id="109" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
1597 <data element_type="i32" shape="1" offset="47675412" size="4" />
1598 <output>
1599 <port id="0" precision="I32">
1600 <dim>1</dim>
1601 </port>
1602 </output>
1603 </layer>
1604 <layer id="110" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
1605 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1606 <input>
1607 <port id="0" precision="FP32">
1608 <dim>-1</dim>
1609 <dim>-1</dim>
1610 <dim>384</dim>
1611 </port>
1612 <port id="1" precision="I32">
1613 <dim>1</dim>
1614 </port>
1615 </input>
1616 <output>
1617 <port id="2" precision="FP32">
1618 <dim>-1</dim>
1619 <dim>-1</dim>
1620 <dim>384</dim>
1621 </port>
1622 </output>
1623 </layer>
1624 <layer id="111" name="Constant_6242" type="Const" version="opset1">
1625 <data element_type="f32" shape="1, 1, 384" offset="54773392" size="1536" />
1626 <output>
1627 <port id="0" precision="FP32">
1628 <dim>1</dim>
1629 <dim>1</dim>
1630 <dim>384</dim>
1631 </port>
1632 </output>
1633 </layer>
1634 <layer id="112" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
1635 <data auto_broadcast="numpy" />
1636 <input>
1637 <port id="0" precision="FP32">
1638 <dim>-1</dim>
1639 <dim>-1</dim>
1640 <dim>384</dim>
1641 </port>
1642 <port id="1" precision="FP32">
1643 <dim>1</dim>
1644 <dim>1</dim>
1645 <dim>384</dim>
1646 </port>
1647 </input>
1648 <output>
1649 <port id="2" precision="FP32">
1650 <dim>-1</dim>
1651 <dim>-1</dim>
1652 <dim>384</dim>
1653 </port>
1654 </output>
1655 </layer>
1656 <layer id="113" name="Constant_6243" type="Const" version="opset1">
1657 <data element_type="f32" shape="1, 1, 384" offset="54774928" size="1536" />
1658 <output>
1659 <port id="0" precision="FP32">
1660 <dim>1</dim>
1661 <dim>1</dim>
1662 <dim>384</dim>
1663 </port>
1664 </output>
1665 </layer>
1666 <layer id="114" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
1667 <data auto_broadcast="numpy" />
1668 <input>
1669 <port id="0" precision="FP32">
1670 <dim>-1</dim>
1671 <dim>-1</dim>
1672 <dim>384</dim>
1673 </port>
1674 <port id="1" precision="FP32">
1675 <dim>1</dim>
1676 <dim>1</dim>
1677 <dim>384</dim>
1678 </port>
1679 </input>
1680 <output>
1681 <port id="2" precision="FP32" names="187,hidden_states.7">
1682 <dim>-1</dim>
1683 <dim>-1</dim>
1684 <dim>384</dim>
1685 </port>
1686 </output>
1687 </layer>
1688 <layer id="115" name="self.encoder.layer.1.attention.self.query.weight" type="Const" version="opset1">
1689 <data element_type="f32" shape="384, 384" offset="54776464" size="589824" />
1690 <output>
1691 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.query.weight">
1692 <dim>384</dim>
1693 <dim>384</dim>
1694 </port>
1695 </output>
1696 </layer>
1697 <layer id="116" name="__module.encoder.layer.1.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
1698 <data transpose_a="false" transpose_b="true" />
1699 <input>
1700 <port id="0" precision="FP32">
1701 <dim>-1</dim>
1702 <dim>-1</dim>
1703 <dim>384</dim>
1704 </port>
1705 <port id="1" precision="FP32">
1706 <dim>384</dim>
1707 <dim>384</dim>
1708 </port>
1709 </input>
1710 <output>
1711 <port id="2" precision="FP32">
1712 <dim>-1</dim>
1713 <dim>-1</dim>
1714 <dim>384</dim>
1715 </port>
1716 </output>
1717 </layer>
1718 <layer id="117" name="Constant_6244" type="Const" version="opset1">
1719 <data element_type="f32" shape="1, 1, 384" offset="55366288" size="1536" />
1720 <output>
1721 <port id="0" precision="FP32">
1722 <dim>1</dim>
1723 <dim>1</dim>
1724 <dim>384</dim>
1725 </port>
1726 </output>
1727 </layer>
1728 <layer id="118" name="__module.encoder.layer.1.attention.self.query/aten::linear/Add" type="Add" version="opset1">
1729 <data auto_broadcast="numpy" />
1730 <input>
1731 <port id="0" precision="FP32">
1732 <dim>-1</dim>
1733 <dim>-1</dim>
1734 <dim>384</dim>
1735 </port>
1736 <port id="1" precision="FP32">
1737 <dim>1</dim>
1738 <dim>1</dim>
1739 <dim>384</dim>
1740 </port>
1741 </input>
1742 <output>
1743 <port id="2" precision="FP32" names="200,x.13">
1744 <dim>-1</dim>
1745 <dim>-1</dim>
1746 <dim>384</dim>
1747 </port>
1748 </output>
1749 </layer>
1750 <layer id="119" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
1751 <data element_type="i64" shape="4" offset="48269848" size="32" />
1752 <output>
1753 <port id="0" precision="I64">
1754 <dim>4</dim>
1755 </port>
1756 </output>
1757 </layer>
1758 <layer id="120" name="__module.encoder.layer.1.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
1759 <data special_zero="true" />
1760 <input>
1761 <port id="0" precision="FP32">
1762 <dim>-1</dim>
1763 <dim>-1</dim>
1764 <dim>384</dim>
1765 </port>
1766 <port id="1" precision="I64">
1767 <dim>4</dim>
1768 </port>
1769 </input>
1770 <output>
1771 <port id="2" precision="FP32" names="204,x.15">
1772 <dim>-1</dim>
1773 <dim>-1</dim>
1774 <dim>12</dim>
1775 <dim>32</dim>
1776 </port>
1777 </output>
1778 </layer>
1779 <layer id="121" name="Constant_479" type="Const" version="opset1">
1780 <data element_type="i64" shape="4" offset="48269880" size="32" />
1781 <output>
1782 <port id="0" precision="I64" names="205">
1783 <dim>4</dim>
1784 </port>
1785 </output>
1786 </layer>
1787 <layer id="122" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
1788 <input>
1789 <port id="0" precision="FP32">
1790 <dim>-1</dim>
1791 <dim>-1</dim>
1792 <dim>12</dim>
1793 <dim>32</dim>
1794 </port>
1795 <port id="1" precision="I64">
1796 <dim>4</dim>
1797 </port>
1798 </input>
1799 <output>
1800 <port id="2" precision="FP32" names="206">
1801 <dim>-1</dim>
1802 <dim>12</dim>
1803 <dim>-1</dim>
1804 <dim>32</dim>
1805 </port>
1806 </output>
1807 </layer>
1808 <layer id="123" name="self.encoder.layer.1.attention.self.key.weight" type="Const" version="opset1">
1809 <data element_type="f32" shape="384, 384" offset="55367824" size="589824" />
1810 <output>
1811 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.key.weight">
1812 <dim>384</dim>
1813 <dim>384</dim>
1814 </port>
1815 </output>
1816 </layer>
1817 <layer id="124" name="__module.encoder.layer.1.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
1818 <data transpose_a="false" transpose_b="true" />
1819 <input>
1820 <port id="0" precision="FP32">
1821 <dim>-1</dim>
1822 <dim>-1</dim>
1823 <dim>384</dim>
1824 </port>
1825 <port id="1" precision="FP32">
1826 <dim>384</dim>
1827 <dim>384</dim>
1828 </port>
1829 </input>
1830 <output>
1831 <port id="2" precision="FP32">
1832 <dim>-1</dim>
1833 <dim>-1</dim>
1834 <dim>384</dim>
1835 </port>
1836 </output>
1837 </layer>
1838 <layer id="125" name="Constant_6245" type="Const" version="opset1">
1839 <data element_type="f32" shape="1, 1, 384" offset="55957648" size="1536" />
1840 <output>
1841 <port id="0" precision="FP32">
1842 <dim>1</dim>
1843 <dim>1</dim>
1844 <dim>384</dim>
1845 </port>
1846 </output>
1847 </layer>
1848 <layer id="126" name="__module.encoder.layer.1.attention.self.key/aten::linear/Add" type="Add" version="opset1">
1849 <data auto_broadcast="numpy" />
1850 <input>
1851 <port id="0" precision="FP32">
1852 <dim>-1</dim>
1853 <dim>-1</dim>
1854 <dim>384</dim>
1855 </port>
1856 <port id="1" precision="FP32">
1857 <dim>1</dim>
1858 <dim>1</dim>
1859 <dim>384</dim>
1860 </port>
1861 </input>
1862 <output>
1863 <port id="2" precision="FP32" names="209,x.17">
1864 <dim>-1</dim>
1865 <dim>-1</dim>
1866 <dim>384</dim>
1867 </port>
1868 </output>
1869 </layer>
1870 <layer id="127" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
1871 <data element_type="i64" shape="4" offset="48269848" size="32" />
1872 <output>
1873 <port id="0" precision="I64">
1874 <dim>4</dim>
1875 </port>
1876 </output>
1877 </layer>
1878 <layer id="128" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
1879 <data special_zero="true" />
1880 <input>
1881 <port id="0" precision="FP32">
1882 <dim>-1</dim>
1883 <dim>-1</dim>
1884 <dim>384</dim>
1885 </port>
1886 <port id="1" precision="I64">
1887 <dim>4</dim>
1888 </port>
1889 </input>
1890 <output>
1891 <port id="2" precision="FP32" names="213,x.19">
1892 <dim>-1</dim>
1893 <dim>-1</dim>
1894 <dim>12</dim>
1895 <dim>32</dim>
1896 </port>
1897 </output>
1898 </layer>
1899 <layer id="129" name="Constant_502" type="Const" version="opset1">
1900 <data element_type="i64" shape="4" offset="48269880" size="32" />
1901 <output>
1902 <port id="0" precision="I64" names="214">
1903 <dim>4</dim>
1904 </port>
1905 </output>
1906 </layer>
1907 <layer id="130" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
1908 <input>
1909 <port id="0" precision="FP32">
1910 <dim>-1</dim>
1911 <dim>-1</dim>
1912 <dim>12</dim>
1913 <dim>32</dim>
1914 </port>
1915 <port id="1" precision="I64">
1916 <dim>4</dim>
1917 </port>
1918 </input>
1919 <output>
1920 <port id="2" precision="FP32" names="215">
1921 <dim>-1</dim>
1922 <dim>12</dim>
1923 <dim>-1</dim>
1924 <dim>32</dim>
1925 </port>
1926 </output>
1927 </layer>
1928 <layer id="131" name="self.encoder.layer.1.attention.self.value.weight" type="Const" version="opset1">
1929 <data element_type="f32" shape="384, 384" offset="55959184" size="589824" />
1930 <output>
1931 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.value.weight">
1932 <dim>384</dim>
1933 <dim>384</dim>
1934 </port>
1935 </output>
1936 </layer>
1937 <layer id="132" name="__module.encoder.layer.1.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
1938 <data transpose_a="false" transpose_b="true" />
1939 <input>
1940 <port id="0" precision="FP32">
1941 <dim>-1</dim>
1942 <dim>-1</dim>
1943 <dim>384</dim>
1944 </port>
1945 <port id="1" precision="FP32">
1946 <dim>384</dim>
1947 <dim>384</dim>
1948 </port>
1949 </input>
1950 <output>
1951 <port id="2" precision="FP32">
1952 <dim>-1</dim>
1953 <dim>-1</dim>
1954 <dim>384</dim>
1955 </port>
1956 </output>
1957 </layer>
1958 <layer id="133" name="Constant_6246" type="Const" version="opset1">
1959 <data element_type="f32" shape="1, 1, 384" offset="56549008" size="1536" />
1960 <output>
1961 <port id="0" precision="FP32">
1962 <dim>1</dim>
1963 <dim>1</dim>
1964 <dim>384</dim>
1965 </port>
1966 </output>
1967 </layer>
1968 <layer id="134" name="__module.encoder.layer.1.attention.self.value/aten::linear/Add" type="Add" version="opset1">
1969 <data auto_broadcast="numpy" />
1970 <input>
1971 <port id="0" precision="FP32">
1972 <dim>-1</dim>
1973 <dim>-1</dim>
1974 <dim>384</dim>
1975 </port>
1976 <port id="1" precision="FP32">
1977 <dim>1</dim>
1978 <dim>1</dim>
1979 <dim>384</dim>
1980 </port>
1981 </input>
1982 <output>
1983 <port id="2" precision="FP32" names="218,x.21">
1984 <dim>-1</dim>
1985 <dim>-1</dim>
1986 <dim>384</dim>
1987 </port>
1988 </output>
1989 </layer>
1990 <layer id="135" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
1991 <data element_type="i64" shape="4" offset="48269848" size="32" />
1992 <output>
1993 <port id="0" precision="I64">
1994 <dim>4</dim>
1995 </port>
1996 </output>
1997 </layer>
1998 <layer id="136" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
1999 <data special_zero="true" />
2000 <input>
2001 <port id="0" precision="FP32">
2002 <dim>-1</dim>
2003 <dim>-1</dim>
2004 <dim>384</dim>
2005 </port>
2006 <port id="1" precision="I64">
2007 <dim>4</dim>
2008 </port>
2009 </input>
2010 <output>
2011 <port id="2" precision="FP32" names="222,x.23">
2012 <dim>-1</dim>
2013 <dim>-1</dim>
2014 <dim>12</dim>
2015 <dim>32</dim>
2016 </port>
2017 </output>
2018 </layer>
2019 <layer id="137" name="Constant_525" type="Const" version="opset1">
2020 <data element_type="i64" shape="4" offset="48269880" size="32" />
2021 <output>
2022 <port id="0" precision="I64" names="223">
2023 <dim>4</dim>
2024 </port>
2025 </output>
2026 </layer>
2027 <layer id="138" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
2028 <input>
2029 <port id="0" precision="FP32">
2030 <dim>-1</dim>
2031 <dim>-1</dim>
2032 <dim>12</dim>
2033 <dim>32</dim>
2034 </port>
2035 <port id="1" precision="I64">
2036 <dim>4</dim>
2037 </port>
2038 </input>
2039 <output>
2040 <port id="2" precision="FP32" names="224">
2041 <dim>-1</dim>
2042 <dim>12</dim>
2043 <dim>-1</dim>
2044 <dim>32</dim>
2045 </port>
2046 </output>
2047 </layer>
2048 <layer id="139" name="__module.encoder.layer.1.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
2049 <data causal="false" />
2050 <input>
2051 <port id="0" precision="FP32">
2052 <dim>-1</dim>
2053 <dim>12</dim>
2054 <dim>-1</dim>
2055 <dim>32</dim>
2056 </port>
2057 <port id="1" precision="FP32">
2058 <dim>-1</dim>
2059 <dim>12</dim>
2060 <dim>-1</dim>
2061 <dim>32</dim>
2062 </port>
2063 <port id="2" precision="FP32">
2064 <dim>-1</dim>
2065 <dim>12</dim>
2066 <dim>-1</dim>
2067 <dim>32</dim>
2068 </port>
2069 <port id="3" precision="FP32">
2070 <dim>-1</dim>
2071 <dim>1</dim>
2072 <dim>-1</dim>
2073 <dim>-1</dim>
2074 </port>
2075 </input>
2076 <output>
2077 <port id="4" precision="FP32" names="225,attn_output.5">
2078 <dim>-1</dim>
2079 <dim>12</dim>
2080 <dim>-1</dim>
2081 <dim>32</dim>
2082 </port>
2083 </output>
2084 </layer>
2085 <layer id="140" name="__module.encoder.layer.1.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
2086 <data element_type="i32" shape="4" offset="49452648" size="16" />
2087 <output>
2088 <port id="0" precision="I32">
2089 <dim>4</dim>
2090 </port>
2091 </output>
2092 </layer>
2093 <layer id="141" name="__module.encoder.layer.1.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
2094 <input>
2095 <port id="0" precision="FP32">
2096 <dim>-1</dim>
2097 <dim>12</dim>
2098 <dim>-1</dim>
2099 <dim>32</dim>
2100 </port>
2101 <port id="1" precision="I32">
2102 <dim>4</dim>
2103 </port>
2104 </input>
2105 <output>
2106 <port id="2" precision="FP32" names="226,attn_output.7">
2107 <dim>-1</dim>
2108 <dim>-1</dim>
2109 <dim>12</dim>
2110 <dim>32</dim>
2111 </port>
2112 </output>
2113 </layer>
2114 <layer id="142" name="__module.encoder.layer.1.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
2115 <data output_type="i64" />
2116 <input>
2117 <port id="0" precision="FP32">
2118 <dim>-1</dim>
2119 <dim>-1</dim>
2120 <dim>384</dim>
2121 </port>
2122 </input>
2123 <output>
2124 <port id="1" precision="I64">
2125 <dim>3</dim>
2126 </port>
2127 </output>
2128 </layer>
2129 <layer id="143" name="Constant_5757" type="Const" version="opset1">
2130 <data element_type="i64" shape="2" offset="49452664" size="16" />
2131 <output>
2132 <port id="0" precision="I64">
2133 <dim>2</dim>
2134 </port>
2135 </output>
2136 </layer>
2137 <layer id="144" name="Constant_5758" type="Const" version="opset1">
2138 <data element_type="i64" shape="" offset="47675396" size="8" />
2139 <output>
2140 <port id="0" precision="I64" />
2141 </output>
2142 </layer>
2143 <layer id="145" name="Gather_5759" type="Gather" version="opset8">
2144 <data batch_dims="0" />
2145 <input>
2146 <port id="0" precision="I64">
2147 <dim>3</dim>
2148 </port>
2149 <port id="1" precision="I64">
2150 <dim>2</dim>
2151 </port>
2152 <port id="2" precision="I64" />
2153 </input>
2154 <output>
2155 <port id="3" precision="I64">
2156 <dim>2</dim>
2157 </port>
2158 </output>
2159 </layer>
2160 <layer id="146" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
2161 <data axis="0" />
2162 <input>
2163 <port id="0" precision="I64">
2164 <dim>2</dim>
2165 </port>
2166 <port id="1" precision="I64">
2167 <dim>1</dim>
2168 </port>
2169 </input>
2170 <output>
2171 <port id="2" precision="I64" names="227">
2172 <dim>3</dim>
2173 </port>
2174 </output>
2175 </layer>
2176 <layer id="147" name="__module.encoder.layer.1.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
2177 <data special_zero="false" />
2178 <input>
2179 <port id="0" precision="FP32">
2180 <dim>-1</dim>
2181 <dim>-1</dim>
2182 <dim>12</dim>
2183 <dim>32</dim>
2184 </port>
2185 <port id="1" precision="I64">
2186 <dim>3</dim>
2187 </port>
2188 </input>
2189 <output>
2190 <port id="2" precision="FP32" names="228">
2191 <dim>-1</dim>
2192 <dim>-1</dim>
2193 <dim>384</dim>
2194 </port>
2195 </output>
2196 </layer>
2197 <layer id="148" name="self.encoder.layer.1.attention.output.dense.weight" type="Const" version="opset1">
2198 <data element_type="f32" shape="384, 384" offset="56550544" size="589824" />
2199 <output>
2200 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.output.dense.weight">
2201 <dim>384</dim>
2202 <dim>384</dim>
2203 </port>
2204 </output>
2205 </layer>
2206 <layer id="149" name="__module.encoder.layer.1.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2207 <data transpose_a="false" transpose_b="true" />
2208 <input>
2209 <port id="0" precision="FP32">
2210 <dim>-1</dim>
2211 <dim>-1</dim>
2212 <dim>384</dim>
2213 </port>
2214 <port id="1" precision="FP32">
2215 <dim>384</dim>
2216 <dim>384</dim>
2217 </port>
2218 </input>
2219 <output>
2220 <port id="2" precision="FP32">
2221 <dim>-1</dim>
2222 <dim>-1</dim>
2223 <dim>384</dim>
2224 </port>
2225 </output>
2226 </layer>
2227 <layer id="150" name="Constant_6247" type="Const" version="opset1">
2228 <data element_type="f32" shape="1, 1, 384" offset="57140368" size="1536" />
2229 <output>
2230 <port id="0" precision="FP32">
2231 <dim>1</dim>
2232 <dim>1</dim>
2233 <dim>384</dim>
2234 </port>
2235 </output>
2236 </layer>
2237 <layer id="151" name="__module.encoder.layer.1.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
2238 <data auto_broadcast="numpy" />
2239 <input>
2240 <port id="0" precision="FP32">
2241 <dim>-1</dim>
2242 <dim>-1</dim>
2243 <dim>384</dim>
2244 </port>
2245 <port id="1" precision="FP32">
2246 <dim>1</dim>
2247 <dim>1</dim>
2248 <dim>384</dim>
2249 </port>
2250 </input>
2251 <output>
2252 <port id="2" precision="FP32" names="234,input.7">
2253 <dim>-1</dim>
2254 <dim>-1</dim>
2255 <dim>384</dim>
2256 </port>
2257 </output>
2258 </layer>
2259 <layer id="152" name="__module.encoder.layer.1.attention.output/aten::add/Add" type="Add" version="opset1">
2260 <data auto_broadcast="numpy" />
2261 <input>
2262 <port id="0" precision="FP32">
2263 <dim>-1</dim>
2264 <dim>-1</dim>
2265 <dim>384</dim>
2266 </port>
2267 <port id="1" precision="FP32">
2268 <dim>-1</dim>
2269 <dim>-1</dim>
2270 <dim>384</dim>
2271 </port>
2272 </input>
2273 <output>
2274 <port id="2" precision="FP32" names="236">
2275 <dim>-1</dim>
2276 <dim>-1</dim>
2277 <dim>384</dim>
2278 </port>
2279 </output>
2280 </layer>
2281 <layer id="153" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
2282 <data element_type="i32" shape="1" offset="47675412" size="4" />
2283 <output>
2284 <port id="0" precision="I32">
2285 <dim>1</dim>
2286 </port>
2287 </output>
2288 </layer>
2289 <layer id="154" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
2290 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2291 <input>
2292 <port id="0" precision="FP32">
2293 <dim>-1</dim>
2294 <dim>-1</dim>
2295 <dim>384</dim>
2296 </port>
2297 <port id="1" precision="I32">
2298 <dim>1</dim>
2299 </port>
2300 </input>
2301 <output>
2302 <port id="2" precision="FP32">
2303 <dim>-1</dim>
2304 <dim>-1</dim>
2305 <dim>384</dim>
2306 </port>
2307 </output>
2308 </layer>
2309 <layer id="155" name="Constant_6248" type="Const" version="opset1">
2310 <data element_type="f32" shape="1, 1, 384" offset="57141904" size="1536" />
2311 <output>
2312 <port id="0" precision="FP32">
2313 <dim>1</dim>
2314 <dim>1</dim>
2315 <dim>384</dim>
2316 </port>
2317 </output>
2318 </layer>
2319 <layer id="156" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
2320 <data auto_broadcast="numpy" />
2321 <input>
2322 <port id="0" precision="FP32">
2323 <dim>-1</dim>
2324 <dim>-1</dim>
2325 <dim>384</dim>
2326 </port>
2327 <port id="1" precision="FP32">
2328 <dim>1</dim>
2329 <dim>1</dim>
2330 <dim>384</dim>
2331 </port>
2332 </input>
2333 <output>
2334 <port id="2" precision="FP32">
2335 <dim>-1</dim>
2336 <dim>-1</dim>
2337 <dim>384</dim>
2338 </port>
2339 </output>
2340 </layer>
2341 <layer id="157" name="Constant_6249" type="Const" version="opset1">
2342 <data element_type="f32" shape="1, 1, 384" offset="57143440" size="1536" />
2343 <output>
2344 <port id="0" precision="FP32">
2345 <dim>1</dim>
2346 <dim>1</dim>
2347 <dim>384</dim>
2348 </port>
2349 </output>
2350 </layer>
2351 <layer id="158" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
2352 <data auto_broadcast="numpy" />
2353 <input>
2354 <port id="0" precision="FP32">
2355 <dim>-1</dim>
2356 <dim>-1</dim>
2357 <dim>384</dim>
2358 </port>
2359 <port id="1" precision="FP32">
2360 <dim>1</dim>
2361 <dim>1</dim>
2362 <dim>384</dim>
2363 </port>
2364 </input>
2365 <output>
2366 <port id="2" precision="FP32" names="240,input_tensor.3">
2367 <dim>-1</dim>
2368 <dim>-1</dim>
2369 <dim>384</dim>
2370 </port>
2371 </output>
2372 </layer>
2373 <layer id="159" name="self.encoder.layer.1.intermediate.dense.weight" type="Const" version="opset1">
2374 <data element_type="f32" shape="1536, 384" offset="57144976" size="2359296" />
2375 <output>
2376 <port id="0" precision="FP32" names="self.encoder.layer.1.intermediate.dense.weight">
2377 <dim>1536</dim>
2378 <dim>384</dim>
2379 </port>
2380 </output>
2381 </layer>
2382 <layer id="160" name="__module.encoder.layer.1.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2383 <data transpose_a="false" transpose_b="true" />
2384 <input>
2385 <port id="0" precision="FP32">
2386 <dim>-1</dim>
2387 <dim>-1</dim>
2388 <dim>384</dim>
2389 </port>
2390 <port id="1" precision="FP32">
2391 <dim>1536</dim>
2392 <dim>384</dim>
2393 </port>
2394 </input>
2395 <output>
2396 <port id="2" precision="FP32">
2397 <dim>-1</dim>
2398 <dim>-1</dim>
2399 <dim>1536</dim>
2400 </port>
2401 </output>
2402 </layer>
2403 <layer id="161" name="Constant_6250" type="Const" version="opset1">
2404 <data element_type="f32" shape="1, 1, 1536" offset="59504272" size="6144" />
2405 <output>
2406 <port id="0" precision="FP32">
2407 <dim>1</dim>
2408 <dim>1</dim>
2409 <dim>1536</dim>
2410 </port>
2411 </output>
2412 </layer>
2413 <layer id="162" name="__module.encoder.layer.1.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
2414 <data auto_broadcast="numpy" />
2415 <input>
2416 <port id="0" precision="FP32">
2417 <dim>-1</dim>
2418 <dim>-1</dim>
2419 <dim>1536</dim>
2420 </port>
2421 <port id="1" precision="FP32">
2422 <dim>1</dim>
2423 <dim>1</dim>
2424 <dim>1536</dim>
2425 </port>
2426 </input>
2427 <output>
2428 <port id="2" precision="FP32" names="245">
2429 <dim>-1</dim>
2430 <dim>-1</dim>
2431 <dim>1536</dim>
2432 </port>
2433 </output>
2434 </layer>
2435 <layer id="163" name="__module.encoder.layer.1.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
2436 <data approximation_mode="ERF" />
2437 <input>
2438 <port id="0" precision="FP32">
2439 <dim>-1</dim>
2440 <dim>-1</dim>
2441 <dim>1536</dim>
2442 </port>
2443 </input>
2444 <output>
2445 <port id="1" precision="FP32" names="246">
2446 <dim>-1</dim>
2447 <dim>-1</dim>
2448 <dim>1536</dim>
2449 </port>
2450 </output>
2451 </layer>
2452 <layer id="164" name="self.encoder.layer.1.output.dense.weight" type="Const" version="opset1">
2453 <data element_type="f32" shape="384, 1536" offset="59510416" size="2359296" />
2454 <output>
2455 <port id="0" precision="FP32" names="self.encoder.layer.1.output.dense.weight">
2456 <dim>384</dim>
2457 <dim>1536</dim>
2458 </port>
2459 </output>
2460 </layer>
2461 <layer id="165" name="__module.encoder.layer.1.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2462 <data transpose_a="false" transpose_b="true" />
2463 <input>
2464 <port id="0" precision="FP32">
2465 <dim>-1</dim>
2466 <dim>-1</dim>
2467 <dim>1536</dim>
2468 </port>
2469 <port id="1" precision="FP32">
2470 <dim>384</dim>
2471 <dim>1536</dim>
2472 </port>
2473 </input>
2474 <output>
2475 <port id="2" precision="FP32">
2476 <dim>-1</dim>
2477 <dim>-1</dim>
2478 <dim>384</dim>
2479 </port>
2480 </output>
2481 </layer>
2482 <layer id="166" name="Constant_6251" type="Const" version="opset1">
2483 <data element_type="f32" shape="1, 1, 384" offset="61869712" size="1536" />
2484 <output>
2485 <port id="0" precision="FP32">
2486 <dim>1</dim>
2487 <dim>1</dim>
2488 <dim>384</dim>
2489 </port>
2490 </output>
2491 </layer>
2492 <layer id="167" name="__module.encoder.layer.1.output.dense/aten::linear/Add" type="Add" version="opset1">
2493 <data auto_broadcast="numpy" />
2494 <input>
2495 <port id="0" precision="FP32">
2496 <dim>-1</dim>
2497 <dim>-1</dim>
2498 <dim>384</dim>
2499 </port>
2500 <port id="1" precision="FP32">
2501 <dim>1</dim>
2502 <dim>1</dim>
2503 <dim>384</dim>
2504 </port>
2505 </input>
2506 <output>
2507 <port id="2" precision="FP32" names="252,input.9">
2508 <dim>-1</dim>
2509 <dim>-1</dim>
2510 <dim>384</dim>
2511 </port>
2512 </output>
2513 </layer>
2514 <layer id="168" name="__module.encoder.layer.1.output/aten::add/Add" type="Add" version="opset1">
2515 <data auto_broadcast="numpy" />
2516 <input>
2517 <port id="0" precision="FP32">
2518 <dim>-1</dim>
2519 <dim>-1</dim>
2520 <dim>384</dim>
2521 </port>
2522 <port id="1" precision="FP32">
2523 <dim>-1</dim>
2524 <dim>-1</dim>
2525 <dim>384</dim>
2526 </port>
2527 </input>
2528 <output>
2529 <port id="2" precision="FP32" names="254">
2530 <dim>-1</dim>
2531 <dim>-1</dim>
2532 <dim>384</dim>
2533 </port>
2534 </output>
2535 </layer>
2536 <layer id="169" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
2537 <data element_type="i32" shape="1" offset="47675412" size="4" />
2538 <output>
2539 <port id="0" precision="I32">
2540 <dim>1</dim>
2541 </port>
2542 </output>
2543 </layer>
2544 <layer id="170" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
2545 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2546 <input>
2547 <port id="0" precision="FP32">
2548 <dim>-1</dim>
2549 <dim>-1</dim>
2550 <dim>384</dim>
2551 </port>
2552 <port id="1" precision="I32">
2553 <dim>1</dim>
2554 </port>
2555 </input>
2556 <output>
2557 <port id="2" precision="FP32">
2558 <dim>-1</dim>
2559 <dim>-1</dim>
2560 <dim>384</dim>
2561 </port>
2562 </output>
2563 </layer>
2564 <layer id="171" name="Constant_6252" type="Const" version="opset1">
2565 <data element_type="f32" shape="1, 1, 384" offset="61871248" size="1536" />
2566 <output>
2567 <port id="0" precision="FP32">
2568 <dim>1</dim>
2569 <dim>1</dim>
2570 <dim>384</dim>
2571 </port>
2572 </output>
2573 </layer>
2574 <layer id="172" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
2575 <data auto_broadcast="numpy" />
2576 <input>
2577 <port id="0" precision="FP32">
2578 <dim>-1</dim>
2579 <dim>-1</dim>
2580 <dim>384</dim>
2581 </port>
2582 <port id="1" precision="FP32">
2583 <dim>1</dim>
2584 <dim>1</dim>
2585 <dim>384</dim>
2586 </port>
2587 </input>
2588 <output>
2589 <port id="2" precision="FP32">
2590 <dim>-1</dim>
2591 <dim>-1</dim>
2592 <dim>384</dim>
2593 </port>
2594 </output>
2595 </layer>
2596 <layer id="173" name="Constant_6253" type="Const" version="opset1">
2597 <data element_type="f32" shape="1, 1, 384" offset="61872784" size="1536" />
2598 <output>
2599 <port id="0" precision="FP32">
2600 <dim>1</dim>
2601 <dim>1</dim>
2602 <dim>384</dim>
2603 </port>
2604 </output>
2605 </layer>
2606 <layer id="174" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
2607 <data auto_broadcast="numpy" />
2608 <input>
2609 <port id="0" precision="FP32">
2610 <dim>-1</dim>
2611 <dim>-1</dim>
2612 <dim>384</dim>
2613 </port>
2614 <port id="1" precision="FP32">
2615 <dim>1</dim>
2616 <dim>1</dim>
2617 <dim>384</dim>
2618 </port>
2619 </input>
2620 <output>
2621 <port id="2" precision="FP32" names="258,hidden_states.13">
2622 <dim>-1</dim>
2623 <dim>-1</dim>
2624 <dim>384</dim>
2625 </port>
2626 </output>
2627 </layer>
2628 <layer id="175" name="self.encoder.layer.2.attention.self.query.weight" type="Const" version="opset1">
2629 <data element_type="f32" shape="384, 384" offset="61874320" size="589824" />
2630 <output>
2631 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.query.weight">
2632 <dim>384</dim>
2633 <dim>384</dim>
2634 </port>
2635 </output>
2636 </layer>
2637 <layer id="176" name="__module.encoder.layer.2.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
2638 <data transpose_a="false" transpose_b="true" />
2639 <input>
2640 <port id="0" precision="FP32">
2641 <dim>-1</dim>
2642 <dim>-1</dim>
2643 <dim>384</dim>
2644 </port>
2645 <port id="1" precision="FP32">
2646 <dim>384</dim>
2647 <dim>384</dim>
2648 </port>
2649 </input>
2650 <output>
2651 <port id="2" precision="FP32">
2652 <dim>-1</dim>
2653 <dim>-1</dim>
2654 <dim>384</dim>
2655 </port>
2656 </output>
2657 </layer>
2658 <layer id="177" name="Constant_6254" type="Const" version="opset1">
2659 <data element_type="f32" shape="1, 1, 384" offset="62464144" size="1536" />
2660 <output>
2661 <port id="0" precision="FP32">
2662 <dim>1</dim>
2663 <dim>1</dim>
2664 <dim>384</dim>
2665 </port>
2666 </output>
2667 </layer>
2668 <layer id="178" name="__module.encoder.layer.2.attention.self.query/aten::linear/Add" type="Add" version="opset1">
2669 <data auto_broadcast="numpy" />
2670 <input>
2671 <port id="0" precision="FP32">
2672 <dim>-1</dim>
2673 <dim>-1</dim>
2674 <dim>384</dim>
2675 </port>
2676 <port id="1" precision="FP32">
2677 <dim>1</dim>
2678 <dim>1</dim>
2679 <dim>384</dim>
2680 </port>
2681 </input>
2682 <output>
2683 <port id="2" precision="FP32" names="271,x.25">
2684 <dim>-1</dim>
2685 <dim>-1</dim>
2686 <dim>384</dim>
2687 </port>
2688 </output>
2689 </layer>
2690 <layer id="179" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
2691 <data element_type="i64" shape="4" offset="48269848" size="32" />
2692 <output>
2693 <port id="0" precision="I64">
2694 <dim>4</dim>
2695 </port>
2696 </output>
2697 </layer>
2698 <layer id="180" name="__module.encoder.layer.2.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
2699 <data special_zero="true" />
2700 <input>
2701 <port id="0" precision="FP32">
2702 <dim>-1</dim>
2703 <dim>-1</dim>
2704 <dim>384</dim>
2705 </port>
2706 <port id="1" precision="I64">
2707 <dim>4</dim>
2708 </port>
2709 </input>
2710 <output>
2711 <port id="2" precision="FP32" names="275,x.27">
2712 <dim>-1</dim>
2713 <dim>-1</dim>
2714 <dim>12</dim>
2715 <dim>32</dim>
2716 </port>
2717 </output>
2718 </layer>
2719 <layer id="181" name="Constant_705" type="Const" version="opset1">
2720 <data element_type="i64" shape="4" offset="48269880" size="32" />
2721 <output>
2722 <port id="0" precision="I64" names="276">
2723 <dim>4</dim>
2724 </port>
2725 </output>
2726 </layer>
2727 <layer id="182" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
2728 <input>
2729 <port id="0" precision="FP32">
2730 <dim>-1</dim>
2731 <dim>-1</dim>
2732 <dim>12</dim>
2733 <dim>32</dim>
2734 </port>
2735 <port id="1" precision="I64">
2736 <dim>4</dim>
2737 </port>
2738 </input>
2739 <output>
2740 <port id="2" precision="FP32" names="277">
2741 <dim>-1</dim>
2742 <dim>12</dim>
2743 <dim>-1</dim>
2744 <dim>32</dim>
2745 </port>
2746 </output>
2747 </layer>
2748 <layer id="183" name="self.encoder.layer.2.attention.self.key.weight" type="Const" version="opset1">
2749 <data element_type="f32" shape="384, 384" offset="62465680" size="589824" />
2750 <output>
2751 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.key.weight">
2752 <dim>384</dim>
2753 <dim>384</dim>
2754 </port>
2755 </output>
2756 </layer>
2757 <layer id="184" name="__module.encoder.layer.2.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
2758 <data transpose_a="false" transpose_b="true" />
2759 <input>
2760 <port id="0" precision="FP32">
2761 <dim>-1</dim>
2762 <dim>-1</dim>
2763 <dim>384</dim>
2764 </port>
2765 <port id="1" precision="FP32">
2766 <dim>384</dim>
2767 <dim>384</dim>
2768 </port>
2769 </input>
2770 <output>
2771 <port id="2" precision="FP32">
2772 <dim>-1</dim>
2773 <dim>-1</dim>
2774 <dim>384</dim>
2775 </port>
2776 </output>
2777 </layer>
2778 <layer id="185" name="Constant_6255" type="Const" version="opset1">
2779 <data element_type="f32" shape="1, 1, 384" offset="63055504" size="1536" />
2780 <output>
2781 <port id="0" precision="FP32">
2782 <dim>1</dim>
2783 <dim>1</dim>
2784 <dim>384</dim>
2785 </port>
2786 </output>
2787 </layer>
2788 <layer id="186" name="__module.encoder.layer.2.attention.self.key/aten::linear/Add" type="Add" version="opset1">
2789 <data auto_broadcast="numpy" />
2790 <input>
2791 <port id="0" precision="FP32">
2792 <dim>-1</dim>
2793 <dim>-1</dim>
2794 <dim>384</dim>
2795 </port>
2796 <port id="1" precision="FP32">
2797 <dim>1</dim>
2798 <dim>1</dim>
2799 <dim>384</dim>
2800 </port>
2801 </input>
2802 <output>
2803 <port id="2" precision="FP32" names="280,x.29">
2804 <dim>-1</dim>
2805 <dim>-1</dim>
2806 <dim>384</dim>
2807 </port>
2808 </output>
2809 </layer>
2810 <layer id="187" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
2811 <data element_type="i64" shape="4" offset="48269848" size="32" />
2812 <output>
2813 <port id="0" precision="I64">
2814 <dim>4</dim>
2815 </port>
2816 </output>
2817 </layer>
2818 <layer id="188" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
2819 <data special_zero="true" />
2820 <input>
2821 <port id="0" precision="FP32">
2822 <dim>-1</dim>
2823 <dim>-1</dim>
2824 <dim>384</dim>
2825 </port>
2826 <port id="1" precision="I64">
2827 <dim>4</dim>
2828 </port>
2829 </input>
2830 <output>
2831 <port id="2" precision="FP32" names="284,x.31">
2832 <dim>-1</dim>
2833 <dim>-1</dim>
2834 <dim>12</dim>
2835 <dim>32</dim>
2836 </port>
2837 </output>
2838 </layer>
2839 <layer id="189" name="Constant_728" type="Const" version="opset1">
2840 <data element_type="i64" shape="4" offset="48269880" size="32" />
2841 <output>
2842 <port id="0" precision="I64" names="285">
2843 <dim>4</dim>
2844 </port>
2845 </output>
2846 </layer>
2847 <layer id="190" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
2848 <input>
2849 <port id="0" precision="FP32">
2850 <dim>-1</dim>
2851 <dim>-1</dim>
2852 <dim>12</dim>
2853 <dim>32</dim>
2854 </port>
2855 <port id="1" precision="I64">
2856 <dim>4</dim>
2857 </port>
2858 </input>
2859 <output>
2860 <port id="2" precision="FP32" names="286">
2861 <dim>-1</dim>
2862 <dim>12</dim>
2863 <dim>-1</dim>
2864 <dim>32</dim>
2865 </port>
2866 </output>
2867 </layer>
2868 <layer id="191" name="self.encoder.layer.2.attention.self.value.weight" type="Const" version="opset1">
2869 <data element_type="f32" shape="384, 384" offset="63057040" size="589824" />
2870 <output>
2871 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.value.weight">
2872 <dim>384</dim>
2873 <dim>384</dim>
2874 </port>
2875 </output>
2876 </layer>
2877 <layer id="192" name="__module.encoder.layer.2.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
2878 <data transpose_a="false" transpose_b="true" />
2879 <input>
2880 <port id="0" precision="FP32">
2881 <dim>-1</dim>
2882 <dim>-1</dim>
2883 <dim>384</dim>
2884 </port>
2885 <port id="1" precision="FP32">
2886 <dim>384</dim>
2887 <dim>384</dim>
2888 </port>
2889 </input>
2890 <output>
2891 <port id="2" precision="FP32">
2892 <dim>-1</dim>
2893 <dim>-1</dim>
2894 <dim>384</dim>
2895 </port>
2896 </output>
2897 </layer>
2898 <layer id="193" name="Constant_6256" type="Const" version="opset1">
2899 <data element_type="f32" shape="1, 1, 384" offset="63646864" size="1536" />
2900 <output>
2901 <port id="0" precision="FP32">
2902 <dim>1</dim>
2903 <dim>1</dim>
2904 <dim>384</dim>
2905 </port>
2906 </output>
2907 </layer>
2908 <layer id="194" name="__module.encoder.layer.2.attention.self.value/aten::linear/Add" type="Add" version="opset1">
2909 <data auto_broadcast="numpy" />
2910 <input>
2911 <port id="0" precision="FP32">
2912 <dim>-1</dim>
2913 <dim>-1</dim>
2914 <dim>384</dim>
2915 </port>
2916 <port id="1" precision="FP32">
2917 <dim>1</dim>
2918 <dim>1</dim>
2919 <dim>384</dim>
2920 </port>
2921 </input>
2922 <output>
2923 <port id="2" precision="FP32" names="289,x.33">
2924 <dim>-1</dim>
2925 <dim>-1</dim>
2926 <dim>384</dim>
2927 </port>
2928 </output>
2929 </layer>
2930 <layer id="195" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
2931 <data element_type="i64" shape="4" offset="48269848" size="32" />
2932 <output>
2933 <port id="0" precision="I64">
2934 <dim>4</dim>
2935 </port>
2936 </output>
2937 </layer>
2938 <layer id="196" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
2939 <data special_zero="true" />
2940 <input>
2941 <port id="0" precision="FP32">
2942 <dim>-1</dim>
2943 <dim>-1</dim>
2944 <dim>384</dim>
2945 </port>
2946 <port id="1" precision="I64">
2947 <dim>4</dim>
2948 </port>
2949 </input>
2950 <output>
2951 <port id="2" precision="FP32" names="293,x.35">
2952 <dim>-1</dim>
2953 <dim>-1</dim>
2954 <dim>12</dim>
2955 <dim>32</dim>
2956 </port>
2957 </output>
2958 </layer>
2959 <layer id="197" name="Constant_751" type="Const" version="opset1">
2960 <data element_type="i64" shape="4" offset="48269880" size="32" />
2961 <output>
2962 <port id="0" precision="I64" names="294">
2963 <dim>4</dim>
2964 </port>
2965 </output>
2966 </layer>
2967 <layer id="198" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
2968 <input>
2969 <port id="0" precision="FP32">
2970 <dim>-1</dim>
2971 <dim>-1</dim>
2972 <dim>12</dim>
2973 <dim>32</dim>
2974 </port>
2975 <port id="1" precision="I64">
2976 <dim>4</dim>
2977 </port>
2978 </input>
2979 <output>
2980 <port id="2" precision="FP32" names="295">
2981 <dim>-1</dim>
2982 <dim>12</dim>
2983 <dim>-1</dim>
2984 <dim>32</dim>
2985 </port>
2986 </output>
2987 </layer>
2988 <layer id="199" name="__module.encoder.layer.2.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
2989 <data causal="false" />
2990 <input>
2991 <port id="0" precision="FP32">
2992 <dim>-1</dim>
2993 <dim>12</dim>
2994 <dim>-1</dim>
2995 <dim>32</dim>
2996 </port>
2997 <port id="1" precision="FP32">
2998 <dim>-1</dim>
2999 <dim>12</dim>
3000 <dim>-1</dim>
3001 <dim>32</dim>
3002 </port>
3003 <port id="2" precision="FP32">
3004 <dim>-1</dim>
3005 <dim>12</dim>
3006 <dim>-1</dim>
3007 <dim>32</dim>
3008 </port>
3009 <port id="3" precision="FP32">
3010 <dim>-1</dim>
3011 <dim>1</dim>
3012 <dim>-1</dim>
3013 <dim>-1</dim>
3014 </port>
3015 </input>
3016 <output>
3017 <port id="4" precision="FP32" names="296,attn_output.9">
3018 <dim>-1</dim>
3019 <dim>12</dim>
3020 <dim>-1</dim>
3021 <dim>32</dim>
3022 </port>
3023 </output>
3024 </layer>
3025 <layer id="200" name="__module.encoder.layer.2.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
3026 <data element_type="i32" shape="4" offset="49452648" size="16" />
3027 <output>
3028 <port id="0" precision="I32">
3029 <dim>4</dim>
3030 </port>
3031 </output>
3032 </layer>
3033 <layer id="201" name="__module.encoder.layer.2.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
3034 <input>
3035 <port id="0" precision="FP32">
3036 <dim>-1</dim>
3037 <dim>12</dim>
3038 <dim>-1</dim>
3039 <dim>32</dim>
3040 </port>
3041 <port id="1" precision="I32">
3042 <dim>4</dim>
3043 </port>
3044 </input>
3045 <output>
3046 <port id="2" precision="FP32" names="297,attn_output.11">
3047 <dim>-1</dim>
3048 <dim>-1</dim>
3049 <dim>12</dim>
3050 <dim>32</dim>
3051 </port>
3052 </output>
3053 </layer>
3054 <layer id="202" name="__module.encoder.layer.2.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
3055 <data output_type="i64" />
3056 <input>
3057 <port id="0" precision="FP32">
3058 <dim>-1</dim>
3059 <dim>-1</dim>
3060 <dim>384</dim>
3061 </port>
3062 </input>
3063 <output>
3064 <port id="1" precision="I64">
3065 <dim>3</dim>
3066 </port>
3067 </output>
3068 </layer>
3069 <layer id="203" name="Constant_5777" type="Const" version="opset1">
3070 <data element_type="i64" shape="2" offset="49452664" size="16" />
3071 <output>
3072 <port id="0" precision="I64">
3073 <dim>2</dim>
3074 </port>
3075 </output>
3076 </layer>
3077 <layer id="204" name="Constant_5778" type="Const" version="opset1">
3078 <data element_type="i64" shape="" offset="47675396" size="8" />
3079 <output>
3080 <port id="0" precision="I64" />
3081 </output>
3082 </layer>
3083 <layer id="205" name="Gather_5779" type="Gather" version="opset8">
3084 <data batch_dims="0" />
3085 <input>
3086 <port id="0" precision="I64">
3087 <dim>3</dim>
3088 </port>
3089 <port id="1" precision="I64">
3090 <dim>2</dim>
3091 </port>
3092 <port id="2" precision="I64" />
3093 </input>
3094 <output>
3095 <port id="3" precision="I64">
3096 <dim>2</dim>
3097 </port>
3098 </output>
3099 </layer>
3100 <layer id="206" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
3101 <data axis="0" />
3102 <input>
3103 <port id="0" precision="I64">
3104 <dim>2</dim>
3105 </port>
3106 <port id="1" precision="I64">
3107 <dim>1</dim>
3108 </port>
3109 </input>
3110 <output>
3111 <port id="2" precision="I64" names="298">
3112 <dim>3</dim>
3113 </port>
3114 </output>
3115 </layer>
3116 <layer id="207" name="__module.encoder.layer.2.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
3117 <data special_zero="false" />
3118 <input>
3119 <port id="0" precision="FP32">
3120 <dim>-1</dim>
3121 <dim>-1</dim>
3122 <dim>12</dim>
3123 <dim>32</dim>
3124 </port>
3125 <port id="1" precision="I64">
3126 <dim>3</dim>
3127 </port>
3128 </input>
3129 <output>
3130 <port id="2" precision="FP32" names="299">
3131 <dim>-1</dim>
3132 <dim>-1</dim>
3133 <dim>384</dim>
3134 </port>
3135 </output>
3136 </layer>
3137 <layer id="208" name="self.encoder.layer.2.attention.output.dense.weight" type="Const" version="opset1">
3138 <data element_type="f32" shape="384, 384" offset="63648400" size="589824" />
3139 <output>
3140 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.output.dense.weight">
3141 <dim>384</dim>
3142 <dim>384</dim>
3143 </port>
3144 </output>
3145 </layer>
3146 <layer id="209" name="__module.encoder.layer.2.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3147 <data transpose_a="false" transpose_b="true" />
3148 <input>
3149 <port id="0" precision="FP32">
3150 <dim>-1</dim>
3151 <dim>-1</dim>
3152 <dim>384</dim>
3153 </port>
3154 <port id="1" precision="FP32">
3155 <dim>384</dim>
3156 <dim>384</dim>
3157 </port>
3158 </input>
3159 <output>
3160 <port id="2" precision="FP32">
3161 <dim>-1</dim>
3162 <dim>-1</dim>
3163 <dim>384</dim>
3164 </port>
3165 </output>
3166 </layer>
3167 <layer id="210" name="Constant_6257" type="Const" version="opset1">
3168 <data element_type="f32" shape="1, 1, 384" offset="64238224" size="1536" />
3169 <output>
3170 <port id="0" precision="FP32">
3171 <dim>1</dim>
3172 <dim>1</dim>
3173 <dim>384</dim>
3174 </port>
3175 </output>
3176 </layer>
3177 <layer id="211" name="__module.encoder.layer.2.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
3178 <data auto_broadcast="numpy" />
3179 <input>
3180 <port id="0" precision="FP32">
3181 <dim>-1</dim>
3182 <dim>-1</dim>
3183 <dim>384</dim>
3184 </port>
3185 <port id="1" precision="FP32">
3186 <dim>1</dim>
3187 <dim>1</dim>
3188 <dim>384</dim>
3189 </port>
3190 </input>
3191 <output>
3192 <port id="2" precision="FP32" names="305,input.11">
3193 <dim>-1</dim>
3194 <dim>-1</dim>
3195 <dim>384</dim>
3196 </port>
3197 </output>
3198 </layer>
3199 <layer id="212" name="__module.encoder.layer.2.attention.output/aten::add/Add" type="Add" version="opset1">
3200 <data auto_broadcast="numpy" />
3201 <input>
3202 <port id="0" precision="FP32">
3203 <dim>-1</dim>
3204 <dim>-1</dim>
3205 <dim>384</dim>
3206 </port>
3207 <port id="1" precision="FP32">
3208 <dim>-1</dim>
3209 <dim>-1</dim>
3210 <dim>384</dim>
3211 </port>
3212 </input>
3213 <output>
3214 <port id="2" precision="FP32" names="307">
3215 <dim>-1</dim>
3216 <dim>-1</dim>
3217 <dim>384</dim>
3218 </port>
3219 </output>
3220 </layer>
3221 <layer id="213" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3222 <data element_type="i32" shape="1" offset="47675412" size="4" />
3223 <output>
3224 <port id="0" precision="I32">
3225 <dim>1</dim>
3226 </port>
3227 </output>
3228 </layer>
3229 <layer id="214" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3230 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3231 <input>
3232 <port id="0" precision="FP32">
3233 <dim>-1</dim>
3234 <dim>-1</dim>
3235 <dim>384</dim>
3236 </port>
3237 <port id="1" precision="I32">
3238 <dim>1</dim>
3239 </port>
3240 </input>
3241 <output>
3242 <port id="2" precision="FP32">
3243 <dim>-1</dim>
3244 <dim>-1</dim>
3245 <dim>384</dim>
3246 </port>
3247 </output>
3248 </layer>
3249 <layer id="215" name="Constant_6258" type="Const" version="opset1">
3250 <data element_type="f32" shape="1, 1, 384" offset="64239760" size="1536" />
3251 <output>
3252 <port id="0" precision="FP32">
3253 <dim>1</dim>
3254 <dim>1</dim>
3255 <dim>384</dim>
3256 </port>
3257 </output>
3258 </layer>
3259 <layer id="216" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3260 <data auto_broadcast="numpy" />
3261 <input>
3262 <port id="0" precision="FP32">
3263 <dim>-1</dim>
3264 <dim>-1</dim>
3265 <dim>384</dim>
3266 </port>
3267 <port id="1" precision="FP32">
3268 <dim>1</dim>
3269 <dim>1</dim>
3270 <dim>384</dim>
3271 </port>
3272 </input>
3273 <output>
3274 <port id="2" precision="FP32">
3275 <dim>-1</dim>
3276 <dim>-1</dim>
3277 <dim>384</dim>
3278 </port>
3279 </output>
3280 </layer>
3281 <layer id="217" name="Constant_6259" type="Const" version="opset1">
3282 <data element_type="f32" shape="1, 1, 384" offset="64241296" size="1536" />
3283 <output>
3284 <port id="0" precision="FP32">
3285 <dim>1</dim>
3286 <dim>1</dim>
3287 <dim>384</dim>
3288 </port>
3289 </output>
3290 </layer>
3291 <layer id="218" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3292 <data auto_broadcast="numpy" />
3293 <input>
3294 <port id="0" precision="FP32">
3295 <dim>-1</dim>
3296 <dim>-1</dim>
3297 <dim>384</dim>
3298 </port>
3299 <port id="1" precision="FP32">
3300 <dim>1</dim>
3301 <dim>1</dim>
3302 <dim>384</dim>
3303 </port>
3304 </input>
3305 <output>
3306 <port id="2" precision="FP32" names="311,input_tensor.5">
3307 <dim>-1</dim>
3308 <dim>-1</dim>
3309 <dim>384</dim>
3310 </port>
3311 </output>
3312 </layer>
3313 <layer id="219" name="self.encoder.layer.2.intermediate.dense.weight" type="Const" version="opset1">
3314 <data element_type="f32" shape="1536, 384" offset="64242832" size="2359296" />
3315 <output>
3316 <port id="0" precision="FP32" names="self.encoder.layer.2.intermediate.dense.weight">
3317 <dim>1536</dim>
3318 <dim>384</dim>
3319 </port>
3320 </output>
3321 </layer>
3322 <layer id="220" name="__module.encoder.layer.2.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3323 <data transpose_a="false" transpose_b="true" />
3324 <input>
3325 <port id="0" precision="FP32">
3326 <dim>-1</dim>
3327 <dim>-1</dim>
3328 <dim>384</dim>
3329 </port>
3330 <port id="1" precision="FP32">
3331 <dim>1536</dim>
3332 <dim>384</dim>
3333 </port>
3334 </input>
3335 <output>
3336 <port id="2" precision="FP32">
3337 <dim>-1</dim>
3338 <dim>-1</dim>
3339 <dim>1536</dim>
3340 </port>
3341 </output>
3342 </layer>
3343 <layer id="221" name="Constant_6260" type="Const" version="opset1">
3344 <data element_type="f32" shape="1, 1, 1536" offset="66602128" size="6144" />
3345 <output>
3346 <port id="0" precision="FP32">
3347 <dim>1</dim>
3348 <dim>1</dim>
3349 <dim>1536</dim>
3350 </port>
3351 </output>
3352 </layer>
3353 <layer id="222" name="__module.encoder.layer.2.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
3354 <data auto_broadcast="numpy" />
3355 <input>
3356 <port id="0" precision="FP32">
3357 <dim>-1</dim>
3358 <dim>-1</dim>
3359 <dim>1536</dim>
3360 </port>
3361 <port id="1" precision="FP32">
3362 <dim>1</dim>
3363 <dim>1</dim>
3364 <dim>1536</dim>
3365 </port>
3366 </input>
3367 <output>
3368 <port id="2" precision="FP32" names="316">
3369 <dim>-1</dim>
3370 <dim>-1</dim>
3371 <dim>1536</dim>
3372 </port>
3373 </output>
3374 </layer>
3375 <layer id="223" name="__module.encoder.layer.2.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
3376 <data approximation_mode="ERF" />
3377 <input>
3378 <port id="0" precision="FP32">
3379 <dim>-1</dim>
3380 <dim>-1</dim>
3381 <dim>1536</dim>
3382 </port>
3383 </input>
3384 <output>
3385 <port id="1" precision="FP32" names="317">
3386 <dim>-1</dim>
3387 <dim>-1</dim>
3388 <dim>1536</dim>
3389 </port>
3390 </output>
3391 </layer>
3392 <layer id="224" name="self.encoder.layer.2.output.dense.weight" type="Const" version="opset1">
3393 <data element_type="f32" shape="384, 1536" offset="66608272" size="2359296" />
3394 <output>
3395 <port id="0" precision="FP32" names="self.encoder.layer.2.output.dense.weight">
3396 <dim>384</dim>
3397 <dim>1536</dim>
3398 </port>
3399 </output>
3400 </layer>
3401 <layer id="225" name="__module.encoder.layer.2.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3402 <data transpose_a="false" transpose_b="true" />
3403 <input>
3404 <port id="0" precision="FP32">
3405 <dim>-1</dim>
3406 <dim>-1</dim>
3407 <dim>1536</dim>
3408 </port>
3409 <port id="1" precision="FP32">
3410 <dim>384</dim>
3411 <dim>1536</dim>
3412 </port>
3413 </input>
3414 <output>
3415 <port id="2" precision="FP32">
3416 <dim>-1</dim>
3417 <dim>-1</dim>
3418 <dim>384</dim>
3419 </port>
3420 </output>
3421 </layer>
3422 <layer id="226" name="Constant_6261" type="Const" version="opset1">
3423 <data element_type="f32" shape="1, 1, 384" offset="68967568" size="1536" />
3424 <output>
3425 <port id="0" precision="FP32">
3426 <dim>1</dim>
3427 <dim>1</dim>
3428 <dim>384</dim>
3429 </port>
3430 </output>
3431 </layer>
3432 <layer id="227" name="__module.encoder.layer.2.output.dense/aten::linear/Add" type="Add" version="opset1">
3433 <data auto_broadcast="numpy" />
3434 <input>
3435 <port id="0" precision="FP32">
3436 <dim>-1</dim>
3437 <dim>-1</dim>
3438 <dim>384</dim>
3439 </port>
3440 <port id="1" precision="FP32">
3441 <dim>1</dim>
3442 <dim>1</dim>
3443 <dim>384</dim>
3444 </port>
3445 </input>
3446 <output>
3447 <port id="2" precision="FP32" names="323,input.13">
3448 <dim>-1</dim>
3449 <dim>-1</dim>
3450 <dim>384</dim>
3451 </port>
3452 </output>
3453 </layer>
3454 <layer id="228" name="__module.encoder.layer.2.output/aten::add/Add" type="Add" version="opset1">
3455 <data auto_broadcast="numpy" />
3456 <input>
3457 <port id="0" precision="FP32">
3458 <dim>-1</dim>
3459 <dim>-1</dim>
3460 <dim>384</dim>
3461 </port>
3462 <port id="1" precision="FP32">
3463 <dim>-1</dim>
3464 <dim>-1</dim>
3465 <dim>384</dim>
3466 </port>
3467 </input>
3468 <output>
3469 <port id="2" precision="FP32" names="325">
3470 <dim>-1</dim>
3471 <dim>-1</dim>
3472 <dim>384</dim>
3473 </port>
3474 </output>
3475 </layer>
3476 <layer id="229" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3477 <data element_type="i32" shape="1" offset="47675412" size="4" />
3478 <output>
3479 <port id="0" precision="I32">
3480 <dim>1</dim>
3481 </port>
3482 </output>
3483 </layer>
3484 <layer id="230" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3485 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3486 <input>
3487 <port id="0" precision="FP32">
3488 <dim>-1</dim>
3489 <dim>-1</dim>
3490 <dim>384</dim>
3491 </port>
3492 <port id="1" precision="I32">
3493 <dim>1</dim>
3494 </port>
3495 </input>
3496 <output>
3497 <port id="2" precision="FP32">
3498 <dim>-1</dim>
3499 <dim>-1</dim>
3500 <dim>384</dim>
3501 </port>
3502 </output>
3503 </layer>
3504 <layer id="231" name="Constant_6262" type="Const" version="opset1">
3505 <data element_type="f32" shape="1, 1, 384" offset="68969104" size="1536" />
3506 <output>
3507 <port id="0" precision="FP32">
3508 <dim>1</dim>
3509 <dim>1</dim>
3510 <dim>384</dim>
3511 </port>
3512 </output>
3513 </layer>
3514 <layer id="232" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3515 <data auto_broadcast="numpy" />
3516 <input>
3517 <port id="0" precision="FP32">
3518 <dim>-1</dim>
3519 <dim>-1</dim>
3520 <dim>384</dim>
3521 </port>
3522 <port id="1" precision="FP32">
3523 <dim>1</dim>
3524 <dim>1</dim>
3525 <dim>384</dim>
3526 </port>
3527 </input>
3528 <output>
3529 <port id="2" precision="FP32">
3530 <dim>-1</dim>
3531 <dim>-1</dim>
3532 <dim>384</dim>
3533 </port>
3534 </output>
3535 </layer>
3536 <layer id="233" name="Constant_6263" type="Const" version="opset1">
3537 <data element_type="f32" shape="1, 1, 384" offset="68970640" size="1536" />
3538 <output>
3539 <port id="0" precision="FP32">
3540 <dim>1</dim>
3541 <dim>1</dim>
3542 <dim>384</dim>
3543 </port>
3544 </output>
3545 </layer>
3546 <layer id="234" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3547 <data auto_broadcast="numpy" />
3548 <input>
3549 <port id="0" precision="FP32">
3550 <dim>-1</dim>
3551 <dim>-1</dim>
3552 <dim>384</dim>
3553 </port>
3554 <port id="1" precision="FP32">
3555 <dim>1</dim>
3556 <dim>1</dim>
3557 <dim>384</dim>
3558 </port>
3559 </input>
3560 <output>
3561 <port id="2" precision="FP32" names="329,hidden_states.19">
3562 <dim>-1</dim>
3563 <dim>-1</dim>
3564 <dim>384</dim>
3565 </port>
3566 </output>
3567 </layer>
3568 <layer id="235" name="self.encoder.layer.3.attention.self.query.weight" type="Const" version="opset1">
3569 <data element_type="f32" shape="384, 384" offset="68972176" size="589824" />
3570 <output>
3571 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.query.weight">
3572 <dim>384</dim>
3573 <dim>384</dim>
3574 </port>
3575 </output>
3576 </layer>
3577 <layer id="236" name="__module.encoder.layer.3.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
3578 <data transpose_a="false" transpose_b="true" />
3579 <input>
3580 <port id="0" precision="FP32">
3581 <dim>-1</dim>
3582 <dim>-1</dim>
3583 <dim>384</dim>
3584 </port>
3585 <port id="1" precision="FP32">
3586 <dim>384</dim>
3587 <dim>384</dim>
3588 </port>
3589 </input>
3590 <output>
3591 <port id="2" precision="FP32">
3592 <dim>-1</dim>
3593 <dim>-1</dim>
3594 <dim>384</dim>
3595 </port>
3596 </output>
3597 </layer>
3598 <layer id="237" name="Constant_6264" type="Const" version="opset1">
3599 <data element_type="f32" shape="1, 1, 384" offset="69562000" size="1536" />
3600 <output>
3601 <port id="0" precision="FP32">
3602 <dim>1</dim>
3603 <dim>1</dim>
3604 <dim>384</dim>
3605 </port>
3606 </output>
3607 </layer>
3608 <layer id="238" name="__module.encoder.layer.3.attention.self.query/aten::linear/Add" type="Add" version="opset1">
3609 <data auto_broadcast="numpy" />
3610 <input>
3611 <port id="0" precision="FP32">
3612 <dim>-1</dim>
3613 <dim>-1</dim>
3614 <dim>384</dim>
3615 </port>
3616 <port id="1" precision="FP32">
3617 <dim>1</dim>
3618 <dim>1</dim>
3619 <dim>384</dim>
3620 </port>
3621 </input>
3622 <output>
3623 <port id="2" precision="FP32" names="342,x.37">
3624 <dim>-1</dim>
3625 <dim>-1</dim>
3626 <dim>384</dim>
3627 </port>
3628 </output>
3629 </layer>
3630 <layer id="239" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
3631 <data element_type="i64" shape="4" offset="48269848" size="32" />
3632 <output>
3633 <port id="0" precision="I64">
3634 <dim>4</dim>
3635 </port>
3636 </output>
3637 </layer>
3638 <layer id="240" name="__module.encoder.layer.3.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
3639 <data special_zero="true" />
3640 <input>
3641 <port id="0" precision="FP32">
3642 <dim>-1</dim>
3643 <dim>-1</dim>
3644 <dim>384</dim>
3645 </port>
3646 <port id="1" precision="I64">
3647 <dim>4</dim>
3648 </port>
3649 </input>
3650 <output>
3651 <port id="2" precision="FP32" names="346,x.39">
3652 <dim>-1</dim>
3653 <dim>-1</dim>
3654 <dim>12</dim>
3655 <dim>32</dim>
3656 </port>
3657 </output>
3658 </layer>
3659 <layer id="241" name="Constant_931" type="Const" version="opset1">
3660 <data element_type="i64" shape="4" offset="48269880" size="32" />
3661 <output>
3662 <port id="0" precision="I64" names="347">
3663 <dim>4</dim>
3664 </port>
3665 </output>
3666 </layer>
3667 <layer id="242" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
3668 <input>
3669 <port id="0" precision="FP32">
3670 <dim>-1</dim>
3671 <dim>-1</dim>
3672 <dim>12</dim>
3673 <dim>32</dim>
3674 </port>
3675 <port id="1" precision="I64">
3676 <dim>4</dim>
3677 </port>
3678 </input>
3679 <output>
3680 <port id="2" precision="FP32" names="348">
3681 <dim>-1</dim>
3682 <dim>12</dim>
3683 <dim>-1</dim>
3684 <dim>32</dim>
3685 </port>
3686 </output>
3687 </layer>
3688 <layer id="243" name="self.encoder.layer.3.attention.self.key.weight" type="Const" version="opset1">
3689 <data element_type="f32" shape="384, 384" offset="69563536" size="589824" />
3690 <output>
3691 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.key.weight">
3692 <dim>384</dim>
3693 <dim>384</dim>
3694 </port>
3695 </output>
3696 </layer>
3697 <layer id="244" name="__module.encoder.layer.3.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
3698 <data transpose_a="false" transpose_b="true" />
3699 <input>
3700 <port id="0" precision="FP32">
3701 <dim>-1</dim>
3702 <dim>-1</dim>
3703 <dim>384</dim>
3704 </port>
3705 <port id="1" precision="FP32">
3706 <dim>384</dim>
3707 <dim>384</dim>
3708 </port>
3709 </input>
3710 <output>
3711 <port id="2" precision="FP32">
3712 <dim>-1</dim>
3713 <dim>-1</dim>
3714 <dim>384</dim>
3715 </port>
3716 </output>
3717 </layer>
3718 <layer id="245" name="Constant_6265" type="Const" version="opset1">
3719 <data element_type="f32" shape="1, 1, 384" offset="70153360" size="1536" />
3720 <output>
3721 <port id="0" precision="FP32">
3722 <dim>1</dim>
3723 <dim>1</dim>
3724 <dim>384</dim>
3725 </port>
3726 </output>
3727 </layer>
3728 <layer id="246" name="__module.encoder.layer.3.attention.self.key/aten::linear/Add" type="Add" version="opset1">
3729 <data auto_broadcast="numpy" />
3730 <input>
3731 <port id="0" precision="FP32">
3732 <dim>-1</dim>
3733 <dim>-1</dim>
3734 <dim>384</dim>
3735 </port>
3736 <port id="1" precision="FP32">
3737 <dim>1</dim>
3738 <dim>1</dim>
3739 <dim>384</dim>
3740 </port>
3741 </input>
3742 <output>
3743 <port id="2" precision="FP32" names="351,x.41">
3744 <dim>-1</dim>
3745 <dim>-1</dim>
3746 <dim>384</dim>
3747 </port>
3748 </output>
3749 </layer>
3750 <layer id="247" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
3751 <data element_type="i64" shape="4" offset="48269848" size="32" />
3752 <output>
3753 <port id="0" precision="I64">
3754 <dim>4</dim>
3755 </port>
3756 </output>
3757 </layer>
3758 <layer id="248" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
3759 <data special_zero="true" />
3760 <input>
3761 <port id="0" precision="FP32">
3762 <dim>-1</dim>
3763 <dim>-1</dim>
3764 <dim>384</dim>
3765 </port>
3766 <port id="1" precision="I64">
3767 <dim>4</dim>
3768 </port>
3769 </input>
3770 <output>
3771 <port id="2" precision="FP32" names="355,x.43">
3772 <dim>-1</dim>
3773 <dim>-1</dim>
3774 <dim>12</dim>
3775 <dim>32</dim>
3776 </port>
3777 </output>
3778 </layer>
3779 <layer id="249" name="Constant_954" type="Const" version="opset1">
3780 <data element_type="i64" shape="4" offset="48269880" size="32" />
3781 <output>
3782 <port id="0" precision="I64" names="356">
3783 <dim>4</dim>
3784 </port>
3785 </output>
3786 </layer>
3787 <layer id="250" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
3788 <input>
3789 <port id="0" precision="FP32">
3790 <dim>-1</dim>
3791 <dim>-1</dim>
3792 <dim>12</dim>
3793 <dim>32</dim>
3794 </port>
3795 <port id="1" precision="I64">
3796 <dim>4</dim>
3797 </port>
3798 </input>
3799 <output>
3800 <port id="2" precision="FP32" names="357">
3801 <dim>-1</dim>
3802 <dim>12</dim>
3803 <dim>-1</dim>
3804 <dim>32</dim>
3805 </port>
3806 </output>
3807 </layer>
3808 <layer id="251" name="self.encoder.layer.3.attention.self.value.weight" type="Const" version="opset1">
3809 <data element_type="f32" shape="384, 384" offset="70154896" size="589824" />
3810 <output>
3811 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.value.weight">
3812 <dim>384</dim>
3813 <dim>384</dim>
3814 </port>
3815 </output>
3816 </layer>
3817 <layer id="252" name="__module.encoder.layer.3.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
3818 <data transpose_a="false" transpose_b="true" />
3819 <input>
3820 <port id="0" precision="FP32">
3821 <dim>-1</dim>
3822 <dim>-1</dim>
3823 <dim>384</dim>
3824 </port>
3825 <port id="1" precision="FP32">
3826 <dim>384</dim>
3827 <dim>384</dim>
3828 </port>
3829 </input>
3830 <output>
3831 <port id="2" precision="FP32">
3832 <dim>-1</dim>
3833 <dim>-1</dim>
3834 <dim>384</dim>
3835 </port>
3836 </output>
3837 </layer>
3838 <layer id="253" name="Constant_6266" type="Const" version="opset1">
3839 <data element_type="f32" shape="1, 1, 384" offset="70744720" size="1536" />
3840 <output>
3841 <port id="0" precision="FP32">
3842 <dim>1</dim>
3843 <dim>1</dim>
3844 <dim>384</dim>
3845 </port>
3846 </output>
3847 </layer>
3848 <layer id="254" name="__module.encoder.layer.3.attention.self.value/aten::linear/Add" type="Add" version="opset1">
3849 <data auto_broadcast="numpy" />
3850 <input>
3851 <port id="0" precision="FP32">
3852 <dim>-1</dim>
3853 <dim>-1</dim>
3854 <dim>384</dim>
3855 </port>
3856 <port id="1" precision="FP32">
3857 <dim>1</dim>
3858 <dim>1</dim>
3859 <dim>384</dim>
3860 </port>
3861 </input>
3862 <output>
3863 <port id="2" precision="FP32" names="360,x.45">
3864 <dim>-1</dim>
3865 <dim>-1</dim>
3866 <dim>384</dim>
3867 </port>
3868 </output>
3869 </layer>
3870 <layer id="255" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
3871 <data element_type="i64" shape="4" offset="48269848" size="32" />
3872 <output>
3873 <port id="0" precision="I64">
3874 <dim>4</dim>
3875 </port>
3876 </output>
3877 </layer>
3878 <layer id="256" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
3879 <data special_zero="true" />
3880 <input>
3881 <port id="0" precision="FP32">
3882 <dim>-1</dim>
3883 <dim>-1</dim>
3884 <dim>384</dim>
3885 </port>
3886 <port id="1" precision="I64">
3887 <dim>4</dim>
3888 </port>
3889 </input>
3890 <output>
3891 <port id="2" precision="FP32" names="364,x.47">
3892 <dim>-1</dim>
3893 <dim>-1</dim>
3894 <dim>12</dim>
3895 <dim>32</dim>
3896 </port>
3897 </output>
3898 </layer>
3899 <layer id="257" name="Constant_977" type="Const" version="opset1">
3900 <data element_type="i64" shape="4" offset="48269880" size="32" />
3901 <output>
3902 <port id="0" precision="I64" names="365">
3903 <dim>4</dim>
3904 </port>
3905 </output>
3906 </layer>
3907 <layer id="258" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
3908 <input>
3909 <port id="0" precision="FP32">
3910 <dim>-1</dim>
3911 <dim>-1</dim>
3912 <dim>12</dim>
3913 <dim>32</dim>
3914 </port>
3915 <port id="1" precision="I64">
3916 <dim>4</dim>
3917 </port>
3918 </input>
3919 <output>
3920 <port id="2" precision="FP32" names="366">
3921 <dim>-1</dim>
3922 <dim>12</dim>
3923 <dim>-1</dim>
3924 <dim>32</dim>
3925 </port>
3926 </output>
3927 </layer>
3928 <layer id="259" name="__module.encoder.layer.3.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
3929 <data causal="false" />
3930 <input>
3931 <port id="0" precision="FP32">
3932 <dim>-1</dim>
3933 <dim>12</dim>
3934 <dim>-1</dim>
3935 <dim>32</dim>
3936 </port>
3937 <port id="1" precision="FP32">
3938 <dim>-1</dim>
3939 <dim>12</dim>
3940 <dim>-1</dim>
3941 <dim>32</dim>
3942 </port>
3943 <port id="2" precision="FP32">
3944 <dim>-1</dim>
3945 <dim>12</dim>
3946 <dim>-1</dim>
3947 <dim>32</dim>
3948 </port>
3949 <port id="3" precision="FP32">
3950 <dim>-1</dim>
3951 <dim>1</dim>
3952 <dim>-1</dim>
3953 <dim>-1</dim>
3954 </port>
3955 </input>
3956 <output>
3957 <port id="4" precision="FP32" names="367,attn_output.13">
3958 <dim>-1</dim>
3959 <dim>12</dim>
3960 <dim>-1</dim>
3961 <dim>32</dim>
3962 </port>
3963 </output>
3964 </layer>
3965 <layer id="260" name="__module.encoder.layer.3.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
3966 <data element_type="i32" shape="4" offset="49452648" size="16" />
3967 <output>
3968 <port id="0" precision="I32">
3969 <dim>4</dim>
3970 </port>
3971 </output>
3972 </layer>
3973 <layer id="261" name="__module.encoder.layer.3.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
3974 <input>
3975 <port id="0" precision="FP32">
3976 <dim>-1</dim>
3977 <dim>12</dim>
3978 <dim>-1</dim>
3979 <dim>32</dim>
3980 </port>
3981 <port id="1" precision="I32">
3982 <dim>4</dim>
3983 </port>
3984 </input>
3985 <output>
3986 <port id="2" precision="FP32" names="368,attn_output.15">
3987 <dim>-1</dim>
3988 <dim>-1</dim>
3989 <dim>12</dim>
3990 <dim>32</dim>
3991 </port>
3992 </output>
3993 </layer>
3994 <layer id="262" name="__module.encoder.layer.3.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
3995 <data output_type="i64" />
3996 <input>
3997 <port id="0" precision="FP32">
3998 <dim>-1</dim>
3999 <dim>-1</dim>
4000 <dim>384</dim>
4001 </port>
4002 </input>
4003 <output>
4004 <port id="1" precision="I64">
4005 <dim>3</dim>
4006 </port>
4007 </output>
4008 </layer>
4009 <layer id="263" name="Constant_5797" type="Const" version="opset1">
4010 <data element_type="i64" shape="2" offset="49452664" size="16" />
4011 <output>
4012 <port id="0" precision="I64">
4013 <dim>2</dim>
4014 </port>
4015 </output>
4016 </layer>
4017 <layer id="264" name="Constant_5798" type="Const" version="opset1">
4018 <data element_type="i64" shape="" offset="47675396" size="8" />
4019 <output>
4020 <port id="0" precision="I64" />
4021 </output>
4022 </layer>
4023 <layer id="265" name="Gather_5799" type="Gather" version="opset8">
4024 <data batch_dims="0" />
4025 <input>
4026 <port id="0" precision="I64">
4027 <dim>3</dim>
4028 </port>
4029 <port id="1" precision="I64">
4030 <dim>2</dim>
4031 </port>
4032 <port id="2" precision="I64" />
4033 </input>
4034 <output>
4035 <port id="3" precision="I64">
4036 <dim>2</dim>
4037 </port>
4038 </output>
4039 </layer>
4040 <layer id="266" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
4041 <data axis="0" />
4042 <input>
4043 <port id="0" precision="I64">
4044 <dim>2</dim>
4045 </port>
4046 <port id="1" precision="I64">
4047 <dim>1</dim>
4048 </port>
4049 </input>
4050 <output>
4051 <port id="2" precision="I64" names="369">
4052 <dim>3</dim>
4053 </port>
4054 </output>
4055 </layer>
4056 <layer id="267" name="__module.encoder.layer.3.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
4057 <data special_zero="false" />
4058 <input>
4059 <port id="0" precision="FP32">
4060 <dim>-1</dim>
4061 <dim>-1</dim>
4062 <dim>12</dim>
4063 <dim>32</dim>
4064 </port>
4065 <port id="1" precision="I64">
4066 <dim>3</dim>
4067 </port>
4068 </input>
4069 <output>
4070 <port id="2" precision="FP32" names="370">
4071 <dim>-1</dim>
4072 <dim>-1</dim>
4073 <dim>384</dim>
4074 </port>
4075 </output>
4076 </layer>
4077 <layer id="268" name="self.encoder.layer.3.attention.output.dense.weight" type="Const" version="opset1">
4078 <data element_type="f32" shape="384, 384" offset="70746256" size="589824" />
4079 <output>
4080 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.output.dense.weight">
4081 <dim>384</dim>
4082 <dim>384</dim>
4083 </port>
4084 </output>
4085 </layer>
4086 <layer id="269" name="__module.encoder.layer.3.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4087 <data transpose_a="false" transpose_b="true" />
4088 <input>
4089 <port id="0" precision="FP32">
4090 <dim>-1</dim>
4091 <dim>-1</dim>
4092 <dim>384</dim>
4093 </port>
4094 <port id="1" precision="FP32">
4095 <dim>384</dim>
4096 <dim>384</dim>
4097 </port>
4098 </input>
4099 <output>
4100 <port id="2" precision="FP32">
4101 <dim>-1</dim>
4102 <dim>-1</dim>
4103 <dim>384</dim>
4104 </port>
4105 </output>
4106 </layer>
4107 <layer id="270" name="Constant_6267" type="Const" version="opset1">
4108 <data element_type="f32" shape="1, 1, 384" offset="71336080" size="1536" />
4109 <output>
4110 <port id="0" precision="FP32">
4111 <dim>1</dim>
4112 <dim>1</dim>
4113 <dim>384</dim>
4114 </port>
4115 </output>
4116 </layer>
4117 <layer id="271" name="__module.encoder.layer.3.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
4118 <data auto_broadcast="numpy" />
4119 <input>
4120 <port id="0" precision="FP32">
4121 <dim>-1</dim>
4122 <dim>-1</dim>
4123 <dim>384</dim>
4124 </port>
4125 <port id="1" precision="FP32">
4126 <dim>1</dim>
4127 <dim>1</dim>
4128 <dim>384</dim>
4129 </port>
4130 </input>
4131 <output>
4132 <port id="2" precision="FP32" names="376,input.15">
4133 <dim>-1</dim>
4134 <dim>-1</dim>
4135 <dim>384</dim>
4136 </port>
4137 </output>
4138 </layer>
4139 <layer id="272" name="__module.encoder.layer.3.attention.output/aten::add/Add" type="Add" version="opset1">
4140 <data auto_broadcast="numpy" />
4141 <input>
4142 <port id="0" precision="FP32">
4143 <dim>-1</dim>
4144 <dim>-1</dim>
4145 <dim>384</dim>
4146 </port>
4147 <port id="1" precision="FP32">
4148 <dim>-1</dim>
4149 <dim>-1</dim>
4150 <dim>384</dim>
4151 </port>
4152 </input>
4153 <output>
4154 <port id="2" precision="FP32" names="378">
4155 <dim>-1</dim>
4156 <dim>-1</dim>
4157 <dim>384</dim>
4158 </port>
4159 </output>
4160 </layer>
4161 <layer id="273" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
4162 <data element_type="i32" shape="1" offset="47675412" size="4" />
4163 <output>
4164 <port id="0" precision="I32">
4165 <dim>1</dim>
4166 </port>
4167 </output>
4168 </layer>
4169 <layer id="274" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
4170 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
4171 <input>
4172 <port id="0" precision="FP32">
4173 <dim>-1</dim>
4174 <dim>-1</dim>
4175 <dim>384</dim>
4176 </port>
4177 <port id="1" precision="I32">
4178 <dim>1</dim>
4179 </port>
4180 </input>
4181 <output>
4182 <port id="2" precision="FP32">
4183 <dim>-1</dim>
4184 <dim>-1</dim>
4185 <dim>384</dim>
4186 </port>
4187 </output>
4188 </layer>
4189 <layer id="275" name="Constant_6268" type="Const" version="opset1">
4190 <data element_type="f32" shape="1, 1, 384" offset="71337616" size="1536" />
4191 <output>
4192 <port id="0" precision="FP32">
4193 <dim>1</dim>
4194 <dim>1</dim>
4195 <dim>384</dim>
4196 </port>
4197 </output>
4198 </layer>
4199 <layer id="276" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
4200 <data auto_broadcast="numpy" />
4201 <input>
4202 <port id="0" precision="FP32">
4203 <dim>-1</dim>
4204 <dim>-1</dim>
4205 <dim>384</dim>
4206 </port>
4207 <port id="1" precision="FP32">
4208 <dim>1</dim>
4209 <dim>1</dim>
4210 <dim>384</dim>
4211 </port>
4212 </input>
4213 <output>
4214 <port id="2" precision="FP32">
4215 <dim>-1</dim>
4216 <dim>-1</dim>
4217 <dim>384</dim>
4218 </port>
4219 </output>
4220 </layer>
4221 <layer id="277" name="Constant_6269" type="Const" version="opset1">
4222 <data element_type="f32" shape="1, 1, 384" offset="71339152" size="1536" />
4223 <output>
4224 <port id="0" precision="FP32">
4225 <dim>1</dim>
4226 <dim>1</dim>
4227 <dim>384</dim>
4228 </port>
4229 </output>
4230 </layer>
4231 <layer id="278" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
4232 <data auto_broadcast="numpy" />
4233 <input>
4234 <port id="0" precision="FP32">
4235 <dim>-1</dim>
4236 <dim>-1</dim>
4237 <dim>384</dim>
4238 </port>
4239 <port id="1" precision="FP32">
4240 <dim>1</dim>
4241 <dim>1</dim>
4242 <dim>384</dim>
4243 </port>
4244 </input>
4245 <output>
4246 <port id="2" precision="FP32" names="382,input_tensor.7">
4247 <dim>-1</dim>
4248 <dim>-1</dim>
4249 <dim>384</dim>
4250 </port>
4251 </output>
4252 </layer>
4253 <layer id="279" name="self.encoder.layer.3.intermediate.dense.weight" type="Const" version="opset1">
4254 <data element_type="f32" shape="1536, 384" offset="71340688" size="2359296" />
4255 <output>
4256 <port id="0" precision="FP32" names="self.encoder.layer.3.intermediate.dense.weight">
4257 <dim>1536</dim>
4258 <dim>384</dim>
4259 </port>
4260 </output>
4261 </layer>
4262 <layer id="280" name="__module.encoder.layer.3.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4263 <data transpose_a="false" transpose_b="true" />
4264 <input>
4265 <port id="0" precision="FP32">
4266 <dim>-1</dim>
4267 <dim>-1</dim>
4268 <dim>384</dim>
4269 </port>
4270 <port id="1" precision="FP32">
4271 <dim>1536</dim>
4272 <dim>384</dim>
4273 </port>
4274 </input>
4275 <output>
4276 <port id="2" precision="FP32">
4277 <dim>-1</dim>
4278 <dim>-1</dim>
4279 <dim>1536</dim>
4280 </port>
4281 </output>
4282 </layer>
4283 <layer id="281" name="Constant_6270" type="Const" version="opset1">
4284 <data element_type="f32" shape="1, 1, 1536" offset="73699984" size="6144" />
4285 <output>
4286 <port id="0" precision="FP32">
4287 <dim>1</dim>
4288 <dim>1</dim>
4289 <dim>1536</dim>
4290 </port>
4291 </output>
4292 </layer>
4293 <layer id="282" name="__module.encoder.layer.3.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
4294 <data auto_broadcast="numpy" />
4295 <input>
4296 <port id="0" precision="FP32">
4297 <dim>-1</dim>
4298 <dim>-1</dim>
4299 <dim>1536</dim>
4300 </port>
4301 <port id="1" precision="FP32">
4302 <dim>1</dim>
4303 <dim>1</dim>
4304 <dim>1536</dim>
4305 </port>
4306 </input>
4307 <output>
4308 <port id="2" precision="FP32" names="387">
4309 <dim>-1</dim>
4310 <dim>-1</dim>
4311 <dim>1536</dim>
4312 </port>
4313 </output>
4314 </layer>
4315 <layer id="283" name="__module.encoder.layer.3.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
4316 <data approximation_mode="ERF" />
4317 <input>
4318 <port id="0" precision="FP32">
4319 <dim>-1</dim>
4320 <dim>-1</dim>
4321 <dim>1536</dim>
4322 </port>
4323 </input>
4324 <output>
4325 <port id="1" precision="FP32" names="388">
4326 <dim>-1</dim>
4327 <dim>-1</dim>
4328 <dim>1536</dim>
4329 </port>
4330 </output>
4331 </layer>
4332 <layer id="284" name="self.encoder.layer.3.output.dense.weight" type="Const" version="opset1">
4333 <data element_type="f32" shape="384, 1536" offset="73706128" size="2359296" />
4334 <output>
4335 <port id="0" precision="FP32" names="self.encoder.layer.3.output.dense.weight">
4336 <dim>384</dim>
4337 <dim>1536</dim>
4338 </port>
4339 </output>
4340 </layer>
4341 <layer id="285" name="__module.encoder.layer.3.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4342 <data transpose_a="false" transpose_b="true" />
4343 <input>
4344 <port id="0" precision="FP32">
4345 <dim>-1</dim>
4346 <dim>-1</dim>
4347 <dim>1536</dim>
4348 </port>
4349 <port id="1" precision="FP32">
4350 <dim>384</dim>
4351 <dim>1536</dim>
4352 </port>
4353 </input>
4354 <output>
4355 <port id="2" precision="FP32">
4356 <dim>-1</dim>
4357 <dim>-1</dim>
4358 <dim>384</dim>
4359 </port>
4360 </output>
4361 </layer>
4362 <layer id="286" name="Constant_6271" type="Const" version="opset1">
4363 <data element_type="f32" shape="1, 1, 384" offset="76065424" size="1536" />
4364 <output>
4365 <port id="0" precision="FP32">
4366 <dim>1</dim>
4367 <dim>1</dim>
4368 <dim>384</dim>
4369 </port>
4370 </output>
4371 </layer>
4372 <layer id="287" name="__module.encoder.layer.3.output.dense/aten::linear/Add" type="Add" version="opset1">
4373 <data auto_broadcast="numpy" />
4374 <input>
4375 <port id="0" precision="FP32">
4376 <dim>-1</dim>
4377 <dim>-1</dim>
4378 <dim>384</dim>
4379 </port>
4380 <port id="1" precision="FP32">
4381 <dim>1</dim>
4382 <dim>1</dim>
4383 <dim>384</dim>
4384 </port>
4385 </input>
4386 <output>
4387 <port id="2" precision="FP32" names="394,input.17">
4388 <dim>-1</dim>
4389 <dim>-1</dim>
4390 <dim>384</dim>
4391 </port>
4392 </output>
4393 </layer>
4394 <layer id="288" name="__module.encoder.layer.3.output/aten::add/Add" type="Add" version="opset1">
4395 <data auto_broadcast="numpy" />
4396 <input>
4397 <port id="0" precision="FP32">
4398 <dim>-1</dim>
4399 <dim>-1</dim>
4400 <dim>384</dim>
4401 </port>
4402 <port id="1" precision="FP32">
4403 <dim>-1</dim>
4404 <dim>-1</dim>
4405 <dim>384</dim>
4406 </port>
4407 </input>
4408 <output>
4409 <port id="2" precision="FP32" names="396">
4410 <dim>-1</dim>
4411 <dim>-1</dim>
4412 <dim>384</dim>
4413 </port>
4414 </output>
4415 </layer>
4416 <layer id="289" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
4417 <data element_type="i32" shape="1" offset="47675412" size="4" />
4418 <output>
4419 <port id="0" precision="I32">
4420 <dim>1</dim>
4421 </port>
4422 </output>
4423 </layer>
4424 <layer id="290" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
4425 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
4426 <input>
4427 <port id="0" precision="FP32">
4428 <dim>-1</dim>
4429 <dim>-1</dim>
4430 <dim>384</dim>
4431 </port>
4432 <port id="1" precision="I32">
4433 <dim>1</dim>
4434 </port>
4435 </input>
4436 <output>
4437 <port id="2" precision="FP32">
4438 <dim>-1</dim>
4439 <dim>-1</dim>
4440 <dim>384</dim>
4441 </port>
4442 </output>
4443 </layer>
4444 <layer id="291" name="Constant_6272" type="Const" version="opset1">
4445 <data element_type="f32" shape="1, 1, 384" offset="76066960" size="1536" />
4446 <output>
4447 <port id="0" precision="FP32">
4448 <dim>1</dim>
4449 <dim>1</dim>
4450 <dim>384</dim>
4451 </port>
4452 </output>
4453 </layer>
4454 <layer id="292" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
4455 <data auto_broadcast="numpy" />
4456 <input>
4457 <port id="0" precision="FP32">
4458 <dim>-1</dim>
4459 <dim>-1</dim>
4460 <dim>384</dim>
4461 </port>
4462 <port id="1" precision="FP32">
4463 <dim>1</dim>
4464 <dim>1</dim>
4465 <dim>384</dim>
4466 </port>
4467 </input>
4468 <output>
4469 <port id="2" precision="FP32">
4470 <dim>-1</dim>
4471 <dim>-1</dim>
4472 <dim>384</dim>
4473 </port>
4474 </output>
4475 </layer>
4476 <layer id="293" name="Constant_6273" type="Const" version="opset1">
4477 <data element_type="f32" shape="1, 1, 384" offset="76068496" size="1536" />
4478 <output>
4479 <port id="0" precision="FP32">
4480 <dim>1</dim>
4481 <dim>1</dim>
4482 <dim>384</dim>
4483 </port>
4484 </output>
4485 </layer>
4486 <layer id="294" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
4487 <data auto_broadcast="numpy" />
4488 <input>
4489 <port id="0" precision="FP32">
4490 <dim>-1</dim>
4491 <dim>-1</dim>
4492 <dim>384</dim>
4493 </port>
4494 <port id="1" precision="FP32">
4495 <dim>1</dim>
4496 <dim>1</dim>
4497 <dim>384</dim>
4498 </port>
4499 </input>
4500 <output>
4501 <port id="2" precision="FP32" names="400,hidden_states.25">
4502 <dim>-1</dim>
4503 <dim>-1</dim>
4504 <dim>384</dim>
4505 </port>
4506 </output>
4507 </layer>
4508 <layer id="295" name="self.encoder.layer.4.attention.self.query.weight" type="Const" version="opset1">
4509 <data element_type="f32" shape="384, 384" offset="76070032" size="589824" />
4510 <output>
4511 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.query.weight">
4512 <dim>384</dim>
4513 <dim>384</dim>
4514 </port>
4515 </output>
4516 </layer>
4517 <layer id="296" name="__module.encoder.layer.4.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
4518 <data transpose_a="false" transpose_b="true" />
4519 <input>
4520 <port id="0" precision="FP32">
4521 <dim>-1</dim>
4522 <dim>-1</dim>
4523 <dim>384</dim>
4524 </port>
4525 <port id="1" precision="FP32">
4526 <dim>384</dim>
4527 <dim>384</dim>
4528 </port>
4529 </input>
4530 <output>
4531 <port id="2" precision="FP32">
4532 <dim>-1</dim>
4533 <dim>-1</dim>
4534 <dim>384</dim>
4535 </port>
4536 </output>
4537 </layer>
4538 <layer id="297" name="Constant_6274" type="Const" version="opset1">
4539 <data element_type="f32" shape="1, 1, 384" offset="76659856" size="1536" />
4540 <output>
4541 <port id="0" precision="FP32">
4542 <dim>1</dim>
4543 <dim>1</dim>
4544 <dim>384</dim>
4545 </port>
4546 </output>
4547 </layer>
4548 <layer id="298" name="__module.encoder.layer.4.attention.self.query/aten::linear/Add" type="Add" version="opset1">
4549 <data auto_broadcast="numpy" />
4550 <input>
4551 <port id="0" precision="FP32">
4552 <dim>-1</dim>
4553 <dim>-1</dim>
4554 <dim>384</dim>
4555 </port>
4556 <port id="1" precision="FP32">
4557 <dim>1</dim>
4558 <dim>1</dim>
4559 <dim>384</dim>
4560 </port>
4561 </input>
4562 <output>
4563 <port id="2" precision="FP32" names="413,x.49">
4564 <dim>-1</dim>
4565 <dim>-1</dim>
4566 <dim>384</dim>
4567 </port>
4568 </output>
4569 </layer>
4570 <layer id="299" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
4571 <data element_type="i64" shape="4" offset="48269848" size="32" />
4572 <output>
4573 <port id="0" precision="I64">
4574 <dim>4</dim>
4575 </port>
4576 </output>
4577 </layer>
4578 <layer id="300" name="__module.encoder.layer.4.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
4579 <data special_zero="true" />
4580 <input>
4581 <port id="0" precision="FP32">
4582 <dim>-1</dim>
4583 <dim>-1</dim>
4584 <dim>384</dim>
4585 </port>
4586 <port id="1" precision="I64">
4587 <dim>4</dim>
4588 </port>
4589 </input>
4590 <output>
4591 <port id="2" precision="FP32" names="417,x.51">
4592 <dim>-1</dim>
4593 <dim>-1</dim>
4594 <dim>12</dim>
4595 <dim>32</dim>
4596 </port>
4597 </output>
4598 </layer>
4599 <layer id="301" name="Constant_1157" type="Const" version="opset1">
4600 <data element_type="i64" shape="4" offset="48269880" size="32" />
4601 <output>
4602 <port id="0" precision="I64" names="418">
4603 <dim>4</dim>
4604 </port>
4605 </output>
4606 </layer>
4607 <layer id="302" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
4608 <input>
4609 <port id="0" precision="FP32">
4610 <dim>-1</dim>
4611 <dim>-1</dim>
4612 <dim>12</dim>
4613 <dim>32</dim>
4614 </port>
4615 <port id="1" precision="I64">
4616 <dim>4</dim>
4617 </port>
4618 </input>
4619 <output>
4620 <port id="2" precision="FP32" names="419">
4621 <dim>-1</dim>
4622 <dim>12</dim>
4623 <dim>-1</dim>
4624 <dim>32</dim>
4625 </port>
4626 </output>
4627 </layer>
4628 <layer id="303" name="self.encoder.layer.4.attention.self.key.weight" type="Const" version="opset1">
4629 <data element_type="f32" shape="384, 384" offset="76661392" size="589824" />
4630 <output>
4631 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.key.weight">
4632 <dim>384</dim>
4633 <dim>384</dim>
4634 </port>
4635 </output>
4636 </layer>
4637 <layer id="304" name="__module.encoder.layer.4.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
4638 <data transpose_a="false" transpose_b="true" />
4639 <input>
4640 <port id="0" precision="FP32">
4641 <dim>-1</dim>
4642 <dim>-1</dim>
4643 <dim>384</dim>
4644 </port>
4645 <port id="1" precision="FP32">
4646 <dim>384</dim>
4647 <dim>384</dim>
4648 </port>
4649 </input>
4650 <output>
4651 <port id="2" precision="FP32">
4652 <dim>-1</dim>
4653 <dim>-1</dim>
4654 <dim>384</dim>
4655 </port>
4656 </output>
4657 </layer>
4658 <layer id="305" name="Constant_6275" type="Const" version="opset1">
4659 <data element_type="f32" shape="1, 1, 384" offset="77251216" size="1536" />
4660 <output>
4661 <port id="0" precision="FP32">
4662 <dim>1</dim>
4663 <dim>1</dim>
4664 <dim>384</dim>
4665 </port>
4666 </output>
4667 </layer>
4668 <layer id="306" name="__module.encoder.layer.4.attention.self.key/aten::linear/Add" type="Add" version="opset1">
4669 <data auto_broadcast="numpy" />
4670 <input>
4671 <port id="0" precision="FP32">
4672 <dim>-1</dim>
4673 <dim>-1</dim>
4674 <dim>384</dim>
4675 </port>
4676 <port id="1" precision="FP32">
4677 <dim>1</dim>
4678 <dim>1</dim>
4679 <dim>384</dim>
4680 </port>
4681 </input>
4682 <output>
4683 <port id="2" precision="FP32" names="422,x.53">
4684 <dim>-1</dim>
4685 <dim>-1</dim>
4686 <dim>384</dim>
4687 </port>
4688 </output>
4689 </layer>
4690 <layer id="307" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
4691 <data element_type="i64" shape="4" offset="48269848" size="32" />
4692 <output>
4693 <port id="0" precision="I64">
4694 <dim>4</dim>
4695 </port>
4696 </output>
4697 </layer>
4698 <layer id="308" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
4699 <data special_zero="true" />
4700 <input>
4701 <port id="0" precision="FP32">
4702 <dim>-1</dim>
4703 <dim>-1</dim>
4704 <dim>384</dim>
4705 </port>
4706 <port id="1" precision="I64">
4707 <dim>4</dim>
4708 </port>
4709 </input>
4710 <output>
4711 <port id="2" precision="FP32" names="426,x.55">
4712 <dim>-1</dim>
4713 <dim>-1</dim>
4714 <dim>12</dim>
4715 <dim>32</dim>
4716 </port>
4717 </output>
4718 </layer>
4719 <layer id="309" name="Constant_1180" type="Const" version="opset1">
4720 <data element_type="i64" shape="4" offset="48269880" size="32" />
4721 <output>
4722 <port id="0" precision="I64" names="427">
4723 <dim>4</dim>
4724 </port>
4725 </output>
4726 </layer>
4727 <layer id="310" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
4728 <input>
4729 <port id="0" precision="FP32">
4730 <dim>-1</dim>
4731 <dim>-1</dim>
4732 <dim>12</dim>
4733 <dim>32</dim>
4734 </port>
4735 <port id="1" precision="I64">
4736 <dim>4</dim>
4737 </port>
4738 </input>
4739 <output>
4740 <port id="2" precision="FP32" names="428">
4741 <dim>-1</dim>
4742 <dim>12</dim>
4743 <dim>-1</dim>
4744 <dim>32</dim>
4745 </port>
4746 </output>
4747 </layer>
4748 <layer id="311" name="self.encoder.layer.4.attention.self.value.weight" type="Const" version="opset1">
4749 <data element_type="f32" shape="384, 384" offset="77252752" size="589824" />
4750 <output>
4751 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.value.weight">
4752 <dim>384</dim>
4753 <dim>384</dim>
4754 </port>
4755 </output>
4756 </layer>
4757 <layer id="312" name="__module.encoder.layer.4.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
4758 <data transpose_a="false" transpose_b="true" />
4759 <input>
4760 <port id="0" precision="FP32">
4761 <dim>-1</dim>
4762 <dim>-1</dim>
4763 <dim>384</dim>
4764 </port>
4765 <port id="1" precision="FP32">
4766 <dim>384</dim>
4767 <dim>384</dim>
4768 </port>
4769 </input>
4770 <output>
4771 <port id="2" precision="FP32">
4772 <dim>-1</dim>
4773 <dim>-1</dim>
4774 <dim>384</dim>
4775 </port>
4776 </output>
4777 </layer>
4778 <layer id="313" name="Constant_6276" type="Const" version="opset1">
4779 <data element_type="f32" shape="1, 1, 384" offset="77842576" size="1536" />
4780 <output>
4781 <port id="0" precision="FP32">
4782 <dim>1</dim>
4783 <dim>1</dim>
4784 <dim>384</dim>
4785 </port>
4786 </output>
4787 </layer>
4788 <layer id="314" name="__module.encoder.layer.4.attention.self.value/aten::linear/Add" type="Add" version="opset1">
4789 <data auto_broadcast="numpy" />
4790 <input>
4791 <port id="0" precision="FP32">
4792 <dim>-1</dim>
4793 <dim>-1</dim>
4794 <dim>384</dim>
4795 </port>
4796 <port id="1" precision="FP32">
4797 <dim>1</dim>
4798 <dim>1</dim>
4799 <dim>384</dim>
4800 </port>
4801 </input>
4802 <output>
4803 <port id="2" precision="FP32" names="431,x.57">
4804 <dim>-1</dim>
4805 <dim>-1</dim>
4806 <dim>384</dim>
4807 </port>
4808 </output>
4809 </layer>
4810 <layer id="315" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
4811 <data element_type="i64" shape="4" offset="48269848" size="32" />
4812 <output>
4813 <port id="0" precision="I64">
4814 <dim>4</dim>
4815 </port>
4816 </output>
4817 </layer>
4818 <layer id="316" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
4819 <data special_zero="true" />
4820 <input>
4821 <port id="0" precision="FP32">
4822 <dim>-1</dim>
4823 <dim>-1</dim>
4824 <dim>384</dim>
4825 </port>
4826 <port id="1" precision="I64">
4827 <dim>4</dim>
4828 </port>
4829 </input>
4830 <output>
4831 <port id="2" precision="FP32" names="435,x.59">
4832 <dim>-1</dim>
4833 <dim>-1</dim>
4834 <dim>12</dim>
4835 <dim>32</dim>
4836 </port>
4837 </output>
4838 </layer>
4839 <layer id="317" name="Constant_1203" type="Const" version="opset1">
4840 <data element_type="i64" shape="4" offset="48269880" size="32" />
4841 <output>
4842 <port id="0" precision="I64" names="436">
4843 <dim>4</dim>
4844 </port>
4845 </output>
4846 </layer>
4847 <layer id="318" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
4848 <input>
4849 <port id="0" precision="FP32">
4850 <dim>-1</dim>
4851 <dim>-1</dim>
4852 <dim>12</dim>
4853 <dim>32</dim>
4854 </port>
4855 <port id="1" precision="I64">
4856 <dim>4</dim>
4857 </port>
4858 </input>
4859 <output>
4860 <port id="2" precision="FP32" names="437">
4861 <dim>-1</dim>
4862 <dim>12</dim>
4863 <dim>-1</dim>
4864 <dim>32</dim>
4865 </port>
4866 </output>
4867 </layer>
4868 <layer id="319" name="__module.encoder.layer.4.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
4869 <data causal="false" />
4870 <input>
4871 <port id="0" precision="FP32">
4872 <dim>-1</dim>
4873 <dim>12</dim>
4874 <dim>-1</dim>
4875 <dim>32</dim>
4876 </port>
4877 <port id="1" precision="FP32">
4878 <dim>-1</dim>
4879 <dim>12</dim>
4880 <dim>-1</dim>
4881 <dim>32</dim>
4882 </port>
4883 <port id="2" precision="FP32">
4884 <dim>-1</dim>
4885 <dim>12</dim>
4886 <dim>-1</dim>
4887 <dim>32</dim>
4888 </port>
4889 <port id="3" precision="FP32">
4890 <dim>-1</dim>
4891 <dim>1</dim>
4892 <dim>-1</dim>
4893 <dim>-1</dim>
4894 </port>
4895 </input>
4896 <output>
4897 <port id="4" precision="FP32" names="438,attn_output.17">
4898 <dim>-1</dim>
4899 <dim>12</dim>
4900 <dim>-1</dim>
4901 <dim>32</dim>
4902 </port>
4903 </output>
4904 </layer>
4905 <layer id="320" name="__module.encoder.layer.4.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
4906 <data element_type="i32" shape="4" offset="49452648" size="16" />
4907 <output>
4908 <port id="0" precision="I32">
4909 <dim>4</dim>
4910 </port>
4911 </output>
4912 </layer>
4913 <layer id="321" name="__module.encoder.layer.4.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
4914 <input>
4915 <port id="0" precision="FP32">
4916 <dim>-1</dim>
4917 <dim>12</dim>
4918 <dim>-1</dim>
4919 <dim>32</dim>
4920 </port>
4921 <port id="1" precision="I32">
4922 <dim>4</dim>
4923 </port>
4924 </input>
4925 <output>
4926 <port id="2" precision="FP32" names="439,attn_output.19">
4927 <dim>-1</dim>
4928 <dim>-1</dim>
4929 <dim>12</dim>
4930 <dim>32</dim>
4931 </port>
4932 </output>
4933 </layer>
4934 <layer id="322" name="__module.encoder.layer.4.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
4935 <data output_type="i64" />
4936 <input>
4937 <port id="0" precision="FP32">
4938 <dim>-1</dim>
4939 <dim>-1</dim>
4940 <dim>384</dim>
4941 </port>
4942 </input>
4943 <output>
4944 <port id="1" precision="I64">
4945 <dim>3</dim>
4946 </port>
4947 </output>
4948 </layer>
4949 <layer id="323" name="Constant_5817" type="Const" version="opset1">
4950 <data element_type="i64" shape="2" offset="49452664" size="16" />
4951 <output>
4952 <port id="0" precision="I64">
4953 <dim>2</dim>
4954 </port>
4955 </output>
4956 </layer>
4957 <layer id="324" name="Constant_5818" type="Const" version="opset1">
4958 <data element_type="i64" shape="" offset="47675396" size="8" />
4959 <output>
4960 <port id="0" precision="I64" />
4961 </output>
4962 </layer>
4963 <layer id="325" name="Gather_5819" type="Gather" version="opset8">
4964 <data batch_dims="0" />
4965 <input>
4966 <port id="0" precision="I64">
4967 <dim>3</dim>
4968 </port>
4969 <port id="1" precision="I64">
4970 <dim>2</dim>
4971 </port>
4972 <port id="2" precision="I64" />
4973 </input>
4974 <output>
4975 <port id="3" precision="I64">
4976 <dim>2</dim>
4977 </port>
4978 </output>
4979 </layer>
4980 <layer id="326" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
4981 <data axis="0" />
4982 <input>
4983 <port id="0" precision="I64">
4984 <dim>2</dim>
4985 </port>
4986 <port id="1" precision="I64">
4987 <dim>1</dim>
4988 </port>
4989 </input>
4990 <output>
4991 <port id="2" precision="I64" names="440">
4992 <dim>3</dim>
4993 </port>
4994 </output>
4995 </layer>
4996 <layer id="327" name="__module.encoder.layer.4.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
4997 <data special_zero="false" />
4998 <input>
4999 <port id="0" precision="FP32">
5000 <dim>-1</dim>
5001 <dim>-1</dim>
5002 <dim>12</dim>
5003 <dim>32</dim>
5004 </port>
5005 <port id="1" precision="I64">
5006 <dim>3</dim>
5007 </port>
5008 </input>
5009 <output>
5010 <port id="2" precision="FP32" names="441">
5011 <dim>-1</dim>
5012 <dim>-1</dim>
5013 <dim>384</dim>
5014 </port>
5015 </output>
5016 </layer>
5017 <layer id="328" name="self.encoder.layer.4.attention.output.dense.weight" type="Const" version="opset1">
5018 <data element_type="f32" shape="384, 384" offset="77844112" size="589824" />
5019 <output>
5020 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.output.dense.weight">
5021 <dim>384</dim>
5022 <dim>384</dim>
5023 </port>
5024 </output>
5025 </layer>
5026 <layer id="329" name="__module.encoder.layer.4.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5027 <data transpose_a="false" transpose_b="true" />
5028 <input>
5029 <port id="0" precision="FP32">
5030 <dim>-1</dim>
5031 <dim>-1</dim>
5032 <dim>384</dim>
5033 </port>
5034 <port id="1" precision="FP32">
5035 <dim>384</dim>
5036 <dim>384</dim>
5037 </port>
5038 </input>
5039 <output>
5040 <port id="2" precision="FP32">
5041 <dim>-1</dim>
5042 <dim>-1</dim>
5043 <dim>384</dim>
5044 </port>
5045 </output>
5046 </layer>
5047 <layer id="330" name="Constant_6277" type="Const" version="opset1">
5048 <data element_type="f32" shape="1, 1, 384" offset="78433936" size="1536" />
5049 <output>
5050 <port id="0" precision="FP32">
5051 <dim>1</dim>
5052 <dim>1</dim>
5053 <dim>384</dim>
5054 </port>
5055 </output>
5056 </layer>
5057 <layer id="331" name="__module.encoder.layer.4.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
5058 <data auto_broadcast="numpy" />
5059 <input>
5060 <port id="0" precision="FP32">
5061 <dim>-1</dim>
5062 <dim>-1</dim>
5063 <dim>384</dim>
5064 </port>
5065 <port id="1" precision="FP32">
5066 <dim>1</dim>
5067 <dim>1</dim>
5068 <dim>384</dim>
5069 </port>
5070 </input>
5071 <output>
5072 <port id="2" precision="FP32" names="447,input.19">
5073 <dim>-1</dim>
5074 <dim>-1</dim>
5075 <dim>384</dim>
5076 </port>
5077 </output>
5078 </layer>
5079 <layer id="332" name="__module.encoder.layer.4.attention.output/aten::add/Add" type="Add" version="opset1">
5080 <data auto_broadcast="numpy" />
5081 <input>
5082 <port id="0" precision="FP32">
5083 <dim>-1</dim>
5084 <dim>-1</dim>
5085 <dim>384</dim>
5086 </port>
5087 <port id="1" precision="FP32">
5088 <dim>-1</dim>
5089 <dim>-1</dim>
5090 <dim>384</dim>
5091 </port>
5092 </input>
5093 <output>
5094 <port id="2" precision="FP32" names="449">
5095 <dim>-1</dim>
5096 <dim>-1</dim>
5097 <dim>384</dim>
5098 </port>
5099 </output>
5100 </layer>
5101 <layer id="333" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5102 <data element_type="i32" shape="1" offset="47675412" size="4" />
5103 <output>
5104 <port id="0" precision="I32">
5105 <dim>1</dim>
5106 </port>
5107 </output>
5108 </layer>
5109 <layer id="334" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5110 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5111 <input>
5112 <port id="0" precision="FP32">
5113 <dim>-1</dim>
5114 <dim>-1</dim>
5115 <dim>384</dim>
5116 </port>
5117 <port id="1" precision="I32">
5118 <dim>1</dim>
5119 </port>
5120 </input>
5121 <output>
5122 <port id="2" precision="FP32">
5123 <dim>-1</dim>
5124 <dim>-1</dim>
5125 <dim>384</dim>
5126 </port>
5127 </output>
5128 </layer>
5129 <layer id="335" name="Constant_6278" type="Const" version="opset1">
5130 <data element_type="f32" shape="1, 1, 384" offset="78435472" size="1536" />
5131 <output>
5132 <port id="0" precision="FP32">
5133 <dim>1</dim>
5134 <dim>1</dim>
5135 <dim>384</dim>
5136 </port>
5137 </output>
5138 </layer>
5139 <layer id="336" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5140 <data auto_broadcast="numpy" />
5141 <input>
5142 <port id="0" precision="FP32">
5143 <dim>-1</dim>
5144 <dim>-1</dim>
5145 <dim>384</dim>
5146 </port>
5147 <port id="1" precision="FP32">
5148 <dim>1</dim>
5149 <dim>1</dim>
5150 <dim>384</dim>
5151 </port>
5152 </input>
5153 <output>
5154 <port id="2" precision="FP32">
5155 <dim>-1</dim>
5156 <dim>-1</dim>
5157 <dim>384</dim>
5158 </port>
5159 </output>
5160 </layer>
5161 <layer id="337" name="Constant_6279" type="Const" version="opset1">
5162 <data element_type="f32" shape="1, 1, 384" offset="78437008" size="1536" />
5163 <output>
5164 <port id="0" precision="FP32">
5165 <dim>1</dim>
5166 <dim>1</dim>
5167 <dim>384</dim>
5168 </port>
5169 </output>
5170 </layer>
5171 <layer id="338" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
5172 <data auto_broadcast="numpy" />
5173 <input>
5174 <port id="0" precision="FP32">
5175 <dim>-1</dim>
5176 <dim>-1</dim>
5177 <dim>384</dim>
5178 </port>
5179 <port id="1" precision="FP32">
5180 <dim>1</dim>
5181 <dim>1</dim>
5182 <dim>384</dim>
5183 </port>
5184 </input>
5185 <output>
5186 <port id="2" precision="FP32" names="453,input_tensor.9">
5187 <dim>-1</dim>
5188 <dim>-1</dim>
5189 <dim>384</dim>
5190 </port>
5191 </output>
5192 </layer>
5193 <layer id="339" name="self.encoder.layer.4.intermediate.dense.weight" type="Const" version="opset1">
5194 <data element_type="f32" shape="1536, 384" offset="78438544" size="2359296" />
5195 <output>
5196 <port id="0" precision="FP32" names="self.encoder.layer.4.intermediate.dense.weight">
5197 <dim>1536</dim>
5198 <dim>384</dim>
5199 </port>
5200 </output>
5201 </layer>
5202 <layer id="340" name="__module.encoder.layer.4.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5203 <data transpose_a="false" transpose_b="true" />
5204 <input>
5205 <port id="0" precision="FP32">
5206 <dim>-1</dim>
5207 <dim>-1</dim>
5208 <dim>384</dim>
5209 </port>
5210 <port id="1" precision="FP32">
5211 <dim>1536</dim>
5212 <dim>384</dim>
5213 </port>
5214 </input>
5215 <output>
5216 <port id="2" precision="FP32">
5217 <dim>-1</dim>
5218 <dim>-1</dim>
5219 <dim>1536</dim>
5220 </port>
5221 </output>
5222 </layer>
5223 <layer id="341" name="Constant_6280" type="Const" version="opset1">
5224 <data element_type="f32" shape="1, 1, 1536" offset="80797840" size="6144" />
5225 <output>
5226 <port id="0" precision="FP32">
5227 <dim>1</dim>
5228 <dim>1</dim>
5229 <dim>1536</dim>
5230 </port>
5231 </output>
5232 </layer>
5233 <layer id="342" name="__module.encoder.layer.4.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
5234 <data auto_broadcast="numpy" />
5235 <input>
5236 <port id="0" precision="FP32">
5237 <dim>-1</dim>
5238 <dim>-1</dim>
5239 <dim>1536</dim>
5240 </port>
5241 <port id="1" precision="FP32">
5242 <dim>1</dim>
5243 <dim>1</dim>
5244 <dim>1536</dim>
5245 </port>
5246 </input>
5247 <output>
5248 <port id="2" precision="FP32" names="458">
5249 <dim>-1</dim>
5250 <dim>-1</dim>
5251 <dim>1536</dim>
5252 </port>
5253 </output>
5254 </layer>
5255 <layer id="343" name="__module.encoder.layer.4.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
5256 <data approximation_mode="ERF" />
5257 <input>
5258 <port id="0" precision="FP32">
5259 <dim>-1</dim>
5260 <dim>-1</dim>
5261 <dim>1536</dim>
5262 </port>
5263 </input>
5264 <output>
5265 <port id="1" precision="FP32" names="459">
5266 <dim>-1</dim>
5267 <dim>-1</dim>
5268 <dim>1536</dim>
5269 </port>
5270 </output>
5271 </layer>
5272 <layer id="344" name="self.encoder.layer.4.output.dense.weight" type="Const" version="opset1">
5273 <data element_type="f32" shape="384, 1536" offset="80803984" size="2359296" />
5274 <output>
5275 <port id="0" precision="FP32" names="self.encoder.layer.4.output.dense.weight">
5276 <dim>384</dim>
5277 <dim>1536</dim>
5278 </port>
5279 </output>
5280 </layer>
5281 <layer id="345" name="__module.encoder.layer.4.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5282 <data transpose_a="false" transpose_b="true" />
5283 <input>
5284 <port id="0" precision="FP32">
5285 <dim>-1</dim>
5286 <dim>-1</dim>
5287 <dim>1536</dim>
5288 </port>
5289 <port id="1" precision="FP32">
5290 <dim>384</dim>
5291 <dim>1536</dim>
5292 </port>
5293 </input>
5294 <output>
5295 <port id="2" precision="FP32">
5296 <dim>-1</dim>
5297 <dim>-1</dim>
5298 <dim>384</dim>
5299 </port>
5300 </output>
5301 </layer>
5302 <layer id="346" name="Constant_6281" type="Const" version="opset1">
5303 <data element_type="f32" shape="1, 1, 384" offset="83163280" size="1536" />
5304 <output>
5305 <port id="0" precision="FP32">
5306 <dim>1</dim>
5307 <dim>1</dim>
5308 <dim>384</dim>
5309 </port>
5310 </output>
5311 </layer>
5312 <layer id="347" name="__module.encoder.layer.4.output.dense/aten::linear/Add" type="Add" version="opset1">
5313 <data auto_broadcast="numpy" />
5314 <input>
5315 <port id="0" precision="FP32">
5316 <dim>-1</dim>
5317 <dim>-1</dim>
5318 <dim>384</dim>
5319 </port>
5320 <port id="1" precision="FP32">
5321 <dim>1</dim>
5322 <dim>1</dim>
5323 <dim>384</dim>
5324 </port>
5325 </input>
5326 <output>
5327 <port id="2" precision="FP32" names="465,input.21">
5328 <dim>-1</dim>
5329 <dim>-1</dim>
5330 <dim>384</dim>
5331 </port>
5332 </output>
5333 </layer>
5334 <layer id="348" name="__module.encoder.layer.4.output/aten::add/Add" type="Add" version="opset1">
5335 <data auto_broadcast="numpy" />
5336 <input>
5337 <port id="0" precision="FP32">
5338 <dim>-1</dim>
5339 <dim>-1</dim>
5340 <dim>384</dim>
5341 </port>
5342 <port id="1" precision="FP32">
5343 <dim>-1</dim>
5344 <dim>-1</dim>
5345 <dim>384</dim>
5346 </port>
5347 </input>
5348 <output>
5349 <port id="2" precision="FP32" names="467">
5350 <dim>-1</dim>
5351 <dim>-1</dim>
5352 <dim>384</dim>
5353 </port>
5354 </output>
5355 </layer>
5356 <layer id="349" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5357 <data element_type="i32" shape="1" offset="47675412" size="4" />
5358 <output>
5359 <port id="0" precision="I32">
5360 <dim>1</dim>
5361 </port>
5362 </output>
5363 </layer>
5364 <layer id="350" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5365 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5366 <input>
5367 <port id="0" precision="FP32">
5368 <dim>-1</dim>
5369 <dim>-1</dim>
5370 <dim>384</dim>
5371 </port>
5372 <port id="1" precision="I32">
5373 <dim>1</dim>
5374 </port>
5375 </input>
5376 <output>
5377 <port id="2" precision="FP32">
5378 <dim>-1</dim>
5379 <dim>-1</dim>
5380 <dim>384</dim>
5381 </port>
5382 </output>
5383 </layer>
5384 <layer id="351" name="Constant_6282" type="Const" version="opset1">
5385 <data element_type="f32" shape="1, 1, 384" offset="83164816" size="1536" />
5386 <output>
5387 <port id="0" precision="FP32">
5388 <dim>1</dim>
5389 <dim>1</dim>
5390 <dim>384</dim>
5391 </port>
5392 </output>
5393 </layer>
5394 <layer id="352" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5395 <data auto_broadcast="numpy" />
5396 <input>
5397 <port id="0" precision="FP32">
5398 <dim>-1</dim>
5399 <dim>-1</dim>
5400 <dim>384</dim>
5401 </port>
5402 <port id="1" precision="FP32">
5403 <dim>1</dim>
5404 <dim>1</dim>
5405 <dim>384</dim>
5406 </port>
5407 </input>
5408 <output>
5409 <port id="2" precision="FP32">
5410 <dim>-1</dim>
5411 <dim>-1</dim>
5412 <dim>384</dim>
5413 </port>
5414 </output>
5415 </layer>
5416 <layer id="353" name="Constant_6283" type="Const" version="opset1">
5417 <data element_type="f32" shape="1, 1, 384" offset="83166352" size="1536" />
5418 <output>
5419 <port id="0" precision="FP32">
5420 <dim>1</dim>
5421 <dim>1</dim>
5422 <dim>384</dim>
5423 </port>
5424 </output>
5425 </layer>
5426 <layer id="354" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
5427 <data auto_broadcast="numpy" />
5428 <input>
5429 <port id="0" precision="FP32">
5430 <dim>-1</dim>
5431 <dim>-1</dim>
5432 <dim>384</dim>
5433 </port>
5434 <port id="1" precision="FP32">
5435 <dim>1</dim>
5436 <dim>1</dim>
5437 <dim>384</dim>
5438 </port>
5439 </input>
5440 <output>
5441 <port id="2" precision="FP32" names="471,hidden_states.31">
5442 <dim>-1</dim>
5443 <dim>-1</dim>
5444 <dim>384</dim>
5445 </port>
5446 </output>
5447 </layer>
5448 <layer id="355" name="self.encoder.layer.5.attention.self.query.weight" type="Const" version="opset1">
5449 <data element_type="f32" shape="384, 384" offset="83167888" size="589824" />
5450 <output>
5451 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.query.weight">
5452 <dim>384</dim>
5453 <dim>384</dim>
5454 </port>
5455 </output>
5456 </layer>
5457 <layer id="356" name="__module.encoder.layer.5.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
5458 <data transpose_a="false" transpose_b="true" />
5459 <input>
5460 <port id="0" precision="FP32">
5461 <dim>-1</dim>
5462 <dim>-1</dim>
5463 <dim>384</dim>
5464 </port>
5465 <port id="1" precision="FP32">
5466 <dim>384</dim>
5467 <dim>384</dim>
5468 </port>
5469 </input>
5470 <output>
5471 <port id="2" precision="FP32">
5472 <dim>-1</dim>
5473 <dim>-1</dim>
5474 <dim>384</dim>
5475 </port>
5476 </output>
5477 </layer>
5478 <layer id="357" name="Constant_6284" type="Const" version="opset1">
5479 <data element_type="f32" shape="1, 1, 384" offset="83757712" size="1536" />
5480 <output>
5481 <port id="0" precision="FP32">
5482 <dim>1</dim>
5483 <dim>1</dim>
5484 <dim>384</dim>
5485 </port>
5486 </output>
5487 </layer>
5488 <layer id="358" name="__module.encoder.layer.5.attention.self.query/aten::linear/Add" type="Add" version="opset1">
5489 <data auto_broadcast="numpy" />
5490 <input>
5491 <port id="0" precision="FP32">
5492 <dim>-1</dim>
5493 <dim>-1</dim>
5494 <dim>384</dim>
5495 </port>
5496 <port id="1" precision="FP32">
5497 <dim>1</dim>
5498 <dim>1</dim>
5499 <dim>384</dim>
5500 </port>
5501 </input>
5502 <output>
5503 <port id="2" precision="FP32" names="484,x.61">
5504 <dim>-1</dim>
5505 <dim>-1</dim>
5506 <dim>384</dim>
5507 </port>
5508 </output>
5509 </layer>
5510 <layer id="359" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
5511 <data element_type="i64" shape="4" offset="48269848" size="32" />
5512 <output>
5513 <port id="0" precision="I64">
5514 <dim>4</dim>
5515 </port>
5516 </output>
5517 </layer>
5518 <layer id="360" name="__module.encoder.layer.5.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
5519 <data special_zero="true" />
5520 <input>
5521 <port id="0" precision="FP32">
5522 <dim>-1</dim>
5523 <dim>-1</dim>
5524 <dim>384</dim>
5525 </port>
5526 <port id="1" precision="I64">
5527 <dim>4</dim>
5528 </port>
5529 </input>
5530 <output>
5531 <port id="2" precision="FP32" names="488,x.63">
5532 <dim>-1</dim>
5533 <dim>-1</dim>
5534 <dim>12</dim>
5535 <dim>32</dim>
5536 </port>
5537 </output>
5538 </layer>
5539 <layer id="361" name="Constant_1383" type="Const" version="opset1">
5540 <data element_type="i64" shape="4" offset="48269880" size="32" />
5541 <output>
5542 <port id="0" precision="I64" names="489">
5543 <dim>4</dim>
5544 </port>
5545 </output>
5546 </layer>
5547 <layer id="362" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
5548 <input>
5549 <port id="0" precision="FP32">
5550 <dim>-1</dim>
5551 <dim>-1</dim>
5552 <dim>12</dim>
5553 <dim>32</dim>
5554 </port>
5555 <port id="1" precision="I64">
5556 <dim>4</dim>
5557 </port>
5558 </input>
5559 <output>
5560 <port id="2" precision="FP32" names="490">
5561 <dim>-1</dim>
5562 <dim>12</dim>
5563 <dim>-1</dim>
5564 <dim>32</dim>
5565 </port>
5566 </output>
5567 </layer>
5568 <layer id="363" name="self.encoder.layer.5.attention.self.key.weight" type="Const" version="opset1">
5569 <data element_type="f32" shape="384, 384" offset="83759248" size="589824" />
5570 <output>
5571 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.key.weight">
5572 <dim>384</dim>
5573 <dim>384</dim>
5574 </port>
5575 </output>
5576 </layer>
5577 <layer id="364" name="__module.encoder.layer.5.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
5578 <data transpose_a="false" transpose_b="true" />
5579 <input>
5580 <port id="0" precision="FP32">
5581 <dim>-1</dim>
5582 <dim>-1</dim>
5583 <dim>384</dim>
5584 </port>
5585 <port id="1" precision="FP32">
5586 <dim>384</dim>
5587 <dim>384</dim>
5588 </port>
5589 </input>
5590 <output>
5591 <port id="2" precision="FP32">
5592 <dim>-1</dim>
5593 <dim>-1</dim>
5594 <dim>384</dim>
5595 </port>
5596 </output>
5597 </layer>
5598 <layer id="365" name="Constant_6285" type="Const" version="opset1">
5599 <data element_type="f32" shape="1, 1, 384" offset="84349072" size="1536" />
5600 <output>
5601 <port id="0" precision="FP32">
5602 <dim>1</dim>
5603 <dim>1</dim>
5604 <dim>384</dim>
5605 </port>
5606 </output>
5607 </layer>
5608 <layer id="366" name="__module.encoder.layer.5.attention.self.key/aten::linear/Add" type="Add" version="opset1">
5609 <data auto_broadcast="numpy" />
5610 <input>
5611 <port id="0" precision="FP32">
5612 <dim>-1</dim>
5613 <dim>-1</dim>
5614 <dim>384</dim>
5615 </port>
5616 <port id="1" precision="FP32">
5617 <dim>1</dim>
5618 <dim>1</dim>
5619 <dim>384</dim>
5620 </port>
5621 </input>
5622 <output>
5623 <port id="2" precision="FP32" names="493,x.65">
5624 <dim>-1</dim>
5625 <dim>-1</dim>
5626 <dim>384</dim>
5627 </port>
5628 </output>
5629 </layer>
5630 <layer id="367" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
5631 <data element_type="i64" shape="4" offset="48269848" size="32" />
5632 <output>
5633 <port id="0" precision="I64">
5634 <dim>4</dim>
5635 </port>
5636 </output>
5637 </layer>
5638 <layer id="368" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
5639 <data special_zero="true" />
5640 <input>
5641 <port id="0" precision="FP32">
5642 <dim>-1</dim>
5643 <dim>-1</dim>
5644 <dim>384</dim>
5645 </port>
5646 <port id="1" precision="I64">
5647 <dim>4</dim>
5648 </port>
5649 </input>
5650 <output>
5651 <port id="2" precision="FP32" names="497,x.67">
5652 <dim>-1</dim>
5653 <dim>-1</dim>
5654 <dim>12</dim>
5655 <dim>32</dim>
5656 </port>
5657 </output>
5658 </layer>
5659 <layer id="369" name="Constant_1406" type="Const" version="opset1">
5660 <data element_type="i64" shape="4" offset="48269880" size="32" />
5661 <output>
5662 <port id="0" precision="I64" names="498">
5663 <dim>4</dim>
5664 </port>
5665 </output>
5666 </layer>
5667 <layer id="370" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
5668 <input>
5669 <port id="0" precision="FP32">
5670 <dim>-1</dim>
5671 <dim>-1</dim>
5672 <dim>12</dim>
5673 <dim>32</dim>
5674 </port>
5675 <port id="1" precision="I64">
5676 <dim>4</dim>
5677 </port>
5678 </input>
5679 <output>
5680 <port id="2" precision="FP32" names="499">
5681 <dim>-1</dim>
5682 <dim>12</dim>
5683 <dim>-1</dim>
5684 <dim>32</dim>
5685 </port>
5686 </output>
5687 </layer>
5688 <layer id="371" name="self.encoder.layer.5.attention.self.value.weight" type="Const" version="opset1">
5689 <data element_type="f32" shape="384, 384" offset="84350608" size="589824" />
5690 <output>
5691 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.value.weight">
5692 <dim>384</dim>
5693 <dim>384</dim>
5694 </port>
5695 </output>
5696 </layer>
5697 <layer id="372" name="__module.encoder.layer.5.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
5698 <data transpose_a="false" transpose_b="true" />
5699 <input>
5700 <port id="0" precision="FP32">
5701 <dim>-1</dim>
5702 <dim>-1</dim>
5703 <dim>384</dim>
5704 </port>
5705 <port id="1" precision="FP32">
5706 <dim>384</dim>
5707 <dim>384</dim>
5708 </port>
5709 </input>
5710 <output>
5711 <port id="2" precision="FP32">
5712 <dim>-1</dim>
5713 <dim>-1</dim>
5714 <dim>384</dim>
5715 </port>
5716 </output>
5717 </layer>
5718 <layer id="373" name="Constant_6286" type="Const" version="opset1">
5719 <data element_type="f32" shape="1, 1, 384" offset="84940432" size="1536" />
5720 <output>
5721 <port id="0" precision="FP32">
5722 <dim>1</dim>
5723 <dim>1</dim>
5724 <dim>384</dim>
5725 </port>
5726 </output>
5727 </layer>
5728 <layer id="374" name="__module.encoder.layer.5.attention.self.value/aten::linear/Add" type="Add" version="opset1">
5729 <data auto_broadcast="numpy" />
5730 <input>
5731 <port id="0" precision="FP32">
5732 <dim>-1</dim>
5733 <dim>-1</dim>
5734 <dim>384</dim>
5735 </port>
5736 <port id="1" precision="FP32">
5737 <dim>1</dim>
5738 <dim>1</dim>
5739 <dim>384</dim>
5740 </port>
5741 </input>
5742 <output>
5743 <port id="2" precision="FP32" names="502,x.69">
5744 <dim>-1</dim>
5745 <dim>-1</dim>
5746 <dim>384</dim>
5747 </port>
5748 </output>
5749 </layer>
5750 <layer id="375" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
5751 <data element_type="i64" shape="4" offset="48269848" size="32" />
5752 <output>
5753 <port id="0" precision="I64">
5754 <dim>4</dim>
5755 </port>
5756 </output>
5757 </layer>
5758 <layer id="376" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
5759 <data special_zero="true" />
5760 <input>
5761 <port id="0" precision="FP32">
5762 <dim>-1</dim>
5763 <dim>-1</dim>
5764 <dim>384</dim>
5765 </port>
5766 <port id="1" precision="I64">
5767 <dim>4</dim>
5768 </port>
5769 </input>
5770 <output>
5771 <port id="2" precision="FP32" names="506,x">
5772 <dim>-1</dim>
5773 <dim>-1</dim>
5774 <dim>12</dim>
5775 <dim>32</dim>
5776 </port>
5777 </output>
5778 </layer>
5779 <layer id="377" name="Constant_1429" type="Const" version="opset1">
5780 <data element_type="i64" shape="4" offset="48269880" size="32" />
5781 <output>
5782 <port id="0" precision="I64" names="507">
5783 <dim>4</dim>
5784 </port>
5785 </output>
5786 </layer>
5787 <layer id="378" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
5788 <input>
5789 <port id="0" precision="FP32">
5790 <dim>-1</dim>
5791 <dim>-1</dim>
5792 <dim>12</dim>
5793 <dim>32</dim>
5794 </port>
5795 <port id="1" precision="I64">
5796 <dim>4</dim>
5797 </port>
5798 </input>
5799 <output>
5800 <port id="2" precision="FP32" names="508">
5801 <dim>-1</dim>
5802 <dim>12</dim>
5803 <dim>-1</dim>
5804 <dim>32</dim>
5805 </port>
5806 </output>
5807 </layer>
5808 <layer id="379" name="__module.encoder.layer.5.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
5809 <data causal="false" />
5810 <input>
5811 <port id="0" precision="FP32">
5812 <dim>-1</dim>
5813 <dim>12</dim>
5814 <dim>-1</dim>
5815 <dim>32</dim>
5816 </port>
5817 <port id="1" precision="FP32">
5818 <dim>-1</dim>
5819 <dim>12</dim>
5820 <dim>-1</dim>
5821 <dim>32</dim>
5822 </port>
5823 <port id="2" precision="FP32">
5824 <dim>-1</dim>
5825 <dim>12</dim>
5826 <dim>-1</dim>
5827 <dim>32</dim>
5828 </port>
5829 <port id="3" precision="FP32">
5830 <dim>-1</dim>
5831 <dim>1</dim>
5832 <dim>-1</dim>
5833 <dim>-1</dim>
5834 </port>
5835 </input>
5836 <output>
5837 <port id="4" precision="FP32" names="509,attn_output.21">
5838 <dim>-1</dim>
5839 <dim>12</dim>
5840 <dim>-1</dim>
5841 <dim>32</dim>
5842 </port>
5843 </output>
5844 </layer>
5845 <layer id="380" name="__module.encoder.layer.5.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
5846 <data element_type="i32" shape="4" offset="49452648" size="16" />
5847 <output>
5848 <port id="0" precision="I32">
5849 <dim>4</dim>
5850 </port>
5851 </output>
5852 </layer>
5853 <layer id="381" name="__module.encoder.layer.5.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
5854 <input>
5855 <port id="0" precision="FP32">
5856 <dim>-1</dim>
5857 <dim>12</dim>
5858 <dim>-1</dim>
5859 <dim>32</dim>
5860 </port>
5861 <port id="1" precision="I32">
5862 <dim>4</dim>
5863 </port>
5864 </input>
5865 <output>
5866 <port id="2" precision="FP32" names="510,attn_output">
5867 <dim>-1</dim>
5868 <dim>-1</dim>
5869 <dim>12</dim>
5870 <dim>32</dim>
5871 </port>
5872 </output>
5873 </layer>
5874 <layer id="382" name="__module.encoder.layer.5.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
5875 <data output_type="i64" />
5876 <input>
5877 <port id="0" precision="FP32">
5878 <dim>-1</dim>
5879 <dim>-1</dim>
5880 <dim>384</dim>
5881 </port>
5882 </input>
5883 <output>
5884 <port id="1" precision="I64">
5885 <dim>3</dim>
5886 </port>
5887 </output>
5888 </layer>
5889 <layer id="383" name="Constant_5837" type="Const" version="opset1">
5890 <data element_type="i64" shape="2" offset="49452664" size="16" />
5891 <output>
5892 <port id="0" precision="I64">
5893 <dim>2</dim>
5894 </port>
5895 </output>
5896 </layer>
5897 <layer id="384" name="Constant_5838" type="Const" version="opset1">
5898 <data element_type="i64" shape="" offset="47675396" size="8" />
5899 <output>
5900 <port id="0" precision="I64" />
5901 </output>
5902 </layer>
5903 <layer id="385" name="Gather_5839" type="Gather" version="opset8">
5904 <data batch_dims="0" />
5905 <input>
5906 <port id="0" precision="I64">
5907 <dim>3</dim>
5908 </port>
5909 <port id="1" precision="I64">
5910 <dim>2</dim>
5911 </port>
5912 <port id="2" precision="I64" />
5913 </input>
5914 <output>
5915 <port id="3" precision="I64">
5916 <dim>2</dim>
5917 </port>
5918 </output>
5919 </layer>
5920 <layer id="386" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
5921 <data axis="0" />
5922 <input>
5923 <port id="0" precision="I64">
5924 <dim>2</dim>
5925 </port>
5926 <port id="1" precision="I64">
5927 <dim>1</dim>
5928 </port>
5929 </input>
5930 <output>
5931 <port id="2" precision="I64" names="511">
5932 <dim>3</dim>
5933 </port>
5934 </output>
5935 </layer>
5936 <layer id="387" name="__module.encoder.layer.5.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
5937 <data special_zero="false" />
5938 <input>
5939 <port id="0" precision="FP32">
5940 <dim>-1</dim>
5941 <dim>-1</dim>
5942 <dim>12</dim>
5943 <dim>32</dim>
5944 </port>
5945 <port id="1" precision="I64">
5946 <dim>3</dim>
5947 </port>
5948 </input>
5949 <output>
5950 <port id="2" precision="FP32" names="512">
5951 <dim>-1</dim>
5952 <dim>-1</dim>
5953 <dim>384</dim>
5954 </port>
5955 </output>
5956 </layer>
5957 <layer id="388" name="self.encoder.layer.5.attention.output.dense.weight" type="Const" version="opset1">
5958 <data element_type="f32" shape="384, 384" offset="84941968" size="589824" />
5959 <output>
5960 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.output.dense.weight">
5961 <dim>384</dim>
5962 <dim>384</dim>
5963 </port>
5964 </output>
5965 </layer>
5966 <layer id="389" name="__module.encoder.layer.5.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5967 <data transpose_a="false" transpose_b="true" />
5968 <input>
5969 <port id="0" precision="FP32">
5970 <dim>-1</dim>
5971 <dim>-1</dim>
5972 <dim>384</dim>
5973 </port>
5974 <port id="1" precision="FP32">
5975 <dim>384</dim>
5976 <dim>384</dim>
5977 </port>
5978 </input>
5979 <output>
5980 <port id="2" precision="FP32">
5981 <dim>-1</dim>
5982 <dim>-1</dim>
5983 <dim>384</dim>
5984 </port>
5985 </output>
5986 </layer>
5987 <layer id="390" name="Constant_6287" type="Const" version="opset1">
5988 <data element_type="f32" shape="1, 1, 384" offset="85531792" size="1536" />
5989 <output>
5990 <port id="0" precision="FP32">
5991 <dim>1</dim>
5992 <dim>1</dim>
5993 <dim>384</dim>
5994 </port>
5995 </output>
5996 </layer>
5997 <layer id="391" name="__module.encoder.layer.5.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
5998 <data auto_broadcast="numpy" />
5999 <input>
6000 <port id="0" precision="FP32">
6001 <dim>-1</dim>
6002 <dim>-1</dim>
6003 <dim>384</dim>
6004 </port>
6005 <port id="1" precision="FP32">
6006 <dim>1</dim>
6007 <dim>1</dim>
6008 <dim>384</dim>
6009 </port>
6010 </input>
6011 <output>
6012 <port id="2" precision="FP32" names="518,input.23">
6013 <dim>-1</dim>
6014 <dim>-1</dim>
6015 <dim>384</dim>
6016 </port>
6017 </output>
6018 </layer>
6019 <layer id="392" name="__module.encoder.layer.5.attention.output/aten::add/Add" type="Add" version="opset1">
6020 <data auto_broadcast="numpy" />
6021 <input>
6022 <port id="0" precision="FP32">
6023 <dim>-1</dim>
6024 <dim>-1</dim>
6025 <dim>384</dim>
6026 </port>
6027 <port id="1" precision="FP32">
6028 <dim>-1</dim>
6029 <dim>-1</dim>
6030 <dim>384</dim>
6031 </port>
6032 </input>
6033 <output>
6034 <port id="2" precision="FP32" names="520">
6035 <dim>-1</dim>
6036 <dim>-1</dim>
6037 <dim>384</dim>
6038 </port>
6039 </output>
6040 </layer>
6041 <layer id="393" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6042 <data element_type="i32" shape="1" offset="47675412" size="4" />
6043 <output>
6044 <port id="0" precision="I32">
6045 <dim>1</dim>
6046 </port>
6047 </output>
6048 </layer>
6049 <layer id="394" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6050 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6051 <input>
6052 <port id="0" precision="FP32">
6053 <dim>-1</dim>
6054 <dim>-1</dim>
6055 <dim>384</dim>
6056 </port>
6057 <port id="1" precision="I32">
6058 <dim>1</dim>
6059 </port>
6060 </input>
6061 <output>
6062 <port id="2" precision="FP32">
6063 <dim>-1</dim>
6064 <dim>-1</dim>
6065 <dim>384</dim>
6066 </port>
6067 </output>
6068 </layer>
6069 <layer id="395" name="Constant_6288" type="Const" version="opset1">
6070 <data element_type="f32" shape="1, 1, 384" offset="85533328" size="1536" />
6071 <output>
6072 <port id="0" precision="FP32">
6073 <dim>1</dim>
6074 <dim>1</dim>
6075 <dim>384</dim>
6076 </port>
6077 </output>
6078 </layer>
6079 <layer id="396" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
6080 <data auto_broadcast="numpy" />
6081 <input>
6082 <port id="0" precision="FP32">
6083 <dim>-1</dim>
6084 <dim>-1</dim>
6085 <dim>384</dim>
6086 </port>
6087 <port id="1" precision="FP32">
6088 <dim>1</dim>
6089 <dim>1</dim>
6090 <dim>384</dim>
6091 </port>
6092 </input>
6093 <output>
6094 <port id="2" precision="FP32">
6095 <dim>-1</dim>
6096 <dim>-1</dim>
6097 <dim>384</dim>
6098 </port>
6099 </output>
6100 </layer>
6101 <layer id="397" name="Constant_6289" type="Const" version="opset1">
6102 <data element_type="f32" shape="1, 1, 384" offset="85534864" size="1536" />
6103 <output>
6104 <port id="0" precision="FP32">
6105 <dim>1</dim>
6106 <dim>1</dim>
6107 <dim>384</dim>
6108 </port>
6109 </output>
6110 </layer>
6111 <layer id="398" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6112 <data auto_broadcast="numpy" />
6113 <input>
6114 <port id="0" precision="FP32">
6115 <dim>-1</dim>
6116 <dim>-1</dim>
6117 <dim>384</dim>
6118 </port>
6119 <port id="1" precision="FP32">
6120 <dim>1</dim>
6121 <dim>1</dim>
6122 <dim>384</dim>
6123 </port>
6124 </input>
6125 <output>
6126 <port id="2" precision="FP32" names="524,input_tensor">
6127 <dim>-1</dim>
6128 <dim>-1</dim>
6129 <dim>384</dim>
6130 </port>
6131 </output>
6132 </layer>
6133 <layer id="399" name="self.encoder.layer.5.intermediate.dense.weight" type="Const" version="opset1">
6134 <data element_type="f32" shape="1536, 384" offset="85536400" size="2359296" />
6135 <output>
6136 <port id="0" precision="FP32" names="self.encoder.layer.5.intermediate.dense.weight">
6137 <dim>1536</dim>
6138 <dim>384</dim>
6139 </port>
6140 </output>
6141 </layer>
6142 <layer id="400" name="__module.encoder.layer.5.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6143 <data transpose_a="false" transpose_b="true" />
6144 <input>
6145 <port id="0" precision="FP32">
6146 <dim>-1</dim>
6147 <dim>-1</dim>
6148 <dim>384</dim>
6149 </port>
6150 <port id="1" precision="FP32">
6151 <dim>1536</dim>
6152 <dim>384</dim>
6153 </port>
6154 </input>
6155 <output>
6156 <port id="2" precision="FP32">
6157 <dim>-1</dim>
6158 <dim>-1</dim>
6159 <dim>1536</dim>
6160 </port>
6161 </output>
6162 </layer>
6163 <layer id="401" name="Constant_6290" type="Const" version="opset1">
6164 <data element_type="f32" shape="1, 1, 1536" offset="87895696" size="6144" />
6165 <output>
6166 <port id="0" precision="FP32">
6167 <dim>1</dim>
6168 <dim>1</dim>
6169 <dim>1536</dim>
6170 </port>
6171 </output>
6172 </layer>
6173 <layer id="402" name="__module.encoder.layer.5.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
6174 <data auto_broadcast="numpy" />
6175 <input>
6176 <port id="0" precision="FP32">
6177 <dim>-1</dim>
6178 <dim>-1</dim>
6179 <dim>1536</dim>
6180 </port>
6181 <port id="1" precision="FP32">
6182 <dim>1</dim>
6183 <dim>1</dim>
6184 <dim>1536</dim>
6185 </port>
6186 </input>
6187 <output>
6188 <port id="2" precision="FP32" names="529">
6189 <dim>-1</dim>
6190 <dim>-1</dim>
6191 <dim>1536</dim>
6192 </port>
6193 </output>
6194 </layer>
6195 <layer id="403" name="__module.encoder.layer.5.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
6196 <data approximation_mode="ERF" />
6197 <input>
6198 <port id="0" precision="FP32">
6199 <dim>-1</dim>
6200 <dim>-1</dim>
6201 <dim>1536</dim>
6202 </port>
6203 </input>
6204 <output>
6205 <port id="1" precision="FP32" names="530">
6206 <dim>-1</dim>
6207 <dim>-1</dim>
6208 <dim>1536</dim>
6209 </port>
6210 </output>
6211 </layer>
6212 <layer id="404" name="self.encoder.layer.5.output.dense.weight" type="Const" version="opset1">
6213 <data element_type="f32" shape="384, 1536" offset="87901840" size="2359296" />
6214 <output>
6215 <port id="0" precision="FP32" names="self.encoder.layer.5.output.dense.weight">
6216 <dim>384</dim>
6217 <dim>1536</dim>
6218 </port>
6219 </output>
6220 </layer>
6221 <layer id="405" name="__module.encoder.layer.5.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6222 <data transpose_a="false" transpose_b="true" />
6223 <input>
6224 <port id="0" precision="FP32">
6225 <dim>-1</dim>
6226 <dim>-1</dim>
6227 <dim>1536</dim>
6228 </port>
6229 <port id="1" precision="FP32">
6230 <dim>384</dim>
6231 <dim>1536</dim>
6232 </port>
6233 </input>
6234 <output>
6235 <port id="2" precision="FP32">
6236 <dim>-1</dim>
6237 <dim>-1</dim>
6238 <dim>384</dim>
6239 </port>
6240 </output>
6241 </layer>
6242 <layer id="406" name="Constant_6291" type="Const" version="opset1">
6243 <data element_type="f32" shape="1, 1, 384" offset="90261136" size="1536" />
6244 <output>
6245 <port id="0" precision="FP32">
6246 <dim>1</dim>
6247 <dim>1</dim>
6248 <dim>384</dim>
6249 </port>
6250 </output>
6251 </layer>
6252 <layer id="407" name="__module.encoder.layer.5.output.dense/aten::linear/Add" type="Add" version="opset1">
6253 <data auto_broadcast="numpy" />
6254 <input>
6255 <port id="0" precision="FP32">
6256 <dim>-1</dim>
6257 <dim>-1</dim>
6258 <dim>384</dim>
6259 </port>
6260 <port id="1" precision="FP32">
6261 <dim>1</dim>
6262 <dim>1</dim>
6263 <dim>384</dim>
6264 </port>
6265 </input>
6266 <output>
6267 <port id="2" precision="FP32" names="536,input">
6268 <dim>-1</dim>
6269 <dim>-1</dim>
6270 <dim>384</dim>
6271 </port>
6272 </output>
6273 </layer>
6274 <layer id="408" name="__module.encoder.layer.5.output/aten::add/Add" type="Add" version="opset1">
6275 <data auto_broadcast="numpy" />
6276 <input>
6277 <port id="0" precision="FP32">
6278 <dim>-1</dim>
6279 <dim>-1</dim>
6280 <dim>384</dim>
6281 </port>
6282 <port id="1" precision="FP32">
6283 <dim>-1</dim>
6284 <dim>-1</dim>
6285 <dim>384</dim>
6286 </port>
6287 </input>
6288 <output>
6289 <port id="2" precision="FP32" names="538">
6290 <dim>-1</dim>
6291 <dim>-1</dim>
6292 <dim>384</dim>
6293 </port>
6294 </output>
6295 </layer>
6296 <layer id="409" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6297 <data element_type="i32" shape="1" offset="47675412" size="4" />
6298 <output>
6299 <port id="0" precision="I32">
6300 <dim>1</dim>
6301 </port>
6302 </output>
6303 </layer>
6304 <layer id="410" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6305 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6306 <input>
6307 <port id="0" precision="FP32">
6308 <dim>-1</dim>
6309 <dim>-1</dim>
6310 <dim>384</dim>
6311 </port>
6312 <port id="1" precision="I32">
6313 <dim>1</dim>
6314 </port>
6315 </input>
6316 <output>
6317 <port id="2" precision="FP32">
6318 <dim>-1</dim>
6319 <dim>-1</dim>
6320 <dim>384</dim>
6321 </port>
6322 </output>
6323 </layer>
6324 <layer id="411" name="Constant_6292" type="Const" version="opset1">
6325 <data element_type="f32" shape="1, 1, 384" offset="90262672" size="1536" />
6326 <output>
6327 <port id="0" precision="FP32">
6328 <dim>1</dim>
6329 <dim>1</dim>
6330 <dim>384</dim>
6331 </port>
6332 </output>
6333 </layer>
6334 <layer id="412" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
6335 <data auto_broadcast="numpy" />
6336 <input>
6337 <port id="0" precision="FP32">
6338 <dim>-1</dim>
6339 <dim>-1</dim>
6340 <dim>384</dim>
6341 </port>
6342 <port id="1" precision="FP32">
6343 <dim>1</dim>
6344 <dim>1</dim>
6345 <dim>384</dim>
6346 </port>
6347 </input>
6348 <output>
6349 <port id="2" precision="FP32">
6350 <dim>-1</dim>
6351 <dim>-1</dim>
6352 <dim>384</dim>
6353 </port>
6354 </output>
6355 </layer>
6356 <layer id="413" name="Constant_6293" type="Const" version="opset1">
6357 <data element_type="f32" shape="1, 1, 384" offset="90264208" size="1536" />
6358 <output>
6359 <port id="0" precision="FP32">
6360 <dim>1</dim>
6361 <dim>1</dim>
6362 <dim>384</dim>
6363 </port>
6364 </output>
6365 </layer>
6366 <layer id="414" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6367 <data auto_broadcast="numpy" />
6368 <input>
6369 <port id="0" precision="FP32">
6370 <dim>-1</dim>
6371 <dim>-1</dim>
6372 <dim>384</dim>
6373 </port>
6374 <port id="1" precision="FP32">
6375 <dim>1</dim>
6376 <dim>1</dim>
6377 <dim>384</dim>
6378 </port>
6379 </input>
6380 <output>
6381 <port id="2" precision="FP32" names="last_hidden_state">
6382 <dim>-1</dim>
6383 <dim>-1</dim>
6384 <dim>384</dim>
6385 </port>
6386 </output>
6387 </layer>
6388 <layer id="415" name="Result_2691" type="Result" version="opset1">
6389 <input>
6390 <port id="0" precision="FP32">
6391 <dim>-1</dim>
6392 <dim>-1</dim>
6393 <dim>384</dim>
6394 </port>
6395 </input>
6396 </layer>
6397 </layers>
6398 <edges>
6399 <edge from-layer="0" from-port="0" to-layer="8" to-port="0" />
6400 <edge from-layer="1" from-port="0" to-layer="58" to-port="0" />
6401 <edge from-layer="1" from-port="0" to-layer="61" to-port="0" />
6402 <edge from-layer="2" from-port="0" to-layer="15" to-port="0" />
6403 <edge from-layer="2" from-port="0" to-layer="4" to-port="0" />
6404 <edge from-layer="3" from-port="0" to-layer="6" to-port="0" />
6405 <edge from-layer="4" from-port="1" to-layer="6" to-port="1" />
6406 <edge from-layer="5" from-port="0" to-layer="6" to-port="2" />
6407 <edge from-layer="6" from-port="3" to-layer="11" to-port="0" />
6408 <edge from-layer="7" from-port="0" to-layer="10" to-port="0" />
6409 <edge from-layer="8" from-port="1" to-layer="10" to-port="1" />
6410 <edge from-layer="9" from-port="0" to-layer="10" to-port="2" />
6411 <edge from-layer="10" from-port="3" to-layer="11" to-port="1" />
6412 <edge from-layer="11" from-port="2" to-layer="25" to-port="0" />
6413 <edge from-layer="12" from-port="0" to-layer="24" to-port="0" />
6414 <edge from-layer="13" from-port="0" to-layer="21" to-port="0" />
6415 <edge from-layer="14" from-port="0" to-layer="21" to-port="1" />
6416 <edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
6417 <edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
6418 <edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
6419 <edge from-layer="18" from-port="3" to-layer="69" to-port="2" />
6420 <edge from-layer="18" from-port="3" to-layer="21" to-port="2" />
6421 <edge from-layer="19" from-port="0" to-layer="21" to-port="3" />
6422 <edge from-layer="20" from-port="0" to-layer="21" to-port="4" />
6423 <edge from-layer="21" from-port="5" to-layer="22" to-port="0" />
6424 <edge from-layer="22" from-port="1" to-layer="24" to-port="1" />
6425 <edge from-layer="23" from-port="0" to-layer="24" to-port="2" />
6426 <edge from-layer="24" from-port="3" to-layer="25" to-port="1" />
6427 <edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
6428 <edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
6429 <edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
6430 <edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
6431 <edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
6432 <edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
6433 <edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
6434 <edge from-layer="31" from-port="2" to-layer="49" to-port="0" />
6435 <edge from-layer="31" from-port="2" to-layer="81" to-port="0" />
6436 <edge from-layer="31" from-port="2" to-layer="92" to-port="1" />
6437 <edge from-layer="31" from-port="2" to-layer="41" to-port="0" />
6438 <edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
6439 <edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
6440 <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
6441 <edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
6442 <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
6443 <edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
6444 <edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
6445 <edge from-layer="39" from-port="2" to-layer="78" to-port="0" />
6446 <edge from-layer="40" from-port="0" to-layer="41" to-port="1" />
6447 <edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
6448 <edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
6449 <edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
6450 <edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
6451 <edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
6452 <edge from-layer="46" from-port="0" to-layer="47" to-port="1" />
6453 <edge from-layer="47" from-port="2" to-layer="78" to-port="1" />
6454 <edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
6455 <edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
6456 <edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
6457 <edge from-layer="51" from-port="2" to-layer="53" to-port="0" />
6458 <edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
6459 <edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
6460 <edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
6461 <edge from-layer="55" from-port="2" to-layer="78" to-port="2" />
6462 <edge from-layer="56" from-port="0" to-layer="74" to-port="0" />
6463 <edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
6464 <edge from-layer="58" from-port="2" to-layer="60" to-port="0" />
6465 <edge from-layer="59" from-port="0" to-layer="60" to-port="1" />
6466 <edge from-layer="60" from-port="2" to-layer="70" to-port="0" />
6467 <edge from-layer="61" from-port="1" to-layer="64" to-port="0" />
6468 <edge from-layer="61" from-port="1" to-layer="68" to-port="0" />
6469 <edge from-layer="62" from-port="0" to-layer="64" to-port="1" />
6470 <edge from-layer="63" from-port="0" to-layer="64" to-port="2" />
6471 <edge from-layer="64" from-port="3" to-layer="69" to-port="0" />
6472 <edge from-layer="65" from-port="0" to-layer="69" to-port="1" />
6473 <edge from-layer="66" from-port="0" to-layer="68" to-port="1" />
6474 <edge from-layer="67" from-port="0" to-layer="68" to-port="2" />
6475 <edge from-layer="68" from-port="3" to-layer="69" to-port="3" />
6476 <edge from-layer="69" from-port="4" to-layer="70" to-port="1" />
6477 <edge from-layer="70" from-port="2" to-layer="71" to-port="0" />
6478 <edge from-layer="71" from-port="1" to-layer="73" to-port="0" />
6479 <edge from-layer="72" from-port="0" to-layer="73" to-port="1" />
6480 <edge from-layer="73" from-port="2" to-layer="74" to-port="1" />
6481 <edge from-layer="74" from-port="2" to-layer="75" to-port="0" />
6482 <edge from-layer="74" from-port="2" to-layer="77" to-port="2" />
6483 <edge from-layer="75" from-port="1" to-layer="77" to-port="0" />
6484 <edge from-layer="76" from-port="0" to-layer="77" to-port="1" />
6485 <edge from-layer="77" from-port="3" to-layer="78" to-port="3" />
6486 <edge from-layer="77" from-port="3" to-layer="259" to-port="3" />
6487 <edge from-layer="77" from-port="3" to-layer="139" to-port="3" />
6488 <edge from-layer="77" from-port="3" to-layer="199" to-port="3" />
6489 <edge from-layer="77" from-port="3" to-layer="379" to-port="3" />
6490 <edge from-layer="77" from-port="3" to-layer="319" to-port="3" />
6491 <edge from-layer="78" from-port="4" to-layer="80" to-port="0" />
6492 <edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
6493 <edge from-layer="80" from-port="2" to-layer="87" to-port="0" />
6494 <edge from-layer="81" from-port="1" to-layer="84" to-port="0" />
6495 <edge from-layer="82" from-port="0" to-layer="84" to-port="1" />
6496 <edge from-layer="83" from-port="0" to-layer="84" to-port="2" />
6497 <edge from-layer="84" from-port="3" to-layer="86" to-port="0" />
6498 <edge from-layer="85" from-port="0" to-layer="86" to-port="1" />
6499 <edge from-layer="85" from-port="0" to-layer="146" to-port="1" />
6500 <edge from-layer="85" from-port="0" to-layer="386" to-port="1" />
6501 <edge from-layer="85" from-port="0" to-layer="206" to-port="1" />
6502 <edge from-layer="85" from-port="0" to-layer="266" to-port="1" />
6503 <edge from-layer="85" from-port="0" to-layer="326" to-port="1" />
6504 <edge from-layer="86" from-port="2" to-layer="87" to-port="1" />
6505 <edge from-layer="87" from-port="2" to-layer="89" to-port="0" />
6506 <edge from-layer="88" from-port="0" to-layer="89" to-port="1" />
6507 <edge from-layer="89" from-port="2" to-layer="91" to-port="0" />
6508 <edge from-layer="90" from-port="0" to-layer="91" to-port="1" />
6509 <edge from-layer="91" from-port="2" to-layer="92" to-port="0" />
6510 <edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
6511 <edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
6512 <edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
6513 <edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
6514 <edge from-layer="96" from-port="2" to-layer="98" to-port="0" />
6515 <edge from-layer="97" from-port="0" to-layer="98" to-port="1" />
6516 <edge from-layer="98" from-port="2" to-layer="108" to-port="1" />
6517 <edge from-layer="98" from-port="2" to-layer="100" to-port="0" />
6518 <edge from-layer="99" from-port="0" to-layer="100" to-port="1" />
6519 <edge from-layer="100" from-port="2" to-layer="102" to-port="0" />
6520 <edge from-layer="101" from-port="0" to-layer="102" to-port="1" />
6521 <edge from-layer="102" from-port="2" to-layer="103" to-port="0" />
6522 <edge from-layer="103" from-port="1" to-layer="105" to-port="0" />
6523 <edge from-layer="104" from-port="0" to-layer="105" to-port="1" />
6524 <edge from-layer="105" from-port="2" to-layer="107" to-port="0" />
6525 <edge from-layer="106" from-port="0" to-layer="107" to-port="1" />
6526 <edge from-layer="107" from-port="2" to-layer="108" to-port="0" />
6527 <edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
6528 <edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
6529 <edge from-layer="110" from-port="2" to-layer="112" to-port="0" />
6530 <edge from-layer="111" from-port="0" to-layer="112" to-port="1" />
6531 <edge from-layer="112" from-port="2" to-layer="114" to-port="0" />
6532 <edge from-layer="113" from-port="0" to-layer="114" to-port="1" />
6533 <edge from-layer="114" from-port="2" to-layer="142" to-port="0" />
6534 <edge from-layer="114" from-port="2" to-layer="152" to-port="1" />
6535 <edge from-layer="114" from-port="2" to-layer="124" to-port="0" />
6536 <edge from-layer="114" from-port="2" to-layer="116" to-port="0" />
6537 <edge from-layer="114" from-port="2" to-layer="132" to-port="0" />
6538 <edge from-layer="115" from-port="0" to-layer="116" to-port="1" />
6539 <edge from-layer="116" from-port="2" to-layer="118" to-port="0" />
6540 <edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
6541 <edge from-layer="118" from-port="2" to-layer="120" to-port="0" />
6542 <edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
6543 <edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
6544 <edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
6545 <edge from-layer="122" from-port="2" to-layer="139" to-port="0" />
6546 <edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
6547 <edge from-layer="124" from-port="2" to-layer="126" to-port="0" />
6548 <edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
6549 <edge from-layer="126" from-port="2" to-layer="128" to-port="0" />
6550 <edge from-layer="127" from-port="0" to-layer="128" to-port="1" />
6551 <edge from-layer="128" from-port="2" to-layer="130" to-port="0" />
6552 <edge from-layer="129" from-port="0" to-layer="130" to-port="1" />
6553 <edge from-layer="130" from-port="2" to-layer="139" to-port="1" />
6554 <edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
6555 <edge from-layer="132" from-port="2" to-layer="134" to-port="0" />
6556 <edge from-layer="133" from-port="0" to-layer="134" to-port="1" />
6557 <edge from-layer="134" from-port="2" to-layer="136" to-port="0" />
6558 <edge from-layer="135" from-port="0" to-layer="136" to-port="1" />
6559 <edge from-layer="136" from-port="2" to-layer="138" to-port="0" />
6560 <edge from-layer="137" from-port="0" to-layer="138" to-port="1" />
6561 <edge from-layer="138" from-port="2" to-layer="139" to-port="2" />
6562 <edge from-layer="139" from-port="4" to-layer="141" to-port="0" />
6563 <edge from-layer="140" from-port="0" to-layer="141" to-port="1" />
6564 <edge from-layer="141" from-port="2" to-layer="147" to-port="0" />
6565 <edge from-layer="142" from-port="1" to-layer="145" to-port="0" />
6566 <edge from-layer="143" from-port="0" to-layer="145" to-port="1" />
6567 <edge from-layer="144" from-port="0" to-layer="145" to-port="2" />
6568 <edge from-layer="145" from-port="3" to-layer="146" to-port="0" />
6569 <edge from-layer="146" from-port="2" to-layer="147" to-port="1" />
6570 <edge from-layer="147" from-port="2" to-layer="149" to-port="0" />
6571 <edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
6572 <edge from-layer="149" from-port="2" to-layer="151" to-port="0" />
6573 <edge from-layer="150" from-port="0" to-layer="151" to-port="1" />
6574 <edge from-layer="151" from-port="2" to-layer="152" to-port="0" />
6575 <edge from-layer="152" from-port="2" to-layer="154" to-port="0" />
6576 <edge from-layer="153" from-port="0" to-layer="154" to-port="1" />
6577 <edge from-layer="154" from-port="2" to-layer="156" to-port="0" />
6578 <edge from-layer="155" from-port="0" to-layer="156" to-port="1" />
6579 <edge from-layer="156" from-port="2" to-layer="158" to-port="0" />
6580 <edge from-layer="157" from-port="0" to-layer="158" to-port="1" />
6581 <edge from-layer="158" from-port="2" to-layer="160" to-port="0" />
6582 <edge from-layer="158" from-port="2" to-layer="168" to-port="1" />
6583 <edge from-layer="159" from-port="0" to-layer="160" to-port="1" />
6584 <edge from-layer="160" from-port="2" to-layer="162" to-port="0" />
6585 <edge from-layer="161" from-port="0" to-layer="162" to-port="1" />
6586 <edge from-layer="162" from-port="2" to-layer="163" to-port="0" />
6587 <edge from-layer="163" from-port="1" to-layer="165" to-port="0" />
6588 <edge from-layer="164" from-port="0" to-layer="165" to-port="1" />
6589 <edge from-layer="165" from-port="2" to-layer="167" to-port="0" />
6590 <edge from-layer="166" from-port="0" to-layer="167" to-port="1" />
6591 <edge from-layer="167" from-port="2" to-layer="168" to-port="0" />
6592 <edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
6593 <edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
6594 <edge from-layer="170" from-port="2" to-layer="172" to-port="0" />
6595 <edge from-layer="171" from-port="0" to-layer="172" to-port="1" />
6596 <edge from-layer="172" from-port="2" to-layer="174" to-port="0" />
6597 <edge from-layer="173" from-port="0" to-layer="174" to-port="1" />
6598 <edge from-layer="174" from-port="2" to-layer="184" to-port="0" />
6599 <edge from-layer="174" from-port="2" to-layer="192" to-port="0" />
6600 <edge from-layer="174" from-port="2" to-layer="212" to-port="1" />
6601 <edge from-layer="174" from-port="2" to-layer="202" to-port="0" />
6602 <edge from-layer="174" from-port="2" to-layer="176" to-port="0" />
6603 <edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
6604 <edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
6605 <edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
6606 <edge from-layer="178" from-port="2" to-layer="180" to-port="0" />
6607 <edge from-layer="179" from-port="0" to-layer="180" to-port="1" />
6608 <edge from-layer="180" from-port="2" to-layer="182" to-port="0" />
6609 <edge from-layer="181" from-port="0" to-layer="182" to-port="1" />
6610 <edge from-layer="182" from-port="2" to-layer="199" to-port="0" />
6611 <edge from-layer="183" from-port="0" to-layer="184" to-port="1" />
6612 <edge from-layer="184" from-port="2" to-layer="186" to-port="0" />
6613 <edge from-layer="185" from-port="0" to-layer="186" to-port="1" />
6614 <edge from-layer="186" from-port="2" to-layer="188" to-port="0" />
6615 <edge from-layer="187" from-port="0" to-layer="188" to-port="1" />
6616 <edge from-layer="188" from-port="2" to-layer="190" to-port="0" />
6617 <edge from-layer="189" from-port="0" to-layer="190" to-port="1" />
6618 <edge from-layer="190" from-port="2" to-layer="199" to-port="1" />
6619 <edge from-layer="191" from-port="0" to-layer="192" to-port="1" />
6620 <edge from-layer="192" from-port="2" to-layer="194" to-port="0" />
6621 <edge from-layer="193" from-port="0" to-layer="194" to-port="1" />
6622 <edge from-layer="194" from-port="2" to-layer="196" to-port="0" />
6623 <edge from-layer="195" from-port="0" to-layer="196" to-port="1" />
6624 <edge from-layer="196" from-port="2" to-layer="198" to-port="0" />
6625 <edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
6626 <edge from-layer="198" from-port="2" to-layer="199" to-port="2" />
6627 <edge from-layer="199" from-port="4" to-layer="201" to-port="0" />
6628 <edge from-layer="200" from-port="0" to-layer="201" to-port="1" />
6629 <edge from-layer="201" from-port="2" to-layer="207" to-port="0" />
6630 <edge from-layer="202" from-port="1" to-layer="205" to-port="0" />
6631 <edge from-layer="203" from-port="0" to-layer="205" to-port="1" />
6632 <edge from-layer="204" from-port="0" to-layer="205" to-port="2" />
6633 <edge from-layer="205" from-port="3" to-layer="206" to-port="0" />
6634 <edge from-layer="206" from-port="2" to-layer="207" to-port="1" />
6635 <edge from-layer="207" from-port="2" to-layer="209" to-port="0" />
6636 <edge from-layer="208" from-port="0" to-layer="209" to-port="1" />
6637 <edge from-layer="209" from-port="2" to-layer="211" to-port="0" />
6638 <edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
6639 <edge from-layer="211" from-port="2" to-layer="212" to-port="0" />
6640 <edge from-layer="212" from-port="2" to-layer="214" to-port="0" />
6641 <edge from-layer="213" from-port="0" to-layer="214" to-port="1" />
6642 <edge from-layer="214" from-port="2" to-layer="216" to-port="0" />
6643 <edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
6644 <edge from-layer="216" from-port="2" to-layer="218" to-port="0" />
6645 <edge from-layer="217" from-port="0" to-layer="218" to-port="1" />
6646 <edge from-layer="218" from-port="2" to-layer="220" to-port="0" />
6647 <edge from-layer="218" from-port="2" to-layer="228" to-port="1" />
6648 <edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
6649 <edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
6650 <edge from-layer="221" from-port="0" to-layer="222" to-port="1" />
6651 <edge from-layer="222" from-port="2" to-layer="223" to-port="0" />
6652 <edge from-layer="223" from-port="1" to-layer="225" to-port="0" />
6653 <edge from-layer="224" from-port="0" to-layer="225" to-port="1" />
6654 <edge from-layer="225" from-port="2" to-layer="227" to-port="0" />
6655 <edge from-layer="226" from-port="0" to-layer="227" to-port="1" />
6656 <edge from-layer="227" from-port="2" to-layer="228" to-port="0" />
6657 <edge from-layer="228" from-port="2" to-layer="230" to-port="0" />
6658 <edge from-layer="229" from-port="0" to-layer="230" to-port="1" />
6659 <edge from-layer="230" from-port="2" to-layer="232" to-port="0" />
6660 <edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
6661 <edge from-layer="232" from-port="2" to-layer="234" to-port="0" />
6662 <edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
6663 <edge from-layer="234" from-port="2" to-layer="252" to-port="0" />
6664 <edge from-layer="234" from-port="2" to-layer="272" to-port="1" />
6665 <edge from-layer="234" from-port="2" to-layer="262" to-port="0" />
6666 <edge from-layer="234" from-port="2" to-layer="244" to-port="0" />
6667 <edge from-layer="234" from-port="2" to-layer="236" to-port="0" />
6668 <edge from-layer="235" from-port="0" to-layer="236" to-port="1" />
6669 <edge from-layer="236" from-port="2" to-layer="238" to-port="0" />
6670 <edge from-layer="237" from-port="0" to-layer="238" to-port="1" />
6671 <edge from-layer="238" from-port="2" to-layer="240" to-port="0" />
6672 <edge from-layer="239" from-port="0" to-layer="240" to-port="1" />
6673 <edge from-layer="240" from-port="2" to-layer="242" to-port="0" />
6674 <edge from-layer="241" from-port="0" to-layer="242" to-port="1" />
6675 <edge from-layer="242" from-port="2" to-layer="259" to-port="0" />
6676 <edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
6677 <edge from-layer="244" from-port="2" to-layer="246" to-port="0" />
6678 <edge from-layer="245" from-port="0" to-layer="246" to-port="1" />
6679 <edge from-layer="246" from-port="2" to-layer="248" to-port="0" />
6680 <edge from-layer="247" from-port="0" to-layer="248" to-port="1" />
6681 <edge from-layer="248" from-port="2" to-layer="250" to-port="0" />
6682 <edge from-layer="249" from-port="0" to-layer="250" to-port="1" />
6683 <edge from-layer="250" from-port="2" to-layer="259" to-port="1" />
6684 <edge from-layer="251" from-port="0" to-layer="252" to-port="1" />
6685 <edge from-layer="252" from-port="2" to-layer="254" to-port="0" />
6686 <edge from-layer="253" from-port="0" to-layer="254" to-port="1" />
6687 <edge from-layer="254" from-port="2" to-layer="256" to-port="0" />
6688 <edge from-layer="255" from-port="0" to-layer="256" to-port="1" />
6689 <edge from-layer="256" from-port="2" to-layer="258" to-port="0" />
6690 <edge from-layer="257" from-port="0" to-layer="258" to-port="1" />
6691 <edge from-layer="258" from-port="2" to-layer="259" to-port="2" />
6692 <edge from-layer="259" from-port="4" to-layer="261" to-port="0" />
6693 <edge from-layer="260" from-port="0" to-layer="261" to-port="1" />
6694 <edge from-layer="261" from-port="2" to-layer="267" to-port="0" />
6695 <edge from-layer="262" from-port="1" to-layer="265" to-port="0" />
6696 <edge from-layer="263" from-port="0" to-layer="265" to-port="1" />
6697 <edge from-layer="264" from-port="0" to-layer="265" to-port="2" />
6698 <edge from-layer="265" from-port="3" to-layer="266" to-port="0" />
6699 <edge from-layer="266" from-port="2" to-layer="267" to-port="1" />
6700 <edge from-layer="267" from-port="2" to-layer="269" to-port="0" />
6701 <edge from-layer="268" from-port="0" to-layer="269" to-port="1" />
6702 <edge from-layer="269" from-port="2" to-layer="271" to-port="0" />
6703 <edge from-layer="270" from-port="0" to-layer="271" to-port="1" />
6704 <edge from-layer="271" from-port="2" to-layer="272" to-port="0" />
6705 <edge from-layer="272" from-port="2" to-layer="274" to-port="0" />
6706 <edge from-layer="273" from-port="0" to-layer="274" to-port="1" />
6707 <edge from-layer="274" from-port="2" to-layer="276" to-port="0" />
6708 <edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
6709 <edge from-layer="276" from-port="2" to-layer="278" to-port="0" />
6710 <edge from-layer="277" from-port="0" to-layer="278" to-port="1" />
6711 <edge from-layer="278" from-port="2" to-layer="280" to-port="0" />
6712 <edge from-layer="278" from-port="2" to-layer="288" to-port="1" />
6713 <edge from-layer="279" from-port="0" to-layer="280" to-port="1" />
6714 <edge from-layer="280" from-port="2" to-layer="282" to-port="0" />
6715 <edge from-layer="281" from-port="0" to-layer="282" to-port="1" />
6716 <edge from-layer="282" from-port="2" to-layer="283" to-port="0" />
6717 <edge from-layer="283" from-port="1" to-layer="285" to-port="0" />
6718 <edge from-layer="284" from-port="0" to-layer="285" to-port="1" />
6719 <edge from-layer="285" from-port="2" to-layer="287" to-port="0" />
6720 <edge from-layer="286" from-port="0" to-layer="287" to-port="1" />
6721 <edge from-layer="287" from-port="2" to-layer="288" to-port="0" />
6722 <edge from-layer="288" from-port="2" to-layer="290" to-port="0" />
6723 <edge from-layer="289" from-port="0" to-layer="290" to-port="1" />
6724 <edge from-layer="290" from-port="2" to-layer="292" to-port="0" />
6725 <edge from-layer="291" from-port="0" to-layer="292" to-port="1" />
6726 <edge from-layer="292" from-port="2" to-layer="294" to-port="0" />
6727 <edge from-layer="293" from-port="0" to-layer="294" to-port="1" />
6728 <edge from-layer="294" from-port="2" to-layer="296" to-port="0" />
6729 <edge from-layer="294" from-port="2" to-layer="322" to-port="0" />
6730 <edge from-layer="294" from-port="2" to-layer="304" to-port="0" />
6731 <edge from-layer="294" from-port="2" to-layer="312" to-port="0" />
6732 <edge from-layer="294" from-port="2" to-layer="332" to-port="1" />
6733 <edge from-layer="295" from-port="0" to-layer="296" to-port="1" />
6734 <edge from-layer="296" from-port="2" to-layer="298" to-port="0" />
6735 <edge from-layer="297" from-port="0" to-layer="298" to-port="1" />
6736 <edge from-layer="298" from-port="2" to-layer="300" to-port="0" />
6737 <edge from-layer="299" from-port="0" to-layer="300" to-port="1" />
6738 <edge from-layer="300" from-port="2" to-layer="302" to-port="0" />
6739 <edge from-layer="301" from-port="0" to-layer="302" to-port="1" />
6740 <edge from-layer="302" from-port="2" to-layer="319" to-port="0" />
6741 <edge from-layer="303" from-port="0" to-layer="304" to-port="1" />
6742 <edge from-layer="304" from-port="2" to-layer="306" to-port="0" />
6743 <edge from-layer="305" from-port="0" to-layer="306" to-port="1" />
6744 <edge from-layer="306" from-port="2" to-layer="308" to-port="0" />
6745 <edge from-layer="307" from-port="0" to-layer="308" to-port="1" />
6746 <edge from-layer="308" from-port="2" to-layer="310" to-port="0" />
6747 <edge from-layer="309" from-port="0" to-layer="310" to-port="1" />
6748 <edge from-layer="310" from-port="2" to-layer="319" to-port="1" />
6749 <edge from-layer="311" from-port="0" to-layer="312" to-port="1" />
6750 <edge from-layer="312" from-port="2" to-layer="314" to-port="0" />
6751 <edge from-layer="313" from-port="0" to-layer="314" to-port="1" />
6752 <edge from-layer="314" from-port="2" to-layer="316" to-port="0" />
6753 <edge from-layer="315" from-port="0" to-layer="316" to-port="1" />
6754 <edge from-layer="316" from-port="2" to-layer="318" to-port="0" />
6755 <edge from-layer="317" from-port="0" to-layer="318" to-port="1" />
6756 <edge from-layer="318" from-port="2" to-layer="319" to-port="2" />
6757 <edge from-layer="319" from-port="4" to-layer="321" to-port="0" />
6758 <edge from-layer="320" from-port="0" to-layer="321" to-port="1" />
6759 <edge from-layer="321" from-port="2" to-layer="327" to-port="0" />
6760 <edge from-layer="322" from-port="1" to-layer="325" to-port="0" />
6761 <edge from-layer="323" from-port="0" to-layer="325" to-port="1" />
6762 <edge from-layer="324" from-port="0" to-layer="325" to-port="2" />
6763 <edge from-layer="325" from-port="3" to-layer="326" to-port="0" />
6764 <edge from-layer="326" from-port="2" to-layer="327" to-port="1" />
6765 <edge from-layer="327" from-port="2" to-layer="329" to-port="0" />
6766 <edge from-layer="328" from-port="0" to-layer="329" to-port="1" />
6767 <edge from-layer="329" from-port="2" to-layer="331" to-port="0" />
6768 <edge from-layer="330" from-port="0" to-layer="331" to-port="1" />
6769 <edge from-layer="331" from-port="2" to-layer="332" to-port="0" />
6770 <edge from-layer="332" from-port="2" to-layer="334" to-port="0" />
6771 <edge from-layer="333" from-port="0" to-layer="334" to-port="1" />
6772 <edge from-layer="334" from-port="2" to-layer="336" to-port="0" />
6773 <edge from-layer="335" from-port="0" to-layer="336" to-port="1" />
6774 <edge from-layer="336" from-port="2" to-layer="338" to-port="0" />
6775 <edge from-layer="337" from-port="0" to-layer="338" to-port="1" />
6776 <edge from-layer="338" from-port="2" to-layer="348" to-port="1" />
6777 <edge from-layer="338" from-port="2" to-layer="340" to-port="0" />
6778 <edge from-layer="339" from-port="0" to-layer="340" to-port="1" />
6779 <edge from-layer="340" from-port="2" to-layer="342" to-port="0" />
6780 <edge from-layer="341" from-port="0" to-layer="342" to-port="1" />
6781 <edge from-layer="342" from-port="2" to-layer="343" to-port="0" />
6782 <edge from-layer="343" from-port="1" to-layer="345" to-port="0" />
6783 <edge from-layer="344" from-port="0" to-layer="345" to-port="1" />
6784 <edge from-layer="345" from-port="2" to-layer="347" to-port="0" />
6785 <edge from-layer="346" from-port="0" to-layer="347" to-port="1" />
6786 <edge from-layer="347" from-port="2" to-layer="348" to-port="0" />
6787 <edge from-layer="348" from-port="2" to-layer="350" to-port="0" />
6788 <edge from-layer="349" from-port="0" to-layer="350" to-port="1" />
6789 <edge from-layer="350" from-port="2" to-layer="352" to-port="0" />
6790 <edge from-layer="351" from-port="0" to-layer="352" to-port="1" />
6791 <edge from-layer="352" from-port="2" to-layer="354" to-port="0" />
6792 <edge from-layer="353" from-port="0" to-layer="354" to-port="1" />
6793 <edge from-layer="354" from-port="2" to-layer="356" to-port="0" />
6794 <edge from-layer="354" from-port="2" to-layer="392" to-port="1" />
6795 <edge from-layer="354" from-port="2" to-layer="364" to-port="0" />
6796 <edge from-layer="354" from-port="2" to-layer="372" to-port="0" />
6797 <edge from-layer="354" from-port="2" to-layer="382" to-port="0" />
6798 <edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
6799 <edge from-layer="356" from-port="2" to-layer="358" to-port="0" />
6800 <edge from-layer="357" from-port="0" to-layer="358" to-port="1" />
6801 <edge from-layer="358" from-port="2" to-layer="360" to-port="0" />
6802 <edge from-layer="359" from-port="0" to-layer="360" to-port="1" />
6803 <edge from-layer="360" from-port="2" to-layer="362" to-port="0" />
6804 <edge from-layer="361" from-port="0" to-layer="362" to-port="1" />
6805 <edge from-layer="362" from-port="2" to-layer="379" to-port="0" />
6806 <edge from-layer="363" from-port="0" to-layer="364" to-port="1" />
6807 <edge from-layer="364" from-port="2" to-layer="366" to-port="0" />
6808 <edge from-layer="365" from-port="0" to-layer="366" to-port="1" />
6809 <edge from-layer="366" from-port="2" to-layer="368" to-port="0" />
6810 <edge from-layer="367" from-port="0" to-layer="368" to-port="1" />
6811 <edge from-layer="368" from-port="2" to-layer="370" to-port="0" />
6812 <edge from-layer="369" from-port="0" to-layer="370" to-port="1" />
6813 <edge from-layer="370" from-port="2" to-layer="379" to-port="1" />
6814 <edge from-layer="371" from-port="0" to-layer="372" to-port="1" />
6815 <edge from-layer="372" from-port="2" to-layer="374" to-port="0" />
6816 <edge from-layer="373" from-port="0" to-layer="374" to-port="1" />
6817 <edge from-layer="374" from-port="2" to-layer="376" to-port="0" />
6818 <edge from-layer="375" from-port="0" to-layer="376" to-port="1" />
6819 <edge from-layer="376" from-port="2" to-layer="378" to-port="0" />
6820 <edge from-layer="377" from-port="0" to-layer="378" to-port="1" />
6821 <edge from-layer="378" from-port="2" to-layer="379" to-port="2" />
6822 <edge from-layer="379" from-port="4" to-layer="381" to-port="0" />
6823 <edge from-layer="380" from-port="0" to-layer="381" to-port="1" />
6824 <edge from-layer="381" from-port="2" to-layer="387" to-port="0" />
6825 <edge from-layer="382" from-port="1" to-layer="385" to-port="0" />
6826 <edge from-layer="383" from-port="0" to-layer="385" to-port="1" />
6827 <edge from-layer="384" from-port="0" to-layer="385" to-port="2" />
6828 <edge from-layer="385" from-port="3" to-layer="386" to-port="0" />
6829 <edge from-layer="386" from-port="2" to-layer="387" to-port="1" />
6830 <edge from-layer="387" from-port="2" to-layer="389" to-port="0" />
6831 <edge from-layer="388" from-port="0" to-layer="389" to-port="1" />
6832 <edge from-layer="389" from-port="2" to-layer="391" to-port="0" />
6833 <edge from-layer="390" from-port="0" to-layer="391" to-port="1" />
6834 <edge from-layer="391" from-port="2" to-layer="392" to-port="0" />
6835 <edge from-layer="392" from-port="2" to-layer="394" to-port="0" />
6836 <edge from-layer="393" from-port="0" to-layer="394" to-port="1" />
6837 <edge from-layer="394" from-port="2" to-layer="396" to-port="0" />
6838 <edge from-layer="395" from-port="0" to-layer="396" to-port="1" />
6839 <edge from-layer="396" from-port="2" to-layer="398" to-port="0" />
6840 <edge from-layer="397" from-port="0" to-layer="398" to-port="1" />
6841 <edge from-layer="398" from-port="2" to-layer="400" to-port="0" />
6842 <edge from-layer="398" from-port="2" to-layer="408" to-port="1" />
6843 <edge from-layer="399" from-port="0" to-layer="400" to-port="1" />
6844 <edge from-layer="400" from-port="2" to-layer="402" to-port="0" />
6845 <edge from-layer="401" from-port="0" to-layer="402" to-port="1" />
6846 <edge from-layer="402" from-port="2" to-layer="403" to-port="0" />
6847 <edge from-layer="403" from-port="1" to-layer="405" to-port="0" />
6848 <edge from-layer="404" from-port="0" to-layer="405" to-port="1" />
6849 <edge from-layer="405" from-port="2" to-layer="407" to-port="0" />
6850 <edge from-layer="406" from-port="0" to-layer="407" to-port="1" />
6851 <edge from-layer="407" from-port="2" to-layer="408" to-port="0" />
6852 <edge from-layer="408" from-port="2" to-layer="410" to-port="0" />
6853 <edge from-layer="409" from-port="0" to-layer="410" to-port="1" />
6854 <edge from-layer="410" from-port="2" to-layer="412" to-port="0" />
6855 <edge from-layer="411" from-port="0" to-layer="412" to-port="1" />
6856 <edge from-layer="412" from-port="2" to-layer="414" to-port="0" />
6857 <edge from-layer="413" from-port="0" to-layer="414" to-port="1" />
6858 <edge from-layer="414" from-port="2" to-layer="415" to-port="0" />
6859 </edges>
6860 <rt_info>
6861 <Runtime_version value="2024.4.1-16618-643f23d1318-releases/2024/4" />
6862 <conversion_parameters>
6863 <framework value="pytorch" />
6864 <is_python_object value="True" />
6865 </conversion_parameters>
6866 <optimum>
6867 <optimum_intel_version value="1.20.0.dev0+b31524c" />
6868 <optimum_version value="1.23.0" />
6869 <pytorch_version value="2.5.0.dev20240807+cu121" />
6870 <transformers_version value="4.43.4" />
6871 </optimum>
6872 </rt_info>
6873 </net>
6874