openvino/openvino_model.xml
389.2 KB · 12916 lines · xml Raw
1 <?xml version="1.0"?>
2 <net name="Model84" version="11">
3 <layers>
4 <layer id="2" name="input_ids" type="Parameter" version="opset1">
5 <data shape="?,?" element_type="i64" />
6 <output>
7 <port id="0" precision="I64" names="input_ids">
8 <dim>-1</dim>
9 <dim>-1</dim>
10 </port>
11 </output>
12 </layer>
13 <layer id="1" name="attention_mask" type="Parameter" version="opset1">
14 <data shape="?,?" element_type="i64" />
15 <output>
16 <port id="0" precision="I64" names="attention_mask">
17 <dim>-1</dim>
18 <dim>-1</dim>
19 </port>
20 </output>
21 </layer>
22 <layer id="0" name="token_type_ids" type="Parameter" version="opset1">
23 <data shape="?,?" element_type="i64" />
24 <output>
25 <port id="0" precision="I64" names="token_type_ids">
26 <dim>-1</dim>
27 <dim>-1</dim>
28 </port>
29 </output>
30 </layer>
31 <layer id="3" name="self.embeddings.word_embeddings.weight" type="Const" version="opset1">
32 <data element_type="f32" shape="250037, 384" offset="0" size="384056832" />
33 <output>
34 <port id="0" precision="FP32" names="self.embeddings.word_embeddings.weight">
35 <dim>250037</dim>
36 <dim>384</dim>
37 </port>
38 </output>
39 </layer>
40 <layer id="4" name="__module.embeddings.word_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
41 <data destination_type="i32" />
42 <input>
43 <port id="0" precision="I64">
44 <dim>-1</dim>
45 <dim>-1</dim>
46 </port>
47 </input>
48 <output>
49 <port id="1" precision="I32">
50 <dim>-1</dim>
51 <dim>-1</dim>
52 </port>
53 </output>
54 </layer>
55 <layer id="5" name="__module.embeddings.word_embeddings/aten::embedding/Constant" type="Const" version="opset1">
56 <data element_type="i32" shape="" offset="384056832" size="4" />
57 <output>
58 <port id="0" precision="I32" />
59 </output>
60 </layer>
61 <layer id="6" name="__module.embeddings.word_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
62 <data batch_dims="0" />
63 <input>
64 <port id="0" precision="FP32">
65 <dim>250037</dim>
66 <dim>384</dim>
67 </port>
68 <port id="1" precision="I32">
69 <dim>-1</dim>
70 <dim>-1</dim>
71 </port>
72 <port id="2" precision="I32" />
73 </input>
74 <output>
75 <port id="3" precision="FP32" names="79,inputs_embeds">
76 <dim>-1</dim>
77 <dim>-1</dim>
78 <dim>384</dim>
79 </port>
80 </output>
81 </layer>
82 <layer id="7" name="self.embeddings.token_type_embeddings.weight" type="Const" version="opset1">
83 <data element_type="f32" shape="2, 384" offset="384056836" size="3072" />
84 <output>
85 <port id="0" precision="FP32" names="self.embeddings.token_type_embeddings.weight">
86 <dim>2</dim>
87 <dim>384</dim>
88 </port>
89 </output>
90 </layer>
91 <layer id="8" name="__module.embeddings.token_type_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
92 <data destination_type="i32" />
93 <input>
94 <port id="0" precision="I64">
95 <dim>-1</dim>
96 <dim>-1</dim>
97 </port>
98 </input>
99 <output>
100 <port id="1" precision="I32">
101 <dim>-1</dim>
102 <dim>-1</dim>
103 </port>
104 </output>
105 </layer>
106 <layer id="9" name="__module.embeddings.token_type_embeddings/aten::embedding/Constant" type="Const" version="opset1">
107 <data element_type="i32" shape="" offset="384056832" size="4" />
108 <output>
109 <port id="0" precision="I32" />
110 </output>
111 </layer>
112 <layer id="10" name="__module.embeddings.token_type_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
113 <data batch_dims="0" />
114 <input>
115 <port id="0" precision="FP32">
116 <dim>2</dim>
117 <dim>384</dim>
118 </port>
119 <port id="1" precision="I32">
120 <dim>-1</dim>
121 <dim>-1</dim>
122 </port>
123 <port id="2" precision="I32" />
124 </input>
125 <output>
126 <port id="3" precision="FP32" names="81,token_type_embeddings.1">
127 <dim>-1</dim>
128 <dim>-1</dim>
129 <dim>384</dim>
130 </port>
131 </output>
132 </layer>
133 <layer id="11" name="__module.embeddings/aten::add/Add" type="Add" version="opset1">
134 <data auto_broadcast="numpy" />
135 <input>
136 <port id="0" precision="FP32">
137 <dim>-1</dim>
138 <dim>-1</dim>
139 <dim>384</dim>
140 </port>
141 <port id="1" precision="FP32">
142 <dim>-1</dim>
143 <dim>-1</dim>
144 <dim>384</dim>
145 </port>
146 </input>
147 <output>
148 <port id="2" precision="FP32" names="82_1">
149 <dim>-1</dim>
150 <dim>-1</dim>
151 <dim>384</dim>
152 </port>
153 </output>
154 </layer>
155 <layer id="12" name="self.embeddings.position_embeddings.weight" type="Const" version="opset1">
156 <data element_type="f32" shape="512, 384" offset="384059908" size="786432" />
157 <output>
158 <port id="0" precision="FP32" names="self.embeddings.position_embeddings.weight">
159 <dim>512</dim>
160 <dim>384</dim>
161 </port>
162 </output>
163 </layer>
164 <layer id="13" name="__module.embeddings/aten::slice/Slice" type="Const" version="opset1">
165 <data element_type="i64" shape="1, 512" offset="384846340" size="4096" />
166 <output>
167 <port id="0" precision="I64" names="76">
168 <dim>1</dim>
169 <dim>512</dim>
170 </port>
171 </output>
172 </layer>
173 <layer id="14" name="__module.embeddings/aten::slice/Reshape" type="Const" version="opset1">
174 <data element_type="i64" shape="1" offset="384850436" size="8" />
175 <output>
176 <port id="0" precision="I64">
177 <dim>1</dim>
178 </port>
179 </output>
180 </layer>
181 <layer id="15" name="ShapeOf_103909" type="ShapeOf" version="opset3">
182 <data output_type="i64" />
183 <input>
184 <port id="0" precision="I64">
185 <dim>-1</dim>
186 <dim>-1</dim>
187 </port>
188 </input>
189 <output>
190 <port id="1" precision="I64">
191 <dim>2</dim>
192 </port>
193 </output>
194 </layer>
195 <layer id="16" name="Constant_104030" type="Const" version="opset1">
196 <data element_type="i64" shape="1" offset="384850444" size="8" />
197 <output>
198 <port id="0" precision="I64">
199 <dim>1</dim>
200 </port>
201 </output>
202 </layer>
203 <layer id="17" name="Constant_103911" type="Const" version="opset1">
204 <data element_type="i64" shape="" offset="384850436" size="8" />
205 <output>
206 <port id="0" precision="I64" />
207 </output>
208 </layer>
209 <layer id="18" name="Gather_103912" type="Gather" version="opset8">
210 <data batch_dims="0" />
211 <input>
212 <port id="0" precision="I64">
213 <dim>2</dim>
214 </port>
215 <port id="1" precision="I64">
216 <dim>1</dim>
217 </port>
218 <port id="2" precision="I64" />
219 </input>
220 <output>
221 <port id="3" precision="I64" names="10,72,74,75,8">
222 <dim>1</dim>
223 </port>
224 </output>
225 </layer>
226 <layer id="19" name="__module.embeddings/aten::slice/Reshape_2" type="Const" version="opset1">
227 <data element_type="i64" shape="1" offset="384850444" size="8" />
228 <output>
229 <port id="0" precision="I64">
230 <dim>1</dim>
231 </port>
232 </output>
233 </layer>
234 <layer id="20" name="__module.embeddings/aten::slice/Reshape_3" type="Const" version="opset1">
235 <data element_type="i64" shape="1" offset="384850444" size="8" />
236 <output>
237 <port id="0" precision="I64">
238 <dim>1</dim>
239 </port>
240 </output>
241 </layer>
242 <layer id="21" name="__module.embeddings/aten::slice/Slice_1" type="Slice" version="opset8">
243 <input>
244 <port id="0" precision="I64">
245 <dim>1</dim>
246 <dim>512</dim>
247 </port>
248 <port id="1" precision="I64">
249 <dim>1</dim>
250 </port>
251 <port id="2" precision="I64">
252 <dim>1</dim>
253 </port>
254 <port id="3" precision="I64">
255 <dim>1</dim>
256 </port>
257 <port id="4" precision="I64">
258 <dim>1</dim>
259 </port>
260 </input>
261 <output>
262 <port id="5" precision="I64" names="77">
263 <dim>1</dim>
264 <dim>-1</dim>
265 </port>
266 </output>
267 </layer>
268 <layer id="22" name="__module.embeddings.position_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
269 <data destination_type="i32" />
270 <input>
271 <port id="0" precision="I64">
272 <dim>1</dim>
273 <dim>-1</dim>
274 </port>
275 </input>
276 <output>
277 <port id="1" precision="I32">
278 <dim>1</dim>
279 <dim>-1</dim>
280 </port>
281 </output>
282 </layer>
283 <layer id="23" name="__module.embeddings.position_embeddings/aten::embedding/Constant" type="Const" version="opset1">
284 <data element_type="i32" shape="" offset="384056832" size="4" />
285 <output>
286 <port id="0" precision="I32" />
287 </output>
288 </layer>
289 <layer id="24" name="__module.embeddings.position_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
290 <data batch_dims="0" />
291 <input>
292 <port id="0" precision="FP32">
293 <dim>512</dim>
294 <dim>384</dim>
295 </port>
296 <port id="1" precision="I32">
297 <dim>1</dim>
298 <dim>-1</dim>
299 </port>
300 <port id="2" precision="I32" />
301 </input>
302 <output>
303 <port id="3" precision="FP32" names="84,position_embeddings.1">
304 <dim>1</dim>
305 <dim>-1</dim>
306 <dim>384</dim>
307 </port>
308 </output>
309 </layer>
310 <layer id="25" name="__module.embeddings/aten::add_/Add" type="Add" version="opset1">
311 <data auto_broadcast="numpy" />
312 <input>
313 <port id="0" precision="FP32">
314 <dim>-1</dim>
315 <dim>-1</dim>
316 <dim>384</dim>
317 </port>
318 <port id="1" precision="FP32">
319 <dim>1</dim>
320 <dim>-1</dim>
321 <dim>384</dim>
322 </port>
323 </input>
324 <output>
325 <port id="2" precision="FP32" names="82,embeddings.1">
326 <dim>-1</dim>
327 <dim>-1</dim>
328 <dim>384</dim>
329 </port>
330 </output>
331 </layer>
332 <layer id="26" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
333 <data element_type="i32" shape="1" offset="384850452" size="4" />
334 <output>
335 <port id="0" precision="I32">
336 <dim>1</dim>
337 </port>
338 </output>
339 </layer>
340 <layer id="27" name="__module.embeddings.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
341 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
342 <input>
343 <port id="0" precision="FP32">
344 <dim>-1</dim>
345 <dim>-1</dim>
346 <dim>384</dim>
347 </port>
348 <port id="1" precision="I32">
349 <dim>1</dim>
350 </port>
351 </input>
352 <output>
353 <port id="2" precision="FP32">
354 <dim>-1</dim>
355 <dim>-1</dim>
356 <dim>384</dim>
357 </port>
358 </output>
359 </layer>
360 <layer id="28" name="Constant_103682" type="Const" version="opset1">
361 <data element_type="f32" shape="1, 1, 384" offset="384850456" size="1536" />
362 <output>
363 <port id="0" precision="FP32">
364 <dim>1</dim>
365 <dim>1</dim>
366 <dim>384</dim>
367 </port>
368 </output>
369 </layer>
370 <layer id="29" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
371 <data auto_broadcast="numpy" />
372 <input>
373 <port id="0" precision="FP32">
374 <dim>-1</dim>
375 <dim>-1</dim>
376 <dim>384</dim>
377 </port>
378 <port id="1" precision="FP32">
379 <dim>1</dim>
380 <dim>1</dim>
381 <dim>384</dim>
382 </port>
383 </input>
384 <output>
385 <port id="2" precision="FP32">
386 <dim>-1</dim>
387 <dim>-1</dim>
388 <dim>384</dim>
389 </port>
390 </output>
391 </layer>
392 <layer id="30" name="Constant_103683" type="Const" version="opset1">
393 <data element_type="f32" shape="1, 1, 384" offset="384851992" size="1536" />
394 <output>
395 <port id="0" precision="FP32">
396 <dim>1</dim>
397 <dim>1</dim>
398 <dim>384</dim>
399 </port>
400 </output>
401 </layer>
402 <layer id="31" name="__module.embeddings.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
403 <data auto_broadcast="numpy" />
404 <input>
405 <port id="0" precision="FP32">
406 <dim>-1</dim>
407 <dim>-1</dim>
408 <dim>384</dim>
409 </port>
410 <port id="1" precision="FP32">
411 <dim>1</dim>
412 <dim>1</dim>
413 <dim>384</dim>
414 </port>
415 </input>
416 <output>
417 <port id="2" precision="FP32" names="89,input.1">
418 <dim>-1</dim>
419 <dim>-1</dim>
420 <dim>384</dim>
421 </port>
422 </output>
423 </layer>
424 <layer id="32" name="self.encoder.layer.0.attention.self.query.weight" type="Const" version="opset1">
425 <data element_type="f32" shape="384, 384" offset="384853528" size="589824" />
426 <output>
427 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.query.weight">
428 <dim>384</dim>
429 <dim>384</dim>
430 </port>
431 </output>
432 </layer>
433 <layer id="33" name="__module.encoder.layer.0.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
434 <data transpose_a="false" transpose_b="true" />
435 <input>
436 <port id="0" precision="FP32">
437 <dim>-1</dim>
438 <dim>-1</dim>
439 <dim>384</dim>
440 </port>
441 <port id="1" precision="FP32">
442 <dim>384</dim>
443 <dim>384</dim>
444 </port>
445 </input>
446 <output>
447 <port id="2" precision="FP32">
448 <dim>-1</dim>
449 <dim>-1</dim>
450 <dim>384</dim>
451 </port>
452 </output>
453 </layer>
454 <layer id="34" name="Constant_103684" type="Const" version="opset1">
455 <data element_type="f32" shape="1, 1, 384" offset="385443352" size="1536" />
456 <output>
457 <port id="0" precision="FP32">
458 <dim>1</dim>
459 <dim>1</dim>
460 <dim>384</dim>
461 </port>
462 </output>
463 </layer>
464 <layer id="35" name="__module.encoder.layer.0.attention.self.query/aten::linear/Add" type="Add" version="opset1">
465 <data auto_broadcast="numpy" />
466 <input>
467 <port id="0" precision="FP32">
468 <dim>-1</dim>
469 <dim>-1</dim>
470 <dim>384</dim>
471 </port>
472 <port id="1" precision="FP32">
473 <dim>1</dim>
474 <dim>1</dim>
475 <dim>384</dim>
476 </port>
477 </input>
478 <output>
479 <port id="2" precision="FP32" names="141,x.1">
480 <dim>-1</dim>
481 <dim>-1</dim>
482 <dim>384</dim>
483 </port>
484 </output>
485 </layer>
486 <layer id="36" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
487 <data element_type="i64" shape="4" offset="385444888" size="32" />
488 <output>
489 <port id="0" precision="I64">
490 <dim>4</dim>
491 </port>
492 </output>
493 </layer>
494 <layer id="37" name="__module.encoder.layer.0.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
495 <data special_zero="true" />
496 <input>
497 <port id="0" precision="FP32">
498 <dim>-1</dim>
499 <dim>-1</dim>
500 <dim>384</dim>
501 </port>
502 <port id="1" precision="I64">
503 <dim>4</dim>
504 </port>
505 </input>
506 <output>
507 <port id="2" precision="FP32" names="145,x.3">
508 <dim>-1</dim>
509 <dim>-1</dim>
510 <dim>12</dim>
511 <dim>32</dim>
512 </port>
513 </output>
514 </layer>
515 <layer id="38" name="Constant_94561" type="Const" version="opset1">
516 <data element_type="i64" shape="4" offset="385444920" size="32" />
517 <output>
518 <port id="0" precision="I64" names="146">
519 <dim>4</dim>
520 </port>
521 </output>
522 </layer>
523 <layer id="39" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
524 <input>
525 <port id="0" precision="FP32">
526 <dim>-1</dim>
527 <dim>-1</dim>
528 <dim>12</dim>
529 <dim>32</dim>
530 </port>
531 <port id="1" precision="I64">
532 <dim>4</dim>
533 </port>
534 </input>
535 <output>
536 <port id="2" precision="FP32" names="147">
537 <dim>-1</dim>
538 <dim>12</dim>
539 <dim>-1</dim>
540 <dim>32</dim>
541 </port>
542 </output>
543 </layer>
544 <layer id="40" name="self.encoder.layer.0.attention.self.key.weight" type="Const" version="opset1">
545 <data element_type="f32" shape="384, 384" offset="385444952" size="589824" />
546 <output>
547 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.key.weight">
548 <dim>384</dim>
549 <dim>384</dim>
550 </port>
551 </output>
552 </layer>
553 <layer id="41" name="__module.encoder.layer.0.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
554 <data transpose_a="false" transpose_b="true" />
555 <input>
556 <port id="0" precision="FP32">
557 <dim>-1</dim>
558 <dim>-1</dim>
559 <dim>384</dim>
560 </port>
561 <port id="1" precision="FP32">
562 <dim>384</dim>
563 <dim>384</dim>
564 </port>
565 </input>
566 <output>
567 <port id="2" precision="FP32">
568 <dim>-1</dim>
569 <dim>-1</dim>
570 <dim>384</dim>
571 </port>
572 </output>
573 </layer>
574 <layer id="42" name="Constant_103685" type="Const" version="opset1">
575 <data element_type="f32" shape="1, 1, 384" offset="386034776" size="1536" />
576 <output>
577 <port id="0" precision="FP32">
578 <dim>1</dim>
579 <dim>1</dim>
580 <dim>384</dim>
581 </port>
582 </output>
583 </layer>
584 <layer id="43" name="__module.encoder.layer.0.attention.self.key/aten::linear/Add" type="Add" version="opset1">
585 <data auto_broadcast="numpy" />
586 <input>
587 <port id="0" precision="FP32">
588 <dim>-1</dim>
589 <dim>-1</dim>
590 <dim>384</dim>
591 </port>
592 <port id="1" precision="FP32">
593 <dim>1</dim>
594 <dim>1</dim>
595 <dim>384</dim>
596 </port>
597 </input>
598 <output>
599 <port id="2" precision="FP32" names="150,x.5">
600 <dim>-1</dim>
601 <dim>-1</dim>
602 <dim>384</dim>
603 </port>
604 </output>
605 </layer>
606 <layer id="44" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
607 <data element_type="i64" shape="4" offset="385444888" size="32" />
608 <output>
609 <port id="0" precision="I64">
610 <dim>4</dim>
611 </port>
612 </output>
613 </layer>
614 <layer id="45" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
615 <data special_zero="true" />
616 <input>
617 <port id="0" precision="FP32">
618 <dim>-1</dim>
619 <dim>-1</dim>
620 <dim>384</dim>
621 </port>
622 <port id="1" precision="I64">
623 <dim>4</dim>
624 </port>
625 </input>
626 <output>
627 <port id="2" precision="FP32" names="154,x.7">
628 <dim>-1</dim>
629 <dim>-1</dim>
630 <dim>12</dim>
631 <dim>32</dim>
632 </port>
633 </output>
634 </layer>
635 <layer id="46" name="Constant_94586" type="Const" version="opset1">
636 <data element_type="i64" shape="4" offset="385444920" size="32" />
637 <output>
638 <port id="0" precision="I64" names="155">
639 <dim>4</dim>
640 </port>
641 </output>
642 </layer>
643 <layer id="47" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
644 <input>
645 <port id="0" precision="FP32">
646 <dim>-1</dim>
647 <dim>-1</dim>
648 <dim>12</dim>
649 <dim>32</dim>
650 </port>
651 <port id="1" precision="I64">
652 <dim>4</dim>
653 </port>
654 </input>
655 <output>
656 <port id="2" precision="FP32" names="156">
657 <dim>-1</dim>
658 <dim>12</dim>
659 <dim>-1</dim>
660 <dim>32</dim>
661 </port>
662 </output>
663 </layer>
664 <layer id="48" name="self.encoder.layer.0.attention.self.value.weight" type="Const" version="opset1">
665 <data element_type="f32" shape="384, 384" offset="386036312" size="589824" />
666 <output>
667 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.value.weight">
668 <dim>384</dim>
669 <dim>384</dim>
670 </port>
671 </output>
672 </layer>
673 <layer id="49" name="__module.encoder.layer.0.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
674 <data transpose_a="false" transpose_b="true" />
675 <input>
676 <port id="0" precision="FP32">
677 <dim>-1</dim>
678 <dim>-1</dim>
679 <dim>384</dim>
680 </port>
681 <port id="1" precision="FP32">
682 <dim>384</dim>
683 <dim>384</dim>
684 </port>
685 </input>
686 <output>
687 <port id="2" precision="FP32">
688 <dim>-1</dim>
689 <dim>-1</dim>
690 <dim>384</dim>
691 </port>
692 </output>
693 </layer>
694 <layer id="50" name="Constant_103686" type="Const" version="opset1">
695 <data element_type="f32" shape="1, 1, 384" offset="386626136" size="1536" />
696 <output>
697 <port id="0" precision="FP32">
698 <dim>1</dim>
699 <dim>1</dim>
700 <dim>384</dim>
701 </port>
702 </output>
703 </layer>
704 <layer id="51" name="__module.encoder.layer.0.attention.self.value/aten::linear/Add" type="Add" version="opset1">
705 <data auto_broadcast="numpy" />
706 <input>
707 <port id="0" precision="FP32">
708 <dim>-1</dim>
709 <dim>-1</dim>
710 <dim>384</dim>
711 </port>
712 <port id="1" precision="FP32">
713 <dim>1</dim>
714 <dim>1</dim>
715 <dim>384</dim>
716 </port>
717 </input>
718 <output>
719 <port id="2" precision="FP32" names="159,x.9">
720 <dim>-1</dim>
721 <dim>-1</dim>
722 <dim>384</dim>
723 </port>
724 </output>
725 </layer>
726 <layer id="52" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
727 <data element_type="i64" shape="4" offset="385444888" size="32" />
728 <output>
729 <port id="0" precision="I64">
730 <dim>4</dim>
731 </port>
732 </output>
733 </layer>
734 <layer id="53" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
735 <data special_zero="true" />
736 <input>
737 <port id="0" precision="FP32">
738 <dim>-1</dim>
739 <dim>-1</dim>
740 <dim>384</dim>
741 </port>
742 <port id="1" precision="I64">
743 <dim>4</dim>
744 </port>
745 </input>
746 <output>
747 <port id="2" precision="FP32" names="163,x.11">
748 <dim>-1</dim>
749 <dim>-1</dim>
750 <dim>12</dim>
751 <dim>32</dim>
752 </port>
753 </output>
754 </layer>
755 <layer id="54" name="Constant_94611" type="Const" version="opset1">
756 <data element_type="i64" shape="4" offset="385444920" size="32" />
757 <output>
758 <port id="0" precision="I64" names="164">
759 <dim>4</dim>
760 </port>
761 </output>
762 </layer>
763 <layer id="55" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
764 <input>
765 <port id="0" precision="FP32">
766 <dim>-1</dim>
767 <dim>-1</dim>
768 <dim>12</dim>
769 <dim>32</dim>
770 </port>
771 <port id="1" precision="I64">
772 <dim>4</dim>
773 </port>
774 </input>
775 <output>
776 <port id="2" precision="FP32" names="165">
777 <dim>-1</dim>
778 <dim>12</dim>
779 <dim>-1</dim>
780 <dim>32</dim>
781 </port>
782 </output>
783 </layer>
784 <layer id="56" name="Constant_103688" type="Const" version="opset1">
785 <data element_type="f32" shape="1, 1, 1, 1" offset="386627672" size="4" />
786 <output>
787 <port id="0" precision="FP32">
788 <dim>1</dim>
789 <dim>1</dim>
790 <dim>1</dim>
791 <dim>1</dim>
792 </port>
793 </output>
794 </layer>
795 <layer id="57" name="25" type="Const" version="opset1">
796 <data element_type="i64" shape="" offset="384850444" size="8" />
797 <output>
798 <port id="0" precision="I64" names="25" />
799 </output>
800 </layer>
801 <layer id="58" name="aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
802 <input>
803 <port id="0" precision="I64">
804 <dim>-1</dim>
805 <dim>-1</dim>
806 </port>
807 <port id="1" precision="I64" />
808 </input>
809 <output>
810 <port id="2" precision="I64" names="26">
811 <dim>-1</dim>
812 <dim>1</dim>
813 <dim>-1</dim>
814 </port>
815 </output>
816 </layer>
817 <layer id="59" name="27" type="Const" version="opset1">
818 <data element_type="i64" shape="" offset="386627676" size="8" />
819 <output>
820 <port id="0" precision="I64" names="27" />
821 </output>
822 </layer>
823 <layer id="60" name="aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
824 <input>
825 <port id="0" precision="I64">
826 <dim>-1</dim>
827 <dim>1</dim>
828 <dim>-1</dim>
829 </port>
830 <port id="1" precision="I64" />
831 </input>
832 <output>
833 <port id="2" precision="I64" names="28,33">
834 <dim>-1</dim>
835 <dim>1</dim>
836 <dim>1</dim>
837 <dim>-1</dim>
838 </port>
839 </output>
840 </layer>
841 <layer id="61" name="ShapeOf_103917" type="ShapeOf" version="opset3">
842 <data output_type="i64" />
843 <input>
844 <port id="0" precision="I64">
845 <dim>-1</dim>
846 <dim>-1</dim>
847 </port>
848 </input>
849 <output>
850 <port id="1" precision="I64">
851 <dim>2</dim>
852 </port>
853 </output>
854 </layer>
855 <layer id="62" name="Constant_104033" type="Const" version="opset1">
856 <data element_type="i64" shape="1" offset="384850436" size="8" />
857 <output>
858 <port id="0" precision="I64">
859 <dim>1</dim>
860 </port>
861 </output>
862 </layer>
863 <layer id="63" name="Constant_103919" type="Const" version="opset1">
864 <data element_type="i64" shape="" offset="384850436" size="8" />
865 <output>
866 <port id="0" precision="I64" />
867 </output>
868 </layer>
869 <layer id="64" name="Gather_103920" type="Gather" version="opset8">
870 <data batch_dims="0" />
871 <input>
872 <port id="0" precision="I64">
873 <dim>2</dim>
874 </port>
875 <port id="1" precision="I64">
876 <dim>1</dim>
877 </port>
878 <port id="2" precision="I64" />
879 </input>
880 <output>
881 <port id="3" precision="I64" names="13,15">
882 <dim>1</dim>
883 </port>
884 </output>
885 </layer>
886 <layer id="65" name="Constant_102306" type="Const" version="opset1">
887 <data element_type="i64" shape="1" offset="384850444" size="8" />
888 <output>
889 <port id="0" precision="I64">
890 <dim>1</dim>
891 </port>
892 </output>
893 </layer>
894 <layer id="66" name="Constant_104036" type="Const" version="opset1">
895 <data element_type="i64" shape="1" offset="384850444" size="8" />
896 <output>
897 <port id="0" precision="I64">
898 <dim>1</dim>
899 </port>
900 </output>
901 </layer>
902 <layer id="67" name="Constant_103927" type="Const" version="opset1">
903 <data element_type="i64" shape="" offset="384850436" size="8" />
904 <output>
905 <port id="0" precision="I64" />
906 </output>
907 </layer>
908 <layer id="68" name="Gather_103928" type="Gather" version="opset8">
909 <data batch_dims="0" />
910 <input>
911 <port id="0" precision="I64">
912 <dim>2</dim>
913 </port>
914 <port id="1" precision="I64">
915 <dim>1</dim>
916 </port>
917 <port id="2" precision="I64" />
918 </input>
919 <output>
920 <port id="3" precision="I64" names="17,19">
921 <dim>1</dim>
922 </port>
923 </output>
924 </layer>
925 <layer id="69" name="prim::ListConstruct/Concat" type="Concat" version="opset1">
926 <data axis="0" />
927 <input>
928 <port id="0" precision="I64">
929 <dim>1</dim>
930 </port>
931 <port id="1" precision="I64">
932 <dim>1</dim>
933 </port>
934 <port id="2" precision="I64">
935 <dim>1</dim>
936 </port>
937 <port id="3" precision="I64">
938 <dim>1</dim>
939 </port>
940 </input>
941 <output>
942 <port id="4" precision="I64" names="35">
943 <dim>4</dim>
944 </port>
945 </output>
946 </layer>
947 <layer id="70" name="aten::expand/Broadcast" type="Broadcast" version="opset3">
948 <data mode="bidirectional" />
949 <input>
950 <port id="0" precision="I64">
951 <dim>-1</dim>
952 <dim>1</dim>
953 <dim>1</dim>
954 <dim>-1</dim>
955 </port>
956 <port id="1" precision="I64">
957 <dim>4</dim>
958 </port>
959 </input>
960 <output>
961 <port id="2" precision="I64" names="37">
962 <dim>-1</dim>
963 <dim>1</dim>
964 <dim>-1</dim>
965 <dim>-1</dim>
966 </port>
967 </output>
968 </layer>
969 <layer id="71" name="aten::to/Convert" type="Convert" version="opset1">
970 <data destination_type="f32" />
971 <input>
972 <port id="0" precision="I64">
973 <dim>-1</dim>
974 <dim>1</dim>
975 <dim>-1</dim>
976 <dim>-1</dim>
977 </port>
978 </input>
979 <output>
980 <port id="1" precision="FP32" names="42">
981 <dim>-1</dim>
982 <dim>1</dim>
983 <dim>-1</dim>
984 <dim>-1</dim>
985 </port>
986 </output>
987 </layer>
988 <layer id="72" name="Constant_103687" type="Const" version="opset1">
989 <data element_type="f32" shape="1, 1, 1, 1" offset="386627672" size="4" />
990 <output>
991 <port id="0" precision="FP32">
992 <dim>1</dim>
993 <dim>1</dim>
994 <dim>1</dim>
995 <dim>1</dim>
996 </port>
997 </output>
998 </layer>
999 <layer id="73" name="aten::rsub/Multiply" type="Multiply" version="opset1">
1000 <data auto_broadcast="numpy" />
1001 <input>
1002 <port id="0" precision="FP32">
1003 <dim>-1</dim>
1004 <dim>1</dim>
1005 <dim>-1</dim>
1006 <dim>-1</dim>
1007 </port>
1008 <port id="1" precision="FP32">
1009 <dim>1</dim>
1010 <dim>1</dim>
1011 <dim>1</dim>
1012 <dim>1</dim>
1013 </port>
1014 </input>
1015 <output>
1016 <port id="2" precision="FP32">
1017 <dim>-1</dim>
1018 <dim>1</dim>
1019 <dim>-1</dim>
1020 <dim>-1</dim>
1021 </port>
1022 </output>
1023 </layer>
1024 <layer id="74" name="aten::rsub/Subtract" type="Subtract" version="opset1">
1025 <data auto_broadcast="numpy" />
1026 <input>
1027 <port id="0" precision="FP32">
1028 <dim>1</dim>
1029 <dim>1</dim>
1030 <dim>1</dim>
1031 <dim>1</dim>
1032 </port>
1033 <port id="1" precision="FP32">
1034 <dim>-1</dim>
1035 <dim>1</dim>
1036 <dim>-1</dim>
1037 <dim>-1</dim>
1038 </port>
1039 </input>
1040 <output>
1041 <port id="2" precision="FP32" names="45,inverted_mask">
1042 <dim>-1</dim>
1043 <dim>1</dim>
1044 <dim>-1</dim>
1045 <dim>-1</dim>
1046 </port>
1047 </output>
1048 </layer>
1049 <layer id="75" name="aten::to/Convert_1" type="Convert" version="opset1">
1050 <data destination_type="boolean" />
1051 <input>
1052 <port id="0" precision="FP32">
1053 <dim>-1</dim>
1054 <dim>1</dim>
1055 <dim>-1</dim>
1056 <dim>-1</dim>
1057 </port>
1058 </input>
1059 <output>
1060 <port id="1" precision="BOOL" names="50">
1061 <dim>-1</dim>
1062 <dim>1</dim>
1063 <dim>-1</dim>
1064 <dim>-1</dim>
1065 </port>
1066 </output>
1067 </layer>
1068 <layer id="76" name="aten::masked_fill/ConvertLike" type="Const" version="opset1">
1069 <data element_type="f32" shape="" offset="386627684" size="4" />
1070 <output>
1071 <port id="0" precision="FP32" />
1072 </output>
1073 </layer>
1074 <layer id="77" name="aten::masked_fill/Select" type="Select" version="opset1">
1075 <data auto_broadcast="numpy" />
1076 <input>
1077 <port id="0" precision="BOOL">
1078 <dim>-1</dim>
1079 <dim>1</dim>
1080 <dim>-1</dim>
1081 <dim>-1</dim>
1082 </port>
1083 <port id="1" precision="FP32" />
1084 <port id="2" precision="FP32">
1085 <dim>-1</dim>
1086 <dim>1</dim>
1087 <dim>-1</dim>
1088 <dim>-1</dim>
1089 </port>
1090 </input>
1091 <output>
1092 <port id="3" precision="FP32" names="52">
1093 <dim>-1</dim>
1094 <dim>1</dim>
1095 <dim>-1</dim>
1096 <dim>-1</dim>
1097 </port>
1098 </output>
1099 </layer>
1100 <layer id="78" name="__module.encoder.layer.0.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
1101 <data causal="false" />
1102 <input>
1103 <port id="0" precision="FP32">
1104 <dim>-1</dim>
1105 <dim>12</dim>
1106 <dim>-1</dim>
1107 <dim>32</dim>
1108 </port>
1109 <port id="1" precision="FP32">
1110 <dim>-1</dim>
1111 <dim>12</dim>
1112 <dim>-1</dim>
1113 <dim>32</dim>
1114 </port>
1115 <port id="2" precision="FP32">
1116 <dim>-1</dim>
1117 <dim>12</dim>
1118 <dim>-1</dim>
1119 <dim>32</dim>
1120 </port>
1121 <port id="3" precision="FP32">
1122 <dim>-1</dim>
1123 <dim>1</dim>
1124 <dim>-1</dim>
1125 <dim>-1</dim>
1126 </port>
1127 </input>
1128 <output>
1129 <port id="4" precision="FP32" names="166,attn_output.1">
1130 <dim>-1</dim>
1131 <dim>12</dim>
1132 <dim>-1</dim>
1133 <dim>32</dim>
1134 </port>
1135 </output>
1136 </layer>
1137 <layer id="79" name="__module.encoder.layer.0.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
1138 <data element_type="i32" shape="4" offset="386627688" size="16" />
1139 <output>
1140 <port id="0" precision="I32">
1141 <dim>4</dim>
1142 </port>
1143 </output>
1144 </layer>
1145 <layer id="80" name="__module.encoder.layer.0.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
1146 <input>
1147 <port id="0" precision="FP32">
1148 <dim>-1</dim>
1149 <dim>12</dim>
1150 <dim>-1</dim>
1151 <dim>32</dim>
1152 </port>
1153 <port id="1" precision="I32">
1154 <dim>4</dim>
1155 </port>
1156 </input>
1157 <output>
1158 <port id="2" precision="FP32" names="167,attn_output.3">
1159 <dim>-1</dim>
1160 <dim>-1</dim>
1161 <dim>12</dim>
1162 <dim>32</dim>
1163 </port>
1164 </output>
1165 </layer>
1166 <layer id="81" name="__module.encoder.layer.0.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
1167 <data output_type="i64" />
1168 <input>
1169 <port id="0" precision="FP32">
1170 <dim>-1</dim>
1171 <dim>-1</dim>
1172 <dim>384</dim>
1173 </port>
1174 </input>
1175 <output>
1176 <port id="1" precision="I64">
1177 <dim>3</dim>
1178 </port>
1179 </output>
1180 </layer>
1181 <layer id="82" name="Constant_102751" type="Const" version="opset1">
1182 <data element_type="i64" shape="2" offset="386627704" size="16" />
1183 <output>
1184 <port id="0" precision="I64">
1185 <dim>2</dim>
1186 </port>
1187 </output>
1188 </layer>
1189 <layer id="83" name="Constant_102752" type="Const" version="opset1">
1190 <data element_type="i64" shape="" offset="384850436" size="8" />
1191 <output>
1192 <port id="0" precision="I64" />
1193 </output>
1194 </layer>
1195 <layer id="84" name="Gather_102753" type="Gather" version="opset8">
1196 <data batch_dims="0" />
1197 <input>
1198 <port id="0" precision="I64">
1199 <dim>3</dim>
1200 </port>
1201 <port id="1" precision="I64">
1202 <dim>2</dim>
1203 </port>
1204 <port id="2" precision="I64" />
1205 </input>
1206 <output>
1207 <port id="3" precision="I64">
1208 <dim>2</dim>
1209 </port>
1210 </output>
1211 </layer>
1212 <layer id="85" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Reshape_1_3" type="Const" version="opset1">
1213 <data element_type="i64" shape="1" offset="386627720" size="8" />
1214 <output>
1215 <port id="0" precision="I64">
1216 <dim>1</dim>
1217 </port>
1218 </output>
1219 </layer>
1220 <layer id="86" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
1221 <data axis="0" />
1222 <input>
1223 <port id="0" precision="I64">
1224 <dim>2</dim>
1225 </port>
1226 <port id="1" precision="I64">
1227 <dim>1</dim>
1228 </port>
1229 </input>
1230 <output>
1231 <port id="2" precision="I64" names="168">
1232 <dim>3</dim>
1233 </port>
1234 </output>
1235 </layer>
1236 <layer id="87" name="__module.encoder.layer.0.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
1237 <data special_zero="false" />
1238 <input>
1239 <port id="0" precision="FP32">
1240 <dim>-1</dim>
1241 <dim>-1</dim>
1242 <dim>12</dim>
1243 <dim>32</dim>
1244 </port>
1245 <port id="1" precision="I64">
1246 <dim>3</dim>
1247 </port>
1248 </input>
1249 <output>
1250 <port id="2" precision="FP32" names="169">
1251 <dim>-1</dim>
1252 <dim>-1</dim>
1253 <dim>384</dim>
1254 </port>
1255 </output>
1256 </layer>
1257 <layer id="88" name="self.encoder.layer.0.attention.output.dense.weight" type="Const" version="opset1">
1258 <data element_type="f32" shape="384, 384" offset="386627728" size="589824" />
1259 <output>
1260 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.output.dense.weight">
1261 <dim>384</dim>
1262 <dim>384</dim>
1263 </port>
1264 </output>
1265 </layer>
1266 <layer id="89" name="__module.encoder.layer.0.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1267 <data transpose_a="false" transpose_b="true" />
1268 <input>
1269 <port id="0" precision="FP32">
1270 <dim>-1</dim>
1271 <dim>-1</dim>
1272 <dim>384</dim>
1273 </port>
1274 <port id="1" precision="FP32">
1275 <dim>384</dim>
1276 <dim>384</dim>
1277 </port>
1278 </input>
1279 <output>
1280 <port id="2" precision="FP32">
1281 <dim>-1</dim>
1282 <dim>-1</dim>
1283 <dim>384</dim>
1284 </port>
1285 </output>
1286 </layer>
1287 <layer id="90" name="Constant_103689" type="Const" version="opset1">
1288 <data element_type="f32" shape="1, 1, 384" offset="387217552" size="1536" />
1289 <output>
1290 <port id="0" precision="FP32">
1291 <dim>1</dim>
1292 <dim>1</dim>
1293 <dim>384</dim>
1294 </port>
1295 </output>
1296 </layer>
1297 <layer id="91" name="__module.encoder.layer.0.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
1298 <data auto_broadcast="numpy" />
1299 <input>
1300 <port id="0" precision="FP32">
1301 <dim>-1</dim>
1302 <dim>-1</dim>
1303 <dim>384</dim>
1304 </port>
1305 <port id="1" precision="FP32">
1306 <dim>1</dim>
1307 <dim>1</dim>
1308 <dim>384</dim>
1309 </port>
1310 </input>
1311 <output>
1312 <port id="2" precision="FP32" names="175,input.3">
1313 <dim>-1</dim>
1314 <dim>-1</dim>
1315 <dim>384</dim>
1316 </port>
1317 </output>
1318 </layer>
1319 <layer id="92" name="__module.encoder.layer.0.attention.output/aten::add/Add" type="Add" version="opset1">
1320 <data auto_broadcast="numpy" />
1321 <input>
1322 <port id="0" precision="FP32">
1323 <dim>-1</dim>
1324 <dim>-1</dim>
1325 <dim>384</dim>
1326 </port>
1327 <port id="1" precision="FP32">
1328 <dim>-1</dim>
1329 <dim>-1</dim>
1330 <dim>384</dim>
1331 </port>
1332 </input>
1333 <output>
1334 <port id="2" precision="FP32" names="177">
1335 <dim>-1</dim>
1336 <dim>-1</dim>
1337 <dim>384</dim>
1338 </port>
1339 </output>
1340 </layer>
1341 <layer id="93" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
1342 <data element_type="i32" shape="1" offset="384850452" size="4" />
1343 <output>
1344 <port id="0" precision="I32">
1345 <dim>1</dim>
1346 </port>
1347 </output>
1348 </layer>
1349 <layer id="94" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
1350 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1351 <input>
1352 <port id="0" precision="FP32">
1353 <dim>-1</dim>
1354 <dim>-1</dim>
1355 <dim>384</dim>
1356 </port>
1357 <port id="1" precision="I32">
1358 <dim>1</dim>
1359 </port>
1360 </input>
1361 <output>
1362 <port id="2" precision="FP32">
1363 <dim>-1</dim>
1364 <dim>-1</dim>
1365 <dim>384</dim>
1366 </port>
1367 </output>
1368 </layer>
1369 <layer id="95" name="Constant_103690" type="Const" version="opset1">
1370 <data element_type="f32" shape="1, 1, 384" offset="387219088" size="1536" />
1371 <output>
1372 <port id="0" precision="FP32">
1373 <dim>1</dim>
1374 <dim>1</dim>
1375 <dim>384</dim>
1376 </port>
1377 </output>
1378 </layer>
1379 <layer id="96" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
1380 <data auto_broadcast="numpy" />
1381 <input>
1382 <port id="0" precision="FP32">
1383 <dim>-1</dim>
1384 <dim>-1</dim>
1385 <dim>384</dim>
1386 </port>
1387 <port id="1" precision="FP32">
1388 <dim>1</dim>
1389 <dim>1</dim>
1390 <dim>384</dim>
1391 </port>
1392 </input>
1393 <output>
1394 <port id="2" precision="FP32">
1395 <dim>-1</dim>
1396 <dim>-1</dim>
1397 <dim>384</dim>
1398 </port>
1399 </output>
1400 </layer>
1401 <layer id="97" name="Constant_103691" type="Const" version="opset1">
1402 <data element_type="f32" shape="1, 1, 384" offset="387220624" size="1536" />
1403 <output>
1404 <port id="0" precision="FP32">
1405 <dim>1</dim>
1406 <dim>1</dim>
1407 <dim>384</dim>
1408 </port>
1409 </output>
1410 </layer>
1411 <layer id="98" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
1412 <data auto_broadcast="numpy" />
1413 <input>
1414 <port id="0" precision="FP32">
1415 <dim>-1</dim>
1416 <dim>-1</dim>
1417 <dim>384</dim>
1418 </port>
1419 <port id="1" precision="FP32">
1420 <dim>1</dim>
1421 <dim>1</dim>
1422 <dim>384</dim>
1423 </port>
1424 </input>
1425 <output>
1426 <port id="2" precision="FP32" names="181,input_tensor.1">
1427 <dim>-1</dim>
1428 <dim>-1</dim>
1429 <dim>384</dim>
1430 </port>
1431 </output>
1432 </layer>
1433 <layer id="99" name="self.encoder.layer.0.intermediate.dense.weight" type="Const" version="opset1">
1434 <data element_type="f32" shape="1536, 384" offset="387222160" size="2359296" />
1435 <output>
1436 <port id="0" precision="FP32" names="self.encoder.layer.0.intermediate.dense.weight">
1437 <dim>1536</dim>
1438 <dim>384</dim>
1439 </port>
1440 </output>
1441 </layer>
1442 <layer id="100" name="__module.encoder.layer.0.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1443 <data transpose_a="false" transpose_b="true" />
1444 <input>
1445 <port id="0" precision="FP32">
1446 <dim>-1</dim>
1447 <dim>-1</dim>
1448 <dim>384</dim>
1449 </port>
1450 <port id="1" precision="FP32">
1451 <dim>1536</dim>
1452 <dim>384</dim>
1453 </port>
1454 </input>
1455 <output>
1456 <port id="2" precision="FP32">
1457 <dim>-1</dim>
1458 <dim>-1</dim>
1459 <dim>1536</dim>
1460 </port>
1461 </output>
1462 </layer>
1463 <layer id="101" name="Constant_103692" type="Const" version="opset1">
1464 <data element_type="f32" shape="1, 1, 1536" offset="389581456" size="6144" />
1465 <output>
1466 <port id="0" precision="FP32">
1467 <dim>1</dim>
1468 <dim>1</dim>
1469 <dim>1536</dim>
1470 </port>
1471 </output>
1472 </layer>
1473 <layer id="102" name="__module.encoder.layer.0.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
1474 <data auto_broadcast="numpy" />
1475 <input>
1476 <port id="0" precision="FP32">
1477 <dim>-1</dim>
1478 <dim>-1</dim>
1479 <dim>1536</dim>
1480 </port>
1481 <port id="1" precision="FP32">
1482 <dim>1</dim>
1483 <dim>1</dim>
1484 <dim>1536</dim>
1485 </port>
1486 </input>
1487 <output>
1488 <port id="2" precision="FP32" names="186">
1489 <dim>-1</dim>
1490 <dim>-1</dim>
1491 <dim>1536</dim>
1492 </port>
1493 </output>
1494 </layer>
1495 <layer id="103" name="__module.encoder.layer.0.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
1496 <data approximation_mode="ERF" />
1497 <input>
1498 <port id="0" precision="FP32">
1499 <dim>-1</dim>
1500 <dim>-1</dim>
1501 <dim>1536</dim>
1502 </port>
1503 </input>
1504 <output>
1505 <port id="1" precision="FP32" names="187">
1506 <dim>-1</dim>
1507 <dim>-1</dim>
1508 <dim>1536</dim>
1509 </port>
1510 </output>
1511 </layer>
1512 <layer id="104" name="self.encoder.layer.0.output.dense.weight" type="Const" version="opset1">
1513 <data element_type="f32" shape="384, 1536" offset="389587600" size="2359296" />
1514 <output>
1515 <port id="0" precision="FP32" names="self.encoder.layer.0.output.dense.weight">
1516 <dim>384</dim>
1517 <dim>1536</dim>
1518 </port>
1519 </output>
1520 </layer>
1521 <layer id="105" name="__module.encoder.layer.0.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1522 <data transpose_a="false" transpose_b="true" />
1523 <input>
1524 <port id="0" precision="FP32">
1525 <dim>-1</dim>
1526 <dim>-1</dim>
1527 <dim>1536</dim>
1528 </port>
1529 <port id="1" precision="FP32">
1530 <dim>384</dim>
1531 <dim>1536</dim>
1532 </port>
1533 </input>
1534 <output>
1535 <port id="2" precision="FP32">
1536 <dim>-1</dim>
1537 <dim>-1</dim>
1538 <dim>384</dim>
1539 </port>
1540 </output>
1541 </layer>
1542 <layer id="106" name="Constant_103693" type="Const" version="opset1">
1543 <data element_type="f32" shape="1, 1, 384" offset="391946896" size="1536" />
1544 <output>
1545 <port id="0" precision="FP32">
1546 <dim>1</dim>
1547 <dim>1</dim>
1548 <dim>384</dim>
1549 </port>
1550 </output>
1551 </layer>
1552 <layer id="107" name="__module.encoder.layer.0.output.dense/aten::linear/Add" type="Add" version="opset1">
1553 <data auto_broadcast="numpy" />
1554 <input>
1555 <port id="0" precision="FP32">
1556 <dim>-1</dim>
1557 <dim>-1</dim>
1558 <dim>384</dim>
1559 </port>
1560 <port id="1" precision="FP32">
1561 <dim>1</dim>
1562 <dim>1</dim>
1563 <dim>384</dim>
1564 </port>
1565 </input>
1566 <output>
1567 <port id="2" precision="FP32" names="193,input.5">
1568 <dim>-1</dim>
1569 <dim>-1</dim>
1570 <dim>384</dim>
1571 </port>
1572 </output>
1573 </layer>
1574 <layer id="108" name="__module.encoder.layer.0.output/aten::add/Add" type="Add" version="opset1">
1575 <data auto_broadcast="numpy" />
1576 <input>
1577 <port id="0" precision="FP32">
1578 <dim>-1</dim>
1579 <dim>-1</dim>
1580 <dim>384</dim>
1581 </port>
1582 <port id="1" precision="FP32">
1583 <dim>-1</dim>
1584 <dim>-1</dim>
1585 <dim>384</dim>
1586 </port>
1587 </input>
1588 <output>
1589 <port id="2" precision="FP32" names="195">
1590 <dim>-1</dim>
1591 <dim>-1</dim>
1592 <dim>384</dim>
1593 </port>
1594 </output>
1595 </layer>
1596 <layer id="109" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
1597 <data element_type="i32" shape="1" offset="384850452" size="4" />
1598 <output>
1599 <port id="0" precision="I32">
1600 <dim>1</dim>
1601 </port>
1602 </output>
1603 </layer>
1604 <layer id="110" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
1605 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1606 <input>
1607 <port id="0" precision="FP32">
1608 <dim>-1</dim>
1609 <dim>-1</dim>
1610 <dim>384</dim>
1611 </port>
1612 <port id="1" precision="I32">
1613 <dim>1</dim>
1614 </port>
1615 </input>
1616 <output>
1617 <port id="2" precision="FP32">
1618 <dim>-1</dim>
1619 <dim>-1</dim>
1620 <dim>384</dim>
1621 </port>
1622 </output>
1623 </layer>
1624 <layer id="111" name="Constant_103694" type="Const" version="opset1">
1625 <data element_type="f32" shape="1, 1, 384" offset="391948432" size="1536" />
1626 <output>
1627 <port id="0" precision="FP32">
1628 <dim>1</dim>
1629 <dim>1</dim>
1630 <dim>384</dim>
1631 </port>
1632 </output>
1633 </layer>
1634 <layer id="112" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
1635 <data auto_broadcast="numpy" />
1636 <input>
1637 <port id="0" precision="FP32">
1638 <dim>-1</dim>
1639 <dim>-1</dim>
1640 <dim>384</dim>
1641 </port>
1642 <port id="1" precision="FP32">
1643 <dim>1</dim>
1644 <dim>1</dim>
1645 <dim>384</dim>
1646 </port>
1647 </input>
1648 <output>
1649 <port id="2" precision="FP32">
1650 <dim>-1</dim>
1651 <dim>-1</dim>
1652 <dim>384</dim>
1653 </port>
1654 </output>
1655 </layer>
1656 <layer id="113" name="Constant_103695" type="Const" version="opset1">
1657 <data element_type="f32" shape="1, 1, 384" offset="391949968" size="1536" />
1658 <output>
1659 <port id="0" precision="FP32">
1660 <dim>1</dim>
1661 <dim>1</dim>
1662 <dim>384</dim>
1663 </port>
1664 </output>
1665 </layer>
1666 <layer id="114" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
1667 <data auto_broadcast="numpy" />
1668 <input>
1669 <port id="0" precision="FP32">
1670 <dim>-1</dim>
1671 <dim>-1</dim>
1672 <dim>384</dim>
1673 </port>
1674 <port id="1" precision="FP32">
1675 <dim>1</dim>
1676 <dim>1</dim>
1677 <dim>384</dim>
1678 </port>
1679 </input>
1680 <output>
1681 <port id="2" precision="FP32" names="199,hidden_states.7">
1682 <dim>-1</dim>
1683 <dim>-1</dim>
1684 <dim>384</dim>
1685 </port>
1686 </output>
1687 </layer>
1688 <layer id="115" name="self.encoder.layer.1.attention.self.query.weight" type="Const" version="opset1">
1689 <data element_type="f32" shape="384, 384" offset="391951504" size="589824" />
1690 <output>
1691 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.query.weight">
1692 <dim>384</dim>
1693 <dim>384</dim>
1694 </port>
1695 </output>
1696 </layer>
1697 <layer id="116" name="__module.encoder.layer.1.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
1698 <data transpose_a="false" transpose_b="true" />
1699 <input>
1700 <port id="0" precision="FP32">
1701 <dim>-1</dim>
1702 <dim>-1</dim>
1703 <dim>384</dim>
1704 </port>
1705 <port id="1" precision="FP32">
1706 <dim>384</dim>
1707 <dim>384</dim>
1708 </port>
1709 </input>
1710 <output>
1711 <port id="2" precision="FP32">
1712 <dim>-1</dim>
1713 <dim>-1</dim>
1714 <dim>384</dim>
1715 </port>
1716 </output>
1717 </layer>
1718 <layer id="117" name="Constant_103696" type="Const" version="opset1">
1719 <data element_type="f32" shape="1, 1, 384" offset="392541328" size="1536" />
1720 <output>
1721 <port id="0" precision="FP32">
1722 <dim>1</dim>
1723 <dim>1</dim>
1724 <dim>384</dim>
1725 </port>
1726 </output>
1727 </layer>
1728 <layer id="118" name="__module.encoder.layer.1.attention.self.query/aten::linear/Add" type="Add" version="opset1">
1729 <data auto_broadcast="numpy" />
1730 <input>
1731 <port id="0" precision="FP32">
1732 <dim>-1</dim>
1733 <dim>-1</dim>
1734 <dim>384</dim>
1735 </port>
1736 <port id="1" precision="FP32">
1737 <dim>1</dim>
1738 <dim>1</dim>
1739 <dim>384</dim>
1740 </port>
1741 </input>
1742 <output>
1743 <port id="2" precision="FP32" names="212,x.13">
1744 <dim>-1</dim>
1745 <dim>-1</dim>
1746 <dim>384</dim>
1747 </port>
1748 </output>
1749 </layer>
1750 <layer id="119" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
1751 <data element_type="i64" shape="4" offset="385444888" size="32" />
1752 <output>
1753 <port id="0" precision="I64">
1754 <dim>4</dim>
1755 </port>
1756 </output>
1757 </layer>
1758 <layer id="120" name="__module.encoder.layer.1.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
1759 <data special_zero="true" />
1760 <input>
1761 <port id="0" precision="FP32">
1762 <dim>-1</dim>
1763 <dim>-1</dim>
1764 <dim>384</dim>
1765 </port>
1766 <port id="1" precision="I64">
1767 <dim>4</dim>
1768 </port>
1769 </input>
1770 <output>
1771 <port id="2" precision="FP32" names="216,x.15">
1772 <dim>-1</dim>
1773 <dim>-1</dim>
1774 <dim>12</dim>
1775 <dim>32</dim>
1776 </port>
1777 </output>
1778 </layer>
1779 <layer id="121" name="Constant_94793" type="Const" version="opset1">
1780 <data element_type="i64" shape="4" offset="385444920" size="32" />
1781 <output>
1782 <port id="0" precision="I64" names="217">
1783 <dim>4</dim>
1784 </port>
1785 </output>
1786 </layer>
1787 <layer id="122" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
1788 <input>
1789 <port id="0" precision="FP32">
1790 <dim>-1</dim>
1791 <dim>-1</dim>
1792 <dim>12</dim>
1793 <dim>32</dim>
1794 </port>
1795 <port id="1" precision="I64">
1796 <dim>4</dim>
1797 </port>
1798 </input>
1799 <output>
1800 <port id="2" precision="FP32" names="218">
1801 <dim>-1</dim>
1802 <dim>12</dim>
1803 <dim>-1</dim>
1804 <dim>32</dim>
1805 </port>
1806 </output>
1807 </layer>
1808 <layer id="123" name="self.encoder.layer.1.attention.self.key.weight" type="Const" version="opset1">
1809 <data element_type="f32" shape="384, 384" offset="392542864" size="589824" />
1810 <output>
1811 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.key.weight">
1812 <dim>384</dim>
1813 <dim>384</dim>
1814 </port>
1815 </output>
1816 </layer>
1817 <layer id="124" name="__module.encoder.layer.1.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
1818 <data transpose_a="false" transpose_b="true" />
1819 <input>
1820 <port id="0" precision="FP32">
1821 <dim>-1</dim>
1822 <dim>-1</dim>
1823 <dim>384</dim>
1824 </port>
1825 <port id="1" precision="FP32">
1826 <dim>384</dim>
1827 <dim>384</dim>
1828 </port>
1829 </input>
1830 <output>
1831 <port id="2" precision="FP32">
1832 <dim>-1</dim>
1833 <dim>-1</dim>
1834 <dim>384</dim>
1835 </port>
1836 </output>
1837 </layer>
1838 <layer id="125" name="Constant_103697" type="Const" version="opset1">
1839 <data element_type="f32" shape="1, 1, 384" offset="393132688" size="1536" />
1840 <output>
1841 <port id="0" precision="FP32">
1842 <dim>1</dim>
1843 <dim>1</dim>
1844 <dim>384</dim>
1845 </port>
1846 </output>
1847 </layer>
1848 <layer id="126" name="__module.encoder.layer.1.attention.self.key/aten::linear/Add" type="Add" version="opset1">
1849 <data auto_broadcast="numpy" />
1850 <input>
1851 <port id="0" precision="FP32">
1852 <dim>-1</dim>
1853 <dim>-1</dim>
1854 <dim>384</dim>
1855 </port>
1856 <port id="1" precision="FP32">
1857 <dim>1</dim>
1858 <dim>1</dim>
1859 <dim>384</dim>
1860 </port>
1861 </input>
1862 <output>
1863 <port id="2" precision="FP32" names="221,x.17">
1864 <dim>-1</dim>
1865 <dim>-1</dim>
1866 <dim>384</dim>
1867 </port>
1868 </output>
1869 </layer>
1870 <layer id="127" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
1871 <data element_type="i64" shape="4" offset="385444888" size="32" />
1872 <output>
1873 <port id="0" precision="I64">
1874 <dim>4</dim>
1875 </port>
1876 </output>
1877 </layer>
1878 <layer id="128" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
1879 <data special_zero="true" />
1880 <input>
1881 <port id="0" precision="FP32">
1882 <dim>-1</dim>
1883 <dim>-1</dim>
1884 <dim>384</dim>
1885 </port>
1886 <port id="1" precision="I64">
1887 <dim>4</dim>
1888 </port>
1889 </input>
1890 <output>
1891 <port id="2" precision="FP32" names="225,x.19">
1892 <dim>-1</dim>
1893 <dim>-1</dim>
1894 <dim>12</dim>
1895 <dim>32</dim>
1896 </port>
1897 </output>
1898 </layer>
1899 <layer id="129" name="Constant_94816" type="Const" version="opset1">
1900 <data element_type="i64" shape="4" offset="385444920" size="32" />
1901 <output>
1902 <port id="0" precision="I64" names="226">
1903 <dim>4</dim>
1904 </port>
1905 </output>
1906 </layer>
1907 <layer id="130" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
1908 <input>
1909 <port id="0" precision="FP32">
1910 <dim>-1</dim>
1911 <dim>-1</dim>
1912 <dim>12</dim>
1913 <dim>32</dim>
1914 </port>
1915 <port id="1" precision="I64">
1916 <dim>4</dim>
1917 </port>
1918 </input>
1919 <output>
1920 <port id="2" precision="FP32" names="227">
1921 <dim>-1</dim>
1922 <dim>12</dim>
1923 <dim>-1</dim>
1924 <dim>32</dim>
1925 </port>
1926 </output>
1927 </layer>
1928 <layer id="131" name="self.encoder.layer.1.attention.self.value.weight" type="Const" version="opset1">
1929 <data element_type="f32" shape="384, 384" offset="393134224" size="589824" />
1930 <output>
1931 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.value.weight">
1932 <dim>384</dim>
1933 <dim>384</dim>
1934 </port>
1935 </output>
1936 </layer>
1937 <layer id="132" name="__module.encoder.layer.1.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
1938 <data transpose_a="false" transpose_b="true" />
1939 <input>
1940 <port id="0" precision="FP32">
1941 <dim>-1</dim>
1942 <dim>-1</dim>
1943 <dim>384</dim>
1944 </port>
1945 <port id="1" precision="FP32">
1946 <dim>384</dim>
1947 <dim>384</dim>
1948 </port>
1949 </input>
1950 <output>
1951 <port id="2" precision="FP32">
1952 <dim>-1</dim>
1953 <dim>-1</dim>
1954 <dim>384</dim>
1955 </port>
1956 </output>
1957 </layer>
1958 <layer id="133" name="Constant_103698" type="Const" version="opset1">
1959 <data element_type="f32" shape="1, 1, 384" offset="393724048" size="1536" />
1960 <output>
1961 <port id="0" precision="FP32">
1962 <dim>1</dim>
1963 <dim>1</dim>
1964 <dim>384</dim>
1965 </port>
1966 </output>
1967 </layer>
1968 <layer id="134" name="__module.encoder.layer.1.attention.self.value/aten::linear/Add" type="Add" version="opset1">
1969 <data auto_broadcast="numpy" />
1970 <input>
1971 <port id="0" precision="FP32">
1972 <dim>-1</dim>
1973 <dim>-1</dim>
1974 <dim>384</dim>
1975 </port>
1976 <port id="1" precision="FP32">
1977 <dim>1</dim>
1978 <dim>1</dim>
1979 <dim>384</dim>
1980 </port>
1981 </input>
1982 <output>
1983 <port id="2" precision="FP32" names="230,x.21">
1984 <dim>-1</dim>
1985 <dim>-1</dim>
1986 <dim>384</dim>
1987 </port>
1988 </output>
1989 </layer>
1990 <layer id="135" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
1991 <data element_type="i64" shape="4" offset="385444888" size="32" />
1992 <output>
1993 <port id="0" precision="I64">
1994 <dim>4</dim>
1995 </port>
1996 </output>
1997 </layer>
1998 <layer id="136" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
1999 <data special_zero="true" />
2000 <input>
2001 <port id="0" precision="FP32">
2002 <dim>-1</dim>
2003 <dim>-1</dim>
2004 <dim>384</dim>
2005 </port>
2006 <port id="1" precision="I64">
2007 <dim>4</dim>
2008 </port>
2009 </input>
2010 <output>
2011 <port id="2" precision="FP32" names="234,x.23">
2012 <dim>-1</dim>
2013 <dim>-1</dim>
2014 <dim>12</dim>
2015 <dim>32</dim>
2016 </port>
2017 </output>
2018 </layer>
2019 <layer id="137" name="Constant_94839" type="Const" version="opset1">
2020 <data element_type="i64" shape="4" offset="385444920" size="32" />
2021 <output>
2022 <port id="0" precision="I64" names="235">
2023 <dim>4</dim>
2024 </port>
2025 </output>
2026 </layer>
2027 <layer id="138" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
2028 <input>
2029 <port id="0" precision="FP32">
2030 <dim>-1</dim>
2031 <dim>-1</dim>
2032 <dim>12</dim>
2033 <dim>32</dim>
2034 </port>
2035 <port id="1" precision="I64">
2036 <dim>4</dim>
2037 </port>
2038 </input>
2039 <output>
2040 <port id="2" precision="FP32" names="236">
2041 <dim>-1</dim>
2042 <dim>12</dim>
2043 <dim>-1</dim>
2044 <dim>32</dim>
2045 </port>
2046 </output>
2047 </layer>
2048 <layer id="139" name="__module.encoder.layer.1.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
2049 <data causal="false" />
2050 <input>
2051 <port id="0" precision="FP32">
2052 <dim>-1</dim>
2053 <dim>12</dim>
2054 <dim>-1</dim>
2055 <dim>32</dim>
2056 </port>
2057 <port id="1" precision="FP32">
2058 <dim>-1</dim>
2059 <dim>12</dim>
2060 <dim>-1</dim>
2061 <dim>32</dim>
2062 </port>
2063 <port id="2" precision="FP32">
2064 <dim>-1</dim>
2065 <dim>12</dim>
2066 <dim>-1</dim>
2067 <dim>32</dim>
2068 </port>
2069 <port id="3" precision="FP32">
2070 <dim>-1</dim>
2071 <dim>1</dim>
2072 <dim>-1</dim>
2073 <dim>-1</dim>
2074 </port>
2075 </input>
2076 <output>
2077 <port id="4" precision="FP32" names="237,attn_output.5">
2078 <dim>-1</dim>
2079 <dim>12</dim>
2080 <dim>-1</dim>
2081 <dim>32</dim>
2082 </port>
2083 </output>
2084 </layer>
2085 <layer id="140" name="__module.encoder.layer.1.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
2086 <data element_type="i32" shape="4" offset="386627688" size="16" />
2087 <output>
2088 <port id="0" precision="I32">
2089 <dim>4</dim>
2090 </port>
2091 </output>
2092 </layer>
2093 <layer id="141" name="__module.encoder.layer.1.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
2094 <input>
2095 <port id="0" precision="FP32">
2096 <dim>-1</dim>
2097 <dim>12</dim>
2098 <dim>-1</dim>
2099 <dim>32</dim>
2100 </port>
2101 <port id="1" precision="I32">
2102 <dim>4</dim>
2103 </port>
2104 </input>
2105 <output>
2106 <port id="2" precision="FP32" names="238,attn_output.7">
2107 <dim>-1</dim>
2108 <dim>-1</dim>
2109 <dim>12</dim>
2110 <dim>32</dim>
2111 </port>
2112 </output>
2113 </layer>
2114 <layer id="142" name="__module.encoder.layer.1.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
2115 <data output_type="i64" />
2116 <input>
2117 <port id="0" precision="FP32">
2118 <dim>-1</dim>
2119 <dim>-1</dim>
2120 <dim>384</dim>
2121 </port>
2122 </input>
2123 <output>
2124 <port id="1" precision="I64">
2125 <dim>3</dim>
2126 </port>
2127 </output>
2128 </layer>
2129 <layer id="143" name="Constant_102771" type="Const" version="opset1">
2130 <data element_type="i64" shape="2" offset="386627704" size="16" />
2131 <output>
2132 <port id="0" precision="I64">
2133 <dim>2</dim>
2134 </port>
2135 </output>
2136 </layer>
2137 <layer id="144" name="Constant_102772" type="Const" version="opset1">
2138 <data element_type="i64" shape="" offset="384850436" size="8" />
2139 <output>
2140 <port id="0" precision="I64" />
2141 </output>
2142 </layer>
2143 <layer id="145" name="Gather_102773" type="Gather" version="opset8">
2144 <data batch_dims="0" />
2145 <input>
2146 <port id="0" precision="I64">
2147 <dim>3</dim>
2148 </port>
2149 <port id="1" precision="I64">
2150 <dim>2</dim>
2151 </port>
2152 <port id="2" precision="I64" />
2153 </input>
2154 <output>
2155 <port id="3" precision="I64">
2156 <dim>2</dim>
2157 </port>
2158 </output>
2159 </layer>
2160 <layer id="146" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
2161 <data axis="0" />
2162 <input>
2163 <port id="0" precision="I64">
2164 <dim>2</dim>
2165 </port>
2166 <port id="1" precision="I64">
2167 <dim>1</dim>
2168 </port>
2169 </input>
2170 <output>
2171 <port id="2" precision="I64" names="239">
2172 <dim>3</dim>
2173 </port>
2174 </output>
2175 </layer>
2176 <layer id="147" name="__module.encoder.layer.1.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
2177 <data special_zero="false" />
2178 <input>
2179 <port id="0" precision="FP32">
2180 <dim>-1</dim>
2181 <dim>-1</dim>
2182 <dim>12</dim>
2183 <dim>32</dim>
2184 </port>
2185 <port id="1" precision="I64">
2186 <dim>3</dim>
2187 </port>
2188 </input>
2189 <output>
2190 <port id="2" precision="FP32" names="240">
2191 <dim>-1</dim>
2192 <dim>-1</dim>
2193 <dim>384</dim>
2194 </port>
2195 </output>
2196 </layer>
2197 <layer id="148" name="self.encoder.layer.1.attention.output.dense.weight" type="Const" version="opset1">
2198 <data element_type="f32" shape="384, 384" offset="393725584" size="589824" />
2199 <output>
2200 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.output.dense.weight">
2201 <dim>384</dim>
2202 <dim>384</dim>
2203 </port>
2204 </output>
2205 </layer>
2206 <layer id="149" name="__module.encoder.layer.1.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2207 <data transpose_a="false" transpose_b="true" />
2208 <input>
2209 <port id="0" precision="FP32">
2210 <dim>-1</dim>
2211 <dim>-1</dim>
2212 <dim>384</dim>
2213 </port>
2214 <port id="1" precision="FP32">
2215 <dim>384</dim>
2216 <dim>384</dim>
2217 </port>
2218 </input>
2219 <output>
2220 <port id="2" precision="FP32">
2221 <dim>-1</dim>
2222 <dim>-1</dim>
2223 <dim>384</dim>
2224 </port>
2225 </output>
2226 </layer>
2227 <layer id="150" name="Constant_103699" type="Const" version="opset1">
2228 <data element_type="f32" shape="1, 1, 384" offset="394315408" size="1536" />
2229 <output>
2230 <port id="0" precision="FP32">
2231 <dim>1</dim>
2232 <dim>1</dim>
2233 <dim>384</dim>
2234 </port>
2235 </output>
2236 </layer>
2237 <layer id="151" name="__module.encoder.layer.1.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
2238 <data auto_broadcast="numpy" />
2239 <input>
2240 <port id="0" precision="FP32">
2241 <dim>-1</dim>
2242 <dim>-1</dim>
2243 <dim>384</dim>
2244 </port>
2245 <port id="1" precision="FP32">
2246 <dim>1</dim>
2247 <dim>1</dim>
2248 <dim>384</dim>
2249 </port>
2250 </input>
2251 <output>
2252 <port id="2" precision="FP32" names="246,input.7">
2253 <dim>-1</dim>
2254 <dim>-1</dim>
2255 <dim>384</dim>
2256 </port>
2257 </output>
2258 </layer>
2259 <layer id="152" name="__module.encoder.layer.1.attention.output/aten::add/Add" type="Add" version="opset1">
2260 <data auto_broadcast="numpy" />
2261 <input>
2262 <port id="0" precision="FP32">
2263 <dim>-1</dim>
2264 <dim>-1</dim>
2265 <dim>384</dim>
2266 </port>
2267 <port id="1" precision="FP32">
2268 <dim>-1</dim>
2269 <dim>-1</dim>
2270 <dim>384</dim>
2271 </port>
2272 </input>
2273 <output>
2274 <port id="2" precision="FP32" names="248">
2275 <dim>-1</dim>
2276 <dim>-1</dim>
2277 <dim>384</dim>
2278 </port>
2279 </output>
2280 </layer>
2281 <layer id="153" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
2282 <data element_type="i32" shape="1" offset="384850452" size="4" />
2283 <output>
2284 <port id="0" precision="I32">
2285 <dim>1</dim>
2286 </port>
2287 </output>
2288 </layer>
2289 <layer id="154" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
2290 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2291 <input>
2292 <port id="0" precision="FP32">
2293 <dim>-1</dim>
2294 <dim>-1</dim>
2295 <dim>384</dim>
2296 </port>
2297 <port id="1" precision="I32">
2298 <dim>1</dim>
2299 </port>
2300 </input>
2301 <output>
2302 <port id="2" precision="FP32">
2303 <dim>-1</dim>
2304 <dim>-1</dim>
2305 <dim>384</dim>
2306 </port>
2307 </output>
2308 </layer>
2309 <layer id="155" name="Constant_103700" type="Const" version="opset1">
2310 <data element_type="f32" shape="1, 1, 384" offset="394316944" size="1536" />
2311 <output>
2312 <port id="0" precision="FP32">
2313 <dim>1</dim>
2314 <dim>1</dim>
2315 <dim>384</dim>
2316 </port>
2317 </output>
2318 </layer>
2319 <layer id="156" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
2320 <data auto_broadcast="numpy" />
2321 <input>
2322 <port id="0" precision="FP32">
2323 <dim>-1</dim>
2324 <dim>-1</dim>
2325 <dim>384</dim>
2326 </port>
2327 <port id="1" precision="FP32">
2328 <dim>1</dim>
2329 <dim>1</dim>
2330 <dim>384</dim>
2331 </port>
2332 </input>
2333 <output>
2334 <port id="2" precision="FP32">
2335 <dim>-1</dim>
2336 <dim>-1</dim>
2337 <dim>384</dim>
2338 </port>
2339 </output>
2340 </layer>
2341 <layer id="157" name="Constant_103701" type="Const" version="opset1">
2342 <data element_type="f32" shape="1, 1, 384" offset="394318480" size="1536" />
2343 <output>
2344 <port id="0" precision="FP32">
2345 <dim>1</dim>
2346 <dim>1</dim>
2347 <dim>384</dim>
2348 </port>
2349 </output>
2350 </layer>
2351 <layer id="158" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
2352 <data auto_broadcast="numpy" />
2353 <input>
2354 <port id="0" precision="FP32">
2355 <dim>-1</dim>
2356 <dim>-1</dim>
2357 <dim>384</dim>
2358 </port>
2359 <port id="1" precision="FP32">
2360 <dim>1</dim>
2361 <dim>1</dim>
2362 <dim>384</dim>
2363 </port>
2364 </input>
2365 <output>
2366 <port id="2" precision="FP32" names="252,input_tensor.3">
2367 <dim>-1</dim>
2368 <dim>-1</dim>
2369 <dim>384</dim>
2370 </port>
2371 </output>
2372 </layer>
2373 <layer id="159" name="self.encoder.layer.1.intermediate.dense.weight" type="Const" version="opset1">
2374 <data element_type="f32" shape="1536, 384" offset="394320016" size="2359296" />
2375 <output>
2376 <port id="0" precision="FP32" names="self.encoder.layer.1.intermediate.dense.weight">
2377 <dim>1536</dim>
2378 <dim>384</dim>
2379 </port>
2380 </output>
2381 </layer>
2382 <layer id="160" name="__module.encoder.layer.1.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2383 <data transpose_a="false" transpose_b="true" />
2384 <input>
2385 <port id="0" precision="FP32">
2386 <dim>-1</dim>
2387 <dim>-1</dim>
2388 <dim>384</dim>
2389 </port>
2390 <port id="1" precision="FP32">
2391 <dim>1536</dim>
2392 <dim>384</dim>
2393 </port>
2394 </input>
2395 <output>
2396 <port id="2" precision="FP32">
2397 <dim>-1</dim>
2398 <dim>-1</dim>
2399 <dim>1536</dim>
2400 </port>
2401 </output>
2402 </layer>
2403 <layer id="161" name="Constant_103702" type="Const" version="opset1">
2404 <data element_type="f32" shape="1, 1, 1536" offset="396679312" size="6144" />
2405 <output>
2406 <port id="0" precision="FP32">
2407 <dim>1</dim>
2408 <dim>1</dim>
2409 <dim>1536</dim>
2410 </port>
2411 </output>
2412 </layer>
2413 <layer id="162" name="__module.encoder.layer.1.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
2414 <data auto_broadcast="numpy" />
2415 <input>
2416 <port id="0" precision="FP32">
2417 <dim>-1</dim>
2418 <dim>-1</dim>
2419 <dim>1536</dim>
2420 </port>
2421 <port id="1" precision="FP32">
2422 <dim>1</dim>
2423 <dim>1</dim>
2424 <dim>1536</dim>
2425 </port>
2426 </input>
2427 <output>
2428 <port id="2" precision="FP32" names="257">
2429 <dim>-1</dim>
2430 <dim>-1</dim>
2431 <dim>1536</dim>
2432 </port>
2433 </output>
2434 </layer>
2435 <layer id="163" name="__module.encoder.layer.1.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
2436 <data approximation_mode="ERF" />
2437 <input>
2438 <port id="0" precision="FP32">
2439 <dim>-1</dim>
2440 <dim>-1</dim>
2441 <dim>1536</dim>
2442 </port>
2443 </input>
2444 <output>
2445 <port id="1" precision="FP32" names="258">
2446 <dim>-1</dim>
2447 <dim>-1</dim>
2448 <dim>1536</dim>
2449 </port>
2450 </output>
2451 </layer>
2452 <layer id="164" name="self.encoder.layer.1.output.dense.weight" type="Const" version="opset1">
2453 <data element_type="f32" shape="384, 1536" offset="396685456" size="2359296" />
2454 <output>
2455 <port id="0" precision="FP32" names="self.encoder.layer.1.output.dense.weight">
2456 <dim>384</dim>
2457 <dim>1536</dim>
2458 </port>
2459 </output>
2460 </layer>
2461 <layer id="165" name="__module.encoder.layer.1.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2462 <data transpose_a="false" transpose_b="true" />
2463 <input>
2464 <port id="0" precision="FP32">
2465 <dim>-1</dim>
2466 <dim>-1</dim>
2467 <dim>1536</dim>
2468 </port>
2469 <port id="1" precision="FP32">
2470 <dim>384</dim>
2471 <dim>1536</dim>
2472 </port>
2473 </input>
2474 <output>
2475 <port id="2" precision="FP32">
2476 <dim>-1</dim>
2477 <dim>-1</dim>
2478 <dim>384</dim>
2479 </port>
2480 </output>
2481 </layer>
2482 <layer id="166" name="Constant_103703" type="Const" version="opset1">
2483 <data element_type="f32" shape="1, 1, 384" offset="399044752" size="1536" />
2484 <output>
2485 <port id="0" precision="FP32">
2486 <dim>1</dim>
2487 <dim>1</dim>
2488 <dim>384</dim>
2489 </port>
2490 </output>
2491 </layer>
2492 <layer id="167" name="__module.encoder.layer.1.output.dense/aten::linear/Add" type="Add" version="opset1">
2493 <data auto_broadcast="numpy" />
2494 <input>
2495 <port id="0" precision="FP32">
2496 <dim>-1</dim>
2497 <dim>-1</dim>
2498 <dim>384</dim>
2499 </port>
2500 <port id="1" precision="FP32">
2501 <dim>1</dim>
2502 <dim>1</dim>
2503 <dim>384</dim>
2504 </port>
2505 </input>
2506 <output>
2507 <port id="2" precision="FP32" names="264,input.9">
2508 <dim>-1</dim>
2509 <dim>-1</dim>
2510 <dim>384</dim>
2511 </port>
2512 </output>
2513 </layer>
2514 <layer id="168" name="__module.encoder.layer.1.output/aten::add/Add" type="Add" version="opset1">
2515 <data auto_broadcast="numpy" />
2516 <input>
2517 <port id="0" precision="FP32">
2518 <dim>-1</dim>
2519 <dim>-1</dim>
2520 <dim>384</dim>
2521 </port>
2522 <port id="1" precision="FP32">
2523 <dim>-1</dim>
2524 <dim>-1</dim>
2525 <dim>384</dim>
2526 </port>
2527 </input>
2528 <output>
2529 <port id="2" precision="FP32" names="266">
2530 <dim>-1</dim>
2531 <dim>-1</dim>
2532 <dim>384</dim>
2533 </port>
2534 </output>
2535 </layer>
2536 <layer id="169" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
2537 <data element_type="i32" shape="1" offset="384850452" size="4" />
2538 <output>
2539 <port id="0" precision="I32">
2540 <dim>1</dim>
2541 </port>
2542 </output>
2543 </layer>
2544 <layer id="170" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
2545 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2546 <input>
2547 <port id="0" precision="FP32">
2548 <dim>-1</dim>
2549 <dim>-1</dim>
2550 <dim>384</dim>
2551 </port>
2552 <port id="1" precision="I32">
2553 <dim>1</dim>
2554 </port>
2555 </input>
2556 <output>
2557 <port id="2" precision="FP32">
2558 <dim>-1</dim>
2559 <dim>-1</dim>
2560 <dim>384</dim>
2561 </port>
2562 </output>
2563 </layer>
2564 <layer id="171" name="Constant_103704" type="Const" version="opset1">
2565 <data element_type="f32" shape="1, 1, 384" offset="399046288" size="1536" />
2566 <output>
2567 <port id="0" precision="FP32">
2568 <dim>1</dim>
2569 <dim>1</dim>
2570 <dim>384</dim>
2571 </port>
2572 </output>
2573 </layer>
2574 <layer id="172" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
2575 <data auto_broadcast="numpy" />
2576 <input>
2577 <port id="0" precision="FP32">
2578 <dim>-1</dim>
2579 <dim>-1</dim>
2580 <dim>384</dim>
2581 </port>
2582 <port id="1" precision="FP32">
2583 <dim>1</dim>
2584 <dim>1</dim>
2585 <dim>384</dim>
2586 </port>
2587 </input>
2588 <output>
2589 <port id="2" precision="FP32">
2590 <dim>-1</dim>
2591 <dim>-1</dim>
2592 <dim>384</dim>
2593 </port>
2594 </output>
2595 </layer>
2596 <layer id="173" name="Constant_103705" type="Const" version="opset1">
2597 <data element_type="f32" shape="1, 1, 384" offset="399047824" size="1536" />
2598 <output>
2599 <port id="0" precision="FP32">
2600 <dim>1</dim>
2601 <dim>1</dim>
2602 <dim>384</dim>
2603 </port>
2604 </output>
2605 </layer>
2606 <layer id="174" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
2607 <data auto_broadcast="numpy" />
2608 <input>
2609 <port id="0" precision="FP32">
2610 <dim>-1</dim>
2611 <dim>-1</dim>
2612 <dim>384</dim>
2613 </port>
2614 <port id="1" precision="FP32">
2615 <dim>1</dim>
2616 <dim>1</dim>
2617 <dim>384</dim>
2618 </port>
2619 </input>
2620 <output>
2621 <port id="2" precision="FP32" names="270,hidden_states.13">
2622 <dim>-1</dim>
2623 <dim>-1</dim>
2624 <dim>384</dim>
2625 </port>
2626 </output>
2627 </layer>
2628 <layer id="175" name="self.encoder.layer.2.attention.self.query.weight" type="Const" version="opset1">
2629 <data element_type="f32" shape="384, 384" offset="399049360" size="589824" />
2630 <output>
2631 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.query.weight">
2632 <dim>384</dim>
2633 <dim>384</dim>
2634 </port>
2635 </output>
2636 </layer>
2637 <layer id="176" name="__module.encoder.layer.2.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
2638 <data transpose_a="false" transpose_b="true" />
2639 <input>
2640 <port id="0" precision="FP32">
2641 <dim>-1</dim>
2642 <dim>-1</dim>
2643 <dim>384</dim>
2644 </port>
2645 <port id="1" precision="FP32">
2646 <dim>384</dim>
2647 <dim>384</dim>
2648 </port>
2649 </input>
2650 <output>
2651 <port id="2" precision="FP32">
2652 <dim>-1</dim>
2653 <dim>-1</dim>
2654 <dim>384</dim>
2655 </port>
2656 </output>
2657 </layer>
2658 <layer id="177" name="Constant_103706" type="Const" version="opset1">
2659 <data element_type="f32" shape="1, 1, 384" offset="399639184" size="1536" />
2660 <output>
2661 <port id="0" precision="FP32">
2662 <dim>1</dim>
2663 <dim>1</dim>
2664 <dim>384</dim>
2665 </port>
2666 </output>
2667 </layer>
2668 <layer id="178" name="__module.encoder.layer.2.attention.self.query/aten::linear/Add" type="Add" version="opset1">
2669 <data auto_broadcast="numpy" />
2670 <input>
2671 <port id="0" precision="FP32">
2672 <dim>-1</dim>
2673 <dim>-1</dim>
2674 <dim>384</dim>
2675 </port>
2676 <port id="1" precision="FP32">
2677 <dim>1</dim>
2678 <dim>1</dim>
2679 <dim>384</dim>
2680 </port>
2681 </input>
2682 <output>
2683 <port id="2" precision="FP32" names="283,x.25">
2684 <dim>-1</dim>
2685 <dim>-1</dim>
2686 <dim>384</dim>
2687 </port>
2688 </output>
2689 </layer>
2690 <layer id="179" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
2691 <data element_type="i64" shape="4" offset="385444888" size="32" />
2692 <output>
2693 <port id="0" precision="I64">
2694 <dim>4</dim>
2695 </port>
2696 </output>
2697 </layer>
2698 <layer id="180" name="__module.encoder.layer.2.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
2699 <data special_zero="true" />
2700 <input>
2701 <port id="0" precision="FP32">
2702 <dim>-1</dim>
2703 <dim>-1</dim>
2704 <dim>384</dim>
2705 </port>
2706 <port id="1" precision="I64">
2707 <dim>4</dim>
2708 </port>
2709 </input>
2710 <output>
2711 <port id="2" precision="FP32" names="287,x.27">
2712 <dim>-1</dim>
2713 <dim>-1</dim>
2714 <dim>12</dim>
2715 <dim>32</dim>
2716 </port>
2717 </output>
2718 </layer>
2719 <layer id="181" name="Constant_95019" type="Const" version="opset1">
2720 <data element_type="i64" shape="4" offset="385444920" size="32" />
2721 <output>
2722 <port id="0" precision="I64" names="288">
2723 <dim>4</dim>
2724 </port>
2725 </output>
2726 </layer>
2727 <layer id="182" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
2728 <input>
2729 <port id="0" precision="FP32">
2730 <dim>-1</dim>
2731 <dim>-1</dim>
2732 <dim>12</dim>
2733 <dim>32</dim>
2734 </port>
2735 <port id="1" precision="I64">
2736 <dim>4</dim>
2737 </port>
2738 </input>
2739 <output>
2740 <port id="2" precision="FP32" names="289">
2741 <dim>-1</dim>
2742 <dim>12</dim>
2743 <dim>-1</dim>
2744 <dim>32</dim>
2745 </port>
2746 </output>
2747 </layer>
2748 <layer id="183" name="self.encoder.layer.2.attention.self.key.weight" type="Const" version="opset1">
2749 <data element_type="f32" shape="384, 384" offset="399640720" size="589824" />
2750 <output>
2751 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.key.weight">
2752 <dim>384</dim>
2753 <dim>384</dim>
2754 </port>
2755 </output>
2756 </layer>
2757 <layer id="184" name="__module.encoder.layer.2.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
2758 <data transpose_a="false" transpose_b="true" />
2759 <input>
2760 <port id="0" precision="FP32">
2761 <dim>-1</dim>
2762 <dim>-1</dim>
2763 <dim>384</dim>
2764 </port>
2765 <port id="1" precision="FP32">
2766 <dim>384</dim>
2767 <dim>384</dim>
2768 </port>
2769 </input>
2770 <output>
2771 <port id="2" precision="FP32">
2772 <dim>-1</dim>
2773 <dim>-1</dim>
2774 <dim>384</dim>
2775 </port>
2776 </output>
2777 </layer>
2778 <layer id="185" name="Constant_103707" type="Const" version="opset1">
2779 <data element_type="f32" shape="1, 1, 384" offset="400230544" size="1536" />
2780 <output>
2781 <port id="0" precision="FP32">
2782 <dim>1</dim>
2783 <dim>1</dim>
2784 <dim>384</dim>
2785 </port>
2786 </output>
2787 </layer>
2788 <layer id="186" name="__module.encoder.layer.2.attention.self.key/aten::linear/Add" type="Add" version="opset1">
2789 <data auto_broadcast="numpy" />
2790 <input>
2791 <port id="0" precision="FP32">
2792 <dim>-1</dim>
2793 <dim>-1</dim>
2794 <dim>384</dim>
2795 </port>
2796 <port id="1" precision="FP32">
2797 <dim>1</dim>
2798 <dim>1</dim>
2799 <dim>384</dim>
2800 </port>
2801 </input>
2802 <output>
2803 <port id="2" precision="FP32" names="292,x.29">
2804 <dim>-1</dim>
2805 <dim>-1</dim>
2806 <dim>384</dim>
2807 </port>
2808 </output>
2809 </layer>
2810 <layer id="187" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
2811 <data element_type="i64" shape="4" offset="385444888" size="32" />
2812 <output>
2813 <port id="0" precision="I64">
2814 <dim>4</dim>
2815 </port>
2816 </output>
2817 </layer>
2818 <layer id="188" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
2819 <data special_zero="true" />
2820 <input>
2821 <port id="0" precision="FP32">
2822 <dim>-1</dim>
2823 <dim>-1</dim>
2824 <dim>384</dim>
2825 </port>
2826 <port id="1" precision="I64">
2827 <dim>4</dim>
2828 </port>
2829 </input>
2830 <output>
2831 <port id="2" precision="FP32" names="296,x.31">
2832 <dim>-1</dim>
2833 <dim>-1</dim>
2834 <dim>12</dim>
2835 <dim>32</dim>
2836 </port>
2837 </output>
2838 </layer>
2839 <layer id="189" name="Constant_95042" type="Const" version="opset1">
2840 <data element_type="i64" shape="4" offset="385444920" size="32" />
2841 <output>
2842 <port id="0" precision="I64" names="297">
2843 <dim>4</dim>
2844 </port>
2845 </output>
2846 </layer>
2847 <layer id="190" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
2848 <input>
2849 <port id="0" precision="FP32">
2850 <dim>-1</dim>
2851 <dim>-1</dim>
2852 <dim>12</dim>
2853 <dim>32</dim>
2854 </port>
2855 <port id="1" precision="I64">
2856 <dim>4</dim>
2857 </port>
2858 </input>
2859 <output>
2860 <port id="2" precision="FP32" names="298">
2861 <dim>-1</dim>
2862 <dim>12</dim>
2863 <dim>-1</dim>
2864 <dim>32</dim>
2865 </port>
2866 </output>
2867 </layer>
2868 <layer id="191" name="self.encoder.layer.2.attention.self.value.weight" type="Const" version="opset1">
2869 <data element_type="f32" shape="384, 384" offset="400232080" size="589824" />
2870 <output>
2871 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.value.weight">
2872 <dim>384</dim>
2873 <dim>384</dim>
2874 </port>
2875 </output>
2876 </layer>
2877 <layer id="192" name="__module.encoder.layer.2.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
2878 <data transpose_a="false" transpose_b="true" />
2879 <input>
2880 <port id="0" precision="FP32">
2881 <dim>-1</dim>
2882 <dim>-1</dim>
2883 <dim>384</dim>
2884 </port>
2885 <port id="1" precision="FP32">
2886 <dim>384</dim>
2887 <dim>384</dim>
2888 </port>
2889 </input>
2890 <output>
2891 <port id="2" precision="FP32">
2892 <dim>-1</dim>
2893 <dim>-1</dim>
2894 <dim>384</dim>
2895 </port>
2896 </output>
2897 </layer>
2898 <layer id="193" name="Constant_103708" type="Const" version="opset1">
2899 <data element_type="f32" shape="1, 1, 384" offset="400821904" size="1536" />
2900 <output>
2901 <port id="0" precision="FP32">
2902 <dim>1</dim>
2903 <dim>1</dim>
2904 <dim>384</dim>
2905 </port>
2906 </output>
2907 </layer>
2908 <layer id="194" name="__module.encoder.layer.2.attention.self.value/aten::linear/Add" type="Add" version="opset1">
2909 <data auto_broadcast="numpy" />
2910 <input>
2911 <port id="0" precision="FP32">
2912 <dim>-1</dim>
2913 <dim>-1</dim>
2914 <dim>384</dim>
2915 </port>
2916 <port id="1" precision="FP32">
2917 <dim>1</dim>
2918 <dim>1</dim>
2919 <dim>384</dim>
2920 </port>
2921 </input>
2922 <output>
2923 <port id="2" precision="FP32" names="301,x.33">
2924 <dim>-1</dim>
2925 <dim>-1</dim>
2926 <dim>384</dim>
2927 </port>
2928 </output>
2929 </layer>
2930 <layer id="195" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
2931 <data element_type="i64" shape="4" offset="385444888" size="32" />
2932 <output>
2933 <port id="0" precision="I64">
2934 <dim>4</dim>
2935 </port>
2936 </output>
2937 </layer>
2938 <layer id="196" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
2939 <data special_zero="true" />
2940 <input>
2941 <port id="0" precision="FP32">
2942 <dim>-1</dim>
2943 <dim>-1</dim>
2944 <dim>384</dim>
2945 </port>
2946 <port id="1" precision="I64">
2947 <dim>4</dim>
2948 </port>
2949 </input>
2950 <output>
2951 <port id="2" precision="FP32" names="305,x.35">
2952 <dim>-1</dim>
2953 <dim>-1</dim>
2954 <dim>12</dim>
2955 <dim>32</dim>
2956 </port>
2957 </output>
2958 </layer>
2959 <layer id="197" name="Constant_95065" type="Const" version="opset1">
2960 <data element_type="i64" shape="4" offset="385444920" size="32" />
2961 <output>
2962 <port id="0" precision="I64" names="306">
2963 <dim>4</dim>
2964 </port>
2965 </output>
2966 </layer>
2967 <layer id="198" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
2968 <input>
2969 <port id="0" precision="FP32">
2970 <dim>-1</dim>
2971 <dim>-1</dim>
2972 <dim>12</dim>
2973 <dim>32</dim>
2974 </port>
2975 <port id="1" precision="I64">
2976 <dim>4</dim>
2977 </port>
2978 </input>
2979 <output>
2980 <port id="2" precision="FP32" names="307">
2981 <dim>-1</dim>
2982 <dim>12</dim>
2983 <dim>-1</dim>
2984 <dim>32</dim>
2985 </port>
2986 </output>
2987 </layer>
2988 <layer id="199" name="__module.encoder.layer.2.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
2989 <data causal="false" />
2990 <input>
2991 <port id="0" precision="FP32">
2992 <dim>-1</dim>
2993 <dim>12</dim>
2994 <dim>-1</dim>
2995 <dim>32</dim>
2996 </port>
2997 <port id="1" precision="FP32">
2998 <dim>-1</dim>
2999 <dim>12</dim>
3000 <dim>-1</dim>
3001 <dim>32</dim>
3002 </port>
3003 <port id="2" precision="FP32">
3004 <dim>-1</dim>
3005 <dim>12</dim>
3006 <dim>-1</dim>
3007 <dim>32</dim>
3008 </port>
3009 <port id="3" precision="FP32">
3010 <dim>-1</dim>
3011 <dim>1</dim>
3012 <dim>-1</dim>
3013 <dim>-1</dim>
3014 </port>
3015 </input>
3016 <output>
3017 <port id="4" precision="FP32" names="308,attn_output.9">
3018 <dim>-1</dim>
3019 <dim>12</dim>
3020 <dim>-1</dim>
3021 <dim>32</dim>
3022 </port>
3023 </output>
3024 </layer>
3025 <layer id="200" name="__module.encoder.layer.2.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
3026 <data element_type="i32" shape="4" offset="386627688" size="16" />
3027 <output>
3028 <port id="0" precision="I32">
3029 <dim>4</dim>
3030 </port>
3031 </output>
3032 </layer>
3033 <layer id="201" name="__module.encoder.layer.2.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
3034 <input>
3035 <port id="0" precision="FP32">
3036 <dim>-1</dim>
3037 <dim>12</dim>
3038 <dim>-1</dim>
3039 <dim>32</dim>
3040 </port>
3041 <port id="1" precision="I32">
3042 <dim>4</dim>
3043 </port>
3044 </input>
3045 <output>
3046 <port id="2" precision="FP32" names="309,attn_output.11">
3047 <dim>-1</dim>
3048 <dim>-1</dim>
3049 <dim>12</dim>
3050 <dim>32</dim>
3051 </port>
3052 </output>
3053 </layer>
3054 <layer id="202" name="__module.encoder.layer.2.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
3055 <data output_type="i64" />
3056 <input>
3057 <port id="0" precision="FP32">
3058 <dim>-1</dim>
3059 <dim>-1</dim>
3060 <dim>384</dim>
3061 </port>
3062 </input>
3063 <output>
3064 <port id="1" precision="I64">
3065 <dim>3</dim>
3066 </port>
3067 </output>
3068 </layer>
3069 <layer id="203" name="Constant_102791" type="Const" version="opset1">
3070 <data element_type="i64" shape="2" offset="386627704" size="16" />
3071 <output>
3072 <port id="0" precision="I64">
3073 <dim>2</dim>
3074 </port>
3075 </output>
3076 </layer>
3077 <layer id="204" name="Constant_102792" type="Const" version="opset1">
3078 <data element_type="i64" shape="" offset="384850436" size="8" />
3079 <output>
3080 <port id="0" precision="I64" />
3081 </output>
3082 </layer>
3083 <layer id="205" name="Gather_102793" type="Gather" version="opset8">
3084 <data batch_dims="0" />
3085 <input>
3086 <port id="0" precision="I64">
3087 <dim>3</dim>
3088 </port>
3089 <port id="1" precision="I64">
3090 <dim>2</dim>
3091 </port>
3092 <port id="2" precision="I64" />
3093 </input>
3094 <output>
3095 <port id="3" precision="I64">
3096 <dim>2</dim>
3097 </port>
3098 </output>
3099 </layer>
3100 <layer id="206" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
3101 <data axis="0" />
3102 <input>
3103 <port id="0" precision="I64">
3104 <dim>2</dim>
3105 </port>
3106 <port id="1" precision="I64">
3107 <dim>1</dim>
3108 </port>
3109 </input>
3110 <output>
3111 <port id="2" precision="I64" names="310">
3112 <dim>3</dim>
3113 </port>
3114 </output>
3115 </layer>
3116 <layer id="207" name="__module.encoder.layer.2.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
3117 <data special_zero="false" />
3118 <input>
3119 <port id="0" precision="FP32">
3120 <dim>-1</dim>
3121 <dim>-1</dim>
3122 <dim>12</dim>
3123 <dim>32</dim>
3124 </port>
3125 <port id="1" precision="I64">
3126 <dim>3</dim>
3127 </port>
3128 </input>
3129 <output>
3130 <port id="2" precision="FP32" names="311">
3131 <dim>-1</dim>
3132 <dim>-1</dim>
3133 <dim>384</dim>
3134 </port>
3135 </output>
3136 </layer>
3137 <layer id="208" name="self.encoder.layer.2.attention.output.dense.weight" type="Const" version="opset1">
3138 <data element_type="f32" shape="384, 384" offset="400823440" size="589824" />
3139 <output>
3140 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.output.dense.weight">
3141 <dim>384</dim>
3142 <dim>384</dim>
3143 </port>
3144 </output>
3145 </layer>
3146 <layer id="209" name="__module.encoder.layer.2.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3147 <data transpose_a="false" transpose_b="true" />
3148 <input>
3149 <port id="0" precision="FP32">
3150 <dim>-1</dim>
3151 <dim>-1</dim>
3152 <dim>384</dim>
3153 </port>
3154 <port id="1" precision="FP32">
3155 <dim>384</dim>
3156 <dim>384</dim>
3157 </port>
3158 </input>
3159 <output>
3160 <port id="2" precision="FP32">
3161 <dim>-1</dim>
3162 <dim>-1</dim>
3163 <dim>384</dim>
3164 </port>
3165 </output>
3166 </layer>
3167 <layer id="210" name="Constant_103709" type="Const" version="opset1">
3168 <data element_type="f32" shape="1, 1, 384" offset="401413264" size="1536" />
3169 <output>
3170 <port id="0" precision="FP32">
3171 <dim>1</dim>
3172 <dim>1</dim>
3173 <dim>384</dim>
3174 </port>
3175 </output>
3176 </layer>
3177 <layer id="211" name="__module.encoder.layer.2.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
3178 <data auto_broadcast="numpy" />
3179 <input>
3180 <port id="0" precision="FP32">
3181 <dim>-1</dim>
3182 <dim>-1</dim>
3183 <dim>384</dim>
3184 </port>
3185 <port id="1" precision="FP32">
3186 <dim>1</dim>
3187 <dim>1</dim>
3188 <dim>384</dim>
3189 </port>
3190 </input>
3191 <output>
3192 <port id="2" precision="FP32" names="317,input.11">
3193 <dim>-1</dim>
3194 <dim>-1</dim>
3195 <dim>384</dim>
3196 </port>
3197 </output>
3198 </layer>
3199 <layer id="212" name="__module.encoder.layer.2.attention.output/aten::add/Add" type="Add" version="opset1">
3200 <data auto_broadcast="numpy" />
3201 <input>
3202 <port id="0" precision="FP32">
3203 <dim>-1</dim>
3204 <dim>-1</dim>
3205 <dim>384</dim>
3206 </port>
3207 <port id="1" precision="FP32">
3208 <dim>-1</dim>
3209 <dim>-1</dim>
3210 <dim>384</dim>
3211 </port>
3212 </input>
3213 <output>
3214 <port id="2" precision="FP32" names="319">
3215 <dim>-1</dim>
3216 <dim>-1</dim>
3217 <dim>384</dim>
3218 </port>
3219 </output>
3220 </layer>
3221 <layer id="213" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3222 <data element_type="i32" shape="1" offset="384850452" size="4" />
3223 <output>
3224 <port id="0" precision="I32">
3225 <dim>1</dim>
3226 </port>
3227 </output>
3228 </layer>
3229 <layer id="214" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3230 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3231 <input>
3232 <port id="0" precision="FP32">
3233 <dim>-1</dim>
3234 <dim>-1</dim>
3235 <dim>384</dim>
3236 </port>
3237 <port id="1" precision="I32">
3238 <dim>1</dim>
3239 </port>
3240 </input>
3241 <output>
3242 <port id="2" precision="FP32">
3243 <dim>-1</dim>
3244 <dim>-1</dim>
3245 <dim>384</dim>
3246 </port>
3247 </output>
3248 </layer>
3249 <layer id="215" name="Constant_103710" type="Const" version="opset1">
3250 <data element_type="f32" shape="1, 1, 384" offset="401414800" size="1536" />
3251 <output>
3252 <port id="0" precision="FP32">
3253 <dim>1</dim>
3254 <dim>1</dim>
3255 <dim>384</dim>
3256 </port>
3257 </output>
3258 </layer>
3259 <layer id="216" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3260 <data auto_broadcast="numpy" />
3261 <input>
3262 <port id="0" precision="FP32">
3263 <dim>-1</dim>
3264 <dim>-1</dim>
3265 <dim>384</dim>
3266 </port>
3267 <port id="1" precision="FP32">
3268 <dim>1</dim>
3269 <dim>1</dim>
3270 <dim>384</dim>
3271 </port>
3272 </input>
3273 <output>
3274 <port id="2" precision="FP32">
3275 <dim>-1</dim>
3276 <dim>-1</dim>
3277 <dim>384</dim>
3278 </port>
3279 </output>
3280 </layer>
3281 <layer id="217" name="Constant_103711" type="Const" version="opset1">
3282 <data element_type="f32" shape="1, 1, 384" offset="401416336" size="1536" />
3283 <output>
3284 <port id="0" precision="FP32">
3285 <dim>1</dim>
3286 <dim>1</dim>
3287 <dim>384</dim>
3288 </port>
3289 </output>
3290 </layer>
3291 <layer id="218" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3292 <data auto_broadcast="numpy" />
3293 <input>
3294 <port id="0" precision="FP32">
3295 <dim>-1</dim>
3296 <dim>-1</dim>
3297 <dim>384</dim>
3298 </port>
3299 <port id="1" precision="FP32">
3300 <dim>1</dim>
3301 <dim>1</dim>
3302 <dim>384</dim>
3303 </port>
3304 </input>
3305 <output>
3306 <port id="2" precision="FP32" names="323,input_tensor.5">
3307 <dim>-1</dim>
3308 <dim>-1</dim>
3309 <dim>384</dim>
3310 </port>
3311 </output>
3312 </layer>
3313 <layer id="219" name="self.encoder.layer.2.intermediate.dense.weight" type="Const" version="opset1">
3314 <data element_type="f32" shape="1536, 384" offset="401417872" size="2359296" />
3315 <output>
3316 <port id="0" precision="FP32" names="self.encoder.layer.2.intermediate.dense.weight">
3317 <dim>1536</dim>
3318 <dim>384</dim>
3319 </port>
3320 </output>
3321 </layer>
3322 <layer id="220" name="__module.encoder.layer.2.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3323 <data transpose_a="false" transpose_b="true" />
3324 <input>
3325 <port id="0" precision="FP32">
3326 <dim>-1</dim>
3327 <dim>-1</dim>
3328 <dim>384</dim>
3329 </port>
3330 <port id="1" precision="FP32">
3331 <dim>1536</dim>
3332 <dim>384</dim>
3333 </port>
3334 </input>
3335 <output>
3336 <port id="2" precision="FP32">
3337 <dim>-1</dim>
3338 <dim>-1</dim>
3339 <dim>1536</dim>
3340 </port>
3341 </output>
3342 </layer>
3343 <layer id="221" name="Constant_103712" type="Const" version="opset1">
3344 <data element_type="f32" shape="1, 1, 1536" offset="403777168" size="6144" />
3345 <output>
3346 <port id="0" precision="FP32">
3347 <dim>1</dim>
3348 <dim>1</dim>
3349 <dim>1536</dim>
3350 </port>
3351 </output>
3352 </layer>
3353 <layer id="222" name="__module.encoder.layer.2.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
3354 <data auto_broadcast="numpy" />
3355 <input>
3356 <port id="0" precision="FP32">
3357 <dim>-1</dim>
3358 <dim>-1</dim>
3359 <dim>1536</dim>
3360 </port>
3361 <port id="1" precision="FP32">
3362 <dim>1</dim>
3363 <dim>1</dim>
3364 <dim>1536</dim>
3365 </port>
3366 </input>
3367 <output>
3368 <port id="2" precision="FP32" names="328">
3369 <dim>-1</dim>
3370 <dim>-1</dim>
3371 <dim>1536</dim>
3372 </port>
3373 </output>
3374 </layer>
3375 <layer id="223" name="__module.encoder.layer.2.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
3376 <data approximation_mode="ERF" />
3377 <input>
3378 <port id="0" precision="FP32">
3379 <dim>-1</dim>
3380 <dim>-1</dim>
3381 <dim>1536</dim>
3382 </port>
3383 </input>
3384 <output>
3385 <port id="1" precision="FP32" names="329">
3386 <dim>-1</dim>
3387 <dim>-1</dim>
3388 <dim>1536</dim>
3389 </port>
3390 </output>
3391 </layer>
3392 <layer id="224" name="self.encoder.layer.2.output.dense.weight" type="Const" version="opset1">
3393 <data element_type="f32" shape="384, 1536" offset="403783312" size="2359296" />
3394 <output>
3395 <port id="0" precision="FP32" names="self.encoder.layer.2.output.dense.weight">
3396 <dim>384</dim>
3397 <dim>1536</dim>
3398 </port>
3399 </output>
3400 </layer>
3401 <layer id="225" name="__module.encoder.layer.2.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3402 <data transpose_a="false" transpose_b="true" />
3403 <input>
3404 <port id="0" precision="FP32">
3405 <dim>-1</dim>
3406 <dim>-1</dim>
3407 <dim>1536</dim>
3408 </port>
3409 <port id="1" precision="FP32">
3410 <dim>384</dim>
3411 <dim>1536</dim>
3412 </port>
3413 </input>
3414 <output>
3415 <port id="2" precision="FP32">
3416 <dim>-1</dim>
3417 <dim>-1</dim>
3418 <dim>384</dim>
3419 </port>
3420 </output>
3421 </layer>
3422 <layer id="226" name="Constant_103713" type="Const" version="opset1">
3423 <data element_type="f32" shape="1, 1, 384" offset="406142608" size="1536" />
3424 <output>
3425 <port id="0" precision="FP32">
3426 <dim>1</dim>
3427 <dim>1</dim>
3428 <dim>384</dim>
3429 </port>
3430 </output>
3431 </layer>
3432 <layer id="227" name="__module.encoder.layer.2.output.dense/aten::linear/Add" type="Add" version="opset1">
3433 <data auto_broadcast="numpy" />
3434 <input>
3435 <port id="0" precision="FP32">
3436 <dim>-1</dim>
3437 <dim>-1</dim>
3438 <dim>384</dim>
3439 </port>
3440 <port id="1" precision="FP32">
3441 <dim>1</dim>
3442 <dim>1</dim>
3443 <dim>384</dim>
3444 </port>
3445 </input>
3446 <output>
3447 <port id="2" precision="FP32" names="335,input.13">
3448 <dim>-1</dim>
3449 <dim>-1</dim>
3450 <dim>384</dim>
3451 </port>
3452 </output>
3453 </layer>
3454 <layer id="228" name="__module.encoder.layer.2.output/aten::add/Add" type="Add" version="opset1">
3455 <data auto_broadcast="numpy" />
3456 <input>
3457 <port id="0" precision="FP32">
3458 <dim>-1</dim>
3459 <dim>-1</dim>
3460 <dim>384</dim>
3461 </port>
3462 <port id="1" precision="FP32">
3463 <dim>-1</dim>
3464 <dim>-1</dim>
3465 <dim>384</dim>
3466 </port>
3467 </input>
3468 <output>
3469 <port id="2" precision="FP32" names="337">
3470 <dim>-1</dim>
3471 <dim>-1</dim>
3472 <dim>384</dim>
3473 </port>
3474 </output>
3475 </layer>
3476 <layer id="229" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3477 <data element_type="i32" shape="1" offset="384850452" size="4" />
3478 <output>
3479 <port id="0" precision="I32">
3480 <dim>1</dim>
3481 </port>
3482 </output>
3483 </layer>
3484 <layer id="230" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3485 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3486 <input>
3487 <port id="0" precision="FP32">
3488 <dim>-1</dim>
3489 <dim>-1</dim>
3490 <dim>384</dim>
3491 </port>
3492 <port id="1" precision="I32">
3493 <dim>1</dim>
3494 </port>
3495 </input>
3496 <output>
3497 <port id="2" precision="FP32">
3498 <dim>-1</dim>
3499 <dim>-1</dim>
3500 <dim>384</dim>
3501 </port>
3502 </output>
3503 </layer>
3504 <layer id="231" name="Constant_103714" type="Const" version="opset1">
3505 <data element_type="f32" shape="1, 1, 384" offset="406144144" size="1536" />
3506 <output>
3507 <port id="0" precision="FP32">
3508 <dim>1</dim>
3509 <dim>1</dim>
3510 <dim>384</dim>
3511 </port>
3512 </output>
3513 </layer>
3514 <layer id="232" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3515 <data auto_broadcast="numpy" />
3516 <input>
3517 <port id="0" precision="FP32">
3518 <dim>-1</dim>
3519 <dim>-1</dim>
3520 <dim>384</dim>
3521 </port>
3522 <port id="1" precision="FP32">
3523 <dim>1</dim>
3524 <dim>1</dim>
3525 <dim>384</dim>
3526 </port>
3527 </input>
3528 <output>
3529 <port id="2" precision="FP32">
3530 <dim>-1</dim>
3531 <dim>-1</dim>
3532 <dim>384</dim>
3533 </port>
3534 </output>
3535 </layer>
3536 <layer id="233" name="Constant_103715" type="Const" version="opset1">
3537 <data element_type="f32" shape="1, 1, 384" offset="406145680" size="1536" />
3538 <output>
3539 <port id="0" precision="FP32">
3540 <dim>1</dim>
3541 <dim>1</dim>
3542 <dim>384</dim>
3543 </port>
3544 </output>
3545 </layer>
3546 <layer id="234" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3547 <data auto_broadcast="numpy" />
3548 <input>
3549 <port id="0" precision="FP32">
3550 <dim>-1</dim>
3551 <dim>-1</dim>
3552 <dim>384</dim>
3553 </port>
3554 <port id="1" precision="FP32">
3555 <dim>1</dim>
3556 <dim>1</dim>
3557 <dim>384</dim>
3558 </port>
3559 </input>
3560 <output>
3561 <port id="2" precision="FP32" names="341,hidden_states.19">
3562 <dim>-1</dim>
3563 <dim>-1</dim>
3564 <dim>384</dim>
3565 </port>
3566 </output>
3567 </layer>
3568 <layer id="235" name="self.encoder.layer.3.attention.self.query.weight" type="Const" version="opset1">
3569 <data element_type="f32" shape="384, 384" offset="406147216" size="589824" />
3570 <output>
3571 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.query.weight">
3572 <dim>384</dim>
3573 <dim>384</dim>
3574 </port>
3575 </output>
3576 </layer>
3577 <layer id="236" name="__module.encoder.layer.3.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
3578 <data transpose_a="false" transpose_b="true" />
3579 <input>
3580 <port id="0" precision="FP32">
3581 <dim>-1</dim>
3582 <dim>-1</dim>
3583 <dim>384</dim>
3584 </port>
3585 <port id="1" precision="FP32">
3586 <dim>384</dim>
3587 <dim>384</dim>
3588 </port>
3589 </input>
3590 <output>
3591 <port id="2" precision="FP32">
3592 <dim>-1</dim>
3593 <dim>-1</dim>
3594 <dim>384</dim>
3595 </port>
3596 </output>
3597 </layer>
3598 <layer id="237" name="Constant_103716" type="Const" version="opset1">
3599 <data element_type="f32" shape="1, 1, 384" offset="406737040" size="1536" />
3600 <output>
3601 <port id="0" precision="FP32">
3602 <dim>1</dim>
3603 <dim>1</dim>
3604 <dim>384</dim>
3605 </port>
3606 </output>
3607 </layer>
3608 <layer id="238" name="__module.encoder.layer.3.attention.self.query/aten::linear/Add" type="Add" version="opset1">
3609 <data auto_broadcast="numpy" />
3610 <input>
3611 <port id="0" precision="FP32">
3612 <dim>-1</dim>
3613 <dim>-1</dim>
3614 <dim>384</dim>
3615 </port>
3616 <port id="1" precision="FP32">
3617 <dim>1</dim>
3618 <dim>1</dim>
3619 <dim>384</dim>
3620 </port>
3621 </input>
3622 <output>
3623 <port id="2" precision="FP32" names="354,x.37">
3624 <dim>-1</dim>
3625 <dim>-1</dim>
3626 <dim>384</dim>
3627 </port>
3628 </output>
3629 </layer>
3630 <layer id="239" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
3631 <data element_type="i64" shape="4" offset="385444888" size="32" />
3632 <output>
3633 <port id="0" precision="I64">
3634 <dim>4</dim>
3635 </port>
3636 </output>
3637 </layer>
3638 <layer id="240" name="__module.encoder.layer.3.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
3639 <data special_zero="true" />
3640 <input>
3641 <port id="0" precision="FP32">
3642 <dim>-1</dim>
3643 <dim>-1</dim>
3644 <dim>384</dim>
3645 </port>
3646 <port id="1" precision="I64">
3647 <dim>4</dim>
3648 </port>
3649 </input>
3650 <output>
3651 <port id="2" precision="FP32" names="358,x.39">
3652 <dim>-1</dim>
3653 <dim>-1</dim>
3654 <dim>12</dim>
3655 <dim>32</dim>
3656 </port>
3657 </output>
3658 </layer>
3659 <layer id="241" name="Constant_95245" type="Const" version="opset1">
3660 <data element_type="i64" shape="4" offset="385444920" size="32" />
3661 <output>
3662 <port id="0" precision="I64" names="359">
3663 <dim>4</dim>
3664 </port>
3665 </output>
3666 </layer>
3667 <layer id="242" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
3668 <input>
3669 <port id="0" precision="FP32">
3670 <dim>-1</dim>
3671 <dim>-1</dim>
3672 <dim>12</dim>
3673 <dim>32</dim>
3674 </port>
3675 <port id="1" precision="I64">
3676 <dim>4</dim>
3677 </port>
3678 </input>
3679 <output>
3680 <port id="2" precision="FP32" names="360">
3681 <dim>-1</dim>
3682 <dim>12</dim>
3683 <dim>-1</dim>
3684 <dim>32</dim>
3685 </port>
3686 </output>
3687 </layer>
3688 <layer id="243" name="self.encoder.layer.3.attention.self.key.weight" type="Const" version="opset1">
3689 <data element_type="f32" shape="384, 384" offset="406738576" size="589824" />
3690 <output>
3691 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.key.weight">
3692 <dim>384</dim>
3693 <dim>384</dim>
3694 </port>
3695 </output>
3696 </layer>
3697 <layer id="244" name="__module.encoder.layer.3.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
3698 <data transpose_a="false" transpose_b="true" />
3699 <input>
3700 <port id="0" precision="FP32">
3701 <dim>-1</dim>
3702 <dim>-1</dim>
3703 <dim>384</dim>
3704 </port>
3705 <port id="1" precision="FP32">
3706 <dim>384</dim>
3707 <dim>384</dim>
3708 </port>
3709 </input>
3710 <output>
3711 <port id="2" precision="FP32">
3712 <dim>-1</dim>
3713 <dim>-1</dim>
3714 <dim>384</dim>
3715 </port>
3716 </output>
3717 </layer>
3718 <layer id="245" name="Constant_103717" type="Const" version="opset1">
3719 <data element_type="f32" shape="1, 1, 384" offset="407328400" size="1536" />
3720 <output>
3721 <port id="0" precision="FP32">
3722 <dim>1</dim>
3723 <dim>1</dim>
3724 <dim>384</dim>
3725 </port>
3726 </output>
3727 </layer>
3728 <layer id="246" name="__module.encoder.layer.3.attention.self.key/aten::linear/Add" type="Add" version="opset1">
3729 <data auto_broadcast="numpy" />
3730 <input>
3731 <port id="0" precision="FP32">
3732 <dim>-1</dim>
3733 <dim>-1</dim>
3734 <dim>384</dim>
3735 </port>
3736 <port id="1" precision="FP32">
3737 <dim>1</dim>
3738 <dim>1</dim>
3739 <dim>384</dim>
3740 </port>
3741 </input>
3742 <output>
3743 <port id="2" precision="FP32" names="363,x.41">
3744 <dim>-1</dim>
3745 <dim>-1</dim>
3746 <dim>384</dim>
3747 </port>
3748 </output>
3749 </layer>
3750 <layer id="247" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
3751 <data element_type="i64" shape="4" offset="385444888" size="32" />
3752 <output>
3753 <port id="0" precision="I64">
3754 <dim>4</dim>
3755 </port>
3756 </output>
3757 </layer>
3758 <layer id="248" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
3759 <data special_zero="true" />
3760 <input>
3761 <port id="0" precision="FP32">
3762 <dim>-1</dim>
3763 <dim>-1</dim>
3764 <dim>384</dim>
3765 </port>
3766 <port id="1" precision="I64">
3767 <dim>4</dim>
3768 </port>
3769 </input>
3770 <output>
3771 <port id="2" precision="FP32" names="367,x.43">
3772 <dim>-1</dim>
3773 <dim>-1</dim>
3774 <dim>12</dim>
3775 <dim>32</dim>
3776 </port>
3777 </output>
3778 </layer>
3779 <layer id="249" name="Constant_95268" type="Const" version="opset1">
3780 <data element_type="i64" shape="4" offset="385444920" size="32" />
3781 <output>
3782 <port id="0" precision="I64" names="368">
3783 <dim>4</dim>
3784 </port>
3785 </output>
3786 </layer>
3787 <layer id="250" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
3788 <input>
3789 <port id="0" precision="FP32">
3790 <dim>-1</dim>
3791 <dim>-1</dim>
3792 <dim>12</dim>
3793 <dim>32</dim>
3794 </port>
3795 <port id="1" precision="I64">
3796 <dim>4</dim>
3797 </port>
3798 </input>
3799 <output>
3800 <port id="2" precision="FP32" names="369">
3801 <dim>-1</dim>
3802 <dim>12</dim>
3803 <dim>-1</dim>
3804 <dim>32</dim>
3805 </port>
3806 </output>
3807 </layer>
3808 <layer id="251" name="self.encoder.layer.3.attention.self.value.weight" type="Const" version="opset1">
3809 <data element_type="f32" shape="384, 384" offset="407329936" size="589824" />
3810 <output>
3811 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.value.weight">
3812 <dim>384</dim>
3813 <dim>384</dim>
3814 </port>
3815 </output>
3816 </layer>
3817 <layer id="252" name="__module.encoder.layer.3.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
3818 <data transpose_a="false" transpose_b="true" />
3819 <input>
3820 <port id="0" precision="FP32">
3821 <dim>-1</dim>
3822 <dim>-1</dim>
3823 <dim>384</dim>
3824 </port>
3825 <port id="1" precision="FP32">
3826 <dim>384</dim>
3827 <dim>384</dim>
3828 </port>
3829 </input>
3830 <output>
3831 <port id="2" precision="FP32">
3832 <dim>-1</dim>
3833 <dim>-1</dim>
3834 <dim>384</dim>
3835 </port>
3836 </output>
3837 </layer>
3838 <layer id="253" name="Constant_103718" type="Const" version="opset1">
3839 <data element_type="f32" shape="1, 1, 384" offset="407919760" size="1536" />
3840 <output>
3841 <port id="0" precision="FP32">
3842 <dim>1</dim>
3843 <dim>1</dim>
3844 <dim>384</dim>
3845 </port>
3846 </output>
3847 </layer>
3848 <layer id="254" name="__module.encoder.layer.3.attention.self.value/aten::linear/Add" type="Add" version="opset1">
3849 <data auto_broadcast="numpy" />
3850 <input>
3851 <port id="0" precision="FP32">
3852 <dim>-1</dim>
3853 <dim>-1</dim>
3854 <dim>384</dim>
3855 </port>
3856 <port id="1" precision="FP32">
3857 <dim>1</dim>
3858 <dim>1</dim>
3859 <dim>384</dim>
3860 </port>
3861 </input>
3862 <output>
3863 <port id="2" precision="FP32" names="372,x.45">
3864 <dim>-1</dim>
3865 <dim>-1</dim>
3866 <dim>384</dim>
3867 </port>
3868 </output>
3869 </layer>
3870 <layer id="255" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
3871 <data element_type="i64" shape="4" offset="385444888" size="32" />
3872 <output>
3873 <port id="0" precision="I64">
3874 <dim>4</dim>
3875 </port>
3876 </output>
3877 </layer>
3878 <layer id="256" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
3879 <data special_zero="true" />
3880 <input>
3881 <port id="0" precision="FP32">
3882 <dim>-1</dim>
3883 <dim>-1</dim>
3884 <dim>384</dim>
3885 </port>
3886 <port id="1" precision="I64">
3887 <dim>4</dim>
3888 </port>
3889 </input>
3890 <output>
3891 <port id="2" precision="FP32" names="376,x.47">
3892 <dim>-1</dim>
3893 <dim>-1</dim>
3894 <dim>12</dim>
3895 <dim>32</dim>
3896 </port>
3897 </output>
3898 </layer>
3899 <layer id="257" name="Constant_95291" type="Const" version="opset1">
3900 <data element_type="i64" shape="4" offset="385444920" size="32" />
3901 <output>
3902 <port id="0" precision="I64" names="377">
3903 <dim>4</dim>
3904 </port>
3905 </output>
3906 </layer>
3907 <layer id="258" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
3908 <input>
3909 <port id="0" precision="FP32">
3910 <dim>-1</dim>
3911 <dim>-1</dim>
3912 <dim>12</dim>
3913 <dim>32</dim>
3914 </port>
3915 <port id="1" precision="I64">
3916 <dim>4</dim>
3917 </port>
3918 </input>
3919 <output>
3920 <port id="2" precision="FP32" names="378">
3921 <dim>-1</dim>
3922 <dim>12</dim>
3923 <dim>-1</dim>
3924 <dim>32</dim>
3925 </port>
3926 </output>
3927 </layer>
3928 <layer id="259" name="__module.encoder.layer.3.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
3929 <data causal="false" />
3930 <input>
3931 <port id="0" precision="FP32">
3932 <dim>-1</dim>
3933 <dim>12</dim>
3934 <dim>-1</dim>
3935 <dim>32</dim>
3936 </port>
3937 <port id="1" precision="FP32">
3938 <dim>-1</dim>
3939 <dim>12</dim>
3940 <dim>-1</dim>
3941 <dim>32</dim>
3942 </port>
3943 <port id="2" precision="FP32">
3944 <dim>-1</dim>
3945 <dim>12</dim>
3946 <dim>-1</dim>
3947 <dim>32</dim>
3948 </port>
3949 <port id="3" precision="FP32">
3950 <dim>-1</dim>
3951 <dim>1</dim>
3952 <dim>-1</dim>
3953 <dim>-1</dim>
3954 </port>
3955 </input>
3956 <output>
3957 <port id="4" precision="FP32" names="379,attn_output.13">
3958 <dim>-1</dim>
3959 <dim>12</dim>
3960 <dim>-1</dim>
3961 <dim>32</dim>
3962 </port>
3963 </output>
3964 </layer>
3965 <layer id="260" name="__module.encoder.layer.3.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
3966 <data element_type="i32" shape="4" offset="386627688" size="16" />
3967 <output>
3968 <port id="0" precision="I32">
3969 <dim>4</dim>
3970 </port>
3971 </output>
3972 </layer>
3973 <layer id="261" name="__module.encoder.layer.3.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
3974 <input>
3975 <port id="0" precision="FP32">
3976 <dim>-1</dim>
3977 <dim>12</dim>
3978 <dim>-1</dim>
3979 <dim>32</dim>
3980 </port>
3981 <port id="1" precision="I32">
3982 <dim>4</dim>
3983 </port>
3984 </input>
3985 <output>
3986 <port id="2" precision="FP32" names="380,attn_output.15">
3987 <dim>-1</dim>
3988 <dim>-1</dim>
3989 <dim>12</dim>
3990 <dim>32</dim>
3991 </port>
3992 </output>
3993 </layer>
3994 <layer id="262" name="__module.encoder.layer.3.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
3995 <data output_type="i64" />
3996 <input>
3997 <port id="0" precision="FP32">
3998 <dim>-1</dim>
3999 <dim>-1</dim>
4000 <dim>384</dim>
4001 </port>
4002 </input>
4003 <output>
4004 <port id="1" precision="I64">
4005 <dim>3</dim>
4006 </port>
4007 </output>
4008 </layer>
4009 <layer id="263" name="Constant_102811" type="Const" version="opset1">
4010 <data element_type="i64" shape="2" offset="386627704" size="16" />
4011 <output>
4012 <port id="0" precision="I64">
4013 <dim>2</dim>
4014 </port>
4015 </output>
4016 </layer>
4017 <layer id="264" name="Constant_102812" type="Const" version="opset1">
4018 <data element_type="i64" shape="" offset="384850436" size="8" />
4019 <output>
4020 <port id="0" precision="I64" />
4021 </output>
4022 </layer>
4023 <layer id="265" name="Gather_102813" type="Gather" version="opset8">
4024 <data batch_dims="0" />
4025 <input>
4026 <port id="0" precision="I64">
4027 <dim>3</dim>
4028 </port>
4029 <port id="1" precision="I64">
4030 <dim>2</dim>
4031 </port>
4032 <port id="2" precision="I64" />
4033 </input>
4034 <output>
4035 <port id="3" precision="I64">
4036 <dim>2</dim>
4037 </port>
4038 </output>
4039 </layer>
4040 <layer id="266" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
4041 <data axis="0" />
4042 <input>
4043 <port id="0" precision="I64">
4044 <dim>2</dim>
4045 </port>
4046 <port id="1" precision="I64">
4047 <dim>1</dim>
4048 </port>
4049 </input>
4050 <output>
4051 <port id="2" precision="I64" names="381">
4052 <dim>3</dim>
4053 </port>
4054 </output>
4055 </layer>
4056 <layer id="267" name="__module.encoder.layer.3.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
4057 <data special_zero="false" />
4058 <input>
4059 <port id="0" precision="FP32">
4060 <dim>-1</dim>
4061 <dim>-1</dim>
4062 <dim>12</dim>
4063 <dim>32</dim>
4064 </port>
4065 <port id="1" precision="I64">
4066 <dim>3</dim>
4067 </port>
4068 </input>
4069 <output>
4070 <port id="2" precision="FP32" names="382">
4071 <dim>-1</dim>
4072 <dim>-1</dim>
4073 <dim>384</dim>
4074 </port>
4075 </output>
4076 </layer>
4077 <layer id="268" name="self.encoder.layer.3.attention.output.dense.weight" type="Const" version="opset1">
4078 <data element_type="f32" shape="384, 384" offset="407921296" size="589824" />
4079 <output>
4080 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.output.dense.weight">
4081 <dim>384</dim>
4082 <dim>384</dim>
4083 </port>
4084 </output>
4085 </layer>
4086 <layer id="269" name="__module.encoder.layer.3.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4087 <data transpose_a="false" transpose_b="true" />
4088 <input>
4089 <port id="0" precision="FP32">
4090 <dim>-1</dim>
4091 <dim>-1</dim>
4092 <dim>384</dim>
4093 </port>
4094 <port id="1" precision="FP32">
4095 <dim>384</dim>
4096 <dim>384</dim>
4097 </port>
4098 </input>
4099 <output>
4100 <port id="2" precision="FP32">
4101 <dim>-1</dim>
4102 <dim>-1</dim>
4103 <dim>384</dim>
4104 </port>
4105 </output>
4106 </layer>
4107 <layer id="270" name="Constant_103719" type="Const" version="opset1">
4108 <data element_type="f32" shape="1, 1, 384" offset="408511120" size="1536" />
4109 <output>
4110 <port id="0" precision="FP32">
4111 <dim>1</dim>
4112 <dim>1</dim>
4113 <dim>384</dim>
4114 </port>
4115 </output>
4116 </layer>
4117 <layer id="271" name="__module.encoder.layer.3.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
4118 <data auto_broadcast="numpy" />
4119 <input>
4120 <port id="0" precision="FP32">
4121 <dim>-1</dim>
4122 <dim>-1</dim>
4123 <dim>384</dim>
4124 </port>
4125 <port id="1" precision="FP32">
4126 <dim>1</dim>
4127 <dim>1</dim>
4128 <dim>384</dim>
4129 </port>
4130 </input>
4131 <output>
4132 <port id="2" precision="FP32" names="388,input.15">
4133 <dim>-1</dim>
4134 <dim>-1</dim>
4135 <dim>384</dim>
4136 </port>
4137 </output>
4138 </layer>
4139 <layer id="272" name="__module.encoder.layer.3.attention.output/aten::add/Add" type="Add" version="opset1">
4140 <data auto_broadcast="numpy" />
4141 <input>
4142 <port id="0" precision="FP32">
4143 <dim>-1</dim>
4144 <dim>-1</dim>
4145 <dim>384</dim>
4146 </port>
4147 <port id="1" precision="FP32">
4148 <dim>-1</dim>
4149 <dim>-1</dim>
4150 <dim>384</dim>
4151 </port>
4152 </input>
4153 <output>
4154 <port id="2" precision="FP32" names="390">
4155 <dim>-1</dim>
4156 <dim>-1</dim>
4157 <dim>384</dim>
4158 </port>
4159 </output>
4160 </layer>
4161 <layer id="273" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
4162 <data element_type="i32" shape="1" offset="384850452" size="4" />
4163 <output>
4164 <port id="0" precision="I32">
4165 <dim>1</dim>
4166 </port>
4167 </output>
4168 </layer>
4169 <layer id="274" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
4170 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
4171 <input>
4172 <port id="0" precision="FP32">
4173 <dim>-1</dim>
4174 <dim>-1</dim>
4175 <dim>384</dim>
4176 </port>
4177 <port id="1" precision="I32">
4178 <dim>1</dim>
4179 </port>
4180 </input>
4181 <output>
4182 <port id="2" precision="FP32">
4183 <dim>-1</dim>
4184 <dim>-1</dim>
4185 <dim>384</dim>
4186 </port>
4187 </output>
4188 </layer>
4189 <layer id="275" name="Constant_103720" type="Const" version="opset1">
4190 <data element_type="f32" shape="1, 1, 384" offset="408512656" size="1536" />
4191 <output>
4192 <port id="0" precision="FP32">
4193 <dim>1</dim>
4194 <dim>1</dim>
4195 <dim>384</dim>
4196 </port>
4197 </output>
4198 </layer>
4199 <layer id="276" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
4200 <data auto_broadcast="numpy" />
4201 <input>
4202 <port id="0" precision="FP32">
4203 <dim>-1</dim>
4204 <dim>-1</dim>
4205 <dim>384</dim>
4206 </port>
4207 <port id="1" precision="FP32">
4208 <dim>1</dim>
4209 <dim>1</dim>
4210 <dim>384</dim>
4211 </port>
4212 </input>
4213 <output>
4214 <port id="2" precision="FP32">
4215 <dim>-1</dim>
4216 <dim>-1</dim>
4217 <dim>384</dim>
4218 </port>
4219 </output>
4220 </layer>
4221 <layer id="277" name="Constant_103721" type="Const" version="opset1">
4222 <data element_type="f32" shape="1, 1, 384" offset="408514192" size="1536" />
4223 <output>
4224 <port id="0" precision="FP32">
4225 <dim>1</dim>
4226 <dim>1</dim>
4227 <dim>384</dim>
4228 </port>
4229 </output>
4230 </layer>
4231 <layer id="278" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
4232 <data auto_broadcast="numpy" />
4233 <input>
4234 <port id="0" precision="FP32">
4235 <dim>-1</dim>
4236 <dim>-1</dim>
4237 <dim>384</dim>
4238 </port>
4239 <port id="1" precision="FP32">
4240 <dim>1</dim>
4241 <dim>1</dim>
4242 <dim>384</dim>
4243 </port>
4244 </input>
4245 <output>
4246 <port id="2" precision="FP32" names="394,input_tensor.7">
4247 <dim>-1</dim>
4248 <dim>-1</dim>
4249 <dim>384</dim>
4250 </port>
4251 </output>
4252 </layer>
4253 <layer id="279" name="self.encoder.layer.3.intermediate.dense.weight" type="Const" version="opset1">
4254 <data element_type="f32" shape="1536, 384" offset="408515728" size="2359296" />
4255 <output>
4256 <port id="0" precision="FP32" names="self.encoder.layer.3.intermediate.dense.weight">
4257 <dim>1536</dim>
4258 <dim>384</dim>
4259 </port>
4260 </output>
4261 </layer>
4262 <layer id="280" name="__module.encoder.layer.3.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4263 <data transpose_a="false" transpose_b="true" />
4264 <input>
4265 <port id="0" precision="FP32">
4266 <dim>-1</dim>
4267 <dim>-1</dim>
4268 <dim>384</dim>
4269 </port>
4270 <port id="1" precision="FP32">
4271 <dim>1536</dim>
4272 <dim>384</dim>
4273 </port>
4274 </input>
4275 <output>
4276 <port id="2" precision="FP32">
4277 <dim>-1</dim>
4278 <dim>-1</dim>
4279 <dim>1536</dim>
4280 </port>
4281 </output>
4282 </layer>
4283 <layer id="281" name="Constant_103722" type="Const" version="opset1">
4284 <data element_type="f32" shape="1, 1, 1536" offset="410875024" size="6144" />
4285 <output>
4286 <port id="0" precision="FP32">
4287 <dim>1</dim>
4288 <dim>1</dim>
4289 <dim>1536</dim>
4290 </port>
4291 </output>
4292 </layer>
4293 <layer id="282" name="__module.encoder.layer.3.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
4294 <data auto_broadcast="numpy" />
4295 <input>
4296 <port id="0" precision="FP32">
4297 <dim>-1</dim>
4298 <dim>-1</dim>
4299 <dim>1536</dim>
4300 </port>
4301 <port id="1" precision="FP32">
4302 <dim>1</dim>
4303 <dim>1</dim>
4304 <dim>1536</dim>
4305 </port>
4306 </input>
4307 <output>
4308 <port id="2" precision="FP32" names="399">
4309 <dim>-1</dim>
4310 <dim>-1</dim>
4311 <dim>1536</dim>
4312 </port>
4313 </output>
4314 </layer>
4315 <layer id="283" name="__module.encoder.layer.3.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
4316 <data approximation_mode="ERF" />
4317 <input>
4318 <port id="0" precision="FP32">
4319 <dim>-1</dim>
4320 <dim>-1</dim>
4321 <dim>1536</dim>
4322 </port>
4323 </input>
4324 <output>
4325 <port id="1" precision="FP32" names="400">
4326 <dim>-1</dim>
4327 <dim>-1</dim>
4328 <dim>1536</dim>
4329 </port>
4330 </output>
4331 </layer>
4332 <layer id="284" name="self.encoder.layer.3.output.dense.weight" type="Const" version="opset1">
4333 <data element_type="f32" shape="384, 1536" offset="410881168" size="2359296" />
4334 <output>
4335 <port id="0" precision="FP32" names="self.encoder.layer.3.output.dense.weight">
4336 <dim>384</dim>
4337 <dim>1536</dim>
4338 </port>
4339 </output>
4340 </layer>
4341 <layer id="285" name="__module.encoder.layer.3.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4342 <data transpose_a="false" transpose_b="true" />
4343 <input>
4344 <port id="0" precision="FP32">
4345 <dim>-1</dim>
4346 <dim>-1</dim>
4347 <dim>1536</dim>
4348 </port>
4349 <port id="1" precision="FP32">
4350 <dim>384</dim>
4351 <dim>1536</dim>
4352 </port>
4353 </input>
4354 <output>
4355 <port id="2" precision="FP32">
4356 <dim>-1</dim>
4357 <dim>-1</dim>
4358 <dim>384</dim>
4359 </port>
4360 </output>
4361 </layer>
4362 <layer id="286" name="Constant_103723" type="Const" version="opset1">
4363 <data element_type="f32" shape="1, 1, 384" offset="413240464" size="1536" />
4364 <output>
4365 <port id="0" precision="FP32">
4366 <dim>1</dim>
4367 <dim>1</dim>
4368 <dim>384</dim>
4369 </port>
4370 </output>
4371 </layer>
4372 <layer id="287" name="__module.encoder.layer.3.output.dense/aten::linear/Add" type="Add" version="opset1">
4373 <data auto_broadcast="numpy" />
4374 <input>
4375 <port id="0" precision="FP32">
4376 <dim>-1</dim>
4377 <dim>-1</dim>
4378 <dim>384</dim>
4379 </port>
4380 <port id="1" precision="FP32">
4381 <dim>1</dim>
4382 <dim>1</dim>
4383 <dim>384</dim>
4384 </port>
4385 </input>
4386 <output>
4387 <port id="2" precision="FP32" names="406,input.17">
4388 <dim>-1</dim>
4389 <dim>-1</dim>
4390 <dim>384</dim>
4391 </port>
4392 </output>
4393 </layer>
4394 <layer id="288" name="__module.encoder.layer.3.output/aten::add/Add" type="Add" version="opset1">
4395 <data auto_broadcast="numpy" />
4396 <input>
4397 <port id="0" precision="FP32">
4398 <dim>-1</dim>
4399 <dim>-1</dim>
4400 <dim>384</dim>
4401 </port>
4402 <port id="1" precision="FP32">
4403 <dim>-1</dim>
4404 <dim>-1</dim>
4405 <dim>384</dim>
4406 </port>
4407 </input>
4408 <output>
4409 <port id="2" precision="FP32" names="408">
4410 <dim>-1</dim>
4411 <dim>-1</dim>
4412 <dim>384</dim>
4413 </port>
4414 </output>
4415 </layer>
4416 <layer id="289" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
4417 <data element_type="i32" shape="1" offset="384850452" size="4" />
4418 <output>
4419 <port id="0" precision="I32">
4420 <dim>1</dim>
4421 </port>
4422 </output>
4423 </layer>
4424 <layer id="290" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
4425 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
4426 <input>
4427 <port id="0" precision="FP32">
4428 <dim>-1</dim>
4429 <dim>-1</dim>
4430 <dim>384</dim>
4431 </port>
4432 <port id="1" precision="I32">
4433 <dim>1</dim>
4434 </port>
4435 </input>
4436 <output>
4437 <port id="2" precision="FP32">
4438 <dim>-1</dim>
4439 <dim>-1</dim>
4440 <dim>384</dim>
4441 </port>
4442 </output>
4443 </layer>
4444 <layer id="291" name="Constant_103724" type="Const" version="opset1">
4445 <data element_type="f32" shape="1, 1, 384" offset="413242000" size="1536" />
4446 <output>
4447 <port id="0" precision="FP32">
4448 <dim>1</dim>
4449 <dim>1</dim>
4450 <dim>384</dim>
4451 </port>
4452 </output>
4453 </layer>
4454 <layer id="292" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
4455 <data auto_broadcast="numpy" />
4456 <input>
4457 <port id="0" precision="FP32">
4458 <dim>-1</dim>
4459 <dim>-1</dim>
4460 <dim>384</dim>
4461 </port>
4462 <port id="1" precision="FP32">
4463 <dim>1</dim>
4464 <dim>1</dim>
4465 <dim>384</dim>
4466 </port>
4467 </input>
4468 <output>
4469 <port id="2" precision="FP32">
4470 <dim>-1</dim>
4471 <dim>-1</dim>
4472 <dim>384</dim>
4473 </port>
4474 </output>
4475 </layer>
4476 <layer id="293" name="Constant_103725" type="Const" version="opset1">
4477 <data element_type="f32" shape="1, 1, 384" offset="413243536" size="1536" />
4478 <output>
4479 <port id="0" precision="FP32">
4480 <dim>1</dim>
4481 <dim>1</dim>
4482 <dim>384</dim>
4483 </port>
4484 </output>
4485 </layer>
4486 <layer id="294" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
4487 <data auto_broadcast="numpy" />
4488 <input>
4489 <port id="0" precision="FP32">
4490 <dim>-1</dim>
4491 <dim>-1</dim>
4492 <dim>384</dim>
4493 </port>
4494 <port id="1" precision="FP32">
4495 <dim>1</dim>
4496 <dim>1</dim>
4497 <dim>384</dim>
4498 </port>
4499 </input>
4500 <output>
4501 <port id="2" precision="FP32" names="412,hidden_states.25">
4502 <dim>-1</dim>
4503 <dim>-1</dim>
4504 <dim>384</dim>
4505 </port>
4506 </output>
4507 </layer>
4508 <layer id="295" name="self.encoder.layer.4.attention.self.query.weight" type="Const" version="opset1">
4509 <data element_type="f32" shape="384, 384" offset="413245072" size="589824" />
4510 <output>
4511 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.query.weight">
4512 <dim>384</dim>
4513 <dim>384</dim>
4514 </port>
4515 </output>
4516 </layer>
4517 <layer id="296" name="__module.encoder.layer.4.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
4518 <data transpose_a="false" transpose_b="true" />
4519 <input>
4520 <port id="0" precision="FP32">
4521 <dim>-1</dim>
4522 <dim>-1</dim>
4523 <dim>384</dim>
4524 </port>
4525 <port id="1" precision="FP32">
4526 <dim>384</dim>
4527 <dim>384</dim>
4528 </port>
4529 </input>
4530 <output>
4531 <port id="2" precision="FP32">
4532 <dim>-1</dim>
4533 <dim>-1</dim>
4534 <dim>384</dim>
4535 </port>
4536 </output>
4537 </layer>
4538 <layer id="297" name="Constant_103726" type="Const" version="opset1">
4539 <data element_type="f32" shape="1, 1, 384" offset="413834896" size="1536" />
4540 <output>
4541 <port id="0" precision="FP32">
4542 <dim>1</dim>
4543 <dim>1</dim>
4544 <dim>384</dim>
4545 </port>
4546 </output>
4547 </layer>
4548 <layer id="298" name="__module.encoder.layer.4.attention.self.query/aten::linear/Add" type="Add" version="opset1">
4549 <data auto_broadcast="numpy" />
4550 <input>
4551 <port id="0" precision="FP32">
4552 <dim>-1</dim>
4553 <dim>-1</dim>
4554 <dim>384</dim>
4555 </port>
4556 <port id="1" precision="FP32">
4557 <dim>1</dim>
4558 <dim>1</dim>
4559 <dim>384</dim>
4560 </port>
4561 </input>
4562 <output>
4563 <port id="2" precision="FP32" names="425,x.49">
4564 <dim>-1</dim>
4565 <dim>-1</dim>
4566 <dim>384</dim>
4567 </port>
4568 </output>
4569 </layer>
4570 <layer id="299" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
4571 <data element_type="i64" shape="4" offset="385444888" size="32" />
4572 <output>
4573 <port id="0" precision="I64">
4574 <dim>4</dim>
4575 </port>
4576 </output>
4577 </layer>
4578 <layer id="300" name="__module.encoder.layer.4.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
4579 <data special_zero="true" />
4580 <input>
4581 <port id="0" precision="FP32">
4582 <dim>-1</dim>
4583 <dim>-1</dim>
4584 <dim>384</dim>
4585 </port>
4586 <port id="1" precision="I64">
4587 <dim>4</dim>
4588 </port>
4589 </input>
4590 <output>
4591 <port id="2" precision="FP32" names="429,x.51">
4592 <dim>-1</dim>
4593 <dim>-1</dim>
4594 <dim>12</dim>
4595 <dim>32</dim>
4596 </port>
4597 </output>
4598 </layer>
4599 <layer id="301" name="Constant_95471" type="Const" version="opset1">
4600 <data element_type="i64" shape="4" offset="385444920" size="32" />
4601 <output>
4602 <port id="0" precision="I64" names="430">
4603 <dim>4</dim>
4604 </port>
4605 </output>
4606 </layer>
4607 <layer id="302" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
4608 <input>
4609 <port id="0" precision="FP32">
4610 <dim>-1</dim>
4611 <dim>-1</dim>
4612 <dim>12</dim>
4613 <dim>32</dim>
4614 </port>
4615 <port id="1" precision="I64">
4616 <dim>4</dim>
4617 </port>
4618 </input>
4619 <output>
4620 <port id="2" precision="FP32" names="431">
4621 <dim>-1</dim>
4622 <dim>12</dim>
4623 <dim>-1</dim>
4624 <dim>32</dim>
4625 </port>
4626 </output>
4627 </layer>
4628 <layer id="303" name="self.encoder.layer.4.attention.self.key.weight" type="Const" version="opset1">
4629 <data element_type="f32" shape="384, 384" offset="413836432" size="589824" />
4630 <output>
4631 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.key.weight">
4632 <dim>384</dim>
4633 <dim>384</dim>
4634 </port>
4635 </output>
4636 </layer>
4637 <layer id="304" name="__module.encoder.layer.4.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
4638 <data transpose_a="false" transpose_b="true" />
4639 <input>
4640 <port id="0" precision="FP32">
4641 <dim>-1</dim>
4642 <dim>-1</dim>
4643 <dim>384</dim>
4644 </port>
4645 <port id="1" precision="FP32">
4646 <dim>384</dim>
4647 <dim>384</dim>
4648 </port>
4649 </input>
4650 <output>
4651 <port id="2" precision="FP32">
4652 <dim>-1</dim>
4653 <dim>-1</dim>
4654 <dim>384</dim>
4655 </port>
4656 </output>
4657 </layer>
4658 <layer id="305" name="Constant_103727" type="Const" version="opset1">
4659 <data element_type="f32" shape="1, 1, 384" offset="414426256" size="1536" />
4660 <output>
4661 <port id="0" precision="FP32">
4662 <dim>1</dim>
4663 <dim>1</dim>
4664 <dim>384</dim>
4665 </port>
4666 </output>
4667 </layer>
4668 <layer id="306" name="__module.encoder.layer.4.attention.self.key/aten::linear/Add" type="Add" version="opset1">
4669 <data auto_broadcast="numpy" />
4670 <input>
4671 <port id="0" precision="FP32">
4672 <dim>-1</dim>
4673 <dim>-1</dim>
4674 <dim>384</dim>
4675 </port>
4676 <port id="1" precision="FP32">
4677 <dim>1</dim>
4678 <dim>1</dim>
4679 <dim>384</dim>
4680 </port>
4681 </input>
4682 <output>
4683 <port id="2" precision="FP32" names="434,x.53">
4684 <dim>-1</dim>
4685 <dim>-1</dim>
4686 <dim>384</dim>
4687 </port>
4688 </output>
4689 </layer>
4690 <layer id="307" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
4691 <data element_type="i64" shape="4" offset="385444888" size="32" />
4692 <output>
4693 <port id="0" precision="I64">
4694 <dim>4</dim>
4695 </port>
4696 </output>
4697 </layer>
4698 <layer id="308" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
4699 <data special_zero="true" />
4700 <input>
4701 <port id="0" precision="FP32">
4702 <dim>-1</dim>
4703 <dim>-1</dim>
4704 <dim>384</dim>
4705 </port>
4706 <port id="1" precision="I64">
4707 <dim>4</dim>
4708 </port>
4709 </input>
4710 <output>
4711 <port id="2" precision="FP32" names="438,x.55">
4712 <dim>-1</dim>
4713 <dim>-1</dim>
4714 <dim>12</dim>
4715 <dim>32</dim>
4716 </port>
4717 </output>
4718 </layer>
4719 <layer id="309" name="Constant_95494" type="Const" version="opset1">
4720 <data element_type="i64" shape="4" offset="385444920" size="32" />
4721 <output>
4722 <port id="0" precision="I64" names="439">
4723 <dim>4</dim>
4724 </port>
4725 </output>
4726 </layer>
4727 <layer id="310" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
4728 <input>
4729 <port id="0" precision="FP32">
4730 <dim>-1</dim>
4731 <dim>-1</dim>
4732 <dim>12</dim>
4733 <dim>32</dim>
4734 </port>
4735 <port id="1" precision="I64">
4736 <dim>4</dim>
4737 </port>
4738 </input>
4739 <output>
4740 <port id="2" precision="FP32" names="440">
4741 <dim>-1</dim>
4742 <dim>12</dim>
4743 <dim>-1</dim>
4744 <dim>32</dim>
4745 </port>
4746 </output>
4747 </layer>
4748 <layer id="311" name="self.encoder.layer.4.attention.self.value.weight" type="Const" version="opset1">
4749 <data element_type="f32" shape="384, 384" offset="414427792" size="589824" />
4750 <output>
4751 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.value.weight">
4752 <dim>384</dim>
4753 <dim>384</dim>
4754 </port>
4755 </output>
4756 </layer>
4757 <layer id="312" name="__module.encoder.layer.4.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
4758 <data transpose_a="false" transpose_b="true" />
4759 <input>
4760 <port id="0" precision="FP32">
4761 <dim>-1</dim>
4762 <dim>-1</dim>
4763 <dim>384</dim>
4764 </port>
4765 <port id="1" precision="FP32">
4766 <dim>384</dim>
4767 <dim>384</dim>
4768 </port>
4769 </input>
4770 <output>
4771 <port id="2" precision="FP32">
4772 <dim>-1</dim>
4773 <dim>-1</dim>
4774 <dim>384</dim>
4775 </port>
4776 </output>
4777 </layer>
4778 <layer id="313" name="Constant_103728" type="Const" version="opset1">
4779 <data element_type="f32" shape="1, 1, 384" offset="415017616" size="1536" />
4780 <output>
4781 <port id="0" precision="FP32">
4782 <dim>1</dim>
4783 <dim>1</dim>
4784 <dim>384</dim>
4785 </port>
4786 </output>
4787 </layer>
4788 <layer id="314" name="__module.encoder.layer.4.attention.self.value/aten::linear/Add" type="Add" version="opset1">
4789 <data auto_broadcast="numpy" />
4790 <input>
4791 <port id="0" precision="FP32">
4792 <dim>-1</dim>
4793 <dim>-1</dim>
4794 <dim>384</dim>
4795 </port>
4796 <port id="1" precision="FP32">
4797 <dim>1</dim>
4798 <dim>1</dim>
4799 <dim>384</dim>
4800 </port>
4801 </input>
4802 <output>
4803 <port id="2" precision="FP32" names="443,x.57">
4804 <dim>-1</dim>
4805 <dim>-1</dim>
4806 <dim>384</dim>
4807 </port>
4808 </output>
4809 </layer>
4810 <layer id="315" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
4811 <data element_type="i64" shape="4" offset="385444888" size="32" />
4812 <output>
4813 <port id="0" precision="I64">
4814 <dim>4</dim>
4815 </port>
4816 </output>
4817 </layer>
4818 <layer id="316" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
4819 <data special_zero="true" />
4820 <input>
4821 <port id="0" precision="FP32">
4822 <dim>-1</dim>
4823 <dim>-1</dim>
4824 <dim>384</dim>
4825 </port>
4826 <port id="1" precision="I64">
4827 <dim>4</dim>
4828 </port>
4829 </input>
4830 <output>
4831 <port id="2" precision="FP32" names="447,x.59">
4832 <dim>-1</dim>
4833 <dim>-1</dim>
4834 <dim>12</dim>
4835 <dim>32</dim>
4836 </port>
4837 </output>
4838 </layer>
4839 <layer id="317" name="Constant_95517" type="Const" version="opset1">
4840 <data element_type="i64" shape="4" offset="385444920" size="32" />
4841 <output>
4842 <port id="0" precision="I64" names="448">
4843 <dim>4</dim>
4844 </port>
4845 </output>
4846 </layer>
4847 <layer id="318" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
4848 <input>
4849 <port id="0" precision="FP32">
4850 <dim>-1</dim>
4851 <dim>-1</dim>
4852 <dim>12</dim>
4853 <dim>32</dim>
4854 </port>
4855 <port id="1" precision="I64">
4856 <dim>4</dim>
4857 </port>
4858 </input>
4859 <output>
4860 <port id="2" precision="FP32" names="449">
4861 <dim>-1</dim>
4862 <dim>12</dim>
4863 <dim>-1</dim>
4864 <dim>32</dim>
4865 </port>
4866 </output>
4867 </layer>
4868 <layer id="319" name="__module.encoder.layer.4.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
4869 <data causal="false" />
4870 <input>
4871 <port id="0" precision="FP32">
4872 <dim>-1</dim>
4873 <dim>12</dim>
4874 <dim>-1</dim>
4875 <dim>32</dim>
4876 </port>
4877 <port id="1" precision="FP32">
4878 <dim>-1</dim>
4879 <dim>12</dim>
4880 <dim>-1</dim>
4881 <dim>32</dim>
4882 </port>
4883 <port id="2" precision="FP32">
4884 <dim>-1</dim>
4885 <dim>12</dim>
4886 <dim>-1</dim>
4887 <dim>32</dim>
4888 </port>
4889 <port id="3" precision="FP32">
4890 <dim>-1</dim>
4891 <dim>1</dim>
4892 <dim>-1</dim>
4893 <dim>-1</dim>
4894 </port>
4895 </input>
4896 <output>
4897 <port id="4" precision="FP32" names="450,attn_output.17">
4898 <dim>-1</dim>
4899 <dim>12</dim>
4900 <dim>-1</dim>
4901 <dim>32</dim>
4902 </port>
4903 </output>
4904 </layer>
4905 <layer id="320" name="__module.encoder.layer.4.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
4906 <data element_type="i32" shape="4" offset="386627688" size="16" />
4907 <output>
4908 <port id="0" precision="I32">
4909 <dim>4</dim>
4910 </port>
4911 </output>
4912 </layer>
4913 <layer id="321" name="__module.encoder.layer.4.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
4914 <input>
4915 <port id="0" precision="FP32">
4916 <dim>-1</dim>
4917 <dim>12</dim>
4918 <dim>-1</dim>
4919 <dim>32</dim>
4920 </port>
4921 <port id="1" precision="I32">
4922 <dim>4</dim>
4923 </port>
4924 </input>
4925 <output>
4926 <port id="2" precision="FP32" names="451,attn_output.19">
4927 <dim>-1</dim>
4928 <dim>-1</dim>
4929 <dim>12</dim>
4930 <dim>32</dim>
4931 </port>
4932 </output>
4933 </layer>
4934 <layer id="322" name="__module.encoder.layer.4.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
4935 <data output_type="i64" />
4936 <input>
4937 <port id="0" precision="FP32">
4938 <dim>-1</dim>
4939 <dim>-1</dim>
4940 <dim>384</dim>
4941 </port>
4942 </input>
4943 <output>
4944 <port id="1" precision="I64">
4945 <dim>3</dim>
4946 </port>
4947 </output>
4948 </layer>
4949 <layer id="323" name="Constant_102831" type="Const" version="opset1">
4950 <data element_type="i64" shape="2" offset="386627704" size="16" />
4951 <output>
4952 <port id="0" precision="I64">
4953 <dim>2</dim>
4954 </port>
4955 </output>
4956 </layer>
4957 <layer id="324" name="Constant_102832" type="Const" version="opset1">
4958 <data element_type="i64" shape="" offset="384850436" size="8" />
4959 <output>
4960 <port id="0" precision="I64" />
4961 </output>
4962 </layer>
4963 <layer id="325" name="Gather_102833" type="Gather" version="opset8">
4964 <data batch_dims="0" />
4965 <input>
4966 <port id="0" precision="I64">
4967 <dim>3</dim>
4968 </port>
4969 <port id="1" precision="I64">
4970 <dim>2</dim>
4971 </port>
4972 <port id="2" precision="I64" />
4973 </input>
4974 <output>
4975 <port id="3" precision="I64">
4976 <dim>2</dim>
4977 </port>
4978 </output>
4979 </layer>
4980 <layer id="326" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
4981 <data axis="0" />
4982 <input>
4983 <port id="0" precision="I64">
4984 <dim>2</dim>
4985 </port>
4986 <port id="1" precision="I64">
4987 <dim>1</dim>
4988 </port>
4989 </input>
4990 <output>
4991 <port id="2" precision="I64" names="452">
4992 <dim>3</dim>
4993 </port>
4994 </output>
4995 </layer>
4996 <layer id="327" name="__module.encoder.layer.4.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
4997 <data special_zero="false" />
4998 <input>
4999 <port id="0" precision="FP32">
5000 <dim>-1</dim>
5001 <dim>-1</dim>
5002 <dim>12</dim>
5003 <dim>32</dim>
5004 </port>
5005 <port id="1" precision="I64">
5006 <dim>3</dim>
5007 </port>
5008 </input>
5009 <output>
5010 <port id="2" precision="FP32" names="453">
5011 <dim>-1</dim>
5012 <dim>-1</dim>
5013 <dim>384</dim>
5014 </port>
5015 </output>
5016 </layer>
5017 <layer id="328" name="self.encoder.layer.4.attention.output.dense.weight" type="Const" version="opset1">
5018 <data element_type="f32" shape="384, 384" offset="415019152" size="589824" />
5019 <output>
5020 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.output.dense.weight">
5021 <dim>384</dim>
5022 <dim>384</dim>
5023 </port>
5024 </output>
5025 </layer>
5026 <layer id="329" name="__module.encoder.layer.4.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5027 <data transpose_a="false" transpose_b="true" />
5028 <input>
5029 <port id="0" precision="FP32">
5030 <dim>-1</dim>
5031 <dim>-1</dim>
5032 <dim>384</dim>
5033 </port>
5034 <port id="1" precision="FP32">
5035 <dim>384</dim>
5036 <dim>384</dim>
5037 </port>
5038 </input>
5039 <output>
5040 <port id="2" precision="FP32">
5041 <dim>-1</dim>
5042 <dim>-1</dim>
5043 <dim>384</dim>
5044 </port>
5045 </output>
5046 </layer>
5047 <layer id="330" name="Constant_103729" type="Const" version="opset1">
5048 <data element_type="f32" shape="1, 1, 384" offset="415608976" size="1536" />
5049 <output>
5050 <port id="0" precision="FP32">
5051 <dim>1</dim>
5052 <dim>1</dim>
5053 <dim>384</dim>
5054 </port>
5055 </output>
5056 </layer>
5057 <layer id="331" name="__module.encoder.layer.4.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
5058 <data auto_broadcast="numpy" />
5059 <input>
5060 <port id="0" precision="FP32">
5061 <dim>-1</dim>
5062 <dim>-1</dim>
5063 <dim>384</dim>
5064 </port>
5065 <port id="1" precision="FP32">
5066 <dim>1</dim>
5067 <dim>1</dim>
5068 <dim>384</dim>
5069 </port>
5070 </input>
5071 <output>
5072 <port id="2" precision="FP32" names="459,input.19">
5073 <dim>-1</dim>
5074 <dim>-1</dim>
5075 <dim>384</dim>
5076 </port>
5077 </output>
5078 </layer>
5079 <layer id="332" name="__module.encoder.layer.4.attention.output/aten::add/Add" type="Add" version="opset1">
5080 <data auto_broadcast="numpy" />
5081 <input>
5082 <port id="0" precision="FP32">
5083 <dim>-1</dim>
5084 <dim>-1</dim>
5085 <dim>384</dim>
5086 </port>
5087 <port id="1" precision="FP32">
5088 <dim>-1</dim>
5089 <dim>-1</dim>
5090 <dim>384</dim>
5091 </port>
5092 </input>
5093 <output>
5094 <port id="2" precision="FP32" names="461">
5095 <dim>-1</dim>
5096 <dim>-1</dim>
5097 <dim>384</dim>
5098 </port>
5099 </output>
5100 </layer>
5101 <layer id="333" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5102 <data element_type="i32" shape="1" offset="384850452" size="4" />
5103 <output>
5104 <port id="0" precision="I32">
5105 <dim>1</dim>
5106 </port>
5107 </output>
5108 </layer>
5109 <layer id="334" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5110 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5111 <input>
5112 <port id="0" precision="FP32">
5113 <dim>-1</dim>
5114 <dim>-1</dim>
5115 <dim>384</dim>
5116 </port>
5117 <port id="1" precision="I32">
5118 <dim>1</dim>
5119 </port>
5120 </input>
5121 <output>
5122 <port id="2" precision="FP32">
5123 <dim>-1</dim>
5124 <dim>-1</dim>
5125 <dim>384</dim>
5126 </port>
5127 </output>
5128 </layer>
5129 <layer id="335" name="Constant_103730" type="Const" version="opset1">
5130 <data element_type="f32" shape="1, 1, 384" offset="415610512" size="1536" />
5131 <output>
5132 <port id="0" precision="FP32">
5133 <dim>1</dim>
5134 <dim>1</dim>
5135 <dim>384</dim>
5136 </port>
5137 </output>
5138 </layer>
5139 <layer id="336" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5140 <data auto_broadcast="numpy" />
5141 <input>
5142 <port id="0" precision="FP32">
5143 <dim>-1</dim>
5144 <dim>-1</dim>
5145 <dim>384</dim>
5146 </port>
5147 <port id="1" precision="FP32">
5148 <dim>1</dim>
5149 <dim>1</dim>
5150 <dim>384</dim>
5151 </port>
5152 </input>
5153 <output>
5154 <port id="2" precision="FP32">
5155 <dim>-1</dim>
5156 <dim>-1</dim>
5157 <dim>384</dim>
5158 </port>
5159 </output>
5160 </layer>
5161 <layer id="337" name="Constant_103731" type="Const" version="opset1">
5162 <data element_type="f32" shape="1, 1, 384" offset="415612048" size="1536" />
5163 <output>
5164 <port id="0" precision="FP32">
5165 <dim>1</dim>
5166 <dim>1</dim>
5167 <dim>384</dim>
5168 </port>
5169 </output>
5170 </layer>
5171 <layer id="338" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
5172 <data auto_broadcast="numpy" />
5173 <input>
5174 <port id="0" precision="FP32">
5175 <dim>-1</dim>
5176 <dim>-1</dim>
5177 <dim>384</dim>
5178 </port>
5179 <port id="1" precision="FP32">
5180 <dim>1</dim>
5181 <dim>1</dim>
5182 <dim>384</dim>
5183 </port>
5184 </input>
5185 <output>
5186 <port id="2" precision="FP32" names="465,input_tensor.9">
5187 <dim>-1</dim>
5188 <dim>-1</dim>
5189 <dim>384</dim>
5190 </port>
5191 </output>
5192 </layer>
5193 <layer id="339" name="self.encoder.layer.4.intermediate.dense.weight" type="Const" version="opset1">
5194 <data element_type="f32" shape="1536, 384" offset="415613584" size="2359296" />
5195 <output>
5196 <port id="0" precision="FP32" names="self.encoder.layer.4.intermediate.dense.weight">
5197 <dim>1536</dim>
5198 <dim>384</dim>
5199 </port>
5200 </output>
5201 </layer>
5202 <layer id="340" name="__module.encoder.layer.4.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5203 <data transpose_a="false" transpose_b="true" />
5204 <input>
5205 <port id="0" precision="FP32">
5206 <dim>-1</dim>
5207 <dim>-1</dim>
5208 <dim>384</dim>
5209 </port>
5210 <port id="1" precision="FP32">
5211 <dim>1536</dim>
5212 <dim>384</dim>
5213 </port>
5214 </input>
5215 <output>
5216 <port id="2" precision="FP32">
5217 <dim>-1</dim>
5218 <dim>-1</dim>
5219 <dim>1536</dim>
5220 </port>
5221 </output>
5222 </layer>
5223 <layer id="341" name="Constant_103732" type="Const" version="opset1">
5224 <data element_type="f32" shape="1, 1, 1536" offset="417972880" size="6144" />
5225 <output>
5226 <port id="0" precision="FP32">
5227 <dim>1</dim>
5228 <dim>1</dim>
5229 <dim>1536</dim>
5230 </port>
5231 </output>
5232 </layer>
5233 <layer id="342" name="__module.encoder.layer.4.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
5234 <data auto_broadcast="numpy" />
5235 <input>
5236 <port id="0" precision="FP32">
5237 <dim>-1</dim>
5238 <dim>-1</dim>
5239 <dim>1536</dim>
5240 </port>
5241 <port id="1" precision="FP32">
5242 <dim>1</dim>
5243 <dim>1</dim>
5244 <dim>1536</dim>
5245 </port>
5246 </input>
5247 <output>
5248 <port id="2" precision="FP32" names="470">
5249 <dim>-1</dim>
5250 <dim>-1</dim>
5251 <dim>1536</dim>
5252 </port>
5253 </output>
5254 </layer>
5255 <layer id="343" name="__module.encoder.layer.4.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
5256 <data approximation_mode="ERF" />
5257 <input>
5258 <port id="0" precision="FP32">
5259 <dim>-1</dim>
5260 <dim>-1</dim>
5261 <dim>1536</dim>
5262 </port>
5263 </input>
5264 <output>
5265 <port id="1" precision="FP32" names="471">
5266 <dim>-1</dim>
5267 <dim>-1</dim>
5268 <dim>1536</dim>
5269 </port>
5270 </output>
5271 </layer>
5272 <layer id="344" name="self.encoder.layer.4.output.dense.weight" type="Const" version="opset1">
5273 <data element_type="f32" shape="384, 1536" offset="417979024" size="2359296" />
5274 <output>
5275 <port id="0" precision="FP32" names="self.encoder.layer.4.output.dense.weight">
5276 <dim>384</dim>
5277 <dim>1536</dim>
5278 </port>
5279 </output>
5280 </layer>
5281 <layer id="345" name="__module.encoder.layer.4.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5282 <data transpose_a="false" transpose_b="true" />
5283 <input>
5284 <port id="0" precision="FP32">
5285 <dim>-1</dim>
5286 <dim>-1</dim>
5287 <dim>1536</dim>
5288 </port>
5289 <port id="1" precision="FP32">
5290 <dim>384</dim>
5291 <dim>1536</dim>
5292 </port>
5293 </input>
5294 <output>
5295 <port id="2" precision="FP32">
5296 <dim>-1</dim>
5297 <dim>-1</dim>
5298 <dim>384</dim>
5299 </port>
5300 </output>
5301 </layer>
5302 <layer id="346" name="Constant_103733" type="Const" version="opset1">
5303 <data element_type="f32" shape="1, 1, 384" offset="420338320" size="1536" />
5304 <output>
5305 <port id="0" precision="FP32">
5306 <dim>1</dim>
5307 <dim>1</dim>
5308 <dim>384</dim>
5309 </port>
5310 </output>
5311 </layer>
5312 <layer id="347" name="__module.encoder.layer.4.output.dense/aten::linear/Add" type="Add" version="opset1">
5313 <data auto_broadcast="numpy" />
5314 <input>
5315 <port id="0" precision="FP32">
5316 <dim>-1</dim>
5317 <dim>-1</dim>
5318 <dim>384</dim>
5319 </port>
5320 <port id="1" precision="FP32">
5321 <dim>1</dim>
5322 <dim>1</dim>
5323 <dim>384</dim>
5324 </port>
5325 </input>
5326 <output>
5327 <port id="2" precision="FP32" names="477,input.21">
5328 <dim>-1</dim>
5329 <dim>-1</dim>
5330 <dim>384</dim>
5331 </port>
5332 </output>
5333 </layer>
5334 <layer id="348" name="__module.encoder.layer.4.output/aten::add/Add" type="Add" version="opset1">
5335 <data auto_broadcast="numpy" />
5336 <input>
5337 <port id="0" precision="FP32">
5338 <dim>-1</dim>
5339 <dim>-1</dim>
5340 <dim>384</dim>
5341 </port>
5342 <port id="1" precision="FP32">
5343 <dim>-1</dim>
5344 <dim>-1</dim>
5345 <dim>384</dim>
5346 </port>
5347 </input>
5348 <output>
5349 <port id="2" precision="FP32" names="479">
5350 <dim>-1</dim>
5351 <dim>-1</dim>
5352 <dim>384</dim>
5353 </port>
5354 </output>
5355 </layer>
5356 <layer id="349" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5357 <data element_type="i32" shape="1" offset="384850452" size="4" />
5358 <output>
5359 <port id="0" precision="I32">
5360 <dim>1</dim>
5361 </port>
5362 </output>
5363 </layer>
5364 <layer id="350" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5365 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5366 <input>
5367 <port id="0" precision="FP32">
5368 <dim>-1</dim>
5369 <dim>-1</dim>
5370 <dim>384</dim>
5371 </port>
5372 <port id="1" precision="I32">
5373 <dim>1</dim>
5374 </port>
5375 </input>
5376 <output>
5377 <port id="2" precision="FP32">
5378 <dim>-1</dim>
5379 <dim>-1</dim>
5380 <dim>384</dim>
5381 </port>
5382 </output>
5383 </layer>
5384 <layer id="351" name="Constant_103734" type="Const" version="opset1">
5385 <data element_type="f32" shape="1, 1, 384" offset="420339856" size="1536" />
5386 <output>
5387 <port id="0" precision="FP32">
5388 <dim>1</dim>
5389 <dim>1</dim>
5390 <dim>384</dim>
5391 </port>
5392 </output>
5393 </layer>
5394 <layer id="352" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5395 <data auto_broadcast="numpy" />
5396 <input>
5397 <port id="0" precision="FP32">
5398 <dim>-1</dim>
5399 <dim>-1</dim>
5400 <dim>384</dim>
5401 </port>
5402 <port id="1" precision="FP32">
5403 <dim>1</dim>
5404 <dim>1</dim>
5405 <dim>384</dim>
5406 </port>
5407 </input>
5408 <output>
5409 <port id="2" precision="FP32">
5410 <dim>-1</dim>
5411 <dim>-1</dim>
5412 <dim>384</dim>
5413 </port>
5414 </output>
5415 </layer>
5416 <layer id="353" name="Constant_103735" type="Const" version="opset1">
5417 <data element_type="f32" shape="1, 1, 384" offset="420341392" size="1536" />
5418 <output>
5419 <port id="0" precision="FP32">
5420 <dim>1</dim>
5421 <dim>1</dim>
5422 <dim>384</dim>
5423 </port>
5424 </output>
5425 </layer>
5426 <layer id="354" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
5427 <data auto_broadcast="numpy" />
5428 <input>
5429 <port id="0" precision="FP32">
5430 <dim>-1</dim>
5431 <dim>-1</dim>
5432 <dim>384</dim>
5433 </port>
5434 <port id="1" precision="FP32">
5435 <dim>1</dim>
5436 <dim>1</dim>
5437 <dim>384</dim>
5438 </port>
5439 </input>
5440 <output>
5441 <port id="2" precision="FP32" names="483,hidden_states.31">
5442 <dim>-1</dim>
5443 <dim>-1</dim>
5444 <dim>384</dim>
5445 </port>
5446 </output>
5447 </layer>
5448 <layer id="355" name="self.encoder.layer.5.attention.self.query.weight" type="Const" version="opset1">
5449 <data element_type="f32" shape="384, 384" offset="420342928" size="589824" />
5450 <output>
5451 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.query.weight">
5452 <dim>384</dim>
5453 <dim>384</dim>
5454 </port>
5455 </output>
5456 </layer>
5457 <layer id="356" name="__module.encoder.layer.5.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
5458 <data transpose_a="false" transpose_b="true" />
5459 <input>
5460 <port id="0" precision="FP32">
5461 <dim>-1</dim>
5462 <dim>-1</dim>
5463 <dim>384</dim>
5464 </port>
5465 <port id="1" precision="FP32">
5466 <dim>384</dim>
5467 <dim>384</dim>
5468 </port>
5469 </input>
5470 <output>
5471 <port id="2" precision="FP32">
5472 <dim>-1</dim>
5473 <dim>-1</dim>
5474 <dim>384</dim>
5475 </port>
5476 </output>
5477 </layer>
5478 <layer id="357" name="Constant_103736" type="Const" version="opset1">
5479 <data element_type="f32" shape="1, 1, 384" offset="420932752" size="1536" />
5480 <output>
5481 <port id="0" precision="FP32">
5482 <dim>1</dim>
5483 <dim>1</dim>
5484 <dim>384</dim>
5485 </port>
5486 </output>
5487 </layer>
5488 <layer id="358" name="__module.encoder.layer.5.attention.self.query/aten::linear/Add" type="Add" version="opset1">
5489 <data auto_broadcast="numpy" />
5490 <input>
5491 <port id="0" precision="FP32">
5492 <dim>-1</dim>
5493 <dim>-1</dim>
5494 <dim>384</dim>
5495 </port>
5496 <port id="1" precision="FP32">
5497 <dim>1</dim>
5498 <dim>1</dim>
5499 <dim>384</dim>
5500 </port>
5501 </input>
5502 <output>
5503 <port id="2" precision="FP32" names="496,x.61">
5504 <dim>-1</dim>
5505 <dim>-1</dim>
5506 <dim>384</dim>
5507 </port>
5508 </output>
5509 </layer>
5510 <layer id="359" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
5511 <data element_type="i64" shape="4" offset="385444888" size="32" />
5512 <output>
5513 <port id="0" precision="I64">
5514 <dim>4</dim>
5515 </port>
5516 </output>
5517 </layer>
5518 <layer id="360" name="__module.encoder.layer.5.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
5519 <data special_zero="true" />
5520 <input>
5521 <port id="0" precision="FP32">
5522 <dim>-1</dim>
5523 <dim>-1</dim>
5524 <dim>384</dim>
5525 </port>
5526 <port id="1" precision="I64">
5527 <dim>4</dim>
5528 </port>
5529 </input>
5530 <output>
5531 <port id="2" precision="FP32" names="500,x.63">
5532 <dim>-1</dim>
5533 <dim>-1</dim>
5534 <dim>12</dim>
5535 <dim>32</dim>
5536 </port>
5537 </output>
5538 </layer>
5539 <layer id="361" name="Constant_95697" type="Const" version="opset1">
5540 <data element_type="i64" shape="4" offset="385444920" size="32" />
5541 <output>
5542 <port id="0" precision="I64" names="501">
5543 <dim>4</dim>
5544 </port>
5545 </output>
5546 </layer>
5547 <layer id="362" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
5548 <input>
5549 <port id="0" precision="FP32">
5550 <dim>-1</dim>
5551 <dim>-1</dim>
5552 <dim>12</dim>
5553 <dim>32</dim>
5554 </port>
5555 <port id="1" precision="I64">
5556 <dim>4</dim>
5557 </port>
5558 </input>
5559 <output>
5560 <port id="2" precision="FP32" names="502">
5561 <dim>-1</dim>
5562 <dim>12</dim>
5563 <dim>-1</dim>
5564 <dim>32</dim>
5565 </port>
5566 </output>
5567 </layer>
5568 <layer id="363" name="self.encoder.layer.5.attention.self.key.weight" type="Const" version="opset1">
5569 <data element_type="f32" shape="384, 384" offset="420934288" size="589824" />
5570 <output>
5571 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.key.weight">
5572 <dim>384</dim>
5573 <dim>384</dim>
5574 </port>
5575 </output>
5576 </layer>
5577 <layer id="364" name="__module.encoder.layer.5.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
5578 <data transpose_a="false" transpose_b="true" />
5579 <input>
5580 <port id="0" precision="FP32">
5581 <dim>-1</dim>
5582 <dim>-1</dim>
5583 <dim>384</dim>
5584 </port>
5585 <port id="1" precision="FP32">
5586 <dim>384</dim>
5587 <dim>384</dim>
5588 </port>
5589 </input>
5590 <output>
5591 <port id="2" precision="FP32">
5592 <dim>-1</dim>
5593 <dim>-1</dim>
5594 <dim>384</dim>
5595 </port>
5596 </output>
5597 </layer>
5598 <layer id="365" name="Constant_103737" type="Const" version="opset1">
5599 <data element_type="f32" shape="1, 1, 384" offset="421524112" size="1536" />
5600 <output>
5601 <port id="0" precision="FP32">
5602 <dim>1</dim>
5603 <dim>1</dim>
5604 <dim>384</dim>
5605 </port>
5606 </output>
5607 </layer>
5608 <layer id="366" name="__module.encoder.layer.5.attention.self.key/aten::linear/Add" type="Add" version="opset1">
5609 <data auto_broadcast="numpy" />
5610 <input>
5611 <port id="0" precision="FP32">
5612 <dim>-1</dim>
5613 <dim>-1</dim>
5614 <dim>384</dim>
5615 </port>
5616 <port id="1" precision="FP32">
5617 <dim>1</dim>
5618 <dim>1</dim>
5619 <dim>384</dim>
5620 </port>
5621 </input>
5622 <output>
5623 <port id="2" precision="FP32" names="505,x.65">
5624 <dim>-1</dim>
5625 <dim>-1</dim>
5626 <dim>384</dim>
5627 </port>
5628 </output>
5629 </layer>
5630 <layer id="367" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
5631 <data element_type="i64" shape="4" offset="385444888" size="32" />
5632 <output>
5633 <port id="0" precision="I64">
5634 <dim>4</dim>
5635 </port>
5636 </output>
5637 </layer>
5638 <layer id="368" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
5639 <data special_zero="true" />
5640 <input>
5641 <port id="0" precision="FP32">
5642 <dim>-1</dim>
5643 <dim>-1</dim>
5644 <dim>384</dim>
5645 </port>
5646 <port id="1" precision="I64">
5647 <dim>4</dim>
5648 </port>
5649 </input>
5650 <output>
5651 <port id="2" precision="FP32" names="509,x.67">
5652 <dim>-1</dim>
5653 <dim>-1</dim>
5654 <dim>12</dim>
5655 <dim>32</dim>
5656 </port>
5657 </output>
5658 </layer>
5659 <layer id="369" name="Constant_95720" type="Const" version="opset1">
5660 <data element_type="i64" shape="4" offset="385444920" size="32" />
5661 <output>
5662 <port id="0" precision="I64" names="510">
5663 <dim>4</dim>
5664 </port>
5665 </output>
5666 </layer>
5667 <layer id="370" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
5668 <input>
5669 <port id="0" precision="FP32">
5670 <dim>-1</dim>
5671 <dim>-1</dim>
5672 <dim>12</dim>
5673 <dim>32</dim>
5674 </port>
5675 <port id="1" precision="I64">
5676 <dim>4</dim>
5677 </port>
5678 </input>
5679 <output>
5680 <port id="2" precision="FP32" names="511">
5681 <dim>-1</dim>
5682 <dim>12</dim>
5683 <dim>-1</dim>
5684 <dim>32</dim>
5685 </port>
5686 </output>
5687 </layer>
5688 <layer id="371" name="self.encoder.layer.5.attention.self.value.weight" type="Const" version="opset1">
5689 <data element_type="f32" shape="384, 384" offset="421525648" size="589824" />
5690 <output>
5691 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.value.weight">
5692 <dim>384</dim>
5693 <dim>384</dim>
5694 </port>
5695 </output>
5696 </layer>
5697 <layer id="372" name="__module.encoder.layer.5.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
5698 <data transpose_a="false" transpose_b="true" />
5699 <input>
5700 <port id="0" precision="FP32">
5701 <dim>-1</dim>
5702 <dim>-1</dim>
5703 <dim>384</dim>
5704 </port>
5705 <port id="1" precision="FP32">
5706 <dim>384</dim>
5707 <dim>384</dim>
5708 </port>
5709 </input>
5710 <output>
5711 <port id="2" precision="FP32">
5712 <dim>-1</dim>
5713 <dim>-1</dim>
5714 <dim>384</dim>
5715 </port>
5716 </output>
5717 </layer>
5718 <layer id="373" name="Constant_103738" type="Const" version="opset1">
5719 <data element_type="f32" shape="1, 1, 384" offset="422115472" size="1536" />
5720 <output>
5721 <port id="0" precision="FP32">
5722 <dim>1</dim>
5723 <dim>1</dim>
5724 <dim>384</dim>
5725 </port>
5726 </output>
5727 </layer>
5728 <layer id="374" name="__module.encoder.layer.5.attention.self.value/aten::linear/Add" type="Add" version="opset1">
5729 <data auto_broadcast="numpy" />
5730 <input>
5731 <port id="0" precision="FP32">
5732 <dim>-1</dim>
5733 <dim>-1</dim>
5734 <dim>384</dim>
5735 </port>
5736 <port id="1" precision="FP32">
5737 <dim>1</dim>
5738 <dim>1</dim>
5739 <dim>384</dim>
5740 </port>
5741 </input>
5742 <output>
5743 <port id="2" precision="FP32" names="514,x.69">
5744 <dim>-1</dim>
5745 <dim>-1</dim>
5746 <dim>384</dim>
5747 </port>
5748 </output>
5749 </layer>
5750 <layer id="375" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
5751 <data element_type="i64" shape="4" offset="385444888" size="32" />
5752 <output>
5753 <port id="0" precision="I64">
5754 <dim>4</dim>
5755 </port>
5756 </output>
5757 </layer>
5758 <layer id="376" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
5759 <data special_zero="true" />
5760 <input>
5761 <port id="0" precision="FP32">
5762 <dim>-1</dim>
5763 <dim>-1</dim>
5764 <dim>384</dim>
5765 </port>
5766 <port id="1" precision="I64">
5767 <dim>4</dim>
5768 </port>
5769 </input>
5770 <output>
5771 <port id="2" precision="FP32" names="518,x.71">
5772 <dim>-1</dim>
5773 <dim>-1</dim>
5774 <dim>12</dim>
5775 <dim>32</dim>
5776 </port>
5777 </output>
5778 </layer>
5779 <layer id="377" name="Constant_95743" type="Const" version="opset1">
5780 <data element_type="i64" shape="4" offset="385444920" size="32" />
5781 <output>
5782 <port id="0" precision="I64" names="519">
5783 <dim>4</dim>
5784 </port>
5785 </output>
5786 </layer>
5787 <layer id="378" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
5788 <input>
5789 <port id="0" precision="FP32">
5790 <dim>-1</dim>
5791 <dim>-1</dim>
5792 <dim>12</dim>
5793 <dim>32</dim>
5794 </port>
5795 <port id="1" precision="I64">
5796 <dim>4</dim>
5797 </port>
5798 </input>
5799 <output>
5800 <port id="2" precision="FP32" names="520">
5801 <dim>-1</dim>
5802 <dim>12</dim>
5803 <dim>-1</dim>
5804 <dim>32</dim>
5805 </port>
5806 </output>
5807 </layer>
5808 <layer id="379" name="__module.encoder.layer.5.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
5809 <data causal="false" />
5810 <input>
5811 <port id="0" precision="FP32">
5812 <dim>-1</dim>
5813 <dim>12</dim>
5814 <dim>-1</dim>
5815 <dim>32</dim>
5816 </port>
5817 <port id="1" precision="FP32">
5818 <dim>-1</dim>
5819 <dim>12</dim>
5820 <dim>-1</dim>
5821 <dim>32</dim>
5822 </port>
5823 <port id="2" precision="FP32">
5824 <dim>-1</dim>
5825 <dim>12</dim>
5826 <dim>-1</dim>
5827 <dim>32</dim>
5828 </port>
5829 <port id="3" precision="FP32">
5830 <dim>-1</dim>
5831 <dim>1</dim>
5832 <dim>-1</dim>
5833 <dim>-1</dim>
5834 </port>
5835 </input>
5836 <output>
5837 <port id="4" precision="FP32" names="521,attn_output.21">
5838 <dim>-1</dim>
5839 <dim>12</dim>
5840 <dim>-1</dim>
5841 <dim>32</dim>
5842 </port>
5843 </output>
5844 </layer>
5845 <layer id="380" name="__module.encoder.layer.5.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
5846 <data element_type="i32" shape="4" offset="386627688" size="16" />
5847 <output>
5848 <port id="0" precision="I32">
5849 <dim>4</dim>
5850 </port>
5851 </output>
5852 </layer>
5853 <layer id="381" name="__module.encoder.layer.5.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
5854 <input>
5855 <port id="0" precision="FP32">
5856 <dim>-1</dim>
5857 <dim>12</dim>
5858 <dim>-1</dim>
5859 <dim>32</dim>
5860 </port>
5861 <port id="1" precision="I32">
5862 <dim>4</dim>
5863 </port>
5864 </input>
5865 <output>
5866 <port id="2" precision="FP32" names="522,attn_output.23">
5867 <dim>-1</dim>
5868 <dim>-1</dim>
5869 <dim>12</dim>
5870 <dim>32</dim>
5871 </port>
5872 </output>
5873 </layer>
5874 <layer id="382" name="__module.encoder.layer.5.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
5875 <data output_type="i64" />
5876 <input>
5877 <port id="0" precision="FP32">
5878 <dim>-1</dim>
5879 <dim>-1</dim>
5880 <dim>384</dim>
5881 </port>
5882 </input>
5883 <output>
5884 <port id="1" precision="I64">
5885 <dim>3</dim>
5886 </port>
5887 </output>
5888 </layer>
5889 <layer id="383" name="Constant_102851" type="Const" version="opset1">
5890 <data element_type="i64" shape="2" offset="386627704" size="16" />
5891 <output>
5892 <port id="0" precision="I64">
5893 <dim>2</dim>
5894 </port>
5895 </output>
5896 </layer>
5897 <layer id="384" name="Constant_102852" type="Const" version="opset1">
5898 <data element_type="i64" shape="" offset="384850436" size="8" />
5899 <output>
5900 <port id="0" precision="I64" />
5901 </output>
5902 </layer>
5903 <layer id="385" name="Gather_102853" type="Gather" version="opset8">
5904 <data batch_dims="0" />
5905 <input>
5906 <port id="0" precision="I64">
5907 <dim>3</dim>
5908 </port>
5909 <port id="1" precision="I64">
5910 <dim>2</dim>
5911 </port>
5912 <port id="2" precision="I64" />
5913 </input>
5914 <output>
5915 <port id="3" precision="I64">
5916 <dim>2</dim>
5917 </port>
5918 </output>
5919 </layer>
5920 <layer id="386" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
5921 <data axis="0" />
5922 <input>
5923 <port id="0" precision="I64">
5924 <dim>2</dim>
5925 </port>
5926 <port id="1" precision="I64">
5927 <dim>1</dim>
5928 </port>
5929 </input>
5930 <output>
5931 <port id="2" precision="I64" names="523">
5932 <dim>3</dim>
5933 </port>
5934 </output>
5935 </layer>
5936 <layer id="387" name="__module.encoder.layer.5.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
5937 <data special_zero="false" />
5938 <input>
5939 <port id="0" precision="FP32">
5940 <dim>-1</dim>
5941 <dim>-1</dim>
5942 <dim>12</dim>
5943 <dim>32</dim>
5944 </port>
5945 <port id="1" precision="I64">
5946 <dim>3</dim>
5947 </port>
5948 </input>
5949 <output>
5950 <port id="2" precision="FP32" names="524">
5951 <dim>-1</dim>
5952 <dim>-1</dim>
5953 <dim>384</dim>
5954 </port>
5955 </output>
5956 </layer>
5957 <layer id="388" name="self.encoder.layer.5.attention.output.dense.weight" type="Const" version="opset1">
5958 <data element_type="f32" shape="384, 384" offset="422117008" size="589824" />
5959 <output>
5960 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.output.dense.weight">
5961 <dim>384</dim>
5962 <dim>384</dim>
5963 </port>
5964 </output>
5965 </layer>
5966 <layer id="389" name="__module.encoder.layer.5.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5967 <data transpose_a="false" transpose_b="true" />
5968 <input>
5969 <port id="0" precision="FP32">
5970 <dim>-1</dim>
5971 <dim>-1</dim>
5972 <dim>384</dim>
5973 </port>
5974 <port id="1" precision="FP32">
5975 <dim>384</dim>
5976 <dim>384</dim>
5977 </port>
5978 </input>
5979 <output>
5980 <port id="2" precision="FP32">
5981 <dim>-1</dim>
5982 <dim>-1</dim>
5983 <dim>384</dim>
5984 </port>
5985 </output>
5986 </layer>
5987 <layer id="390" name="Constant_103739" type="Const" version="opset1">
5988 <data element_type="f32" shape="1, 1, 384" offset="422706832" size="1536" />
5989 <output>
5990 <port id="0" precision="FP32">
5991 <dim>1</dim>
5992 <dim>1</dim>
5993 <dim>384</dim>
5994 </port>
5995 </output>
5996 </layer>
5997 <layer id="391" name="__module.encoder.layer.5.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
5998 <data auto_broadcast="numpy" />
5999 <input>
6000 <port id="0" precision="FP32">
6001 <dim>-1</dim>
6002 <dim>-1</dim>
6003 <dim>384</dim>
6004 </port>
6005 <port id="1" precision="FP32">
6006 <dim>1</dim>
6007 <dim>1</dim>
6008 <dim>384</dim>
6009 </port>
6010 </input>
6011 <output>
6012 <port id="2" precision="FP32" names="530,input.23">
6013 <dim>-1</dim>
6014 <dim>-1</dim>
6015 <dim>384</dim>
6016 </port>
6017 </output>
6018 </layer>
6019 <layer id="392" name="__module.encoder.layer.5.attention.output/aten::add/Add" type="Add" version="opset1">
6020 <data auto_broadcast="numpy" />
6021 <input>
6022 <port id="0" precision="FP32">
6023 <dim>-1</dim>
6024 <dim>-1</dim>
6025 <dim>384</dim>
6026 </port>
6027 <port id="1" precision="FP32">
6028 <dim>-1</dim>
6029 <dim>-1</dim>
6030 <dim>384</dim>
6031 </port>
6032 </input>
6033 <output>
6034 <port id="2" precision="FP32" names="532">
6035 <dim>-1</dim>
6036 <dim>-1</dim>
6037 <dim>384</dim>
6038 </port>
6039 </output>
6040 </layer>
6041 <layer id="393" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6042 <data element_type="i32" shape="1" offset="384850452" size="4" />
6043 <output>
6044 <port id="0" precision="I32">
6045 <dim>1</dim>
6046 </port>
6047 </output>
6048 </layer>
6049 <layer id="394" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6050 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6051 <input>
6052 <port id="0" precision="FP32">
6053 <dim>-1</dim>
6054 <dim>-1</dim>
6055 <dim>384</dim>
6056 </port>
6057 <port id="1" precision="I32">
6058 <dim>1</dim>
6059 </port>
6060 </input>
6061 <output>
6062 <port id="2" precision="FP32">
6063 <dim>-1</dim>
6064 <dim>-1</dim>
6065 <dim>384</dim>
6066 </port>
6067 </output>
6068 </layer>
6069 <layer id="395" name="Constant_103740" type="Const" version="opset1">
6070 <data element_type="f32" shape="1, 1, 384" offset="422708368" size="1536" />
6071 <output>
6072 <port id="0" precision="FP32">
6073 <dim>1</dim>
6074 <dim>1</dim>
6075 <dim>384</dim>
6076 </port>
6077 </output>
6078 </layer>
6079 <layer id="396" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
6080 <data auto_broadcast="numpy" />
6081 <input>
6082 <port id="0" precision="FP32">
6083 <dim>-1</dim>
6084 <dim>-1</dim>
6085 <dim>384</dim>
6086 </port>
6087 <port id="1" precision="FP32">
6088 <dim>1</dim>
6089 <dim>1</dim>
6090 <dim>384</dim>
6091 </port>
6092 </input>
6093 <output>
6094 <port id="2" precision="FP32">
6095 <dim>-1</dim>
6096 <dim>-1</dim>
6097 <dim>384</dim>
6098 </port>
6099 </output>
6100 </layer>
6101 <layer id="397" name="Constant_103741" type="Const" version="opset1">
6102 <data element_type="f32" shape="1, 1, 384" offset="422709904" size="1536" />
6103 <output>
6104 <port id="0" precision="FP32">
6105 <dim>1</dim>
6106 <dim>1</dim>
6107 <dim>384</dim>
6108 </port>
6109 </output>
6110 </layer>
6111 <layer id="398" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6112 <data auto_broadcast="numpy" />
6113 <input>
6114 <port id="0" precision="FP32">
6115 <dim>-1</dim>
6116 <dim>-1</dim>
6117 <dim>384</dim>
6118 </port>
6119 <port id="1" precision="FP32">
6120 <dim>1</dim>
6121 <dim>1</dim>
6122 <dim>384</dim>
6123 </port>
6124 </input>
6125 <output>
6126 <port id="2" precision="FP32" names="536,input_tensor.11">
6127 <dim>-1</dim>
6128 <dim>-1</dim>
6129 <dim>384</dim>
6130 </port>
6131 </output>
6132 </layer>
6133 <layer id="399" name="self.encoder.layer.5.intermediate.dense.weight" type="Const" version="opset1">
6134 <data element_type="f32" shape="1536, 384" offset="422711440" size="2359296" />
6135 <output>
6136 <port id="0" precision="FP32" names="self.encoder.layer.5.intermediate.dense.weight">
6137 <dim>1536</dim>
6138 <dim>384</dim>
6139 </port>
6140 </output>
6141 </layer>
6142 <layer id="400" name="__module.encoder.layer.5.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6143 <data transpose_a="false" transpose_b="true" />
6144 <input>
6145 <port id="0" precision="FP32">
6146 <dim>-1</dim>
6147 <dim>-1</dim>
6148 <dim>384</dim>
6149 </port>
6150 <port id="1" precision="FP32">
6151 <dim>1536</dim>
6152 <dim>384</dim>
6153 </port>
6154 </input>
6155 <output>
6156 <port id="2" precision="FP32">
6157 <dim>-1</dim>
6158 <dim>-1</dim>
6159 <dim>1536</dim>
6160 </port>
6161 </output>
6162 </layer>
6163 <layer id="401" name="Constant_103742" type="Const" version="opset1">
6164 <data element_type="f32" shape="1, 1, 1536" offset="425070736" size="6144" />
6165 <output>
6166 <port id="0" precision="FP32">
6167 <dim>1</dim>
6168 <dim>1</dim>
6169 <dim>1536</dim>
6170 </port>
6171 </output>
6172 </layer>
6173 <layer id="402" name="__module.encoder.layer.5.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
6174 <data auto_broadcast="numpy" />
6175 <input>
6176 <port id="0" precision="FP32">
6177 <dim>-1</dim>
6178 <dim>-1</dim>
6179 <dim>1536</dim>
6180 </port>
6181 <port id="1" precision="FP32">
6182 <dim>1</dim>
6183 <dim>1</dim>
6184 <dim>1536</dim>
6185 </port>
6186 </input>
6187 <output>
6188 <port id="2" precision="FP32" names="541">
6189 <dim>-1</dim>
6190 <dim>-1</dim>
6191 <dim>1536</dim>
6192 </port>
6193 </output>
6194 </layer>
6195 <layer id="403" name="__module.encoder.layer.5.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
6196 <data approximation_mode="ERF" />
6197 <input>
6198 <port id="0" precision="FP32">
6199 <dim>-1</dim>
6200 <dim>-1</dim>
6201 <dim>1536</dim>
6202 </port>
6203 </input>
6204 <output>
6205 <port id="1" precision="FP32" names="542">
6206 <dim>-1</dim>
6207 <dim>-1</dim>
6208 <dim>1536</dim>
6209 </port>
6210 </output>
6211 </layer>
6212 <layer id="404" name="self.encoder.layer.5.output.dense.weight" type="Const" version="opset1">
6213 <data element_type="f32" shape="384, 1536" offset="425076880" size="2359296" />
6214 <output>
6215 <port id="0" precision="FP32" names="self.encoder.layer.5.output.dense.weight">
6216 <dim>384</dim>
6217 <dim>1536</dim>
6218 </port>
6219 </output>
6220 </layer>
6221 <layer id="405" name="__module.encoder.layer.5.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6222 <data transpose_a="false" transpose_b="true" />
6223 <input>
6224 <port id="0" precision="FP32">
6225 <dim>-1</dim>
6226 <dim>-1</dim>
6227 <dim>1536</dim>
6228 </port>
6229 <port id="1" precision="FP32">
6230 <dim>384</dim>
6231 <dim>1536</dim>
6232 </port>
6233 </input>
6234 <output>
6235 <port id="2" precision="FP32">
6236 <dim>-1</dim>
6237 <dim>-1</dim>
6238 <dim>384</dim>
6239 </port>
6240 </output>
6241 </layer>
6242 <layer id="406" name="Constant_103743" type="Const" version="opset1">
6243 <data element_type="f32" shape="1, 1, 384" offset="427436176" size="1536" />
6244 <output>
6245 <port id="0" precision="FP32">
6246 <dim>1</dim>
6247 <dim>1</dim>
6248 <dim>384</dim>
6249 </port>
6250 </output>
6251 </layer>
6252 <layer id="407" name="__module.encoder.layer.5.output.dense/aten::linear/Add" type="Add" version="opset1">
6253 <data auto_broadcast="numpy" />
6254 <input>
6255 <port id="0" precision="FP32">
6256 <dim>-1</dim>
6257 <dim>-1</dim>
6258 <dim>384</dim>
6259 </port>
6260 <port id="1" precision="FP32">
6261 <dim>1</dim>
6262 <dim>1</dim>
6263 <dim>384</dim>
6264 </port>
6265 </input>
6266 <output>
6267 <port id="2" precision="FP32" names="548,input.25">
6268 <dim>-1</dim>
6269 <dim>-1</dim>
6270 <dim>384</dim>
6271 </port>
6272 </output>
6273 </layer>
6274 <layer id="408" name="__module.encoder.layer.5.output/aten::add/Add" type="Add" version="opset1">
6275 <data auto_broadcast="numpy" />
6276 <input>
6277 <port id="0" precision="FP32">
6278 <dim>-1</dim>
6279 <dim>-1</dim>
6280 <dim>384</dim>
6281 </port>
6282 <port id="1" precision="FP32">
6283 <dim>-1</dim>
6284 <dim>-1</dim>
6285 <dim>384</dim>
6286 </port>
6287 </input>
6288 <output>
6289 <port id="2" precision="FP32" names="550">
6290 <dim>-1</dim>
6291 <dim>-1</dim>
6292 <dim>384</dim>
6293 </port>
6294 </output>
6295 </layer>
6296 <layer id="409" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6297 <data element_type="i32" shape="1" offset="384850452" size="4" />
6298 <output>
6299 <port id="0" precision="I32">
6300 <dim>1</dim>
6301 </port>
6302 </output>
6303 </layer>
6304 <layer id="410" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6305 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6306 <input>
6307 <port id="0" precision="FP32">
6308 <dim>-1</dim>
6309 <dim>-1</dim>
6310 <dim>384</dim>
6311 </port>
6312 <port id="1" precision="I32">
6313 <dim>1</dim>
6314 </port>
6315 </input>
6316 <output>
6317 <port id="2" precision="FP32">
6318 <dim>-1</dim>
6319 <dim>-1</dim>
6320 <dim>384</dim>
6321 </port>
6322 </output>
6323 </layer>
6324 <layer id="411" name="Constant_103744" type="Const" version="opset1">
6325 <data element_type="f32" shape="1, 1, 384" offset="427437712" size="1536" />
6326 <output>
6327 <port id="0" precision="FP32">
6328 <dim>1</dim>
6329 <dim>1</dim>
6330 <dim>384</dim>
6331 </port>
6332 </output>
6333 </layer>
6334 <layer id="412" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
6335 <data auto_broadcast="numpy" />
6336 <input>
6337 <port id="0" precision="FP32">
6338 <dim>-1</dim>
6339 <dim>-1</dim>
6340 <dim>384</dim>
6341 </port>
6342 <port id="1" precision="FP32">
6343 <dim>1</dim>
6344 <dim>1</dim>
6345 <dim>384</dim>
6346 </port>
6347 </input>
6348 <output>
6349 <port id="2" precision="FP32">
6350 <dim>-1</dim>
6351 <dim>-1</dim>
6352 <dim>384</dim>
6353 </port>
6354 </output>
6355 </layer>
6356 <layer id="413" name="Constant_103745" type="Const" version="opset1">
6357 <data element_type="f32" shape="1, 1, 384" offset="427439248" size="1536" />
6358 <output>
6359 <port id="0" precision="FP32">
6360 <dim>1</dim>
6361 <dim>1</dim>
6362 <dim>384</dim>
6363 </port>
6364 </output>
6365 </layer>
6366 <layer id="414" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6367 <data auto_broadcast="numpy" />
6368 <input>
6369 <port id="0" precision="FP32">
6370 <dim>-1</dim>
6371 <dim>-1</dim>
6372 <dim>384</dim>
6373 </port>
6374 <port id="1" precision="FP32">
6375 <dim>1</dim>
6376 <dim>1</dim>
6377 <dim>384</dim>
6378 </port>
6379 </input>
6380 <output>
6381 <port id="2" precision="FP32" names="554,hidden_states.37">
6382 <dim>-1</dim>
6383 <dim>-1</dim>
6384 <dim>384</dim>
6385 </port>
6386 </output>
6387 </layer>
6388 <layer id="415" name="self.encoder.layer.6.attention.self.query.weight" type="Const" version="opset1">
6389 <data element_type="f32" shape="384, 384" offset="427440784" size="589824" />
6390 <output>
6391 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.query.weight">
6392 <dim>384</dim>
6393 <dim>384</dim>
6394 </port>
6395 </output>
6396 </layer>
6397 <layer id="416" name="__module.encoder.layer.6.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
6398 <data transpose_a="false" transpose_b="true" />
6399 <input>
6400 <port id="0" precision="FP32">
6401 <dim>-1</dim>
6402 <dim>-1</dim>
6403 <dim>384</dim>
6404 </port>
6405 <port id="1" precision="FP32">
6406 <dim>384</dim>
6407 <dim>384</dim>
6408 </port>
6409 </input>
6410 <output>
6411 <port id="2" precision="FP32">
6412 <dim>-1</dim>
6413 <dim>-1</dim>
6414 <dim>384</dim>
6415 </port>
6416 </output>
6417 </layer>
6418 <layer id="417" name="Constant_103746" type="Const" version="opset1">
6419 <data element_type="f32" shape="1, 1, 384" offset="428030608" size="1536" />
6420 <output>
6421 <port id="0" precision="FP32">
6422 <dim>1</dim>
6423 <dim>1</dim>
6424 <dim>384</dim>
6425 </port>
6426 </output>
6427 </layer>
6428 <layer id="418" name="__module.encoder.layer.6.attention.self.query/aten::linear/Add" type="Add" version="opset1">
6429 <data auto_broadcast="numpy" />
6430 <input>
6431 <port id="0" precision="FP32">
6432 <dim>-1</dim>
6433 <dim>-1</dim>
6434 <dim>384</dim>
6435 </port>
6436 <port id="1" precision="FP32">
6437 <dim>1</dim>
6438 <dim>1</dim>
6439 <dim>384</dim>
6440 </port>
6441 </input>
6442 <output>
6443 <port id="2" precision="FP32" names="567,x.73">
6444 <dim>-1</dim>
6445 <dim>-1</dim>
6446 <dim>384</dim>
6447 </port>
6448 </output>
6449 </layer>
6450 <layer id="419" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
6451 <data element_type="i64" shape="4" offset="385444888" size="32" />
6452 <output>
6453 <port id="0" precision="I64">
6454 <dim>4</dim>
6455 </port>
6456 </output>
6457 </layer>
6458 <layer id="420" name="__module.encoder.layer.6.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
6459 <data special_zero="true" />
6460 <input>
6461 <port id="0" precision="FP32">
6462 <dim>-1</dim>
6463 <dim>-1</dim>
6464 <dim>384</dim>
6465 </port>
6466 <port id="1" precision="I64">
6467 <dim>4</dim>
6468 </port>
6469 </input>
6470 <output>
6471 <port id="2" precision="FP32" names="571,x.75">
6472 <dim>-1</dim>
6473 <dim>-1</dim>
6474 <dim>12</dim>
6475 <dim>32</dim>
6476 </port>
6477 </output>
6478 </layer>
6479 <layer id="421" name="Constant_95923" type="Const" version="opset1">
6480 <data element_type="i64" shape="4" offset="385444920" size="32" />
6481 <output>
6482 <port id="0" precision="I64" names="572">
6483 <dim>4</dim>
6484 </port>
6485 </output>
6486 </layer>
6487 <layer id="422" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
6488 <input>
6489 <port id="0" precision="FP32">
6490 <dim>-1</dim>
6491 <dim>-1</dim>
6492 <dim>12</dim>
6493 <dim>32</dim>
6494 </port>
6495 <port id="1" precision="I64">
6496 <dim>4</dim>
6497 </port>
6498 </input>
6499 <output>
6500 <port id="2" precision="FP32" names="573">
6501 <dim>-1</dim>
6502 <dim>12</dim>
6503 <dim>-1</dim>
6504 <dim>32</dim>
6505 </port>
6506 </output>
6507 </layer>
6508 <layer id="423" name="self.encoder.layer.6.attention.self.key.weight" type="Const" version="opset1">
6509 <data element_type="f32" shape="384, 384" offset="428032144" size="589824" />
6510 <output>
6511 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.key.weight">
6512 <dim>384</dim>
6513 <dim>384</dim>
6514 </port>
6515 </output>
6516 </layer>
6517 <layer id="424" name="__module.encoder.layer.6.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
6518 <data transpose_a="false" transpose_b="true" />
6519 <input>
6520 <port id="0" precision="FP32">
6521 <dim>-1</dim>
6522 <dim>-1</dim>
6523 <dim>384</dim>
6524 </port>
6525 <port id="1" precision="FP32">
6526 <dim>384</dim>
6527 <dim>384</dim>
6528 </port>
6529 </input>
6530 <output>
6531 <port id="2" precision="FP32">
6532 <dim>-1</dim>
6533 <dim>-1</dim>
6534 <dim>384</dim>
6535 </port>
6536 </output>
6537 </layer>
6538 <layer id="425" name="Constant_103747" type="Const" version="opset1">
6539 <data element_type="f32" shape="1, 1, 384" offset="428621968" size="1536" />
6540 <output>
6541 <port id="0" precision="FP32">
6542 <dim>1</dim>
6543 <dim>1</dim>
6544 <dim>384</dim>
6545 </port>
6546 </output>
6547 </layer>
6548 <layer id="426" name="__module.encoder.layer.6.attention.self.key/aten::linear/Add" type="Add" version="opset1">
6549 <data auto_broadcast="numpy" />
6550 <input>
6551 <port id="0" precision="FP32">
6552 <dim>-1</dim>
6553 <dim>-1</dim>
6554 <dim>384</dim>
6555 </port>
6556 <port id="1" precision="FP32">
6557 <dim>1</dim>
6558 <dim>1</dim>
6559 <dim>384</dim>
6560 </port>
6561 </input>
6562 <output>
6563 <port id="2" precision="FP32" names="576,x.77">
6564 <dim>-1</dim>
6565 <dim>-1</dim>
6566 <dim>384</dim>
6567 </port>
6568 </output>
6569 </layer>
6570 <layer id="427" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
6571 <data element_type="i64" shape="4" offset="385444888" size="32" />
6572 <output>
6573 <port id="0" precision="I64">
6574 <dim>4</dim>
6575 </port>
6576 </output>
6577 </layer>
6578 <layer id="428" name="__module.encoder.layer.6.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
6579 <data special_zero="true" />
6580 <input>
6581 <port id="0" precision="FP32">
6582 <dim>-1</dim>
6583 <dim>-1</dim>
6584 <dim>384</dim>
6585 </port>
6586 <port id="1" precision="I64">
6587 <dim>4</dim>
6588 </port>
6589 </input>
6590 <output>
6591 <port id="2" precision="FP32" names="580,x.79">
6592 <dim>-1</dim>
6593 <dim>-1</dim>
6594 <dim>12</dim>
6595 <dim>32</dim>
6596 </port>
6597 </output>
6598 </layer>
6599 <layer id="429" name="Constant_95946" type="Const" version="opset1">
6600 <data element_type="i64" shape="4" offset="385444920" size="32" />
6601 <output>
6602 <port id="0" precision="I64" names="581">
6603 <dim>4</dim>
6604 </port>
6605 </output>
6606 </layer>
6607 <layer id="430" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
6608 <input>
6609 <port id="0" precision="FP32">
6610 <dim>-1</dim>
6611 <dim>-1</dim>
6612 <dim>12</dim>
6613 <dim>32</dim>
6614 </port>
6615 <port id="1" precision="I64">
6616 <dim>4</dim>
6617 </port>
6618 </input>
6619 <output>
6620 <port id="2" precision="FP32" names="582">
6621 <dim>-1</dim>
6622 <dim>12</dim>
6623 <dim>-1</dim>
6624 <dim>32</dim>
6625 </port>
6626 </output>
6627 </layer>
6628 <layer id="431" name="self.encoder.layer.6.attention.self.value.weight" type="Const" version="opset1">
6629 <data element_type="f32" shape="384, 384" offset="428623504" size="589824" />
6630 <output>
6631 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.value.weight">
6632 <dim>384</dim>
6633 <dim>384</dim>
6634 </port>
6635 </output>
6636 </layer>
6637 <layer id="432" name="__module.encoder.layer.6.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
6638 <data transpose_a="false" transpose_b="true" />
6639 <input>
6640 <port id="0" precision="FP32">
6641 <dim>-1</dim>
6642 <dim>-1</dim>
6643 <dim>384</dim>
6644 </port>
6645 <port id="1" precision="FP32">
6646 <dim>384</dim>
6647 <dim>384</dim>
6648 </port>
6649 </input>
6650 <output>
6651 <port id="2" precision="FP32">
6652 <dim>-1</dim>
6653 <dim>-1</dim>
6654 <dim>384</dim>
6655 </port>
6656 </output>
6657 </layer>
6658 <layer id="433" name="Constant_103748" type="Const" version="opset1">
6659 <data element_type="f32" shape="1, 1, 384" offset="429213328" size="1536" />
6660 <output>
6661 <port id="0" precision="FP32">
6662 <dim>1</dim>
6663 <dim>1</dim>
6664 <dim>384</dim>
6665 </port>
6666 </output>
6667 </layer>
6668 <layer id="434" name="__module.encoder.layer.6.attention.self.value/aten::linear/Add" type="Add" version="opset1">
6669 <data auto_broadcast="numpy" />
6670 <input>
6671 <port id="0" precision="FP32">
6672 <dim>-1</dim>
6673 <dim>-1</dim>
6674 <dim>384</dim>
6675 </port>
6676 <port id="1" precision="FP32">
6677 <dim>1</dim>
6678 <dim>1</dim>
6679 <dim>384</dim>
6680 </port>
6681 </input>
6682 <output>
6683 <port id="2" precision="FP32" names="585,x.81">
6684 <dim>-1</dim>
6685 <dim>-1</dim>
6686 <dim>384</dim>
6687 </port>
6688 </output>
6689 </layer>
6690 <layer id="435" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
6691 <data element_type="i64" shape="4" offset="385444888" size="32" />
6692 <output>
6693 <port id="0" precision="I64">
6694 <dim>4</dim>
6695 </port>
6696 </output>
6697 </layer>
6698 <layer id="436" name="__module.encoder.layer.6.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
6699 <data special_zero="true" />
6700 <input>
6701 <port id="0" precision="FP32">
6702 <dim>-1</dim>
6703 <dim>-1</dim>
6704 <dim>384</dim>
6705 </port>
6706 <port id="1" precision="I64">
6707 <dim>4</dim>
6708 </port>
6709 </input>
6710 <output>
6711 <port id="2" precision="FP32" names="589,x.83">
6712 <dim>-1</dim>
6713 <dim>-1</dim>
6714 <dim>12</dim>
6715 <dim>32</dim>
6716 </port>
6717 </output>
6718 </layer>
6719 <layer id="437" name="Constant_95969" type="Const" version="opset1">
6720 <data element_type="i64" shape="4" offset="385444920" size="32" />
6721 <output>
6722 <port id="0" precision="I64" names="590">
6723 <dim>4</dim>
6724 </port>
6725 </output>
6726 </layer>
6727 <layer id="438" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
6728 <input>
6729 <port id="0" precision="FP32">
6730 <dim>-1</dim>
6731 <dim>-1</dim>
6732 <dim>12</dim>
6733 <dim>32</dim>
6734 </port>
6735 <port id="1" precision="I64">
6736 <dim>4</dim>
6737 </port>
6738 </input>
6739 <output>
6740 <port id="2" precision="FP32" names="591">
6741 <dim>-1</dim>
6742 <dim>12</dim>
6743 <dim>-1</dim>
6744 <dim>32</dim>
6745 </port>
6746 </output>
6747 </layer>
6748 <layer id="439" name="__module.encoder.layer.6.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
6749 <data causal="false" />
6750 <input>
6751 <port id="0" precision="FP32">
6752 <dim>-1</dim>
6753 <dim>12</dim>
6754 <dim>-1</dim>
6755 <dim>32</dim>
6756 </port>
6757 <port id="1" precision="FP32">
6758 <dim>-1</dim>
6759 <dim>12</dim>
6760 <dim>-1</dim>
6761 <dim>32</dim>
6762 </port>
6763 <port id="2" precision="FP32">
6764 <dim>-1</dim>
6765 <dim>12</dim>
6766 <dim>-1</dim>
6767 <dim>32</dim>
6768 </port>
6769 <port id="3" precision="FP32">
6770 <dim>-1</dim>
6771 <dim>1</dim>
6772 <dim>-1</dim>
6773 <dim>-1</dim>
6774 </port>
6775 </input>
6776 <output>
6777 <port id="4" precision="FP32" names="592,attn_output.25">
6778 <dim>-1</dim>
6779 <dim>12</dim>
6780 <dim>-1</dim>
6781 <dim>32</dim>
6782 </port>
6783 </output>
6784 </layer>
6785 <layer id="440" name="__module.encoder.layer.6.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
6786 <data element_type="i32" shape="4" offset="386627688" size="16" />
6787 <output>
6788 <port id="0" precision="I32">
6789 <dim>4</dim>
6790 </port>
6791 </output>
6792 </layer>
6793 <layer id="441" name="__module.encoder.layer.6.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
6794 <input>
6795 <port id="0" precision="FP32">
6796 <dim>-1</dim>
6797 <dim>12</dim>
6798 <dim>-1</dim>
6799 <dim>32</dim>
6800 </port>
6801 <port id="1" precision="I32">
6802 <dim>4</dim>
6803 </port>
6804 </input>
6805 <output>
6806 <port id="2" precision="FP32" names="593,attn_output.27">
6807 <dim>-1</dim>
6808 <dim>-1</dim>
6809 <dim>12</dim>
6810 <dim>32</dim>
6811 </port>
6812 </output>
6813 </layer>
6814 <layer id="442" name="__module.encoder.layer.6.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
6815 <data output_type="i64" />
6816 <input>
6817 <port id="0" precision="FP32">
6818 <dim>-1</dim>
6819 <dim>-1</dim>
6820 <dim>384</dim>
6821 </port>
6822 </input>
6823 <output>
6824 <port id="1" precision="I64">
6825 <dim>3</dim>
6826 </port>
6827 </output>
6828 </layer>
6829 <layer id="443" name="Constant_102871" type="Const" version="opset1">
6830 <data element_type="i64" shape="2" offset="386627704" size="16" />
6831 <output>
6832 <port id="0" precision="I64">
6833 <dim>2</dim>
6834 </port>
6835 </output>
6836 </layer>
6837 <layer id="444" name="Constant_102872" type="Const" version="opset1">
6838 <data element_type="i64" shape="" offset="384850436" size="8" />
6839 <output>
6840 <port id="0" precision="I64" />
6841 </output>
6842 </layer>
6843 <layer id="445" name="Gather_102873" type="Gather" version="opset8">
6844 <data batch_dims="0" />
6845 <input>
6846 <port id="0" precision="I64">
6847 <dim>3</dim>
6848 </port>
6849 <port id="1" precision="I64">
6850 <dim>2</dim>
6851 </port>
6852 <port id="2" precision="I64" />
6853 </input>
6854 <output>
6855 <port id="3" precision="I64">
6856 <dim>2</dim>
6857 </port>
6858 </output>
6859 </layer>
6860 <layer id="446" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
6861 <data axis="0" />
6862 <input>
6863 <port id="0" precision="I64">
6864 <dim>2</dim>
6865 </port>
6866 <port id="1" precision="I64">
6867 <dim>1</dim>
6868 </port>
6869 </input>
6870 <output>
6871 <port id="2" precision="I64" names="594">
6872 <dim>3</dim>
6873 </port>
6874 </output>
6875 </layer>
6876 <layer id="447" name="__module.encoder.layer.6.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
6877 <data special_zero="false" />
6878 <input>
6879 <port id="0" precision="FP32">
6880 <dim>-1</dim>
6881 <dim>-1</dim>
6882 <dim>12</dim>
6883 <dim>32</dim>
6884 </port>
6885 <port id="1" precision="I64">
6886 <dim>3</dim>
6887 </port>
6888 </input>
6889 <output>
6890 <port id="2" precision="FP32" names="595">
6891 <dim>-1</dim>
6892 <dim>-1</dim>
6893 <dim>384</dim>
6894 </port>
6895 </output>
6896 </layer>
6897 <layer id="448" name="self.encoder.layer.6.attention.output.dense.weight" type="Const" version="opset1">
6898 <data element_type="f32" shape="384, 384" offset="429214864" size="589824" />
6899 <output>
6900 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.output.dense.weight">
6901 <dim>384</dim>
6902 <dim>384</dim>
6903 </port>
6904 </output>
6905 </layer>
6906 <layer id="449" name="__module.encoder.layer.6.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6907 <data transpose_a="false" transpose_b="true" />
6908 <input>
6909 <port id="0" precision="FP32">
6910 <dim>-1</dim>
6911 <dim>-1</dim>
6912 <dim>384</dim>
6913 </port>
6914 <port id="1" precision="FP32">
6915 <dim>384</dim>
6916 <dim>384</dim>
6917 </port>
6918 </input>
6919 <output>
6920 <port id="2" precision="FP32">
6921 <dim>-1</dim>
6922 <dim>-1</dim>
6923 <dim>384</dim>
6924 </port>
6925 </output>
6926 </layer>
6927 <layer id="450" name="Constant_103749" type="Const" version="opset1">
6928 <data element_type="f32" shape="1, 1, 384" offset="429804688" size="1536" />
6929 <output>
6930 <port id="0" precision="FP32">
6931 <dim>1</dim>
6932 <dim>1</dim>
6933 <dim>384</dim>
6934 </port>
6935 </output>
6936 </layer>
6937 <layer id="451" name="__module.encoder.layer.6.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
6938 <data auto_broadcast="numpy" />
6939 <input>
6940 <port id="0" precision="FP32">
6941 <dim>-1</dim>
6942 <dim>-1</dim>
6943 <dim>384</dim>
6944 </port>
6945 <port id="1" precision="FP32">
6946 <dim>1</dim>
6947 <dim>1</dim>
6948 <dim>384</dim>
6949 </port>
6950 </input>
6951 <output>
6952 <port id="2" precision="FP32" names="601,input.27">
6953 <dim>-1</dim>
6954 <dim>-1</dim>
6955 <dim>384</dim>
6956 </port>
6957 </output>
6958 </layer>
6959 <layer id="452" name="__module.encoder.layer.6.attention.output/aten::add/Add" type="Add" version="opset1">
6960 <data auto_broadcast="numpy" />
6961 <input>
6962 <port id="0" precision="FP32">
6963 <dim>-1</dim>
6964 <dim>-1</dim>
6965 <dim>384</dim>
6966 </port>
6967 <port id="1" precision="FP32">
6968 <dim>-1</dim>
6969 <dim>-1</dim>
6970 <dim>384</dim>
6971 </port>
6972 </input>
6973 <output>
6974 <port id="2" precision="FP32" names="603">
6975 <dim>-1</dim>
6976 <dim>-1</dim>
6977 <dim>384</dim>
6978 </port>
6979 </output>
6980 </layer>
6981 <layer id="453" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6982 <data element_type="i32" shape="1" offset="384850452" size="4" />
6983 <output>
6984 <port id="0" precision="I32">
6985 <dim>1</dim>
6986 </port>
6987 </output>
6988 </layer>
6989 <layer id="454" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6990 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6991 <input>
6992 <port id="0" precision="FP32">
6993 <dim>-1</dim>
6994 <dim>-1</dim>
6995 <dim>384</dim>
6996 </port>
6997 <port id="1" precision="I32">
6998 <dim>1</dim>
6999 </port>
7000 </input>
7001 <output>
7002 <port id="2" precision="FP32">
7003 <dim>-1</dim>
7004 <dim>-1</dim>
7005 <dim>384</dim>
7006 </port>
7007 </output>
7008 </layer>
7009 <layer id="455" name="Constant_103750" type="Const" version="opset1">
7010 <data element_type="f32" shape="1, 1, 384" offset="429806224" size="1536" />
7011 <output>
7012 <port id="0" precision="FP32">
7013 <dim>1</dim>
7014 <dim>1</dim>
7015 <dim>384</dim>
7016 </port>
7017 </output>
7018 </layer>
7019 <layer id="456" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
7020 <data auto_broadcast="numpy" />
7021 <input>
7022 <port id="0" precision="FP32">
7023 <dim>-1</dim>
7024 <dim>-1</dim>
7025 <dim>384</dim>
7026 </port>
7027 <port id="1" precision="FP32">
7028 <dim>1</dim>
7029 <dim>1</dim>
7030 <dim>384</dim>
7031 </port>
7032 </input>
7033 <output>
7034 <port id="2" precision="FP32">
7035 <dim>-1</dim>
7036 <dim>-1</dim>
7037 <dim>384</dim>
7038 </port>
7039 </output>
7040 </layer>
7041 <layer id="457" name="Constant_103751" type="Const" version="opset1">
7042 <data element_type="f32" shape="1, 1, 384" offset="429807760" size="1536" />
7043 <output>
7044 <port id="0" precision="FP32">
7045 <dim>1</dim>
7046 <dim>1</dim>
7047 <dim>384</dim>
7048 </port>
7049 </output>
7050 </layer>
7051 <layer id="458" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
7052 <data auto_broadcast="numpy" />
7053 <input>
7054 <port id="0" precision="FP32">
7055 <dim>-1</dim>
7056 <dim>-1</dim>
7057 <dim>384</dim>
7058 </port>
7059 <port id="1" precision="FP32">
7060 <dim>1</dim>
7061 <dim>1</dim>
7062 <dim>384</dim>
7063 </port>
7064 </input>
7065 <output>
7066 <port id="2" precision="FP32" names="607,input_tensor.13">
7067 <dim>-1</dim>
7068 <dim>-1</dim>
7069 <dim>384</dim>
7070 </port>
7071 </output>
7072 </layer>
7073 <layer id="459" name="self.encoder.layer.6.intermediate.dense.weight" type="Const" version="opset1">
7074 <data element_type="f32" shape="1536, 384" offset="429809296" size="2359296" />
7075 <output>
7076 <port id="0" precision="FP32" names="self.encoder.layer.6.intermediate.dense.weight">
7077 <dim>1536</dim>
7078 <dim>384</dim>
7079 </port>
7080 </output>
7081 </layer>
7082 <layer id="460" name="__module.encoder.layer.6.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
7083 <data transpose_a="false" transpose_b="true" />
7084 <input>
7085 <port id="0" precision="FP32">
7086 <dim>-1</dim>
7087 <dim>-1</dim>
7088 <dim>384</dim>
7089 </port>
7090 <port id="1" precision="FP32">
7091 <dim>1536</dim>
7092 <dim>384</dim>
7093 </port>
7094 </input>
7095 <output>
7096 <port id="2" precision="FP32">
7097 <dim>-1</dim>
7098 <dim>-1</dim>
7099 <dim>1536</dim>
7100 </port>
7101 </output>
7102 </layer>
7103 <layer id="461" name="Constant_103752" type="Const" version="opset1">
7104 <data element_type="f32" shape="1, 1, 1536" offset="432168592" size="6144" />
7105 <output>
7106 <port id="0" precision="FP32">
7107 <dim>1</dim>
7108 <dim>1</dim>
7109 <dim>1536</dim>
7110 </port>
7111 </output>
7112 </layer>
7113 <layer id="462" name="__module.encoder.layer.6.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
7114 <data auto_broadcast="numpy" />
7115 <input>
7116 <port id="0" precision="FP32">
7117 <dim>-1</dim>
7118 <dim>-1</dim>
7119 <dim>1536</dim>
7120 </port>
7121 <port id="1" precision="FP32">
7122 <dim>1</dim>
7123 <dim>1</dim>
7124 <dim>1536</dim>
7125 </port>
7126 </input>
7127 <output>
7128 <port id="2" precision="FP32" names="612">
7129 <dim>-1</dim>
7130 <dim>-1</dim>
7131 <dim>1536</dim>
7132 </port>
7133 </output>
7134 </layer>
7135 <layer id="463" name="__module.encoder.layer.6.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
7136 <data approximation_mode="ERF" />
7137 <input>
7138 <port id="0" precision="FP32">
7139 <dim>-1</dim>
7140 <dim>-1</dim>
7141 <dim>1536</dim>
7142 </port>
7143 </input>
7144 <output>
7145 <port id="1" precision="FP32" names="613">
7146 <dim>-1</dim>
7147 <dim>-1</dim>
7148 <dim>1536</dim>
7149 </port>
7150 </output>
7151 </layer>
7152 <layer id="464" name="self.encoder.layer.6.output.dense.weight" type="Const" version="opset1">
7153 <data element_type="f32" shape="384, 1536" offset="432174736" size="2359296" />
7154 <output>
7155 <port id="0" precision="FP32" names="self.encoder.layer.6.output.dense.weight">
7156 <dim>384</dim>
7157 <dim>1536</dim>
7158 </port>
7159 </output>
7160 </layer>
7161 <layer id="465" name="__module.encoder.layer.6.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
7162 <data transpose_a="false" transpose_b="true" />
7163 <input>
7164 <port id="0" precision="FP32">
7165 <dim>-1</dim>
7166 <dim>-1</dim>
7167 <dim>1536</dim>
7168 </port>
7169 <port id="1" precision="FP32">
7170 <dim>384</dim>
7171 <dim>1536</dim>
7172 </port>
7173 </input>
7174 <output>
7175 <port id="2" precision="FP32">
7176 <dim>-1</dim>
7177 <dim>-1</dim>
7178 <dim>384</dim>
7179 </port>
7180 </output>
7181 </layer>
7182 <layer id="466" name="Constant_103753" type="Const" version="opset1">
7183 <data element_type="f32" shape="1, 1, 384" offset="434534032" size="1536" />
7184 <output>
7185 <port id="0" precision="FP32">
7186 <dim>1</dim>
7187 <dim>1</dim>
7188 <dim>384</dim>
7189 </port>
7190 </output>
7191 </layer>
7192 <layer id="467" name="__module.encoder.layer.6.output.dense/aten::linear/Add" type="Add" version="opset1">
7193 <data auto_broadcast="numpy" />
7194 <input>
7195 <port id="0" precision="FP32">
7196 <dim>-1</dim>
7197 <dim>-1</dim>
7198 <dim>384</dim>
7199 </port>
7200 <port id="1" precision="FP32">
7201 <dim>1</dim>
7202 <dim>1</dim>
7203 <dim>384</dim>
7204 </port>
7205 </input>
7206 <output>
7207 <port id="2" precision="FP32" names="619,input.29">
7208 <dim>-1</dim>
7209 <dim>-1</dim>
7210 <dim>384</dim>
7211 </port>
7212 </output>
7213 </layer>
7214 <layer id="468" name="__module.encoder.layer.6.output/aten::add/Add" type="Add" version="opset1">
7215 <data auto_broadcast="numpy" />
7216 <input>
7217 <port id="0" precision="FP32">
7218 <dim>-1</dim>
7219 <dim>-1</dim>
7220 <dim>384</dim>
7221 </port>
7222 <port id="1" precision="FP32">
7223 <dim>-1</dim>
7224 <dim>-1</dim>
7225 <dim>384</dim>
7226 </port>
7227 </input>
7228 <output>
7229 <port id="2" precision="FP32" names="621">
7230 <dim>-1</dim>
7231 <dim>-1</dim>
7232 <dim>384</dim>
7233 </port>
7234 </output>
7235 </layer>
7236 <layer id="469" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
7237 <data element_type="i32" shape="1" offset="384850452" size="4" />
7238 <output>
7239 <port id="0" precision="I32">
7240 <dim>1</dim>
7241 </port>
7242 </output>
7243 </layer>
7244 <layer id="470" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
7245 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
7246 <input>
7247 <port id="0" precision="FP32">
7248 <dim>-1</dim>
7249 <dim>-1</dim>
7250 <dim>384</dim>
7251 </port>
7252 <port id="1" precision="I32">
7253 <dim>1</dim>
7254 </port>
7255 </input>
7256 <output>
7257 <port id="2" precision="FP32">
7258 <dim>-1</dim>
7259 <dim>-1</dim>
7260 <dim>384</dim>
7261 </port>
7262 </output>
7263 </layer>
7264 <layer id="471" name="Constant_103754" type="Const" version="opset1">
7265 <data element_type="f32" shape="1, 1, 384" offset="434535568" size="1536" />
7266 <output>
7267 <port id="0" precision="FP32">
7268 <dim>1</dim>
7269 <dim>1</dim>
7270 <dim>384</dim>
7271 </port>
7272 </output>
7273 </layer>
7274 <layer id="472" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
7275 <data auto_broadcast="numpy" />
7276 <input>
7277 <port id="0" precision="FP32">
7278 <dim>-1</dim>
7279 <dim>-1</dim>
7280 <dim>384</dim>
7281 </port>
7282 <port id="1" precision="FP32">
7283 <dim>1</dim>
7284 <dim>1</dim>
7285 <dim>384</dim>
7286 </port>
7287 </input>
7288 <output>
7289 <port id="2" precision="FP32">
7290 <dim>-1</dim>
7291 <dim>-1</dim>
7292 <dim>384</dim>
7293 </port>
7294 </output>
7295 </layer>
7296 <layer id="473" name="Constant_103755" type="Const" version="opset1">
7297 <data element_type="f32" shape="1, 1, 384" offset="434537104" size="1536" />
7298 <output>
7299 <port id="0" precision="FP32">
7300 <dim>1</dim>
7301 <dim>1</dim>
7302 <dim>384</dim>
7303 </port>
7304 </output>
7305 </layer>
7306 <layer id="474" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
7307 <data auto_broadcast="numpy" />
7308 <input>
7309 <port id="0" precision="FP32">
7310 <dim>-1</dim>
7311 <dim>-1</dim>
7312 <dim>384</dim>
7313 </port>
7314 <port id="1" precision="FP32">
7315 <dim>1</dim>
7316 <dim>1</dim>
7317 <dim>384</dim>
7318 </port>
7319 </input>
7320 <output>
7321 <port id="2" precision="FP32" names="625,hidden_states.43">
7322 <dim>-1</dim>
7323 <dim>-1</dim>
7324 <dim>384</dim>
7325 </port>
7326 </output>
7327 </layer>
7328 <layer id="475" name="self.encoder.layer.7.attention.self.query.weight" type="Const" version="opset1">
7329 <data element_type="f32" shape="384, 384" offset="434538640" size="589824" />
7330 <output>
7331 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.query.weight">
7332 <dim>384</dim>
7333 <dim>384</dim>
7334 </port>
7335 </output>
7336 </layer>
7337 <layer id="476" name="__module.encoder.layer.7.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
7338 <data transpose_a="false" transpose_b="true" />
7339 <input>
7340 <port id="0" precision="FP32">
7341 <dim>-1</dim>
7342 <dim>-1</dim>
7343 <dim>384</dim>
7344 </port>
7345 <port id="1" precision="FP32">
7346 <dim>384</dim>
7347 <dim>384</dim>
7348 </port>
7349 </input>
7350 <output>
7351 <port id="2" precision="FP32">
7352 <dim>-1</dim>
7353 <dim>-1</dim>
7354 <dim>384</dim>
7355 </port>
7356 </output>
7357 </layer>
7358 <layer id="477" name="Constant_103756" type="Const" version="opset1">
7359 <data element_type="f32" shape="1, 1, 384" offset="435128464" size="1536" />
7360 <output>
7361 <port id="0" precision="FP32">
7362 <dim>1</dim>
7363 <dim>1</dim>
7364 <dim>384</dim>
7365 </port>
7366 </output>
7367 </layer>
7368 <layer id="478" name="__module.encoder.layer.7.attention.self.query/aten::linear/Add" type="Add" version="opset1">
7369 <data auto_broadcast="numpy" />
7370 <input>
7371 <port id="0" precision="FP32">
7372 <dim>-1</dim>
7373 <dim>-1</dim>
7374 <dim>384</dim>
7375 </port>
7376 <port id="1" precision="FP32">
7377 <dim>1</dim>
7378 <dim>1</dim>
7379 <dim>384</dim>
7380 </port>
7381 </input>
7382 <output>
7383 <port id="2" precision="FP32" names="638,x.85">
7384 <dim>-1</dim>
7385 <dim>-1</dim>
7386 <dim>384</dim>
7387 </port>
7388 </output>
7389 </layer>
7390 <layer id="479" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
7391 <data element_type="i64" shape="4" offset="385444888" size="32" />
7392 <output>
7393 <port id="0" precision="I64">
7394 <dim>4</dim>
7395 </port>
7396 </output>
7397 </layer>
7398 <layer id="480" name="__module.encoder.layer.7.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
7399 <data special_zero="true" />
7400 <input>
7401 <port id="0" precision="FP32">
7402 <dim>-1</dim>
7403 <dim>-1</dim>
7404 <dim>384</dim>
7405 </port>
7406 <port id="1" precision="I64">
7407 <dim>4</dim>
7408 </port>
7409 </input>
7410 <output>
7411 <port id="2" precision="FP32" names="642,x.87">
7412 <dim>-1</dim>
7413 <dim>-1</dim>
7414 <dim>12</dim>
7415 <dim>32</dim>
7416 </port>
7417 </output>
7418 </layer>
7419 <layer id="481" name="Constant_96149" type="Const" version="opset1">
7420 <data element_type="i64" shape="4" offset="385444920" size="32" />
7421 <output>
7422 <port id="0" precision="I64" names="643">
7423 <dim>4</dim>
7424 </port>
7425 </output>
7426 </layer>
7427 <layer id="482" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
7428 <input>
7429 <port id="0" precision="FP32">
7430 <dim>-1</dim>
7431 <dim>-1</dim>
7432 <dim>12</dim>
7433 <dim>32</dim>
7434 </port>
7435 <port id="1" precision="I64">
7436 <dim>4</dim>
7437 </port>
7438 </input>
7439 <output>
7440 <port id="2" precision="FP32" names="644">
7441 <dim>-1</dim>
7442 <dim>12</dim>
7443 <dim>-1</dim>
7444 <dim>32</dim>
7445 </port>
7446 </output>
7447 </layer>
7448 <layer id="483" name="self.encoder.layer.7.attention.self.key.weight" type="Const" version="opset1">
7449 <data element_type="f32" shape="384, 384" offset="435130000" size="589824" />
7450 <output>
7451 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.key.weight">
7452 <dim>384</dim>
7453 <dim>384</dim>
7454 </port>
7455 </output>
7456 </layer>
7457 <layer id="484" name="__module.encoder.layer.7.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
7458 <data transpose_a="false" transpose_b="true" />
7459 <input>
7460 <port id="0" precision="FP32">
7461 <dim>-1</dim>
7462 <dim>-1</dim>
7463 <dim>384</dim>
7464 </port>
7465 <port id="1" precision="FP32">
7466 <dim>384</dim>
7467 <dim>384</dim>
7468 </port>
7469 </input>
7470 <output>
7471 <port id="2" precision="FP32">
7472 <dim>-1</dim>
7473 <dim>-1</dim>
7474 <dim>384</dim>
7475 </port>
7476 </output>
7477 </layer>
7478 <layer id="485" name="Constant_103757" type="Const" version="opset1">
7479 <data element_type="f32" shape="1, 1, 384" offset="435719824" size="1536" />
7480 <output>
7481 <port id="0" precision="FP32">
7482 <dim>1</dim>
7483 <dim>1</dim>
7484 <dim>384</dim>
7485 </port>
7486 </output>
7487 </layer>
7488 <layer id="486" name="__module.encoder.layer.7.attention.self.key/aten::linear/Add" type="Add" version="opset1">
7489 <data auto_broadcast="numpy" />
7490 <input>
7491 <port id="0" precision="FP32">
7492 <dim>-1</dim>
7493 <dim>-1</dim>
7494 <dim>384</dim>
7495 </port>
7496 <port id="1" precision="FP32">
7497 <dim>1</dim>
7498 <dim>1</dim>
7499 <dim>384</dim>
7500 </port>
7501 </input>
7502 <output>
7503 <port id="2" precision="FP32" names="647,x.89">
7504 <dim>-1</dim>
7505 <dim>-1</dim>
7506 <dim>384</dim>
7507 </port>
7508 </output>
7509 </layer>
7510 <layer id="487" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
7511 <data element_type="i64" shape="4" offset="385444888" size="32" />
7512 <output>
7513 <port id="0" precision="I64">
7514 <dim>4</dim>
7515 </port>
7516 </output>
7517 </layer>
7518 <layer id="488" name="__module.encoder.layer.7.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
7519 <data special_zero="true" />
7520 <input>
7521 <port id="0" precision="FP32">
7522 <dim>-1</dim>
7523 <dim>-1</dim>
7524 <dim>384</dim>
7525 </port>
7526 <port id="1" precision="I64">
7527 <dim>4</dim>
7528 </port>
7529 </input>
7530 <output>
7531 <port id="2" precision="FP32" names="651,x.91">
7532 <dim>-1</dim>
7533 <dim>-1</dim>
7534 <dim>12</dim>
7535 <dim>32</dim>
7536 </port>
7537 </output>
7538 </layer>
7539 <layer id="489" name="Constant_96172" type="Const" version="opset1">
7540 <data element_type="i64" shape="4" offset="385444920" size="32" />
7541 <output>
7542 <port id="0" precision="I64" names="652">
7543 <dim>4</dim>
7544 </port>
7545 </output>
7546 </layer>
7547 <layer id="490" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
7548 <input>
7549 <port id="0" precision="FP32">
7550 <dim>-1</dim>
7551 <dim>-1</dim>
7552 <dim>12</dim>
7553 <dim>32</dim>
7554 </port>
7555 <port id="1" precision="I64">
7556 <dim>4</dim>
7557 </port>
7558 </input>
7559 <output>
7560 <port id="2" precision="FP32" names="653">
7561 <dim>-1</dim>
7562 <dim>12</dim>
7563 <dim>-1</dim>
7564 <dim>32</dim>
7565 </port>
7566 </output>
7567 </layer>
7568 <layer id="491" name="self.encoder.layer.7.attention.self.value.weight" type="Const" version="opset1">
7569 <data element_type="f32" shape="384, 384" offset="435721360" size="589824" />
7570 <output>
7571 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.value.weight">
7572 <dim>384</dim>
7573 <dim>384</dim>
7574 </port>
7575 </output>
7576 </layer>
7577 <layer id="492" name="__module.encoder.layer.7.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
7578 <data transpose_a="false" transpose_b="true" />
7579 <input>
7580 <port id="0" precision="FP32">
7581 <dim>-1</dim>
7582 <dim>-1</dim>
7583 <dim>384</dim>
7584 </port>
7585 <port id="1" precision="FP32">
7586 <dim>384</dim>
7587 <dim>384</dim>
7588 </port>
7589 </input>
7590 <output>
7591 <port id="2" precision="FP32">
7592 <dim>-1</dim>
7593 <dim>-1</dim>
7594 <dim>384</dim>
7595 </port>
7596 </output>
7597 </layer>
7598 <layer id="493" name="Constant_103758" type="Const" version="opset1">
7599 <data element_type="f32" shape="1, 1, 384" offset="436311184" size="1536" />
7600 <output>
7601 <port id="0" precision="FP32">
7602 <dim>1</dim>
7603 <dim>1</dim>
7604 <dim>384</dim>
7605 </port>
7606 </output>
7607 </layer>
7608 <layer id="494" name="__module.encoder.layer.7.attention.self.value/aten::linear/Add" type="Add" version="opset1">
7609 <data auto_broadcast="numpy" />
7610 <input>
7611 <port id="0" precision="FP32">
7612 <dim>-1</dim>
7613 <dim>-1</dim>
7614 <dim>384</dim>
7615 </port>
7616 <port id="1" precision="FP32">
7617 <dim>1</dim>
7618 <dim>1</dim>
7619 <dim>384</dim>
7620 </port>
7621 </input>
7622 <output>
7623 <port id="2" precision="FP32" names="656,x.93">
7624 <dim>-1</dim>
7625 <dim>-1</dim>
7626 <dim>384</dim>
7627 </port>
7628 </output>
7629 </layer>
7630 <layer id="495" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
7631 <data element_type="i64" shape="4" offset="385444888" size="32" />
7632 <output>
7633 <port id="0" precision="I64">
7634 <dim>4</dim>
7635 </port>
7636 </output>
7637 </layer>
7638 <layer id="496" name="__module.encoder.layer.7.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
7639 <data special_zero="true" />
7640 <input>
7641 <port id="0" precision="FP32">
7642 <dim>-1</dim>
7643 <dim>-1</dim>
7644 <dim>384</dim>
7645 </port>
7646 <port id="1" precision="I64">
7647 <dim>4</dim>
7648 </port>
7649 </input>
7650 <output>
7651 <port id="2" precision="FP32" names="660,x.95">
7652 <dim>-1</dim>
7653 <dim>-1</dim>
7654 <dim>12</dim>
7655 <dim>32</dim>
7656 </port>
7657 </output>
7658 </layer>
7659 <layer id="497" name="Constant_96195" type="Const" version="opset1">
7660 <data element_type="i64" shape="4" offset="385444920" size="32" />
7661 <output>
7662 <port id="0" precision="I64" names="661">
7663 <dim>4</dim>
7664 </port>
7665 </output>
7666 </layer>
7667 <layer id="498" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
7668 <input>
7669 <port id="0" precision="FP32">
7670 <dim>-1</dim>
7671 <dim>-1</dim>
7672 <dim>12</dim>
7673 <dim>32</dim>
7674 </port>
7675 <port id="1" precision="I64">
7676 <dim>4</dim>
7677 </port>
7678 </input>
7679 <output>
7680 <port id="2" precision="FP32" names="662">
7681 <dim>-1</dim>
7682 <dim>12</dim>
7683 <dim>-1</dim>
7684 <dim>32</dim>
7685 </port>
7686 </output>
7687 </layer>
7688 <layer id="499" name="__module.encoder.layer.7.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
7689 <data causal="false" />
7690 <input>
7691 <port id="0" precision="FP32">
7692 <dim>-1</dim>
7693 <dim>12</dim>
7694 <dim>-1</dim>
7695 <dim>32</dim>
7696 </port>
7697 <port id="1" precision="FP32">
7698 <dim>-1</dim>
7699 <dim>12</dim>
7700 <dim>-1</dim>
7701 <dim>32</dim>
7702 </port>
7703 <port id="2" precision="FP32">
7704 <dim>-1</dim>
7705 <dim>12</dim>
7706 <dim>-1</dim>
7707 <dim>32</dim>
7708 </port>
7709 <port id="3" precision="FP32">
7710 <dim>-1</dim>
7711 <dim>1</dim>
7712 <dim>-1</dim>
7713 <dim>-1</dim>
7714 </port>
7715 </input>
7716 <output>
7717 <port id="4" precision="FP32" names="663,attn_output.29">
7718 <dim>-1</dim>
7719 <dim>12</dim>
7720 <dim>-1</dim>
7721 <dim>32</dim>
7722 </port>
7723 </output>
7724 </layer>
7725 <layer id="500" name="__module.encoder.layer.7.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
7726 <data element_type="i32" shape="4" offset="386627688" size="16" />
7727 <output>
7728 <port id="0" precision="I32">
7729 <dim>4</dim>
7730 </port>
7731 </output>
7732 </layer>
7733 <layer id="501" name="__module.encoder.layer.7.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
7734 <input>
7735 <port id="0" precision="FP32">
7736 <dim>-1</dim>
7737 <dim>12</dim>
7738 <dim>-1</dim>
7739 <dim>32</dim>
7740 </port>
7741 <port id="1" precision="I32">
7742 <dim>4</dim>
7743 </port>
7744 </input>
7745 <output>
7746 <port id="2" precision="FP32" names="664,attn_output.31">
7747 <dim>-1</dim>
7748 <dim>-1</dim>
7749 <dim>12</dim>
7750 <dim>32</dim>
7751 </port>
7752 </output>
7753 </layer>
7754 <layer id="502" name="__module.encoder.layer.7.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
7755 <data output_type="i64" />
7756 <input>
7757 <port id="0" precision="FP32">
7758 <dim>-1</dim>
7759 <dim>-1</dim>
7760 <dim>384</dim>
7761 </port>
7762 </input>
7763 <output>
7764 <port id="1" precision="I64">
7765 <dim>3</dim>
7766 </port>
7767 </output>
7768 </layer>
7769 <layer id="503" name="Constant_102891" type="Const" version="opset1">
7770 <data element_type="i64" shape="2" offset="386627704" size="16" />
7771 <output>
7772 <port id="0" precision="I64">
7773 <dim>2</dim>
7774 </port>
7775 </output>
7776 </layer>
7777 <layer id="504" name="Constant_102892" type="Const" version="opset1">
7778 <data element_type="i64" shape="" offset="384850436" size="8" />
7779 <output>
7780 <port id="0" precision="I64" />
7781 </output>
7782 </layer>
7783 <layer id="505" name="Gather_102893" type="Gather" version="opset8">
7784 <data batch_dims="0" />
7785 <input>
7786 <port id="0" precision="I64">
7787 <dim>3</dim>
7788 </port>
7789 <port id="1" precision="I64">
7790 <dim>2</dim>
7791 </port>
7792 <port id="2" precision="I64" />
7793 </input>
7794 <output>
7795 <port id="3" precision="I64">
7796 <dim>2</dim>
7797 </port>
7798 </output>
7799 </layer>
7800 <layer id="506" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
7801 <data axis="0" />
7802 <input>
7803 <port id="0" precision="I64">
7804 <dim>2</dim>
7805 </port>
7806 <port id="1" precision="I64">
7807 <dim>1</dim>
7808 </port>
7809 </input>
7810 <output>
7811 <port id="2" precision="I64" names="665">
7812 <dim>3</dim>
7813 </port>
7814 </output>
7815 </layer>
7816 <layer id="507" name="__module.encoder.layer.7.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
7817 <data special_zero="false" />
7818 <input>
7819 <port id="0" precision="FP32">
7820 <dim>-1</dim>
7821 <dim>-1</dim>
7822 <dim>12</dim>
7823 <dim>32</dim>
7824 </port>
7825 <port id="1" precision="I64">
7826 <dim>3</dim>
7827 </port>
7828 </input>
7829 <output>
7830 <port id="2" precision="FP32" names="666">
7831 <dim>-1</dim>
7832 <dim>-1</dim>
7833 <dim>384</dim>
7834 </port>
7835 </output>
7836 </layer>
7837 <layer id="508" name="self.encoder.layer.7.attention.output.dense.weight" type="Const" version="opset1">
7838 <data element_type="f32" shape="384, 384" offset="436312720" size="589824" />
7839 <output>
7840 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.output.dense.weight">
7841 <dim>384</dim>
7842 <dim>384</dim>
7843 </port>
7844 </output>
7845 </layer>
7846 <layer id="509" name="__module.encoder.layer.7.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
7847 <data transpose_a="false" transpose_b="true" />
7848 <input>
7849 <port id="0" precision="FP32">
7850 <dim>-1</dim>
7851 <dim>-1</dim>
7852 <dim>384</dim>
7853 </port>
7854 <port id="1" precision="FP32">
7855 <dim>384</dim>
7856 <dim>384</dim>
7857 </port>
7858 </input>
7859 <output>
7860 <port id="2" precision="FP32">
7861 <dim>-1</dim>
7862 <dim>-1</dim>
7863 <dim>384</dim>
7864 </port>
7865 </output>
7866 </layer>
7867 <layer id="510" name="Constant_103759" type="Const" version="opset1">
7868 <data element_type="f32" shape="1, 1, 384" offset="436902544" size="1536" />
7869 <output>
7870 <port id="0" precision="FP32">
7871 <dim>1</dim>
7872 <dim>1</dim>
7873 <dim>384</dim>
7874 </port>
7875 </output>
7876 </layer>
7877 <layer id="511" name="__module.encoder.layer.7.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
7878 <data auto_broadcast="numpy" />
7879 <input>
7880 <port id="0" precision="FP32">
7881 <dim>-1</dim>
7882 <dim>-1</dim>
7883 <dim>384</dim>
7884 </port>
7885 <port id="1" precision="FP32">
7886 <dim>1</dim>
7887 <dim>1</dim>
7888 <dim>384</dim>
7889 </port>
7890 </input>
7891 <output>
7892 <port id="2" precision="FP32" names="672,input.31">
7893 <dim>-1</dim>
7894 <dim>-1</dim>
7895 <dim>384</dim>
7896 </port>
7897 </output>
7898 </layer>
7899 <layer id="512" name="__module.encoder.layer.7.attention.output/aten::add/Add" type="Add" version="opset1">
7900 <data auto_broadcast="numpy" />
7901 <input>
7902 <port id="0" precision="FP32">
7903 <dim>-1</dim>
7904 <dim>-1</dim>
7905 <dim>384</dim>
7906 </port>
7907 <port id="1" precision="FP32">
7908 <dim>-1</dim>
7909 <dim>-1</dim>
7910 <dim>384</dim>
7911 </port>
7912 </input>
7913 <output>
7914 <port id="2" precision="FP32" names="674">
7915 <dim>-1</dim>
7916 <dim>-1</dim>
7917 <dim>384</dim>
7918 </port>
7919 </output>
7920 </layer>
7921 <layer id="513" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
7922 <data element_type="i32" shape="1" offset="384850452" size="4" />
7923 <output>
7924 <port id="0" precision="I32">
7925 <dim>1</dim>
7926 </port>
7927 </output>
7928 </layer>
7929 <layer id="514" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
7930 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
7931 <input>
7932 <port id="0" precision="FP32">
7933 <dim>-1</dim>
7934 <dim>-1</dim>
7935 <dim>384</dim>
7936 </port>
7937 <port id="1" precision="I32">
7938 <dim>1</dim>
7939 </port>
7940 </input>
7941 <output>
7942 <port id="2" precision="FP32">
7943 <dim>-1</dim>
7944 <dim>-1</dim>
7945 <dim>384</dim>
7946 </port>
7947 </output>
7948 </layer>
7949 <layer id="515" name="Constant_103760" type="Const" version="opset1">
7950 <data element_type="f32" shape="1, 1, 384" offset="436904080" size="1536" />
7951 <output>
7952 <port id="0" precision="FP32">
7953 <dim>1</dim>
7954 <dim>1</dim>
7955 <dim>384</dim>
7956 </port>
7957 </output>
7958 </layer>
7959 <layer id="516" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
7960 <data auto_broadcast="numpy" />
7961 <input>
7962 <port id="0" precision="FP32">
7963 <dim>-1</dim>
7964 <dim>-1</dim>
7965 <dim>384</dim>
7966 </port>
7967 <port id="1" precision="FP32">
7968 <dim>1</dim>
7969 <dim>1</dim>
7970 <dim>384</dim>
7971 </port>
7972 </input>
7973 <output>
7974 <port id="2" precision="FP32">
7975 <dim>-1</dim>
7976 <dim>-1</dim>
7977 <dim>384</dim>
7978 </port>
7979 </output>
7980 </layer>
7981 <layer id="517" name="Constant_103761" type="Const" version="opset1">
7982 <data element_type="f32" shape="1, 1, 384" offset="436905616" size="1536" />
7983 <output>
7984 <port id="0" precision="FP32">
7985 <dim>1</dim>
7986 <dim>1</dim>
7987 <dim>384</dim>
7988 </port>
7989 </output>
7990 </layer>
7991 <layer id="518" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
7992 <data auto_broadcast="numpy" />
7993 <input>
7994 <port id="0" precision="FP32">
7995 <dim>-1</dim>
7996 <dim>-1</dim>
7997 <dim>384</dim>
7998 </port>
7999 <port id="1" precision="FP32">
8000 <dim>1</dim>
8001 <dim>1</dim>
8002 <dim>384</dim>
8003 </port>
8004 </input>
8005 <output>
8006 <port id="2" precision="FP32" names="678,input_tensor.15">
8007 <dim>-1</dim>
8008 <dim>-1</dim>
8009 <dim>384</dim>
8010 </port>
8011 </output>
8012 </layer>
8013 <layer id="519" name="self.encoder.layer.7.intermediate.dense.weight" type="Const" version="opset1">
8014 <data element_type="f32" shape="1536, 384" offset="436907152" size="2359296" />
8015 <output>
8016 <port id="0" precision="FP32" names="self.encoder.layer.7.intermediate.dense.weight">
8017 <dim>1536</dim>
8018 <dim>384</dim>
8019 </port>
8020 </output>
8021 </layer>
8022 <layer id="520" name="__module.encoder.layer.7.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8023 <data transpose_a="false" transpose_b="true" />
8024 <input>
8025 <port id="0" precision="FP32">
8026 <dim>-1</dim>
8027 <dim>-1</dim>
8028 <dim>384</dim>
8029 </port>
8030 <port id="1" precision="FP32">
8031 <dim>1536</dim>
8032 <dim>384</dim>
8033 </port>
8034 </input>
8035 <output>
8036 <port id="2" precision="FP32">
8037 <dim>-1</dim>
8038 <dim>-1</dim>
8039 <dim>1536</dim>
8040 </port>
8041 </output>
8042 </layer>
8043 <layer id="521" name="Constant_103762" type="Const" version="opset1">
8044 <data element_type="f32" shape="1, 1, 1536" offset="439266448" size="6144" />
8045 <output>
8046 <port id="0" precision="FP32">
8047 <dim>1</dim>
8048 <dim>1</dim>
8049 <dim>1536</dim>
8050 </port>
8051 </output>
8052 </layer>
8053 <layer id="522" name="__module.encoder.layer.7.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
8054 <data auto_broadcast="numpy" />
8055 <input>
8056 <port id="0" precision="FP32">
8057 <dim>-1</dim>
8058 <dim>-1</dim>
8059 <dim>1536</dim>
8060 </port>
8061 <port id="1" precision="FP32">
8062 <dim>1</dim>
8063 <dim>1</dim>
8064 <dim>1536</dim>
8065 </port>
8066 </input>
8067 <output>
8068 <port id="2" precision="FP32" names="683">
8069 <dim>-1</dim>
8070 <dim>-1</dim>
8071 <dim>1536</dim>
8072 </port>
8073 </output>
8074 </layer>
8075 <layer id="523" name="__module.encoder.layer.7.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
8076 <data approximation_mode="ERF" />
8077 <input>
8078 <port id="0" precision="FP32">
8079 <dim>-1</dim>
8080 <dim>-1</dim>
8081 <dim>1536</dim>
8082 </port>
8083 </input>
8084 <output>
8085 <port id="1" precision="FP32" names="684">
8086 <dim>-1</dim>
8087 <dim>-1</dim>
8088 <dim>1536</dim>
8089 </port>
8090 </output>
8091 </layer>
8092 <layer id="524" name="self.encoder.layer.7.output.dense.weight" type="Const" version="opset1">
8093 <data element_type="f32" shape="384, 1536" offset="439272592" size="2359296" />
8094 <output>
8095 <port id="0" precision="FP32" names="self.encoder.layer.7.output.dense.weight">
8096 <dim>384</dim>
8097 <dim>1536</dim>
8098 </port>
8099 </output>
8100 </layer>
8101 <layer id="525" name="__module.encoder.layer.7.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8102 <data transpose_a="false" transpose_b="true" />
8103 <input>
8104 <port id="0" precision="FP32">
8105 <dim>-1</dim>
8106 <dim>-1</dim>
8107 <dim>1536</dim>
8108 </port>
8109 <port id="1" precision="FP32">
8110 <dim>384</dim>
8111 <dim>1536</dim>
8112 </port>
8113 </input>
8114 <output>
8115 <port id="2" precision="FP32">
8116 <dim>-1</dim>
8117 <dim>-1</dim>
8118 <dim>384</dim>
8119 </port>
8120 </output>
8121 </layer>
8122 <layer id="526" name="Constant_103763" type="Const" version="opset1">
8123 <data element_type="f32" shape="1, 1, 384" offset="441631888" size="1536" />
8124 <output>
8125 <port id="0" precision="FP32">
8126 <dim>1</dim>
8127 <dim>1</dim>
8128 <dim>384</dim>
8129 </port>
8130 </output>
8131 </layer>
8132 <layer id="527" name="__module.encoder.layer.7.output.dense/aten::linear/Add" type="Add" version="opset1">
8133 <data auto_broadcast="numpy" />
8134 <input>
8135 <port id="0" precision="FP32">
8136 <dim>-1</dim>
8137 <dim>-1</dim>
8138 <dim>384</dim>
8139 </port>
8140 <port id="1" precision="FP32">
8141 <dim>1</dim>
8142 <dim>1</dim>
8143 <dim>384</dim>
8144 </port>
8145 </input>
8146 <output>
8147 <port id="2" precision="FP32" names="690,input.33">
8148 <dim>-1</dim>
8149 <dim>-1</dim>
8150 <dim>384</dim>
8151 </port>
8152 </output>
8153 </layer>
8154 <layer id="528" name="__module.encoder.layer.7.output/aten::add/Add" type="Add" version="opset1">
8155 <data auto_broadcast="numpy" />
8156 <input>
8157 <port id="0" precision="FP32">
8158 <dim>-1</dim>
8159 <dim>-1</dim>
8160 <dim>384</dim>
8161 </port>
8162 <port id="1" precision="FP32">
8163 <dim>-1</dim>
8164 <dim>-1</dim>
8165 <dim>384</dim>
8166 </port>
8167 </input>
8168 <output>
8169 <port id="2" precision="FP32" names="692">
8170 <dim>-1</dim>
8171 <dim>-1</dim>
8172 <dim>384</dim>
8173 </port>
8174 </output>
8175 </layer>
8176 <layer id="529" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
8177 <data element_type="i32" shape="1" offset="384850452" size="4" />
8178 <output>
8179 <port id="0" precision="I32">
8180 <dim>1</dim>
8181 </port>
8182 </output>
8183 </layer>
8184 <layer id="530" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
8185 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
8186 <input>
8187 <port id="0" precision="FP32">
8188 <dim>-1</dim>
8189 <dim>-1</dim>
8190 <dim>384</dim>
8191 </port>
8192 <port id="1" precision="I32">
8193 <dim>1</dim>
8194 </port>
8195 </input>
8196 <output>
8197 <port id="2" precision="FP32">
8198 <dim>-1</dim>
8199 <dim>-1</dim>
8200 <dim>384</dim>
8201 </port>
8202 </output>
8203 </layer>
8204 <layer id="531" name="Constant_103764" type="Const" version="opset1">
8205 <data element_type="f32" shape="1, 1, 384" offset="441633424" size="1536" />
8206 <output>
8207 <port id="0" precision="FP32">
8208 <dim>1</dim>
8209 <dim>1</dim>
8210 <dim>384</dim>
8211 </port>
8212 </output>
8213 </layer>
8214 <layer id="532" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
8215 <data auto_broadcast="numpy" />
8216 <input>
8217 <port id="0" precision="FP32">
8218 <dim>-1</dim>
8219 <dim>-1</dim>
8220 <dim>384</dim>
8221 </port>
8222 <port id="1" precision="FP32">
8223 <dim>1</dim>
8224 <dim>1</dim>
8225 <dim>384</dim>
8226 </port>
8227 </input>
8228 <output>
8229 <port id="2" precision="FP32">
8230 <dim>-1</dim>
8231 <dim>-1</dim>
8232 <dim>384</dim>
8233 </port>
8234 </output>
8235 </layer>
8236 <layer id="533" name="Constant_103765" type="Const" version="opset1">
8237 <data element_type="f32" shape="1, 1, 384" offset="441634960" size="1536" />
8238 <output>
8239 <port id="0" precision="FP32">
8240 <dim>1</dim>
8241 <dim>1</dim>
8242 <dim>384</dim>
8243 </port>
8244 </output>
8245 </layer>
8246 <layer id="534" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
8247 <data auto_broadcast="numpy" />
8248 <input>
8249 <port id="0" precision="FP32">
8250 <dim>-1</dim>
8251 <dim>-1</dim>
8252 <dim>384</dim>
8253 </port>
8254 <port id="1" precision="FP32">
8255 <dim>1</dim>
8256 <dim>1</dim>
8257 <dim>384</dim>
8258 </port>
8259 </input>
8260 <output>
8261 <port id="2" precision="FP32" names="696,hidden_states.49">
8262 <dim>-1</dim>
8263 <dim>-1</dim>
8264 <dim>384</dim>
8265 </port>
8266 </output>
8267 </layer>
8268 <layer id="535" name="self.encoder.layer.8.attention.self.query.weight" type="Const" version="opset1">
8269 <data element_type="f32" shape="384, 384" offset="441636496" size="589824" />
8270 <output>
8271 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.query.weight">
8272 <dim>384</dim>
8273 <dim>384</dim>
8274 </port>
8275 </output>
8276 </layer>
8277 <layer id="536" name="__module.encoder.layer.8.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
8278 <data transpose_a="false" transpose_b="true" />
8279 <input>
8280 <port id="0" precision="FP32">
8281 <dim>-1</dim>
8282 <dim>-1</dim>
8283 <dim>384</dim>
8284 </port>
8285 <port id="1" precision="FP32">
8286 <dim>384</dim>
8287 <dim>384</dim>
8288 </port>
8289 </input>
8290 <output>
8291 <port id="2" precision="FP32">
8292 <dim>-1</dim>
8293 <dim>-1</dim>
8294 <dim>384</dim>
8295 </port>
8296 </output>
8297 </layer>
8298 <layer id="537" name="Constant_103766" type="Const" version="opset1">
8299 <data element_type="f32" shape="1, 1, 384" offset="442226320" size="1536" />
8300 <output>
8301 <port id="0" precision="FP32">
8302 <dim>1</dim>
8303 <dim>1</dim>
8304 <dim>384</dim>
8305 </port>
8306 </output>
8307 </layer>
8308 <layer id="538" name="__module.encoder.layer.8.attention.self.query/aten::linear/Add" type="Add" version="opset1">
8309 <data auto_broadcast="numpy" />
8310 <input>
8311 <port id="0" precision="FP32">
8312 <dim>-1</dim>
8313 <dim>-1</dim>
8314 <dim>384</dim>
8315 </port>
8316 <port id="1" precision="FP32">
8317 <dim>1</dim>
8318 <dim>1</dim>
8319 <dim>384</dim>
8320 </port>
8321 </input>
8322 <output>
8323 <port id="2" precision="FP32" names="709,x.97">
8324 <dim>-1</dim>
8325 <dim>-1</dim>
8326 <dim>384</dim>
8327 </port>
8328 </output>
8329 </layer>
8330 <layer id="539" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
8331 <data element_type="i64" shape="4" offset="385444888" size="32" />
8332 <output>
8333 <port id="0" precision="I64">
8334 <dim>4</dim>
8335 </port>
8336 </output>
8337 </layer>
8338 <layer id="540" name="__module.encoder.layer.8.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
8339 <data special_zero="true" />
8340 <input>
8341 <port id="0" precision="FP32">
8342 <dim>-1</dim>
8343 <dim>-1</dim>
8344 <dim>384</dim>
8345 </port>
8346 <port id="1" precision="I64">
8347 <dim>4</dim>
8348 </port>
8349 </input>
8350 <output>
8351 <port id="2" precision="FP32" names="713,x.99">
8352 <dim>-1</dim>
8353 <dim>-1</dim>
8354 <dim>12</dim>
8355 <dim>32</dim>
8356 </port>
8357 </output>
8358 </layer>
8359 <layer id="541" name="Constant_96375" type="Const" version="opset1">
8360 <data element_type="i64" shape="4" offset="385444920" size="32" />
8361 <output>
8362 <port id="0" precision="I64" names="714">
8363 <dim>4</dim>
8364 </port>
8365 </output>
8366 </layer>
8367 <layer id="542" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
8368 <input>
8369 <port id="0" precision="FP32">
8370 <dim>-1</dim>
8371 <dim>-1</dim>
8372 <dim>12</dim>
8373 <dim>32</dim>
8374 </port>
8375 <port id="1" precision="I64">
8376 <dim>4</dim>
8377 </port>
8378 </input>
8379 <output>
8380 <port id="2" precision="FP32" names="715">
8381 <dim>-1</dim>
8382 <dim>12</dim>
8383 <dim>-1</dim>
8384 <dim>32</dim>
8385 </port>
8386 </output>
8387 </layer>
8388 <layer id="543" name="self.encoder.layer.8.attention.self.key.weight" type="Const" version="opset1">
8389 <data element_type="f32" shape="384, 384" offset="442227856" size="589824" />
8390 <output>
8391 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.key.weight">
8392 <dim>384</dim>
8393 <dim>384</dim>
8394 </port>
8395 </output>
8396 </layer>
8397 <layer id="544" name="__module.encoder.layer.8.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
8398 <data transpose_a="false" transpose_b="true" />
8399 <input>
8400 <port id="0" precision="FP32">
8401 <dim>-1</dim>
8402 <dim>-1</dim>
8403 <dim>384</dim>
8404 </port>
8405 <port id="1" precision="FP32">
8406 <dim>384</dim>
8407 <dim>384</dim>
8408 </port>
8409 </input>
8410 <output>
8411 <port id="2" precision="FP32">
8412 <dim>-1</dim>
8413 <dim>-1</dim>
8414 <dim>384</dim>
8415 </port>
8416 </output>
8417 </layer>
8418 <layer id="545" name="Constant_103767" type="Const" version="opset1">
8419 <data element_type="f32" shape="1, 1, 384" offset="442817680" size="1536" />
8420 <output>
8421 <port id="0" precision="FP32">
8422 <dim>1</dim>
8423 <dim>1</dim>
8424 <dim>384</dim>
8425 </port>
8426 </output>
8427 </layer>
8428 <layer id="546" name="__module.encoder.layer.8.attention.self.key/aten::linear/Add" type="Add" version="opset1">
8429 <data auto_broadcast="numpy" />
8430 <input>
8431 <port id="0" precision="FP32">
8432 <dim>-1</dim>
8433 <dim>-1</dim>
8434 <dim>384</dim>
8435 </port>
8436 <port id="1" precision="FP32">
8437 <dim>1</dim>
8438 <dim>1</dim>
8439 <dim>384</dim>
8440 </port>
8441 </input>
8442 <output>
8443 <port id="2" precision="FP32" names="718,x.101">
8444 <dim>-1</dim>
8445 <dim>-1</dim>
8446 <dim>384</dim>
8447 </port>
8448 </output>
8449 </layer>
8450 <layer id="547" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
8451 <data element_type="i64" shape="4" offset="385444888" size="32" />
8452 <output>
8453 <port id="0" precision="I64">
8454 <dim>4</dim>
8455 </port>
8456 </output>
8457 </layer>
8458 <layer id="548" name="__module.encoder.layer.8.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
8459 <data special_zero="true" />
8460 <input>
8461 <port id="0" precision="FP32">
8462 <dim>-1</dim>
8463 <dim>-1</dim>
8464 <dim>384</dim>
8465 </port>
8466 <port id="1" precision="I64">
8467 <dim>4</dim>
8468 </port>
8469 </input>
8470 <output>
8471 <port id="2" precision="FP32" names="722,x.103">
8472 <dim>-1</dim>
8473 <dim>-1</dim>
8474 <dim>12</dim>
8475 <dim>32</dim>
8476 </port>
8477 </output>
8478 </layer>
8479 <layer id="549" name="Constant_96398" type="Const" version="opset1">
8480 <data element_type="i64" shape="4" offset="385444920" size="32" />
8481 <output>
8482 <port id="0" precision="I64" names="723">
8483 <dim>4</dim>
8484 </port>
8485 </output>
8486 </layer>
8487 <layer id="550" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
8488 <input>
8489 <port id="0" precision="FP32">
8490 <dim>-1</dim>
8491 <dim>-1</dim>
8492 <dim>12</dim>
8493 <dim>32</dim>
8494 </port>
8495 <port id="1" precision="I64">
8496 <dim>4</dim>
8497 </port>
8498 </input>
8499 <output>
8500 <port id="2" precision="FP32" names="724">
8501 <dim>-1</dim>
8502 <dim>12</dim>
8503 <dim>-1</dim>
8504 <dim>32</dim>
8505 </port>
8506 </output>
8507 </layer>
8508 <layer id="551" name="self.encoder.layer.8.attention.self.value.weight" type="Const" version="opset1">
8509 <data element_type="f32" shape="384, 384" offset="442819216" size="589824" />
8510 <output>
8511 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.value.weight">
8512 <dim>384</dim>
8513 <dim>384</dim>
8514 </port>
8515 </output>
8516 </layer>
8517 <layer id="552" name="__module.encoder.layer.8.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
8518 <data transpose_a="false" transpose_b="true" />
8519 <input>
8520 <port id="0" precision="FP32">
8521 <dim>-1</dim>
8522 <dim>-1</dim>
8523 <dim>384</dim>
8524 </port>
8525 <port id="1" precision="FP32">
8526 <dim>384</dim>
8527 <dim>384</dim>
8528 </port>
8529 </input>
8530 <output>
8531 <port id="2" precision="FP32">
8532 <dim>-1</dim>
8533 <dim>-1</dim>
8534 <dim>384</dim>
8535 </port>
8536 </output>
8537 </layer>
8538 <layer id="553" name="Constant_103768" type="Const" version="opset1">
8539 <data element_type="f32" shape="1, 1, 384" offset="443409040" size="1536" />
8540 <output>
8541 <port id="0" precision="FP32">
8542 <dim>1</dim>
8543 <dim>1</dim>
8544 <dim>384</dim>
8545 </port>
8546 </output>
8547 </layer>
8548 <layer id="554" name="__module.encoder.layer.8.attention.self.value/aten::linear/Add" type="Add" version="opset1">
8549 <data auto_broadcast="numpy" />
8550 <input>
8551 <port id="0" precision="FP32">
8552 <dim>-1</dim>
8553 <dim>-1</dim>
8554 <dim>384</dim>
8555 </port>
8556 <port id="1" precision="FP32">
8557 <dim>1</dim>
8558 <dim>1</dim>
8559 <dim>384</dim>
8560 </port>
8561 </input>
8562 <output>
8563 <port id="2" precision="FP32" names="727,x.105">
8564 <dim>-1</dim>
8565 <dim>-1</dim>
8566 <dim>384</dim>
8567 </port>
8568 </output>
8569 </layer>
8570 <layer id="555" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
8571 <data element_type="i64" shape="4" offset="385444888" size="32" />
8572 <output>
8573 <port id="0" precision="I64">
8574 <dim>4</dim>
8575 </port>
8576 </output>
8577 </layer>
8578 <layer id="556" name="__module.encoder.layer.8.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
8579 <data special_zero="true" />
8580 <input>
8581 <port id="0" precision="FP32">
8582 <dim>-1</dim>
8583 <dim>-1</dim>
8584 <dim>384</dim>
8585 </port>
8586 <port id="1" precision="I64">
8587 <dim>4</dim>
8588 </port>
8589 </input>
8590 <output>
8591 <port id="2" precision="FP32" names="731,x.107">
8592 <dim>-1</dim>
8593 <dim>-1</dim>
8594 <dim>12</dim>
8595 <dim>32</dim>
8596 </port>
8597 </output>
8598 </layer>
8599 <layer id="557" name="Constant_96421" type="Const" version="opset1">
8600 <data element_type="i64" shape="4" offset="385444920" size="32" />
8601 <output>
8602 <port id="0" precision="I64" names="732">
8603 <dim>4</dim>
8604 </port>
8605 </output>
8606 </layer>
8607 <layer id="558" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
8608 <input>
8609 <port id="0" precision="FP32">
8610 <dim>-1</dim>
8611 <dim>-1</dim>
8612 <dim>12</dim>
8613 <dim>32</dim>
8614 </port>
8615 <port id="1" precision="I64">
8616 <dim>4</dim>
8617 </port>
8618 </input>
8619 <output>
8620 <port id="2" precision="FP32" names="733">
8621 <dim>-1</dim>
8622 <dim>12</dim>
8623 <dim>-1</dim>
8624 <dim>32</dim>
8625 </port>
8626 </output>
8627 </layer>
8628 <layer id="559" name="__module.encoder.layer.8.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
8629 <data causal="false" />
8630 <input>
8631 <port id="0" precision="FP32">
8632 <dim>-1</dim>
8633 <dim>12</dim>
8634 <dim>-1</dim>
8635 <dim>32</dim>
8636 </port>
8637 <port id="1" precision="FP32">
8638 <dim>-1</dim>
8639 <dim>12</dim>
8640 <dim>-1</dim>
8641 <dim>32</dim>
8642 </port>
8643 <port id="2" precision="FP32">
8644 <dim>-1</dim>
8645 <dim>12</dim>
8646 <dim>-1</dim>
8647 <dim>32</dim>
8648 </port>
8649 <port id="3" precision="FP32">
8650 <dim>-1</dim>
8651 <dim>1</dim>
8652 <dim>-1</dim>
8653 <dim>-1</dim>
8654 </port>
8655 </input>
8656 <output>
8657 <port id="4" precision="FP32" names="734,attn_output.33">
8658 <dim>-1</dim>
8659 <dim>12</dim>
8660 <dim>-1</dim>
8661 <dim>32</dim>
8662 </port>
8663 </output>
8664 </layer>
8665 <layer id="560" name="__module.encoder.layer.8.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
8666 <data element_type="i32" shape="4" offset="386627688" size="16" />
8667 <output>
8668 <port id="0" precision="I32">
8669 <dim>4</dim>
8670 </port>
8671 </output>
8672 </layer>
8673 <layer id="561" name="__module.encoder.layer.8.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
8674 <input>
8675 <port id="0" precision="FP32">
8676 <dim>-1</dim>
8677 <dim>12</dim>
8678 <dim>-1</dim>
8679 <dim>32</dim>
8680 </port>
8681 <port id="1" precision="I32">
8682 <dim>4</dim>
8683 </port>
8684 </input>
8685 <output>
8686 <port id="2" precision="FP32" names="735,attn_output.35">
8687 <dim>-1</dim>
8688 <dim>-1</dim>
8689 <dim>12</dim>
8690 <dim>32</dim>
8691 </port>
8692 </output>
8693 </layer>
8694 <layer id="562" name="__module.encoder.layer.8.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
8695 <data output_type="i64" />
8696 <input>
8697 <port id="0" precision="FP32">
8698 <dim>-1</dim>
8699 <dim>-1</dim>
8700 <dim>384</dim>
8701 </port>
8702 </input>
8703 <output>
8704 <port id="1" precision="I64">
8705 <dim>3</dim>
8706 </port>
8707 </output>
8708 </layer>
8709 <layer id="563" name="Constant_102911" type="Const" version="opset1">
8710 <data element_type="i64" shape="2" offset="386627704" size="16" />
8711 <output>
8712 <port id="0" precision="I64">
8713 <dim>2</dim>
8714 </port>
8715 </output>
8716 </layer>
8717 <layer id="564" name="Constant_102912" type="Const" version="opset1">
8718 <data element_type="i64" shape="" offset="384850436" size="8" />
8719 <output>
8720 <port id="0" precision="I64" />
8721 </output>
8722 </layer>
8723 <layer id="565" name="Gather_102913" type="Gather" version="opset8">
8724 <data batch_dims="0" />
8725 <input>
8726 <port id="0" precision="I64">
8727 <dim>3</dim>
8728 </port>
8729 <port id="1" precision="I64">
8730 <dim>2</dim>
8731 </port>
8732 <port id="2" precision="I64" />
8733 </input>
8734 <output>
8735 <port id="3" precision="I64">
8736 <dim>2</dim>
8737 </port>
8738 </output>
8739 </layer>
8740 <layer id="566" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
8741 <data axis="0" />
8742 <input>
8743 <port id="0" precision="I64">
8744 <dim>2</dim>
8745 </port>
8746 <port id="1" precision="I64">
8747 <dim>1</dim>
8748 </port>
8749 </input>
8750 <output>
8751 <port id="2" precision="I64" names="736">
8752 <dim>3</dim>
8753 </port>
8754 </output>
8755 </layer>
8756 <layer id="567" name="__module.encoder.layer.8.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
8757 <data special_zero="false" />
8758 <input>
8759 <port id="0" precision="FP32">
8760 <dim>-1</dim>
8761 <dim>-1</dim>
8762 <dim>12</dim>
8763 <dim>32</dim>
8764 </port>
8765 <port id="1" precision="I64">
8766 <dim>3</dim>
8767 </port>
8768 </input>
8769 <output>
8770 <port id="2" precision="FP32" names="737">
8771 <dim>-1</dim>
8772 <dim>-1</dim>
8773 <dim>384</dim>
8774 </port>
8775 </output>
8776 </layer>
8777 <layer id="568" name="self.encoder.layer.8.attention.output.dense.weight" type="Const" version="opset1">
8778 <data element_type="f32" shape="384, 384" offset="443410576" size="589824" />
8779 <output>
8780 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.output.dense.weight">
8781 <dim>384</dim>
8782 <dim>384</dim>
8783 </port>
8784 </output>
8785 </layer>
8786 <layer id="569" name="__module.encoder.layer.8.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8787 <data transpose_a="false" transpose_b="true" />
8788 <input>
8789 <port id="0" precision="FP32">
8790 <dim>-1</dim>
8791 <dim>-1</dim>
8792 <dim>384</dim>
8793 </port>
8794 <port id="1" precision="FP32">
8795 <dim>384</dim>
8796 <dim>384</dim>
8797 </port>
8798 </input>
8799 <output>
8800 <port id="2" precision="FP32">
8801 <dim>-1</dim>
8802 <dim>-1</dim>
8803 <dim>384</dim>
8804 </port>
8805 </output>
8806 </layer>
8807 <layer id="570" name="Constant_103769" type="Const" version="opset1">
8808 <data element_type="f32" shape="1, 1, 384" offset="444000400" size="1536" />
8809 <output>
8810 <port id="0" precision="FP32">
8811 <dim>1</dim>
8812 <dim>1</dim>
8813 <dim>384</dim>
8814 </port>
8815 </output>
8816 </layer>
8817 <layer id="571" name="__module.encoder.layer.8.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
8818 <data auto_broadcast="numpy" />
8819 <input>
8820 <port id="0" precision="FP32">
8821 <dim>-1</dim>
8822 <dim>-1</dim>
8823 <dim>384</dim>
8824 </port>
8825 <port id="1" precision="FP32">
8826 <dim>1</dim>
8827 <dim>1</dim>
8828 <dim>384</dim>
8829 </port>
8830 </input>
8831 <output>
8832 <port id="2" precision="FP32" names="743,input.35">
8833 <dim>-1</dim>
8834 <dim>-1</dim>
8835 <dim>384</dim>
8836 </port>
8837 </output>
8838 </layer>
8839 <layer id="572" name="__module.encoder.layer.8.attention.output/aten::add/Add" type="Add" version="opset1">
8840 <data auto_broadcast="numpy" />
8841 <input>
8842 <port id="0" precision="FP32">
8843 <dim>-1</dim>
8844 <dim>-1</dim>
8845 <dim>384</dim>
8846 </port>
8847 <port id="1" precision="FP32">
8848 <dim>-1</dim>
8849 <dim>-1</dim>
8850 <dim>384</dim>
8851 </port>
8852 </input>
8853 <output>
8854 <port id="2" precision="FP32" names="745">
8855 <dim>-1</dim>
8856 <dim>-1</dim>
8857 <dim>384</dim>
8858 </port>
8859 </output>
8860 </layer>
8861 <layer id="573" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
8862 <data element_type="i32" shape="1" offset="384850452" size="4" />
8863 <output>
8864 <port id="0" precision="I32">
8865 <dim>1</dim>
8866 </port>
8867 </output>
8868 </layer>
8869 <layer id="574" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
8870 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
8871 <input>
8872 <port id="0" precision="FP32">
8873 <dim>-1</dim>
8874 <dim>-1</dim>
8875 <dim>384</dim>
8876 </port>
8877 <port id="1" precision="I32">
8878 <dim>1</dim>
8879 </port>
8880 </input>
8881 <output>
8882 <port id="2" precision="FP32">
8883 <dim>-1</dim>
8884 <dim>-1</dim>
8885 <dim>384</dim>
8886 </port>
8887 </output>
8888 </layer>
8889 <layer id="575" name="Constant_103770" type="Const" version="opset1">
8890 <data element_type="f32" shape="1, 1, 384" offset="444001936" size="1536" />
8891 <output>
8892 <port id="0" precision="FP32">
8893 <dim>1</dim>
8894 <dim>1</dim>
8895 <dim>384</dim>
8896 </port>
8897 </output>
8898 </layer>
8899 <layer id="576" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
8900 <data auto_broadcast="numpy" />
8901 <input>
8902 <port id="0" precision="FP32">
8903 <dim>-1</dim>
8904 <dim>-1</dim>
8905 <dim>384</dim>
8906 </port>
8907 <port id="1" precision="FP32">
8908 <dim>1</dim>
8909 <dim>1</dim>
8910 <dim>384</dim>
8911 </port>
8912 </input>
8913 <output>
8914 <port id="2" precision="FP32">
8915 <dim>-1</dim>
8916 <dim>-1</dim>
8917 <dim>384</dim>
8918 </port>
8919 </output>
8920 </layer>
8921 <layer id="577" name="Constant_103771" type="Const" version="opset1">
8922 <data element_type="f32" shape="1, 1, 384" offset="444003472" size="1536" />
8923 <output>
8924 <port id="0" precision="FP32">
8925 <dim>1</dim>
8926 <dim>1</dim>
8927 <dim>384</dim>
8928 </port>
8929 </output>
8930 </layer>
8931 <layer id="578" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
8932 <data auto_broadcast="numpy" />
8933 <input>
8934 <port id="0" precision="FP32">
8935 <dim>-1</dim>
8936 <dim>-1</dim>
8937 <dim>384</dim>
8938 </port>
8939 <port id="1" precision="FP32">
8940 <dim>1</dim>
8941 <dim>1</dim>
8942 <dim>384</dim>
8943 </port>
8944 </input>
8945 <output>
8946 <port id="2" precision="FP32" names="749,input_tensor.17">
8947 <dim>-1</dim>
8948 <dim>-1</dim>
8949 <dim>384</dim>
8950 </port>
8951 </output>
8952 </layer>
8953 <layer id="579" name="self.encoder.layer.8.intermediate.dense.weight" type="Const" version="opset1">
8954 <data element_type="f32" shape="1536, 384" offset="444005008" size="2359296" />
8955 <output>
8956 <port id="0" precision="FP32" names="self.encoder.layer.8.intermediate.dense.weight">
8957 <dim>1536</dim>
8958 <dim>384</dim>
8959 </port>
8960 </output>
8961 </layer>
8962 <layer id="580" name="__module.encoder.layer.8.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8963 <data transpose_a="false" transpose_b="true" />
8964 <input>
8965 <port id="0" precision="FP32">
8966 <dim>-1</dim>
8967 <dim>-1</dim>
8968 <dim>384</dim>
8969 </port>
8970 <port id="1" precision="FP32">
8971 <dim>1536</dim>
8972 <dim>384</dim>
8973 </port>
8974 </input>
8975 <output>
8976 <port id="2" precision="FP32">
8977 <dim>-1</dim>
8978 <dim>-1</dim>
8979 <dim>1536</dim>
8980 </port>
8981 </output>
8982 </layer>
8983 <layer id="581" name="Constant_103772" type="Const" version="opset1">
8984 <data element_type="f32" shape="1, 1, 1536" offset="446364304" size="6144" />
8985 <output>
8986 <port id="0" precision="FP32">
8987 <dim>1</dim>
8988 <dim>1</dim>
8989 <dim>1536</dim>
8990 </port>
8991 </output>
8992 </layer>
8993 <layer id="582" name="__module.encoder.layer.8.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
8994 <data auto_broadcast="numpy" />
8995 <input>
8996 <port id="0" precision="FP32">
8997 <dim>-1</dim>
8998 <dim>-1</dim>
8999 <dim>1536</dim>
9000 </port>
9001 <port id="1" precision="FP32">
9002 <dim>1</dim>
9003 <dim>1</dim>
9004 <dim>1536</dim>
9005 </port>
9006 </input>
9007 <output>
9008 <port id="2" precision="FP32" names="754">
9009 <dim>-1</dim>
9010 <dim>-1</dim>
9011 <dim>1536</dim>
9012 </port>
9013 </output>
9014 </layer>
9015 <layer id="583" name="__module.encoder.layer.8.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
9016 <data approximation_mode="ERF" />
9017 <input>
9018 <port id="0" precision="FP32">
9019 <dim>-1</dim>
9020 <dim>-1</dim>
9021 <dim>1536</dim>
9022 </port>
9023 </input>
9024 <output>
9025 <port id="1" precision="FP32" names="755">
9026 <dim>-1</dim>
9027 <dim>-1</dim>
9028 <dim>1536</dim>
9029 </port>
9030 </output>
9031 </layer>
9032 <layer id="584" name="self.encoder.layer.8.output.dense.weight" type="Const" version="opset1">
9033 <data element_type="f32" shape="384, 1536" offset="446370448" size="2359296" />
9034 <output>
9035 <port id="0" precision="FP32" names="self.encoder.layer.8.output.dense.weight">
9036 <dim>384</dim>
9037 <dim>1536</dim>
9038 </port>
9039 </output>
9040 </layer>
9041 <layer id="585" name="__module.encoder.layer.8.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9042 <data transpose_a="false" transpose_b="true" />
9043 <input>
9044 <port id="0" precision="FP32">
9045 <dim>-1</dim>
9046 <dim>-1</dim>
9047 <dim>1536</dim>
9048 </port>
9049 <port id="1" precision="FP32">
9050 <dim>384</dim>
9051 <dim>1536</dim>
9052 </port>
9053 </input>
9054 <output>
9055 <port id="2" precision="FP32">
9056 <dim>-1</dim>
9057 <dim>-1</dim>
9058 <dim>384</dim>
9059 </port>
9060 </output>
9061 </layer>
9062 <layer id="586" name="Constant_103773" type="Const" version="opset1">
9063 <data element_type="f32" shape="1, 1, 384" offset="448729744" size="1536" />
9064 <output>
9065 <port id="0" precision="FP32">
9066 <dim>1</dim>
9067 <dim>1</dim>
9068 <dim>384</dim>
9069 </port>
9070 </output>
9071 </layer>
9072 <layer id="587" name="__module.encoder.layer.8.output.dense/aten::linear/Add" type="Add" version="opset1">
9073 <data auto_broadcast="numpy" />
9074 <input>
9075 <port id="0" precision="FP32">
9076 <dim>-1</dim>
9077 <dim>-1</dim>
9078 <dim>384</dim>
9079 </port>
9080 <port id="1" precision="FP32">
9081 <dim>1</dim>
9082 <dim>1</dim>
9083 <dim>384</dim>
9084 </port>
9085 </input>
9086 <output>
9087 <port id="2" precision="FP32" names="761,input.37">
9088 <dim>-1</dim>
9089 <dim>-1</dim>
9090 <dim>384</dim>
9091 </port>
9092 </output>
9093 </layer>
9094 <layer id="588" name="__module.encoder.layer.8.output/aten::add/Add" type="Add" version="opset1">
9095 <data auto_broadcast="numpy" />
9096 <input>
9097 <port id="0" precision="FP32">
9098 <dim>-1</dim>
9099 <dim>-1</dim>
9100 <dim>384</dim>
9101 </port>
9102 <port id="1" precision="FP32">
9103 <dim>-1</dim>
9104 <dim>-1</dim>
9105 <dim>384</dim>
9106 </port>
9107 </input>
9108 <output>
9109 <port id="2" precision="FP32" names="763">
9110 <dim>-1</dim>
9111 <dim>-1</dim>
9112 <dim>384</dim>
9113 </port>
9114 </output>
9115 </layer>
9116 <layer id="589" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
9117 <data element_type="i32" shape="1" offset="384850452" size="4" />
9118 <output>
9119 <port id="0" precision="I32">
9120 <dim>1</dim>
9121 </port>
9122 </output>
9123 </layer>
9124 <layer id="590" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
9125 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
9126 <input>
9127 <port id="0" precision="FP32">
9128 <dim>-1</dim>
9129 <dim>-1</dim>
9130 <dim>384</dim>
9131 </port>
9132 <port id="1" precision="I32">
9133 <dim>1</dim>
9134 </port>
9135 </input>
9136 <output>
9137 <port id="2" precision="FP32">
9138 <dim>-1</dim>
9139 <dim>-1</dim>
9140 <dim>384</dim>
9141 </port>
9142 </output>
9143 </layer>
9144 <layer id="591" name="Constant_103774" type="Const" version="opset1">
9145 <data element_type="f32" shape="1, 1, 384" offset="448731280" size="1536" />
9146 <output>
9147 <port id="0" precision="FP32">
9148 <dim>1</dim>
9149 <dim>1</dim>
9150 <dim>384</dim>
9151 </port>
9152 </output>
9153 </layer>
9154 <layer id="592" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
9155 <data auto_broadcast="numpy" />
9156 <input>
9157 <port id="0" precision="FP32">
9158 <dim>-1</dim>
9159 <dim>-1</dim>
9160 <dim>384</dim>
9161 </port>
9162 <port id="1" precision="FP32">
9163 <dim>1</dim>
9164 <dim>1</dim>
9165 <dim>384</dim>
9166 </port>
9167 </input>
9168 <output>
9169 <port id="2" precision="FP32">
9170 <dim>-1</dim>
9171 <dim>-1</dim>
9172 <dim>384</dim>
9173 </port>
9174 </output>
9175 </layer>
9176 <layer id="593" name="Constant_103775" type="Const" version="opset1">
9177 <data element_type="f32" shape="1, 1, 384" offset="448732816" size="1536" />
9178 <output>
9179 <port id="0" precision="FP32">
9180 <dim>1</dim>
9181 <dim>1</dim>
9182 <dim>384</dim>
9183 </port>
9184 </output>
9185 </layer>
9186 <layer id="594" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
9187 <data auto_broadcast="numpy" />
9188 <input>
9189 <port id="0" precision="FP32">
9190 <dim>-1</dim>
9191 <dim>-1</dim>
9192 <dim>384</dim>
9193 </port>
9194 <port id="1" precision="FP32">
9195 <dim>1</dim>
9196 <dim>1</dim>
9197 <dim>384</dim>
9198 </port>
9199 </input>
9200 <output>
9201 <port id="2" precision="FP32" names="767,hidden_states.55">
9202 <dim>-1</dim>
9203 <dim>-1</dim>
9204 <dim>384</dim>
9205 </port>
9206 </output>
9207 </layer>
9208 <layer id="595" name="self.encoder.layer.9.attention.self.query.weight" type="Const" version="opset1">
9209 <data element_type="f32" shape="384, 384" offset="448734352" size="589824" />
9210 <output>
9211 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.query.weight">
9212 <dim>384</dim>
9213 <dim>384</dim>
9214 </port>
9215 </output>
9216 </layer>
9217 <layer id="596" name="__module.encoder.layer.9.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
9218 <data transpose_a="false" transpose_b="true" />
9219 <input>
9220 <port id="0" precision="FP32">
9221 <dim>-1</dim>
9222 <dim>-1</dim>
9223 <dim>384</dim>
9224 </port>
9225 <port id="1" precision="FP32">
9226 <dim>384</dim>
9227 <dim>384</dim>
9228 </port>
9229 </input>
9230 <output>
9231 <port id="2" precision="FP32">
9232 <dim>-1</dim>
9233 <dim>-1</dim>
9234 <dim>384</dim>
9235 </port>
9236 </output>
9237 </layer>
9238 <layer id="597" name="Constant_103776" type="Const" version="opset1">
9239 <data element_type="f32" shape="1, 1, 384" offset="449324176" size="1536" />
9240 <output>
9241 <port id="0" precision="FP32">
9242 <dim>1</dim>
9243 <dim>1</dim>
9244 <dim>384</dim>
9245 </port>
9246 </output>
9247 </layer>
9248 <layer id="598" name="__module.encoder.layer.9.attention.self.query/aten::linear/Add" type="Add" version="opset1">
9249 <data auto_broadcast="numpy" />
9250 <input>
9251 <port id="0" precision="FP32">
9252 <dim>-1</dim>
9253 <dim>-1</dim>
9254 <dim>384</dim>
9255 </port>
9256 <port id="1" precision="FP32">
9257 <dim>1</dim>
9258 <dim>1</dim>
9259 <dim>384</dim>
9260 </port>
9261 </input>
9262 <output>
9263 <port id="2" precision="FP32" names="780,x.109">
9264 <dim>-1</dim>
9265 <dim>-1</dim>
9266 <dim>384</dim>
9267 </port>
9268 </output>
9269 </layer>
9270 <layer id="599" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
9271 <data element_type="i64" shape="4" offset="385444888" size="32" />
9272 <output>
9273 <port id="0" precision="I64">
9274 <dim>4</dim>
9275 </port>
9276 </output>
9277 </layer>
9278 <layer id="600" name="__module.encoder.layer.9.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
9279 <data special_zero="true" />
9280 <input>
9281 <port id="0" precision="FP32">
9282 <dim>-1</dim>
9283 <dim>-1</dim>
9284 <dim>384</dim>
9285 </port>
9286 <port id="1" precision="I64">
9287 <dim>4</dim>
9288 </port>
9289 </input>
9290 <output>
9291 <port id="2" precision="FP32" names="784,x.111">
9292 <dim>-1</dim>
9293 <dim>-1</dim>
9294 <dim>12</dim>
9295 <dim>32</dim>
9296 </port>
9297 </output>
9298 </layer>
9299 <layer id="601" name="Constant_96601" type="Const" version="opset1">
9300 <data element_type="i64" shape="4" offset="385444920" size="32" />
9301 <output>
9302 <port id="0" precision="I64" names="785">
9303 <dim>4</dim>
9304 </port>
9305 </output>
9306 </layer>
9307 <layer id="602" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
9308 <input>
9309 <port id="0" precision="FP32">
9310 <dim>-1</dim>
9311 <dim>-1</dim>
9312 <dim>12</dim>
9313 <dim>32</dim>
9314 </port>
9315 <port id="1" precision="I64">
9316 <dim>4</dim>
9317 </port>
9318 </input>
9319 <output>
9320 <port id="2" precision="FP32" names="786">
9321 <dim>-1</dim>
9322 <dim>12</dim>
9323 <dim>-1</dim>
9324 <dim>32</dim>
9325 </port>
9326 </output>
9327 </layer>
9328 <layer id="603" name="self.encoder.layer.9.attention.self.key.weight" type="Const" version="opset1">
9329 <data element_type="f32" shape="384, 384" offset="449325712" size="589824" />
9330 <output>
9331 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.key.weight">
9332 <dim>384</dim>
9333 <dim>384</dim>
9334 </port>
9335 </output>
9336 </layer>
9337 <layer id="604" name="__module.encoder.layer.9.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
9338 <data transpose_a="false" transpose_b="true" />
9339 <input>
9340 <port id="0" precision="FP32">
9341 <dim>-1</dim>
9342 <dim>-1</dim>
9343 <dim>384</dim>
9344 </port>
9345 <port id="1" precision="FP32">
9346 <dim>384</dim>
9347 <dim>384</dim>
9348 </port>
9349 </input>
9350 <output>
9351 <port id="2" precision="FP32">
9352 <dim>-1</dim>
9353 <dim>-1</dim>
9354 <dim>384</dim>
9355 </port>
9356 </output>
9357 </layer>
9358 <layer id="605" name="Constant_103777" type="Const" version="opset1">
9359 <data element_type="f32" shape="1, 1, 384" offset="449915536" size="1536" />
9360 <output>
9361 <port id="0" precision="FP32">
9362 <dim>1</dim>
9363 <dim>1</dim>
9364 <dim>384</dim>
9365 </port>
9366 </output>
9367 </layer>
9368 <layer id="606" name="__module.encoder.layer.9.attention.self.key/aten::linear/Add" type="Add" version="opset1">
9369 <data auto_broadcast="numpy" />
9370 <input>
9371 <port id="0" precision="FP32">
9372 <dim>-1</dim>
9373 <dim>-1</dim>
9374 <dim>384</dim>
9375 </port>
9376 <port id="1" precision="FP32">
9377 <dim>1</dim>
9378 <dim>1</dim>
9379 <dim>384</dim>
9380 </port>
9381 </input>
9382 <output>
9383 <port id="2" precision="FP32" names="789,x.113">
9384 <dim>-1</dim>
9385 <dim>-1</dim>
9386 <dim>384</dim>
9387 </port>
9388 </output>
9389 </layer>
9390 <layer id="607" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
9391 <data element_type="i64" shape="4" offset="385444888" size="32" />
9392 <output>
9393 <port id="0" precision="I64">
9394 <dim>4</dim>
9395 </port>
9396 </output>
9397 </layer>
9398 <layer id="608" name="__module.encoder.layer.9.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
9399 <data special_zero="true" />
9400 <input>
9401 <port id="0" precision="FP32">
9402 <dim>-1</dim>
9403 <dim>-1</dim>
9404 <dim>384</dim>
9405 </port>
9406 <port id="1" precision="I64">
9407 <dim>4</dim>
9408 </port>
9409 </input>
9410 <output>
9411 <port id="2" precision="FP32" names="793,x.115">
9412 <dim>-1</dim>
9413 <dim>-1</dim>
9414 <dim>12</dim>
9415 <dim>32</dim>
9416 </port>
9417 </output>
9418 </layer>
9419 <layer id="609" name="Constant_96624" type="Const" version="opset1">
9420 <data element_type="i64" shape="4" offset="385444920" size="32" />
9421 <output>
9422 <port id="0" precision="I64" names="794">
9423 <dim>4</dim>
9424 </port>
9425 </output>
9426 </layer>
9427 <layer id="610" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
9428 <input>
9429 <port id="0" precision="FP32">
9430 <dim>-1</dim>
9431 <dim>-1</dim>
9432 <dim>12</dim>
9433 <dim>32</dim>
9434 </port>
9435 <port id="1" precision="I64">
9436 <dim>4</dim>
9437 </port>
9438 </input>
9439 <output>
9440 <port id="2" precision="FP32" names="795">
9441 <dim>-1</dim>
9442 <dim>12</dim>
9443 <dim>-1</dim>
9444 <dim>32</dim>
9445 </port>
9446 </output>
9447 </layer>
9448 <layer id="611" name="self.encoder.layer.9.attention.self.value.weight" type="Const" version="opset1">
9449 <data element_type="f32" shape="384, 384" offset="449917072" size="589824" />
9450 <output>
9451 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.value.weight">
9452 <dim>384</dim>
9453 <dim>384</dim>
9454 </port>
9455 </output>
9456 </layer>
9457 <layer id="612" name="__module.encoder.layer.9.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
9458 <data transpose_a="false" transpose_b="true" />
9459 <input>
9460 <port id="0" precision="FP32">
9461 <dim>-1</dim>
9462 <dim>-1</dim>
9463 <dim>384</dim>
9464 </port>
9465 <port id="1" precision="FP32">
9466 <dim>384</dim>
9467 <dim>384</dim>
9468 </port>
9469 </input>
9470 <output>
9471 <port id="2" precision="FP32">
9472 <dim>-1</dim>
9473 <dim>-1</dim>
9474 <dim>384</dim>
9475 </port>
9476 </output>
9477 </layer>
9478 <layer id="613" name="Constant_103778" type="Const" version="opset1">
9479 <data element_type="f32" shape="1, 1, 384" offset="450506896" size="1536" />
9480 <output>
9481 <port id="0" precision="FP32">
9482 <dim>1</dim>
9483 <dim>1</dim>
9484 <dim>384</dim>
9485 </port>
9486 </output>
9487 </layer>
9488 <layer id="614" name="__module.encoder.layer.9.attention.self.value/aten::linear/Add" type="Add" version="opset1">
9489 <data auto_broadcast="numpy" />
9490 <input>
9491 <port id="0" precision="FP32">
9492 <dim>-1</dim>
9493 <dim>-1</dim>
9494 <dim>384</dim>
9495 </port>
9496 <port id="1" precision="FP32">
9497 <dim>1</dim>
9498 <dim>1</dim>
9499 <dim>384</dim>
9500 </port>
9501 </input>
9502 <output>
9503 <port id="2" precision="FP32" names="798,x.117">
9504 <dim>-1</dim>
9505 <dim>-1</dim>
9506 <dim>384</dim>
9507 </port>
9508 </output>
9509 </layer>
9510 <layer id="615" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
9511 <data element_type="i64" shape="4" offset="385444888" size="32" />
9512 <output>
9513 <port id="0" precision="I64">
9514 <dim>4</dim>
9515 </port>
9516 </output>
9517 </layer>
9518 <layer id="616" name="__module.encoder.layer.9.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
9519 <data special_zero="true" />
9520 <input>
9521 <port id="0" precision="FP32">
9522 <dim>-1</dim>
9523 <dim>-1</dim>
9524 <dim>384</dim>
9525 </port>
9526 <port id="1" precision="I64">
9527 <dim>4</dim>
9528 </port>
9529 </input>
9530 <output>
9531 <port id="2" precision="FP32" names="802,x.119">
9532 <dim>-1</dim>
9533 <dim>-1</dim>
9534 <dim>12</dim>
9535 <dim>32</dim>
9536 </port>
9537 </output>
9538 </layer>
9539 <layer id="617" name="Constant_96647" type="Const" version="opset1">
9540 <data element_type="i64" shape="4" offset="385444920" size="32" />
9541 <output>
9542 <port id="0" precision="I64" names="803">
9543 <dim>4</dim>
9544 </port>
9545 </output>
9546 </layer>
9547 <layer id="618" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
9548 <input>
9549 <port id="0" precision="FP32">
9550 <dim>-1</dim>
9551 <dim>-1</dim>
9552 <dim>12</dim>
9553 <dim>32</dim>
9554 </port>
9555 <port id="1" precision="I64">
9556 <dim>4</dim>
9557 </port>
9558 </input>
9559 <output>
9560 <port id="2" precision="FP32" names="804">
9561 <dim>-1</dim>
9562 <dim>12</dim>
9563 <dim>-1</dim>
9564 <dim>32</dim>
9565 </port>
9566 </output>
9567 </layer>
9568 <layer id="619" name="__module.encoder.layer.9.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
9569 <data causal="false" />
9570 <input>
9571 <port id="0" precision="FP32">
9572 <dim>-1</dim>
9573 <dim>12</dim>
9574 <dim>-1</dim>
9575 <dim>32</dim>
9576 </port>
9577 <port id="1" precision="FP32">
9578 <dim>-1</dim>
9579 <dim>12</dim>
9580 <dim>-1</dim>
9581 <dim>32</dim>
9582 </port>
9583 <port id="2" precision="FP32">
9584 <dim>-1</dim>
9585 <dim>12</dim>
9586 <dim>-1</dim>
9587 <dim>32</dim>
9588 </port>
9589 <port id="3" precision="FP32">
9590 <dim>-1</dim>
9591 <dim>1</dim>
9592 <dim>-1</dim>
9593 <dim>-1</dim>
9594 </port>
9595 </input>
9596 <output>
9597 <port id="4" precision="FP32" names="805,attn_output.37">
9598 <dim>-1</dim>
9599 <dim>12</dim>
9600 <dim>-1</dim>
9601 <dim>32</dim>
9602 </port>
9603 </output>
9604 </layer>
9605 <layer id="620" name="__module.encoder.layer.9.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
9606 <data element_type="i32" shape="4" offset="386627688" size="16" />
9607 <output>
9608 <port id="0" precision="I32">
9609 <dim>4</dim>
9610 </port>
9611 </output>
9612 </layer>
9613 <layer id="621" name="__module.encoder.layer.9.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
9614 <input>
9615 <port id="0" precision="FP32">
9616 <dim>-1</dim>
9617 <dim>12</dim>
9618 <dim>-1</dim>
9619 <dim>32</dim>
9620 </port>
9621 <port id="1" precision="I32">
9622 <dim>4</dim>
9623 </port>
9624 </input>
9625 <output>
9626 <port id="2" precision="FP32" names="806,attn_output.39">
9627 <dim>-1</dim>
9628 <dim>-1</dim>
9629 <dim>12</dim>
9630 <dim>32</dim>
9631 </port>
9632 </output>
9633 </layer>
9634 <layer id="622" name="__module.encoder.layer.9.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
9635 <data output_type="i64" />
9636 <input>
9637 <port id="0" precision="FP32">
9638 <dim>-1</dim>
9639 <dim>-1</dim>
9640 <dim>384</dim>
9641 </port>
9642 </input>
9643 <output>
9644 <port id="1" precision="I64">
9645 <dim>3</dim>
9646 </port>
9647 </output>
9648 </layer>
9649 <layer id="623" name="Constant_102931" type="Const" version="opset1">
9650 <data element_type="i64" shape="2" offset="386627704" size="16" />
9651 <output>
9652 <port id="0" precision="I64">
9653 <dim>2</dim>
9654 </port>
9655 </output>
9656 </layer>
9657 <layer id="624" name="Constant_102932" type="Const" version="opset1">
9658 <data element_type="i64" shape="" offset="384850436" size="8" />
9659 <output>
9660 <port id="0" precision="I64" />
9661 </output>
9662 </layer>
9663 <layer id="625" name="Gather_102933" type="Gather" version="opset8">
9664 <data batch_dims="0" />
9665 <input>
9666 <port id="0" precision="I64">
9667 <dim>3</dim>
9668 </port>
9669 <port id="1" precision="I64">
9670 <dim>2</dim>
9671 </port>
9672 <port id="2" precision="I64" />
9673 </input>
9674 <output>
9675 <port id="3" precision="I64">
9676 <dim>2</dim>
9677 </port>
9678 </output>
9679 </layer>
9680 <layer id="626" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
9681 <data axis="0" />
9682 <input>
9683 <port id="0" precision="I64">
9684 <dim>2</dim>
9685 </port>
9686 <port id="1" precision="I64">
9687 <dim>1</dim>
9688 </port>
9689 </input>
9690 <output>
9691 <port id="2" precision="I64" names="807">
9692 <dim>3</dim>
9693 </port>
9694 </output>
9695 </layer>
9696 <layer id="627" name="__module.encoder.layer.9.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
9697 <data special_zero="false" />
9698 <input>
9699 <port id="0" precision="FP32">
9700 <dim>-1</dim>
9701 <dim>-1</dim>
9702 <dim>12</dim>
9703 <dim>32</dim>
9704 </port>
9705 <port id="1" precision="I64">
9706 <dim>3</dim>
9707 </port>
9708 </input>
9709 <output>
9710 <port id="2" precision="FP32" names="808">
9711 <dim>-1</dim>
9712 <dim>-1</dim>
9713 <dim>384</dim>
9714 </port>
9715 </output>
9716 </layer>
9717 <layer id="628" name="self.encoder.layer.9.attention.output.dense.weight" type="Const" version="opset1">
9718 <data element_type="f32" shape="384, 384" offset="450508432" size="589824" />
9719 <output>
9720 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.output.dense.weight">
9721 <dim>384</dim>
9722 <dim>384</dim>
9723 </port>
9724 </output>
9725 </layer>
9726 <layer id="629" name="__module.encoder.layer.9.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9727 <data transpose_a="false" transpose_b="true" />
9728 <input>
9729 <port id="0" precision="FP32">
9730 <dim>-1</dim>
9731 <dim>-1</dim>
9732 <dim>384</dim>
9733 </port>
9734 <port id="1" precision="FP32">
9735 <dim>384</dim>
9736 <dim>384</dim>
9737 </port>
9738 </input>
9739 <output>
9740 <port id="2" precision="FP32">
9741 <dim>-1</dim>
9742 <dim>-1</dim>
9743 <dim>384</dim>
9744 </port>
9745 </output>
9746 </layer>
9747 <layer id="630" name="Constant_103779" type="Const" version="opset1">
9748 <data element_type="f32" shape="1, 1, 384" offset="451098256" size="1536" />
9749 <output>
9750 <port id="0" precision="FP32">
9751 <dim>1</dim>
9752 <dim>1</dim>
9753 <dim>384</dim>
9754 </port>
9755 </output>
9756 </layer>
9757 <layer id="631" name="__module.encoder.layer.9.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
9758 <data auto_broadcast="numpy" />
9759 <input>
9760 <port id="0" precision="FP32">
9761 <dim>-1</dim>
9762 <dim>-1</dim>
9763 <dim>384</dim>
9764 </port>
9765 <port id="1" precision="FP32">
9766 <dim>1</dim>
9767 <dim>1</dim>
9768 <dim>384</dim>
9769 </port>
9770 </input>
9771 <output>
9772 <port id="2" precision="FP32" names="814,input.39">
9773 <dim>-1</dim>
9774 <dim>-1</dim>
9775 <dim>384</dim>
9776 </port>
9777 </output>
9778 </layer>
9779 <layer id="632" name="__module.encoder.layer.9.attention.output/aten::add/Add" type="Add" version="opset1">
9780 <data auto_broadcast="numpy" />
9781 <input>
9782 <port id="0" precision="FP32">
9783 <dim>-1</dim>
9784 <dim>-1</dim>
9785 <dim>384</dim>
9786 </port>
9787 <port id="1" precision="FP32">
9788 <dim>-1</dim>
9789 <dim>-1</dim>
9790 <dim>384</dim>
9791 </port>
9792 </input>
9793 <output>
9794 <port id="2" precision="FP32" names="816">
9795 <dim>-1</dim>
9796 <dim>-1</dim>
9797 <dim>384</dim>
9798 </port>
9799 </output>
9800 </layer>
9801 <layer id="633" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
9802 <data element_type="i32" shape="1" offset="384850452" size="4" />
9803 <output>
9804 <port id="0" precision="I32">
9805 <dim>1</dim>
9806 </port>
9807 </output>
9808 </layer>
9809 <layer id="634" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
9810 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
9811 <input>
9812 <port id="0" precision="FP32">
9813 <dim>-1</dim>
9814 <dim>-1</dim>
9815 <dim>384</dim>
9816 </port>
9817 <port id="1" precision="I32">
9818 <dim>1</dim>
9819 </port>
9820 </input>
9821 <output>
9822 <port id="2" precision="FP32">
9823 <dim>-1</dim>
9824 <dim>-1</dim>
9825 <dim>384</dim>
9826 </port>
9827 </output>
9828 </layer>
9829 <layer id="635" name="Constant_103780" type="Const" version="opset1">
9830 <data element_type="f32" shape="1, 1, 384" offset="451099792" size="1536" />
9831 <output>
9832 <port id="0" precision="FP32">
9833 <dim>1</dim>
9834 <dim>1</dim>
9835 <dim>384</dim>
9836 </port>
9837 </output>
9838 </layer>
9839 <layer id="636" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
9840 <data auto_broadcast="numpy" />
9841 <input>
9842 <port id="0" precision="FP32">
9843 <dim>-1</dim>
9844 <dim>-1</dim>
9845 <dim>384</dim>
9846 </port>
9847 <port id="1" precision="FP32">
9848 <dim>1</dim>
9849 <dim>1</dim>
9850 <dim>384</dim>
9851 </port>
9852 </input>
9853 <output>
9854 <port id="2" precision="FP32">
9855 <dim>-1</dim>
9856 <dim>-1</dim>
9857 <dim>384</dim>
9858 </port>
9859 </output>
9860 </layer>
9861 <layer id="637" name="Constant_103781" type="Const" version="opset1">
9862 <data element_type="f32" shape="1, 1, 384" offset="451101328" size="1536" />
9863 <output>
9864 <port id="0" precision="FP32">
9865 <dim>1</dim>
9866 <dim>1</dim>
9867 <dim>384</dim>
9868 </port>
9869 </output>
9870 </layer>
9871 <layer id="638" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
9872 <data auto_broadcast="numpy" />
9873 <input>
9874 <port id="0" precision="FP32">
9875 <dim>-1</dim>
9876 <dim>-1</dim>
9877 <dim>384</dim>
9878 </port>
9879 <port id="1" precision="FP32">
9880 <dim>1</dim>
9881 <dim>1</dim>
9882 <dim>384</dim>
9883 </port>
9884 </input>
9885 <output>
9886 <port id="2" precision="FP32" names="820,input_tensor.19">
9887 <dim>-1</dim>
9888 <dim>-1</dim>
9889 <dim>384</dim>
9890 </port>
9891 </output>
9892 </layer>
9893 <layer id="639" name="self.encoder.layer.9.intermediate.dense.weight" type="Const" version="opset1">
9894 <data element_type="f32" shape="1536, 384" offset="451102864" size="2359296" />
9895 <output>
9896 <port id="0" precision="FP32" names="self.encoder.layer.9.intermediate.dense.weight">
9897 <dim>1536</dim>
9898 <dim>384</dim>
9899 </port>
9900 </output>
9901 </layer>
9902 <layer id="640" name="__module.encoder.layer.9.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9903 <data transpose_a="false" transpose_b="true" />
9904 <input>
9905 <port id="0" precision="FP32">
9906 <dim>-1</dim>
9907 <dim>-1</dim>
9908 <dim>384</dim>
9909 </port>
9910 <port id="1" precision="FP32">
9911 <dim>1536</dim>
9912 <dim>384</dim>
9913 </port>
9914 </input>
9915 <output>
9916 <port id="2" precision="FP32">
9917 <dim>-1</dim>
9918 <dim>-1</dim>
9919 <dim>1536</dim>
9920 </port>
9921 </output>
9922 </layer>
9923 <layer id="641" name="Constant_103782" type="Const" version="opset1">
9924 <data element_type="f32" shape="1, 1, 1536" offset="453462160" size="6144" />
9925 <output>
9926 <port id="0" precision="FP32">
9927 <dim>1</dim>
9928 <dim>1</dim>
9929 <dim>1536</dim>
9930 </port>
9931 </output>
9932 </layer>
9933 <layer id="642" name="__module.encoder.layer.9.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
9934 <data auto_broadcast="numpy" />
9935 <input>
9936 <port id="0" precision="FP32">
9937 <dim>-1</dim>
9938 <dim>-1</dim>
9939 <dim>1536</dim>
9940 </port>
9941 <port id="1" precision="FP32">
9942 <dim>1</dim>
9943 <dim>1</dim>
9944 <dim>1536</dim>
9945 </port>
9946 </input>
9947 <output>
9948 <port id="2" precision="FP32" names="825">
9949 <dim>-1</dim>
9950 <dim>-1</dim>
9951 <dim>1536</dim>
9952 </port>
9953 </output>
9954 </layer>
9955 <layer id="643" name="__module.encoder.layer.9.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
9956 <data approximation_mode="ERF" />
9957 <input>
9958 <port id="0" precision="FP32">
9959 <dim>-1</dim>
9960 <dim>-1</dim>
9961 <dim>1536</dim>
9962 </port>
9963 </input>
9964 <output>
9965 <port id="1" precision="FP32" names="826">
9966 <dim>-1</dim>
9967 <dim>-1</dim>
9968 <dim>1536</dim>
9969 </port>
9970 </output>
9971 </layer>
9972 <layer id="644" name="self.encoder.layer.9.output.dense.weight" type="Const" version="opset1">
9973 <data element_type="f32" shape="384, 1536" offset="453468304" size="2359296" />
9974 <output>
9975 <port id="0" precision="FP32" names="self.encoder.layer.9.output.dense.weight">
9976 <dim>384</dim>
9977 <dim>1536</dim>
9978 </port>
9979 </output>
9980 </layer>
9981 <layer id="645" name="__module.encoder.layer.9.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9982 <data transpose_a="false" transpose_b="true" />
9983 <input>
9984 <port id="0" precision="FP32">
9985 <dim>-1</dim>
9986 <dim>-1</dim>
9987 <dim>1536</dim>
9988 </port>
9989 <port id="1" precision="FP32">
9990 <dim>384</dim>
9991 <dim>1536</dim>
9992 </port>
9993 </input>
9994 <output>
9995 <port id="2" precision="FP32">
9996 <dim>-1</dim>
9997 <dim>-1</dim>
9998 <dim>384</dim>
9999 </port>
10000 </output>
10001 </layer>
10002 <layer id="646" name="Constant_103783" type="Const" version="opset1">
10003 <data element_type="f32" shape="1, 1, 384" offset="455827600" size="1536" />
10004 <output>
10005 <port id="0" precision="FP32">
10006 <dim>1</dim>
10007 <dim>1</dim>
10008 <dim>384</dim>
10009 </port>
10010 </output>
10011 </layer>
10012 <layer id="647" name="__module.encoder.layer.9.output.dense/aten::linear/Add" type="Add" version="opset1">
10013 <data auto_broadcast="numpy" />
10014 <input>
10015 <port id="0" precision="FP32">
10016 <dim>-1</dim>
10017 <dim>-1</dim>
10018 <dim>384</dim>
10019 </port>
10020 <port id="1" precision="FP32">
10021 <dim>1</dim>
10022 <dim>1</dim>
10023 <dim>384</dim>
10024 </port>
10025 </input>
10026 <output>
10027 <port id="2" precision="FP32" names="832,input.41">
10028 <dim>-1</dim>
10029 <dim>-1</dim>
10030 <dim>384</dim>
10031 </port>
10032 </output>
10033 </layer>
10034 <layer id="648" name="__module.encoder.layer.9.output/aten::add/Add" type="Add" version="opset1">
10035 <data auto_broadcast="numpy" />
10036 <input>
10037 <port id="0" precision="FP32">
10038 <dim>-1</dim>
10039 <dim>-1</dim>
10040 <dim>384</dim>
10041 </port>
10042 <port id="1" precision="FP32">
10043 <dim>-1</dim>
10044 <dim>-1</dim>
10045 <dim>384</dim>
10046 </port>
10047 </input>
10048 <output>
10049 <port id="2" precision="FP32" names="834">
10050 <dim>-1</dim>
10051 <dim>-1</dim>
10052 <dim>384</dim>
10053 </port>
10054 </output>
10055 </layer>
10056 <layer id="649" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
10057 <data element_type="i32" shape="1" offset="384850452" size="4" />
10058 <output>
10059 <port id="0" precision="I32">
10060 <dim>1</dim>
10061 </port>
10062 </output>
10063 </layer>
10064 <layer id="650" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
10065 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
10066 <input>
10067 <port id="0" precision="FP32">
10068 <dim>-1</dim>
10069 <dim>-1</dim>
10070 <dim>384</dim>
10071 </port>
10072 <port id="1" precision="I32">
10073 <dim>1</dim>
10074 </port>
10075 </input>
10076 <output>
10077 <port id="2" precision="FP32">
10078 <dim>-1</dim>
10079 <dim>-1</dim>
10080 <dim>384</dim>
10081 </port>
10082 </output>
10083 </layer>
10084 <layer id="651" name="Constant_103784" type="Const" version="opset1">
10085 <data element_type="f32" shape="1, 1, 384" offset="455829136" size="1536" />
10086 <output>
10087 <port id="0" precision="FP32">
10088 <dim>1</dim>
10089 <dim>1</dim>
10090 <dim>384</dim>
10091 </port>
10092 </output>
10093 </layer>
10094 <layer id="652" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
10095 <data auto_broadcast="numpy" />
10096 <input>
10097 <port id="0" precision="FP32">
10098 <dim>-1</dim>
10099 <dim>-1</dim>
10100 <dim>384</dim>
10101 </port>
10102 <port id="1" precision="FP32">
10103 <dim>1</dim>
10104 <dim>1</dim>
10105 <dim>384</dim>
10106 </port>
10107 </input>
10108 <output>
10109 <port id="2" precision="FP32">
10110 <dim>-1</dim>
10111 <dim>-1</dim>
10112 <dim>384</dim>
10113 </port>
10114 </output>
10115 </layer>
10116 <layer id="653" name="Constant_103785" type="Const" version="opset1">
10117 <data element_type="f32" shape="1, 1, 384" offset="455830672" size="1536" />
10118 <output>
10119 <port id="0" precision="FP32">
10120 <dim>1</dim>
10121 <dim>1</dim>
10122 <dim>384</dim>
10123 </port>
10124 </output>
10125 </layer>
10126 <layer id="654" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
10127 <data auto_broadcast="numpy" />
10128 <input>
10129 <port id="0" precision="FP32">
10130 <dim>-1</dim>
10131 <dim>-1</dim>
10132 <dim>384</dim>
10133 </port>
10134 <port id="1" precision="FP32">
10135 <dim>1</dim>
10136 <dim>1</dim>
10137 <dim>384</dim>
10138 </port>
10139 </input>
10140 <output>
10141 <port id="2" precision="FP32" names="838,hidden_states.61">
10142 <dim>-1</dim>
10143 <dim>-1</dim>
10144 <dim>384</dim>
10145 </port>
10146 </output>
10147 </layer>
10148 <layer id="655" name="self.encoder.layer.10.attention.self.query.weight" type="Const" version="opset1">
10149 <data element_type="f32" shape="384, 384" offset="455832208" size="589824" />
10150 <output>
10151 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.query.weight">
10152 <dim>384</dim>
10153 <dim>384</dim>
10154 </port>
10155 </output>
10156 </layer>
10157 <layer id="656" name="__module.encoder.layer.10.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
10158 <data transpose_a="false" transpose_b="true" />
10159 <input>
10160 <port id="0" precision="FP32">
10161 <dim>-1</dim>
10162 <dim>-1</dim>
10163 <dim>384</dim>
10164 </port>
10165 <port id="1" precision="FP32">
10166 <dim>384</dim>
10167 <dim>384</dim>
10168 </port>
10169 </input>
10170 <output>
10171 <port id="2" precision="FP32">
10172 <dim>-1</dim>
10173 <dim>-1</dim>
10174 <dim>384</dim>
10175 </port>
10176 </output>
10177 </layer>
10178 <layer id="657" name="Constant_103786" type="Const" version="opset1">
10179 <data element_type="f32" shape="1, 1, 384" offset="456422032" size="1536" />
10180 <output>
10181 <port id="0" precision="FP32">
10182 <dim>1</dim>
10183 <dim>1</dim>
10184 <dim>384</dim>
10185 </port>
10186 </output>
10187 </layer>
10188 <layer id="658" name="__module.encoder.layer.10.attention.self.query/aten::linear/Add" type="Add" version="opset1">
10189 <data auto_broadcast="numpy" />
10190 <input>
10191 <port id="0" precision="FP32">
10192 <dim>-1</dim>
10193 <dim>-1</dim>
10194 <dim>384</dim>
10195 </port>
10196 <port id="1" precision="FP32">
10197 <dim>1</dim>
10198 <dim>1</dim>
10199 <dim>384</dim>
10200 </port>
10201 </input>
10202 <output>
10203 <port id="2" precision="FP32" names="851,x.121">
10204 <dim>-1</dim>
10205 <dim>-1</dim>
10206 <dim>384</dim>
10207 </port>
10208 </output>
10209 </layer>
10210 <layer id="659" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
10211 <data element_type="i64" shape="4" offset="385444888" size="32" />
10212 <output>
10213 <port id="0" precision="I64">
10214 <dim>4</dim>
10215 </port>
10216 </output>
10217 </layer>
10218 <layer id="660" name="__module.encoder.layer.10.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
10219 <data special_zero="true" />
10220 <input>
10221 <port id="0" precision="FP32">
10222 <dim>-1</dim>
10223 <dim>-1</dim>
10224 <dim>384</dim>
10225 </port>
10226 <port id="1" precision="I64">
10227 <dim>4</dim>
10228 </port>
10229 </input>
10230 <output>
10231 <port id="2" precision="FP32" names="855,x.123">
10232 <dim>-1</dim>
10233 <dim>-1</dim>
10234 <dim>12</dim>
10235 <dim>32</dim>
10236 </port>
10237 </output>
10238 </layer>
10239 <layer id="661" name="Constant_96827" type="Const" version="opset1">
10240 <data element_type="i64" shape="4" offset="385444920" size="32" />
10241 <output>
10242 <port id="0" precision="I64" names="856">
10243 <dim>4</dim>
10244 </port>
10245 </output>
10246 </layer>
10247 <layer id="662" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
10248 <input>
10249 <port id="0" precision="FP32">
10250 <dim>-1</dim>
10251 <dim>-1</dim>
10252 <dim>12</dim>
10253 <dim>32</dim>
10254 </port>
10255 <port id="1" precision="I64">
10256 <dim>4</dim>
10257 </port>
10258 </input>
10259 <output>
10260 <port id="2" precision="FP32" names="857">
10261 <dim>-1</dim>
10262 <dim>12</dim>
10263 <dim>-1</dim>
10264 <dim>32</dim>
10265 </port>
10266 </output>
10267 </layer>
10268 <layer id="663" name="self.encoder.layer.10.attention.self.key.weight" type="Const" version="opset1">
10269 <data element_type="f32" shape="384, 384" offset="456423568" size="589824" />
10270 <output>
10271 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.key.weight">
10272 <dim>384</dim>
10273 <dim>384</dim>
10274 </port>
10275 </output>
10276 </layer>
10277 <layer id="664" name="__module.encoder.layer.10.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
10278 <data transpose_a="false" transpose_b="true" />
10279 <input>
10280 <port id="0" precision="FP32">
10281 <dim>-1</dim>
10282 <dim>-1</dim>
10283 <dim>384</dim>
10284 </port>
10285 <port id="1" precision="FP32">
10286 <dim>384</dim>
10287 <dim>384</dim>
10288 </port>
10289 </input>
10290 <output>
10291 <port id="2" precision="FP32">
10292 <dim>-1</dim>
10293 <dim>-1</dim>
10294 <dim>384</dim>
10295 </port>
10296 </output>
10297 </layer>
10298 <layer id="665" name="Constant_103787" type="Const" version="opset1">
10299 <data element_type="f32" shape="1, 1, 384" offset="457013392" size="1536" />
10300 <output>
10301 <port id="0" precision="FP32">
10302 <dim>1</dim>
10303 <dim>1</dim>
10304 <dim>384</dim>
10305 </port>
10306 </output>
10307 </layer>
10308 <layer id="666" name="__module.encoder.layer.10.attention.self.key/aten::linear/Add" type="Add" version="opset1">
10309 <data auto_broadcast="numpy" />
10310 <input>
10311 <port id="0" precision="FP32">
10312 <dim>-1</dim>
10313 <dim>-1</dim>
10314 <dim>384</dim>
10315 </port>
10316 <port id="1" precision="FP32">
10317 <dim>1</dim>
10318 <dim>1</dim>
10319 <dim>384</dim>
10320 </port>
10321 </input>
10322 <output>
10323 <port id="2" precision="FP32" names="860,x.125">
10324 <dim>-1</dim>
10325 <dim>-1</dim>
10326 <dim>384</dim>
10327 </port>
10328 </output>
10329 </layer>
10330 <layer id="667" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
10331 <data element_type="i64" shape="4" offset="385444888" size="32" />
10332 <output>
10333 <port id="0" precision="I64">
10334 <dim>4</dim>
10335 </port>
10336 </output>
10337 </layer>
10338 <layer id="668" name="__module.encoder.layer.10.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
10339 <data special_zero="true" />
10340 <input>
10341 <port id="0" precision="FP32">
10342 <dim>-1</dim>
10343 <dim>-1</dim>
10344 <dim>384</dim>
10345 </port>
10346 <port id="1" precision="I64">
10347 <dim>4</dim>
10348 </port>
10349 </input>
10350 <output>
10351 <port id="2" precision="FP32" names="864,x.127">
10352 <dim>-1</dim>
10353 <dim>-1</dim>
10354 <dim>12</dim>
10355 <dim>32</dim>
10356 </port>
10357 </output>
10358 </layer>
10359 <layer id="669" name="Constant_96850" type="Const" version="opset1">
10360 <data element_type="i64" shape="4" offset="385444920" size="32" />
10361 <output>
10362 <port id="0" precision="I64" names="865">
10363 <dim>4</dim>
10364 </port>
10365 </output>
10366 </layer>
10367 <layer id="670" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
10368 <input>
10369 <port id="0" precision="FP32">
10370 <dim>-1</dim>
10371 <dim>-1</dim>
10372 <dim>12</dim>
10373 <dim>32</dim>
10374 </port>
10375 <port id="1" precision="I64">
10376 <dim>4</dim>
10377 </port>
10378 </input>
10379 <output>
10380 <port id="2" precision="FP32" names="866">
10381 <dim>-1</dim>
10382 <dim>12</dim>
10383 <dim>-1</dim>
10384 <dim>32</dim>
10385 </port>
10386 </output>
10387 </layer>
10388 <layer id="671" name="self.encoder.layer.10.attention.self.value.weight" type="Const" version="opset1">
10389 <data element_type="f32" shape="384, 384" offset="457014928" size="589824" />
10390 <output>
10391 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.value.weight">
10392 <dim>384</dim>
10393 <dim>384</dim>
10394 </port>
10395 </output>
10396 </layer>
10397 <layer id="672" name="__module.encoder.layer.10.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
10398 <data transpose_a="false" transpose_b="true" />
10399 <input>
10400 <port id="0" precision="FP32">
10401 <dim>-1</dim>
10402 <dim>-1</dim>
10403 <dim>384</dim>
10404 </port>
10405 <port id="1" precision="FP32">
10406 <dim>384</dim>
10407 <dim>384</dim>
10408 </port>
10409 </input>
10410 <output>
10411 <port id="2" precision="FP32">
10412 <dim>-1</dim>
10413 <dim>-1</dim>
10414 <dim>384</dim>
10415 </port>
10416 </output>
10417 </layer>
10418 <layer id="673" name="Constant_103788" type="Const" version="opset1">
10419 <data element_type="f32" shape="1, 1, 384" offset="457604752" size="1536" />
10420 <output>
10421 <port id="0" precision="FP32">
10422 <dim>1</dim>
10423 <dim>1</dim>
10424 <dim>384</dim>
10425 </port>
10426 </output>
10427 </layer>
10428 <layer id="674" name="__module.encoder.layer.10.attention.self.value/aten::linear/Add" type="Add" version="opset1">
10429 <data auto_broadcast="numpy" />
10430 <input>
10431 <port id="0" precision="FP32">
10432 <dim>-1</dim>
10433 <dim>-1</dim>
10434 <dim>384</dim>
10435 </port>
10436 <port id="1" precision="FP32">
10437 <dim>1</dim>
10438 <dim>1</dim>
10439 <dim>384</dim>
10440 </port>
10441 </input>
10442 <output>
10443 <port id="2" precision="FP32" names="869,x.129">
10444 <dim>-1</dim>
10445 <dim>-1</dim>
10446 <dim>384</dim>
10447 </port>
10448 </output>
10449 </layer>
10450 <layer id="675" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
10451 <data element_type="i64" shape="4" offset="385444888" size="32" />
10452 <output>
10453 <port id="0" precision="I64">
10454 <dim>4</dim>
10455 </port>
10456 </output>
10457 </layer>
10458 <layer id="676" name="__module.encoder.layer.10.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
10459 <data special_zero="true" />
10460 <input>
10461 <port id="0" precision="FP32">
10462 <dim>-1</dim>
10463 <dim>-1</dim>
10464 <dim>384</dim>
10465 </port>
10466 <port id="1" precision="I64">
10467 <dim>4</dim>
10468 </port>
10469 </input>
10470 <output>
10471 <port id="2" precision="FP32" names="873,x.131">
10472 <dim>-1</dim>
10473 <dim>-1</dim>
10474 <dim>12</dim>
10475 <dim>32</dim>
10476 </port>
10477 </output>
10478 </layer>
10479 <layer id="677" name="Constant_96873" type="Const" version="opset1">
10480 <data element_type="i64" shape="4" offset="385444920" size="32" />
10481 <output>
10482 <port id="0" precision="I64" names="874">
10483 <dim>4</dim>
10484 </port>
10485 </output>
10486 </layer>
10487 <layer id="678" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
10488 <input>
10489 <port id="0" precision="FP32">
10490 <dim>-1</dim>
10491 <dim>-1</dim>
10492 <dim>12</dim>
10493 <dim>32</dim>
10494 </port>
10495 <port id="1" precision="I64">
10496 <dim>4</dim>
10497 </port>
10498 </input>
10499 <output>
10500 <port id="2" precision="FP32" names="875">
10501 <dim>-1</dim>
10502 <dim>12</dim>
10503 <dim>-1</dim>
10504 <dim>32</dim>
10505 </port>
10506 </output>
10507 </layer>
10508 <layer id="679" name="__module.encoder.layer.10.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
10509 <data causal="false" />
10510 <input>
10511 <port id="0" precision="FP32">
10512 <dim>-1</dim>
10513 <dim>12</dim>
10514 <dim>-1</dim>
10515 <dim>32</dim>
10516 </port>
10517 <port id="1" precision="FP32">
10518 <dim>-1</dim>
10519 <dim>12</dim>
10520 <dim>-1</dim>
10521 <dim>32</dim>
10522 </port>
10523 <port id="2" precision="FP32">
10524 <dim>-1</dim>
10525 <dim>12</dim>
10526 <dim>-1</dim>
10527 <dim>32</dim>
10528 </port>
10529 <port id="3" precision="FP32">
10530 <dim>-1</dim>
10531 <dim>1</dim>
10532 <dim>-1</dim>
10533 <dim>-1</dim>
10534 </port>
10535 </input>
10536 <output>
10537 <port id="4" precision="FP32" names="876,attn_output.41">
10538 <dim>-1</dim>
10539 <dim>12</dim>
10540 <dim>-1</dim>
10541 <dim>32</dim>
10542 </port>
10543 </output>
10544 </layer>
10545 <layer id="680" name="__module.encoder.layer.10.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
10546 <data element_type="i32" shape="4" offset="386627688" size="16" />
10547 <output>
10548 <port id="0" precision="I32">
10549 <dim>4</dim>
10550 </port>
10551 </output>
10552 </layer>
10553 <layer id="681" name="__module.encoder.layer.10.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
10554 <input>
10555 <port id="0" precision="FP32">
10556 <dim>-1</dim>
10557 <dim>12</dim>
10558 <dim>-1</dim>
10559 <dim>32</dim>
10560 </port>
10561 <port id="1" precision="I32">
10562 <dim>4</dim>
10563 </port>
10564 </input>
10565 <output>
10566 <port id="2" precision="FP32" names="877,attn_output.43">
10567 <dim>-1</dim>
10568 <dim>-1</dim>
10569 <dim>12</dim>
10570 <dim>32</dim>
10571 </port>
10572 </output>
10573 </layer>
10574 <layer id="682" name="__module.encoder.layer.10.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
10575 <data output_type="i64" />
10576 <input>
10577 <port id="0" precision="FP32">
10578 <dim>-1</dim>
10579 <dim>-1</dim>
10580 <dim>384</dim>
10581 </port>
10582 </input>
10583 <output>
10584 <port id="1" precision="I64">
10585 <dim>3</dim>
10586 </port>
10587 </output>
10588 </layer>
10589 <layer id="683" name="Constant_102951" type="Const" version="opset1">
10590 <data element_type="i64" shape="2" offset="386627704" size="16" />
10591 <output>
10592 <port id="0" precision="I64">
10593 <dim>2</dim>
10594 </port>
10595 </output>
10596 </layer>
10597 <layer id="684" name="Constant_102952" type="Const" version="opset1">
10598 <data element_type="i64" shape="" offset="384850436" size="8" />
10599 <output>
10600 <port id="0" precision="I64" />
10601 </output>
10602 </layer>
10603 <layer id="685" name="Gather_102953" type="Gather" version="opset8">
10604 <data batch_dims="0" />
10605 <input>
10606 <port id="0" precision="I64">
10607 <dim>3</dim>
10608 </port>
10609 <port id="1" precision="I64">
10610 <dim>2</dim>
10611 </port>
10612 <port id="2" precision="I64" />
10613 </input>
10614 <output>
10615 <port id="3" precision="I64">
10616 <dim>2</dim>
10617 </port>
10618 </output>
10619 </layer>
10620 <layer id="686" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
10621 <data axis="0" />
10622 <input>
10623 <port id="0" precision="I64">
10624 <dim>2</dim>
10625 </port>
10626 <port id="1" precision="I64">
10627 <dim>1</dim>
10628 </port>
10629 </input>
10630 <output>
10631 <port id="2" precision="I64" names="878">
10632 <dim>3</dim>
10633 </port>
10634 </output>
10635 </layer>
10636 <layer id="687" name="__module.encoder.layer.10.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
10637 <data special_zero="false" />
10638 <input>
10639 <port id="0" precision="FP32">
10640 <dim>-1</dim>
10641 <dim>-1</dim>
10642 <dim>12</dim>
10643 <dim>32</dim>
10644 </port>
10645 <port id="1" precision="I64">
10646 <dim>3</dim>
10647 </port>
10648 </input>
10649 <output>
10650 <port id="2" precision="FP32" names="879">
10651 <dim>-1</dim>
10652 <dim>-1</dim>
10653 <dim>384</dim>
10654 </port>
10655 </output>
10656 </layer>
10657 <layer id="688" name="self.encoder.layer.10.attention.output.dense.weight" type="Const" version="opset1">
10658 <data element_type="f32" shape="384, 384" offset="457606288" size="589824" />
10659 <output>
10660 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.output.dense.weight">
10661 <dim>384</dim>
10662 <dim>384</dim>
10663 </port>
10664 </output>
10665 </layer>
10666 <layer id="689" name="__module.encoder.layer.10.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10667 <data transpose_a="false" transpose_b="true" />
10668 <input>
10669 <port id="0" precision="FP32">
10670 <dim>-1</dim>
10671 <dim>-1</dim>
10672 <dim>384</dim>
10673 </port>
10674 <port id="1" precision="FP32">
10675 <dim>384</dim>
10676 <dim>384</dim>
10677 </port>
10678 </input>
10679 <output>
10680 <port id="2" precision="FP32">
10681 <dim>-1</dim>
10682 <dim>-1</dim>
10683 <dim>384</dim>
10684 </port>
10685 </output>
10686 </layer>
10687 <layer id="690" name="Constant_103789" type="Const" version="opset1">
10688 <data element_type="f32" shape="1, 1, 384" offset="458196112" size="1536" />
10689 <output>
10690 <port id="0" precision="FP32">
10691 <dim>1</dim>
10692 <dim>1</dim>
10693 <dim>384</dim>
10694 </port>
10695 </output>
10696 </layer>
10697 <layer id="691" name="__module.encoder.layer.10.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
10698 <data auto_broadcast="numpy" />
10699 <input>
10700 <port id="0" precision="FP32">
10701 <dim>-1</dim>
10702 <dim>-1</dim>
10703 <dim>384</dim>
10704 </port>
10705 <port id="1" precision="FP32">
10706 <dim>1</dim>
10707 <dim>1</dim>
10708 <dim>384</dim>
10709 </port>
10710 </input>
10711 <output>
10712 <port id="2" precision="FP32" names="885,input.43">
10713 <dim>-1</dim>
10714 <dim>-1</dim>
10715 <dim>384</dim>
10716 </port>
10717 </output>
10718 </layer>
10719 <layer id="692" name="__module.encoder.layer.10.attention.output/aten::add/Add" type="Add" version="opset1">
10720 <data auto_broadcast="numpy" />
10721 <input>
10722 <port id="0" precision="FP32">
10723 <dim>-1</dim>
10724 <dim>-1</dim>
10725 <dim>384</dim>
10726 </port>
10727 <port id="1" precision="FP32">
10728 <dim>-1</dim>
10729 <dim>-1</dim>
10730 <dim>384</dim>
10731 </port>
10732 </input>
10733 <output>
10734 <port id="2" precision="FP32" names="887">
10735 <dim>-1</dim>
10736 <dim>-1</dim>
10737 <dim>384</dim>
10738 </port>
10739 </output>
10740 </layer>
10741 <layer id="693" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
10742 <data element_type="i32" shape="1" offset="384850452" size="4" />
10743 <output>
10744 <port id="0" precision="I32">
10745 <dim>1</dim>
10746 </port>
10747 </output>
10748 </layer>
10749 <layer id="694" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
10750 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
10751 <input>
10752 <port id="0" precision="FP32">
10753 <dim>-1</dim>
10754 <dim>-1</dim>
10755 <dim>384</dim>
10756 </port>
10757 <port id="1" precision="I32">
10758 <dim>1</dim>
10759 </port>
10760 </input>
10761 <output>
10762 <port id="2" precision="FP32">
10763 <dim>-1</dim>
10764 <dim>-1</dim>
10765 <dim>384</dim>
10766 </port>
10767 </output>
10768 </layer>
10769 <layer id="695" name="Constant_103790" type="Const" version="opset1">
10770 <data element_type="f32" shape="1, 1, 384" offset="458197648" size="1536" />
10771 <output>
10772 <port id="0" precision="FP32">
10773 <dim>1</dim>
10774 <dim>1</dim>
10775 <dim>384</dim>
10776 </port>
10777 </output>
10778 </layer>
10779 <layer id="696" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
10780 <data auto_broadcast="numpy" />
10781 <input>
10782 <port id="0" precision="FP32">
10783 <dim>-1</dim>
10784 <dim>-1</dim>
10785 <dim>384</dim>
10786 </port>
10787 <port id="1" precision="FP32">
10788 <dim>1</dim>
10789 <dim>1</dim>
10790 <dim>384</dim>
10791 </port>
10792 </input>
10793 <output>
10794 <port id="2" precision="FP32">
10795 <dim>-1</dim>
10796 <dim>-1</dim>
10797 <dim>384</dim>
10798 </port>
10799 </output>
10800 </layer>
10801 <layer id="697" name="Constant_103791" type="Const" version="opset1">
10802 <data element_type="f32" shape="1, 1, 384" offset="458199184" size="1536" />
10803 <output>
10804 <port id="0" precision="FP32">
10805 <dim>1</dim>
10806 <dim>1</dim>
10807 <dim>384</dim>
10808 </port>
10809 </output>
10810 </layer>
10811 <layer id="698" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
10812 <data auto_broadcast="numpy" />
10813 <input>
10814 <port id="0" precision="FP32">
10815 <dim>-1</dim>
10816 <dim>-1</dim>
10817 <dim>384</dim>
10818 </port>
10819 <port id="1" precision="FP32">
10820 <dim>1</dim>
10821 <dim>1</dim>
10822 <dim>384</dim>
10823 </port>
10824 </input>
10825 <output>
10826 <port id="2" precision="FP32" names="891,input_tensor.21">
10827 <dim>-1</dim>
10828 <dim>-1</dim>
10829 <dim>384</dim>
10830 </port>
10831 </output>
10832 </layer>
10833 <layer id="699" name="self.encoder.layer.10.intermediate.dense.weight" type="Const" version="opset1">
10834 <data element_type="f32" shape="1536, 384" offset="458200720" size="2359296" />
10835 <output>
10836 <port id="0" precision="FP32" names="self.encoder.layer.10.intermediate.dense.weight">
10837 <dim>1536</dim>
10838 <dim>384</dim>
10839 </port>
10840 </output>
10841 </layer>
10842 <layer id="700" name="__module.encoder.layer.10.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10843 <data transpose_a="false" transpose_b="true" />
10844 <input>
10845 <port id="0" precision="FP32">
10846 <dim>-1</dim>
10847 <dim>-1</dim>
10848 <dim>384</dim>
10849 </port>
10850 <port id="1" precision="FP32">
10851 <dim>1536</dim>
10852 <dim>384</dim>
10853 </port>
10854 </input>
10855 <output>
10856 <port id="2" precision="FP32">
10857 <dim>-1</dim>
10858 <dim>-1</dim>
10859 <dim>1536</dim>
10860 </port>
10861 </output>
10862 </layer>
10863 <layer id="701" name="Constant_103792" type="Const" version="opset1">
10864 <data element_type="f32" shape="1, 1, 1536" offset="460560016" size="6144" />
10865 <output>
10866 <port id="0" precision="FP32">
10867 <dim>1</dim>
10868 <dim>1</dim>
10869 <dim>1536</dim>
10870 </port>
10871 </output>
10872 </layer>
10873 <layer id="702" name="__module.encoder.layer.10.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
10874 <data auto_broadcast="numpy" />
10875 <input>
10876 <port id="0" precision="FP32">
10877 <dim>-1</dim>
10878 <dim>-1</dim>
10879 <dim>1536</dim>
10880 </port>
10881 <port id="1" precision="FP32">
10882 <dim>1</dim>
10883 <dim>1</dim>
10884 <dim>1536</dim>
10885 </port>
10886 </input>
10887 <output>
10888 <port id="2" precision="FP32" names="896">
10889 <dim>-1</dim>
10890 <dim>-1</dim>
10891 <dim>1536</dim>
10892 </port>
10893 </output>
10894 </layer>
10895 <layer id="703" name="__module.encoder.layer.10.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
10896 <data approximation_mode="ERF" />
10897 <input>
10898 <port id="0" precision="FP32">
10899 <dim>-1</dim>
10900 <dim>-1</dim>
10901 <dim>1536</dim>
10902 </port>
10903 </input>
10904 <output>
10905 <port id="1" precision="FP32" names="897">
10906 <dim>-1</dim>
10907 <dim>-1</dim>
10908 <dim>1536</dim>
10909 </port>
10910 </output>
10911 </layer>
10912 <layer id="704" name="self.encoder.layer.10.output.dense.weight" type="Const" version="opset1">
10913 <data element_type="f32" shape="384, 1536" offset="460566160" size="2359296" />
10914 <output>
10915 <port id="0" precision="FP32" names="self.encoder.layer.10.output.dense.weight">
10916 <dim>384</dim>
10917 <dim>1536</dim>
10918 </port>
10919 </output>
10920 </layer>
10921 <layer id="705" name="__module.encoder.layer.10.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10922 <data transpose_a="false" transpose_b="true" />
10923 <input>
10924 <port id="0" precision="FP32">
10925 <dim>-1</dim>
10926 <dim>-1</dim>
10927 <dim>1536</dim>
10928 </port>
10929 <port id="1" precision="FP32">
10930 <dim>384</dim>
10931 <dim>1536</dim>
10932 </port>
10933 </input>
10934 <output>
10935 <port id="2" precision="FP32">
10936 <dim>-1</dim>
10937 <dim>-1</dim>
10938 <dim>384</dim>
10939 </port>
10940 </output>
10941 </layer>
10942 <layer id="706" name="Constant_103793" type="Const" version="opset1">
10943 <data element_type="f32" shape="1, 1, 384" offset="462925456" size="1536" />
10944 <output>
10945 <port id="0" precision="FP32">
10946 <dim>1</dim>
10947 <dim>1</dim>
10948 <dim>384</dim>
10949 </port>
10950 </output>
10951 </layer>
10952 <layer id="707" name="__module.encoder.layer.10.output.dense/aten::linear/Add" type="Add" version="opset1">
10953 <data auto_broadcast="numpy" />
10954 <input>
10955 <port id="0" precision="FP32">
10956 <dim>-1</dim>
10957 <dim>-1</dim>
10958 <dim>384</dim>
10959 </port>
10960 <port id="1" precision="FP32">
10961 <dim>1</dim>
10962 <dim>1</dim>
10963 <dim>384</dim>
10964 </port>
10965 </input>
10966 <output>
10967 <port id="2" precision="FP32" names="903,input.45">
10968 <dim>-1</dim>
10969 <dim>-1</dim>
10970 <dim>384</dim>
10971 </port>
10972 </output>
10973 </layer>
10974 <layer id="708" name="__module.encoder.layer.10.output/aten::add/Add" type="Add" version="opset1">
10975 <data auto_broadcast="numpy" />
10976 <input>
10977 <port id="0" precision="FP32">
10978 <dim>-1</dim>
10979 <dim>-1</dim>
10980 <dim>384</dim>
10981 </port>
10982 <port id="1" precision="FP32">
10983 <dim>-1</dim>
10984 <dim>-1</dim>
10985 <dim>384</dim>
10986 </port>
10987 </input>
10988 <output>
10989 <port id="2" precision="FP32" names="905">
10990 <dim>-1</dim>
10991 <dim>-1</dim>
10992 <dim>384</dim>
10993 </port>
10994 </output>
10995 </layer>
10996 <layer id="709" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
10997 <data element_type="i32" shape="1" offset="384850452" size="4" />
10998 <output>
10999 <port id="0" precision="I32">
11000 <dim>1</dim>
11001 </port>
11002 </output>
11003 </layer>
11004 <layer id="710" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
11005 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
11006 <input>
11007 <port id="0" precision="FP32">
11008 <dim>-1</dim>
11009 <dim>-1</dim>
11010 <dim>384</dim>
11011 </port>
11012 <port id="1" precision="I32">
11013 <dim>1</dim>
11014 </port>
11015 </input>
11016 <output>
11017 <port id="2" precision="FP32">
11018 <dim>-1</dim>
11019 <dim>-1</dim>
11020 <dim>384</dim>
11021 </port>
11022 </output>
11023 </layer>
11024 <layer id="711" name="Constant_103794" type="Const" version="opset1">
11025 <data element_type="f32" shape="1, 1, 384" offset="462926992" size="1536" />
11026 <output>
11027 <port id="0" precision="FP32">
11028 <dim>1</dim>
11029 <dim>1</dim>
11030 <dim>384</dim>
11031 </port>
11032 </output>
11033 </layer>
11034 <layer id="712" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
11035 <data auto_broadcast="numpy" />
11036 <input>
11037 <port id="0" precision="FP32">
11038 <dim>-1</dim>
11039 <dim>-1</dim>
11040 <dim>384</dim>
11041 </port>
11042 <port id="1" precision="FP32">
11043 <dim>1</dim>
11044 <dim>1</dim>
11045 <dim>384</dim>
11046 </port>
11047 </input>
11048 <output>
11049 <port id="2" precision="FP32">
11050 <dim>-1</dim>
11051 <dim>-1</dim>
11052 <dim>384</dim>
11053 </port>
11054 </output>
11055 </layer>
11056 <layer id="713" name="Constant_103795" type="Const" version="opset1">
11057 <data element_type="f32" shape="1, 1, 384" offset="462928528" size="1536" />
11058 <output>
11059 <port id="0" precision="FP32">
11060 <dim>1</dim>
11061 <dim>1</dim>
11062 <dim>384</dim>
11063 </port>
11064 </output>
11065 </layer>
11066 <layer id="714" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
11067 <data auto_broadcast="numpy" />
11068 <input>
11069 <port id="0" precision="FP32">
11070 <dim>-1</dim>
11071 <dim>-1</dim>
11072 <dim>384</dim>
11073 </port>
11074 <port id="1" precision="FP32">
11075 <dim>1</dim>
11076 <dim>1</dim>
11077 <dim>384</dim>
11078 </port>
11079 </input>
11080 <output>
11081 <port id="2" precision="FP32" names="909,hidden_states.67">
11082 <dim>-1</dim>
11083 <dim>-1</dim>
11084 <dim>384</dim>
11085 </port>
11086 </output>
11087 </layer>
11088 <layer id="715" name="self.encoder.layer.11.attention.self.query.weight" type="Const" version="opset1">
11089 <data element_type="f32" shape="384, 384" offset="462930064" size="589824" />
11090 <output>
11091 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.query.weight">
11092 <dim>384</dim>
11093 <dim>384</dim>
11094 </port>
11095 </output>
11096 </layer>
11097 <layer id="716" name="__module.encoder.layer.11.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
11098 <data transpose_a="false" transpose_b="true" />
11099 <input>
11100 <port id="0" precision="FP32">
11101 <dim>-1</dim>
11102 <dim>-1</dim>
11103 <dim>384</dim>
11104 </port>
11105 <port id="1" precision="FP32">
11106 <dim>384</dim>
11107 <dim>384</dim>
11108 </port>
11109 </input>
11110 <output>
11111 <port id="2" precision="FP32">
11112 <dim>-1</dim>
11113 <dim>-1</dim>
11114 <dim>384</dim>
11115 </port>
11116 </output>
11117 </layer>
11118 <layer id="717" name="Constant_103796" type="Const" version="opset1">
11119 <data element_type="f32" shape="1, 1, 384" offset="463519888" size="1536" />
11120 <output>
11121 <port id="0" precision="FP32">
11122 <dim>1</dim>
11123 <dim>1</dim>
11124 <dim>384</dim>
11125 </port>
11126 </output>
11127 </layer>
11128 <layer id="718" name="__module.encoder.layer.11.attention.self.query/aten::linear/Add" type="Add" version="opset1">
11129 <data auto_broadcast="numpy" />
11130 <input>
11131 <port id="0" precision="FP32">
11132 <dim>-1</dim>
11133 <dim>-1</dim>
11134 <dim>384</dim>
11135 </port>
11136 <port id="1" precision="FP32">
11137 <dim>1</dim>
11138 <dim>1</dim>
11139 <dim>384</dim>
11140 </port>
11141 </input>
11142 <output>
11143 <port id="2" precision="FP32" names="922,x.133">
11144 <dim>-1</dim>
11145 <dim>-1</dim>
11146 <dim>384</dim>
11147 </port>
11148 </output>
11149 </layer>
11150 <layer id="719" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
11151 <data element_type="i64" shape="4" offset="385444888" size="32" />
11152 <output>
11153 <port id="0" precision="I64">
11154 <dim>4</dim>
11155 </port>
11156 </output>
11157 </layer>
11158 <layer id="720" name="__module.encoder.layer.11.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
11159 <data special_zero="true" />
11160 <input>
11161 <port id="0" precision="FP32">
11162 <dim>-1</dim>
11163 <dim>-1</dim>
11164 <dim>384</dim>
11165 </port>
11166 <port id="1" precision="I64">
11167 <dim>4</dim>
11168 </port>
11169 </input>
11170 <output>
11171 <port id="2" precision="FP32" names="926,x.135">
11172 <dim>-1</dim>
11173 <dim>-1</dim>
11174 <dim>12</dim>
11175 <dim>32</dim>
11176 </port>
11177 </output>
11178 </layer>
11179 <layer id="721" name="Constant_97053" type="Const" version="opset1">
11180 <data element_type="i64" shape="4" offset="385444920" size="32" />
11181 <output>
11182 <port id="0" precision="I64" names="927">
11183 <dim>4</dim>
11184 </port>
11185 </output>
11186 </layer>
11187 <layer id="722" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
11188 <input>
11189 <port id="0" precision="FP32">
11190 <dim>-1</dim>
11191 <dim>-1</dim>
11192 <dim>12</dim>
11193 <dim>32</dim>
11194 </port>
11195 <port id="1" precision="I64">
11196 <dim>4</dim>
11197 </port>
11198 </input>
11199 <output>
11200 <port id="2" precision="FP32" names="928">
11201 <dim>-1</dim>
11202 <dim>12</dim>
11203 <dim>-1</dim>
11204 <dim>32</dim>
11205 </port>
11206 </output>
11207 </layer>
11208 <layer id="723" name="self.encoder.layer.11.attention.self.key.weight" type="Const" version="opset1">
11209 <data element_type="f32" shape="384, 384" offset="463521424" size="589824" />
11210 <output>
11211 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.key.weight">
11212 <dim>384</dim>
11213 <dim>384</dim>
11214 </port>
11215 </output>
11216 </layer>
11217 <layer id="724" name="__module.encoder.layer.11.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
11218 <data transpose_a="false" transpose_b="true" />
11219 <input>
11220 <port id="0" precision="FP32">
11221 <dim>-1</dim>
11222 <dim>-1</dim>
11223 <dim>384</dim>
11224 </port>
11225 <port id="1" precision="FP32">
11226 <dim>384</dim>
11227 <dim>384</dim>
11228 </port>
11229 </input>
11230 <output>
11231 <port id="2" precision="FP32">
11232 <dim>-1</dim>
11233 <dim>-1</dim>
11234 <dim>384</dim>
11235 </port>
11236 </output>
11237 </layer>
11238 <layer id="725" name="Constant_103797" type="Const" version="opset1">
11239 <data element_type="f32" shape="1, 1, 384" offset="464111248" size="1536" />
11240 <output>
11241 <port id="0" precision="FP32">
11242 <dim>1</dim>
11243 <dim>1</dim>
11244 <dim>384</dim>
11245 </port>
11246 </output>
11247 </layer>
11248 <layer id="726" name="__module.encoder.layer.11.attention.self.key/aten::linear/Add" type="Add" version="opset1">
11249 <data auto_broadcast="numpy" />
11250 <input>
11251 <port id="0" precision="FP32">
11252 <dim>-1</dim>
11253 <dim>-1</dim>
11254 <dim>384</dim>
11255 </port>
11256 <port id="1" precision="FP32">
11257 <dim>1</dim>
11258 <dim>1</dim>
11259 <dim>384</dim>
11260 </port>
11261 </input>
11262 <output>
11263 <port id="2" precision="FP32" names="931,x.137">
11264 <dim>-1</dim>
11265 <dim>-1</dim>
11266 <dim>384</dim>
11267 </port>
11268 </output>
11269 </layer>
11270 <layer id="727" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
11271 <data element_type="i64" shape="4" offset="385444888" size="32" />
11272 <output>
11273 <port id="0" precision="I64">
11274 <dim>4</dim>
11275 </port>
11276 </output>
11277 </layer>
11278 <layer id="728" name="__module.encoder.layer.11.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
11279 <data special_zero="true" />
11280 <input>
11281 <port id="0" precision="FP32">
11282 <dim>-1</dim>
11283 <dim>-1</dim>
11284 <dim>384</dim>
11285 </port>
11286 <port id="1" precision="I64">
11287 <dim>4</dim>
11288 </port>
11289 </input>
11290 <output>
11291 <port id="2" precision="FP32" names="935,x.139">
11292 <dim>-1</dim>
11293 <dim>-1</dim>
11294 <dim>12</dim>
11295 <dim>32</dim>
11296 </port>
11297 </output>
11298 </layer>
11299 <layer id="729" name="Constant_97076" type="Const" version="opset1">
11300 <data element_type="i64" shape="4" offset="385444920" size="32" />
11301 <output>
11302 <port id="0" precision="I64" names="936">
11303 <dim>4</dim>
11304 </port>
11305 </output>
11306 </layer>
11307 <layer id="730" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
11308 <input>
11309 <port id="0" precision="FP32">
11310 <dim>-1</dim>
11311 <dim>-1</dim>
11312 <dim>12</dim>
11313 <dim>32</dim>
11314 </port>
11315 <port id="1" precision="I64">
11316 <dim>4</dim>
11317 </port>
11318 </input>
11319 <output>
11320 <port id="2" precision="FP32" names="937">
11321 <dim>-1</dim>
11322 <dim>12</dim>
11323 <dim>-1</dim>
11324 <dim>32</dim>
11325 </port>
11326 </output>
11327 </layer>
11328 <layer id="731" name="self.encoder.layer.11.attention.self.value.weight" type="Const" version="opset1">
11329 <data element_type="f32" shape="384, 384" offset="464112784" size="589824" />
11330 <output>
11331 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.value.weight">
11332 <dim>384</dim>
11333 <dim>384</dim>
11334 </port>
11335 </output>
11336 </layer>
11337 <layer id="732" name="__module.encoder.layer.11.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
11338 <data transpose_a="false" transpose_b="true" />
11339 <input>
11340 <port id="0" precision="FP32">
11341 <dim>-1</dim>
11342 <dim>-1</dim>
11343 <dim>384</dim>
11344 </port>
11345 <port id="1" precision="FP32">
11346 <dim>384</dim>
11347 <dim>384</dim>
11348 </port>
11349 </input>
11350 <output>
11351 <port id="2" precision="FP32">
11352 <dim>-1</dim>
11353 <dim>-1</dim>
11354 <dim>384</dim>
11355 </port>
11356 </output>
11357 </layer>
11358 <layer id="733" name="Constant_103798" type="Const" version="opset1">
11359 <data element_type="f32" shape="1, 1, 384" offset="464702608" size="1536" />
11360 <output>
11361 <port id="0" precision="FP32">
11362 <dim>1</dim>
11363 <dim>1</dim>
11364 <dim>384</dim>
11365 </port>
11366 </output>
11367 </layer>
11368 <layer id="734" name="__module.encoder.layer.11.attention.self.value/aten::linear/Add" type="Add" version="opset1">
11369 <data auto_broadcast="numpy" />
11370 <input>
11371 <port id="0" precision="FP32">
11372 <dim>-1</dim>
11373 <dim>-1</dim>
11374 <dim>384</dim>
11375 </port>
11376 <port id="1" precision="FP32">
11377 <dim>1</dim>
11378 <dim>1</dim>
11379 <dim>384</dim>
11380 </port>
11381 </input>
11382 <output>
11383 <port id="2" precision="FP32" names="940,x.141">
11384 <dim>-1</dim>
11385 <dim>-1</dim>
11386 <dim>384</dim>
11387 </port>
11388 </output>
11389 </layer>
11390 <layer id="735" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
11391 <data element_type="i64" shape="4" offset="385444888" size="32" />
11392 <output>
11393 <port id="0" precision="I64">
11394 <dim>4</dim>
11395 </port>
11396 </output>
11397 </layer>
11398 <layer id="736" name="__module.encoder.layer.11.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
11399 <data special_zero="true" />
11400 <input>
11401 <port id="0" precision="FP32">
11402 <dim>-1</dim>
11403 <dim>-1</dim>
11404 <dim>384</dim>
11405 </port>
11406 <port id="1" precision="I64">
11407 <dim>4</dim>
11408 </port>
11409 </input>
11410 <output>
11411 <port id="2" precision="FP32" names="944,x">
11412 <dim>-1</dim>
11413 <dim>-1</dim>
11414 <dim>12</dim>
11415 <dim>32</dim>
11416 </port>
11417 </output>
11418 </layer>
11419 <layer id="737" name="Constant_97099" type="Const" version="opset1">
11420 <data element_type="i64" shape="4" offset="385444920" size="32" />
11421 <output>
11422 <port id="0" precision="I64" names="945">
11423 <dim>4</dim>
11424 </port>
11425 </output>
11426 </layer>
11427 <layer id="738" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
11428 <input>
11429 <port id="0" precision="FP32">
11430 <dim>-1</dim>
11431 <dim>-1</dim>
11432 <dim>12</dim>
11433 <dim>32</dim>
11434 </port>
11435 <port id="1" precision="I64">
11436 <dim>4</dim>
11437 </port>
11438 </input>
11439 <output>
11440 <port id="2" precision="FP32" names="946">
11441 <dim>-1</dim>
11442 <dim>12</dim>
11443 <dim>-1</dim>
11444 <dim>32</dim>
11445 </port>
11446 </output>
11447 </layer>
11448 <layer id="739" name="__module.encoder.layer.11.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
11449 <data causal="false" />
11450 <input>
11451 <port id="0" precision="FP32">
11452 <dim>-1</dim>
11453 <dim>12</dim>
11454 <dim>-1</dim>
11455 <dim>32</dim>
11456 </port>
11457 <port id="1" precision="FP32">
11458 <dim>-1</dim>
11459 <dim>12</dim>
11460 <dim>-1</dim>
11461 <dim>32</dim>
11462 </port>
11463 <port id="2" precision="FP32">
11464 <dim>-1</dim>
11465 <dim>12</dim>
11466 <dim>-1</dim>
11467 <dim>32</dim>
11468 </port>
11469 <port id="3" precision="FP32">
11470 <dim>-1</dim>
11471 <dim>1</dim>
11472 <dim>-1</dim>
11473 <dim>-1</dim>
11474 </port>
11475 </input>
11476 <output>
11477 <port id="4" precision="FP32" names="947,attn_output.45">
11478 <dim>-1</dim>
11479 <dim>12</dim>
11480 <dim>-1</dim>
11481 <dim>32</dim>
11482 </port>
11483 </output>
11484 </layer>
11485 <layer id="740" name="__module.encoder.layer.11.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
11486 <data element_type="i32" shape="4" offset="386627688" size="16" />
11487 <output>
11488 <port id="0" precision="I32">
11489 <dim>4</dim>
11490 </port>
11491 </output>
11492 </layer>
11493 <layer id="741" name="__module.encoder.layer.11.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
11494 <input>
11495 <port id="0" precision="FP32">
11496 <dim>-1</dim>
11497 <dim>12</dim>
11498 <dim>-1</dim>
11499 <dim>32</dim>
11500 </port>
11501 <port id="1" precision="I32">
11502 <dim>4</dim>
11503 </port>
11504 </input>
11505 <output>
11506 <port id="2" precision="FP32" names="948,attn_output">
11507 <dim>-1</dim>
11508 <dim>-1</dim>
11509 <dim>12</dim>
11510 <dim>32</dim>
11511 </port>
11512 </output>
11513 </layer>
11514 <layer id="742" name="__module.encoder.layer.11.attention.self/aten::size/ShapeOf_6" type="ShapeOf" version="opset3">
11515 <data output_type="i64" />
11516 <input>
11517 <port id="0" precision="FP32">
11518 <dim>-1</dim>
11519 <dim>-1</dim>
11520 <dim>384</dim>
11521 </port>
11522 </input>
11523 <output>
11524 <port id="1" precision="I64">
11525 <dim>3</dim>
11526 </port>
11527 </output>
11528 </layer>
11529 <layer id="743" name="Constant_102971" type="Const" version="opset1">
11530 <data element_type="i64" shape="2" offset="386627704" size="16" />
11531 <output>
11532 <port id="0" precision="I64">
11533 <dim>2</dim>
11534 </port>
11535 </output>
11536 </layer>
11537 <layer id="744" name="Constant_102972" type="Const" version="opset1">
11538 <data element_type="i64" shape="" offset="384850436" size="8" />
11539 <output>
11540 <port id="0" precision="I64" />
11541 </output>
11542 </layer>
11543 <layer id="745" name="Gather_102973" type="Gather" version="opset8">
11544 <data batch_dims="0" />
11545 <input>
11546 <port id="0" precision="I64">
11547 <dim>3</dim>
11548 </port>
11549 <port id="1" precision="I64">
11550 <dim>2</dim>
11551 </port>
11552 <port id="2" precision="I64" />
11553 </input>
11554 <output>
11555 <port id="3" precision="I64">
11556 <dim>2</dim>
11557 </port>
11558 </output>
11559 </layer>
11560 <layer id="746" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_3" type="Concat" version="opset1">
11561 <data axis="0" />
11562 <input>
11563 <port id="0" precision="I64">
11564 <dim>2</dim>
11565 </port>
11566 <port id="1" precision="I64">
11567 <dim>1</dim>
11568 </port>
11569 </input>
11570 <output>
11571 <port id="2" precision="I64" names="949">
11572 <dim>3</dim>
11573 </port>
11574 </output>
11575 </layer>
11576 <layer id="747" name="__module.encoder.layer.11.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
11577 <data special_zero="false" />
11578 <input>
11579 <port id="0" precision="FP32">
11580 <dim>-1</dim>
11581 <dim>-1</dim>
11582 <dim>12</dim>
11583 <dim>32</dim>
11584 </port>
11585 <port id="1" precision="I64">
11586 <dim>3</dim>
11587 </port>
11588 </input>
11589 <output>
11590 <port id="2" precision="FP32" names="950">
11591 <dim>-1</dim>
11592 <dim>-1</dim>
11593 <dim>384</dim>
11594 </port>
11595 </output>
11596 </layer>
11597 <layer id="748" name="self.encoder.layer.11.attention.output.dense.weight" type="Const" version="opset1">
11598 <data element_type="f32" shape="384, 384" offset="464704144" size="589824" />
11599 <output>
11600 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.output.dense.weight">
11601 <dim>384</dim>
11602 <dim>384</dim>
11603 </port>
11604 </output>
11605 </layer>
11606 <layer id="749" name="__module.encoder.layer.11.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
11607 <data transpose_a="false" transpose_b="true" />
11608 <input>
11609 <port id="0" precision="FP32">
11610 <dim>-1</dim>
11611 <dim>-1</dim>
11612 <dim>384</dim>
11613 </port>
11614 <port id="1" precision="FP32">
11615 <dim>384</dim>
11616 <dim>384</dim>
11617 </port>
11618 </input>
11619 <output>
11620 <port id="2" precision="FP32">
11621 <dim>-1</dim>
11622 <dim>-1</dim>
11623 <dim>384</dim>
11624 </port>
11625 </output>
11626 </layer>
11627 <layer id="750" name="Constant_103799" type="Const" version="opset1">
11628 <data element_type="f32" shape="1, 1, 384" offset="465293968" size="1536" />
11629 <output>
11630 <port id="0" precision="FP32">
11631 <dim>1</dim>
11632 <dim>1</dim>
11633 <dim>384</dim>
11634 </port>
11635 </output>
11636 </layer>
11637 <layer id="751" name="__module.encoder.layer.11.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
11638 <data auto_broadcast="numpy" />
11639 <input>
11640 <port id="0" precision="FP32">
11641 <dim>-1</dim>
11642 <dim>-1</dim>
11643 <dim>384</dim>
11644 </port>
11645 <port id="1" precision="FP32">
11646 <dim>1</dim>
11647 <dim>1</dim>
11648 <dim>384</dim>
11649 </port>
11650 </input>
11651 <output>
11652 <port id="2" precision="FP32" names="956,input.47">
11653 <dim>-1</dim>
11654 <dim>-1</dim>
11655 <dim>384</dim>
11656 </port>
11657 </output>
11658 </layer>
11659 <layer id="752" name="__module.encoder.layer.11.attention.output/aten::add/Add" type="Add" version="opset1">
11660 <data auto_broadcast="numpy" />
11661 <input>
11662 <port id="0" precision="FP32">
11663 <dim>-1</dim>
11664 <dim>-1</dim>
11665 <dim>384</dim>
11666 </port>
11667 <port id="1" precision="FP32">
11668 <dim>-1</dim>
11669 <dim>-1</dim>
11670 <dim>384</dim>
11671 </port>
11672 </input>
11673 <output>
11674 <port id="2" precision="FP32" names="958">
11675 <dim>-1</dim>
11676 <dim>-1</dim>
11677 <dim>384</dim>
11678 </port>
11679 </output>
11680 </layer>
11681 <layer id="753" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
11682 <data element_type="i32" shape="1" offset="384850452" size="4" />
11683 <output>
11684 <port id="0" precision="I32">
11685 <dim>1</dim>
11686 </port>
11687 </output>
11688 </layer>
11689 <layer id="754" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
11690 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
11691 <input>
11692 <port id="0" precision="FP32">
11693 <dim>-1</dim>
11694 <dim>-1</dim>
11695 <dim>384</dim>
11696 </port>
11697 <port id="1" precision="I32">
11698 <dim>1</dim>
11699 </port>
11700 </input>
11701 <output>
11702 <port id="2" precision="FP32">
11703 <dim>-1</dim>
11704 <dim>-1</dim>
11705 <dim>384</dim>
11706 </port>
11707 </output>
11708 </layer>
11709 <layer id="755" name="Constant_103800" type="Const" version="opset1">
11710 <data element_type="f32" shape="1, 1, 384" offset="465295504" size="1536" />
11711 <output>
11712 <port id="0" precision="FP32">
11713 <dim>1</dim>
11714 <dim>1</dim>
11715 <dim>384</dim>
11716 </port>
11717 </output>
11718 </layer>
11719 <layer id="756" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
11720 <data auto_broadcast="numpy" />
11721 <input>
11722 <port id="0" precision="FP32">
11723 <dim>-1</dim>
11724 <dim>-1</dim>
11725 <dim>384</dim>
11726 </port>
11727 <port id="1" precision="FP32">
11728 <dim>1</dim>
11729 <dim>1</dim>
11730 <dim>384</dim>
11731 </port>
11732 </input>
11733 <output>
11734 <port id="2" precision="FP32">
11735 <dim>-1</dim>
11736 <dim>-1</dim>
11737 <dim>384</dim>
11738 </port>
11739 </output>
11740 </layer>
11741 <layer id="757" name="Constant_103801" type="Const" version="opset1">
11742 <data element_type="f32" shape="1, 1, 384" offset="465297040" size="1536" />
11743 <output>
11744 <port id="0" precision="FP32">
11745 <dim>1</dim>
11746 <dim>1</dim>
11747 <dim>384</dim>
11748 </port>
11749 </output>
11750 </layer>
11751 <layer id="758" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
11752 <data auto_broadcast="numpy" />
11753 <input>
11754 <port id="0" precision="FP32">
11755 <dim>-1</dim>
11756 <dim>-1</dim>
11757 <dim>384</dim>
11758 </port>
11759 <port id="1" precision="FP32">
11760 <dim>1</dim>
11761 <dim>1</dim>
11762 <dim>384</dim>
11763 </port>
11764 </input>
11765 <output>
11766 <port id="2" precision="FP32" names="962,input_tensor">
11767 <dim>-1</dim>
11768 <dim>-1</dim>
11769 <dim>384</dim>
11770 </port>
11771 </output>
11772 </layer>
11773 <layer id="759" name="self.encoder.layer.11.intermediate.dense.weight" type="Const" version="opset1">
11774 <data element_type="f32" shape="1536, 384" offset="465298576" size="2359296" />
11775 <output>
11776 <port id="0" precision="FP32" names="self.encoder.layer.11.intermediate.dense.weight">
11777 <dim>1536</dim>
11778 <dim>384</dim>
11779 </port>
11780 </output>
11781 </layer>
11782 <layer id="760" name="__module.encoder.layer.11.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
11783 <data transpose_a="false" transpose_b="true" />
11784 <input>
11785 <port id="0" precision="FP32">
11786 <dim>-1</dim>
11787 <dim>-1</dim>
11788 <dim>384</dim>
11789 </port>
11790 <port id="1" precision="FP32">
11791 <dim>1536</dim>
11792 <dim>384</dim>
11793 </port>
11794 </input>
11795 <output>
11796 <port id="2" precision="FP32">
11797 <dim>-1</dim>
11798 <dim>-1</dim>
11799 <dim>1536</dim>
11800 </port>
11801 </output>
11802 </layer>
11803 <layer id="761" name="Constant_103802" type="Const" version="opset1">
11804 <data element_type="f32" shape="1, 1, 1536" offset="467657872" size="6144" />
11805 <output>
11806 <port id="0" precision="FP32">
11807 <dim>1</dim>
11808 <dim>1</dim>
11809 <dim>1536</dim>
11810 </port>
11811 </output>
11812 </layer>
11813 <layer id="762" name="__module.encoder.layer.11.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
11814 <data auto_broadcast="numpy" />
11815 <input>
11816 <port id="0" precision="FP32">
11817 <dim>-1</dim>
11818 <dim>-1</dim>
11819 <dim>1536</dim>
11820 </port>
11821 <port id="1" precision="FP32">
11822 <dim>1</dim>
11823 <dim>1</dim>
11824 <dim>1536</dim>
11825 </port>
11826 </input>
11827 <output>
11828 <port id="2" precision="FP32" names="967">
11829 <dim>-1</dim>
11830 <dim>-1</dim>
11831 <dim>1536</dim>
11832 </port>
11833 </output>
11834 </layer>
11835 <layer id="763" name="__module.encoder.layer.11.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
11836 <data approximation_mode="ERF" />
11837 <input>
11838 <port id="0" precision="FP32">
11839 <dim>-1</dim>
11840 <dim>-1</dim>
11841 <dim>1536</dim>
11842 </port>
11843 </input>
11844 <output>
11845 <port id="1" precision="FP32" names="968">
11846 <dim>-1</dim>
11847 <dim>-1</dim>
11848 <dim>1536</dim>
11849 </port>
11850 </output>
11851 </layer>
11852 <layer id="764" name="self.encoder.layer.11.output.dense.weight" type="Const" version="opset1">
11853 <data element_type="f32" shape="384, 1536" offset="467664016" size="2359296" />
11854 <output>
11855 <port id="0" precision="FP32" names="self.encoder.layer.11.output.dense.weight">
11856 <dim>384</dim>
11857 <dim>1536</dim>
11858 </port>
11859 </output>
11860 </layer>
11861 <layer id="765" name="__module.encoder.layer.11.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
11862 <data transpose_a="false" transpose_b="true" />
11863 <input>
11864 <port id="0" precision="FP32">
11865 <dim>-1</dim>
11866 <dim>-1</dim>
11867 <dim>1536</dim>
11868 </port>
11869 <port id="1" precision="FP32">
11870 <dim>384</dim>
11871 <dim>1536</dim>
11872 </port>
11873 </input>
11874 <output>
11875 <port id="2" precision="FP32">
11876 <dim>-1</dim>
11877 <dim>-1</dim>
11878 <dim>384</dim>
11879 </port>
11880 </output>
11881 </layer>
11882 <layer id="766" name="Constant_103803" type="Const" version="opset1">
11883 <data element_type="f32" shape="1, 1, 384" offset="470023312" size="1536" />
11884 <output>
11885 <port id="0" precision="FP32">
11886 <dim>1</dim>
11887 <dim>1</dim>
11888 <dim>384</dim>
11889 </port>
11890 </output>
11891 </layer>
11892 <layer id="767" name="__module.encoder.layer.11.output.dense/aten::linear/Add" type="Add" version="opset1">
11893 <data auto_broadcast="numpy" />
11894 <input>
11895 <port id="0" precision="FP32">
11896 <dim>-1</dim>
11897 <dim>-1</dim>
11898 <dim>384</dim>
11899 </port>
11900 <port id="1" precision="FP32">
11901 <dim>1</dim>
11902 <dim>1</dim>
11903 <dim>384</dim>
11904 </port>
11905 </input>
11906 <output>
11907 <port id="2" precision="FP32" names="974,input">
11908 <dim>-1</dim>
11909 <dim>-1</dim>
11910 <dim>384</dim>
11911 </port>
11912 </output>
11913 </layer>
11914 <layer id="768" name="__module.encoder.layer.11.output/aten::add/Add" type="Add" version="opset1">
11915 <data auto_broadcast="numpy" />
11916 <input>
11917 <port id="0" precision="FP32">
11918 <dim>-1</dim>
11919 <dim>-1</dim>
11920 <dim>384</dim>
11921 </port>
11922 <port id="1" precision="FP32">
11923 <dim>-1</dim>
11924 <dim>-1</dim>
11925 <dim>384</dim>
11926 </port>
11927 </input>
11928 <output>
11929 <port id="2" precision="FP32" names="976">
11930 <dim>-1</dim>
11931 <dim>-1</dim>
11932 <dim>384</dim>
11933 </port>
11934 </output>
11935 </layer>
11936 <layer id="769" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
11937 <data element_type="i32" shape="1" offset="384850452" size="4" />
11938 <output>
11939 <port id="0" precision="I32">
11940 <dim>1</dim>
11941 </port>
11942 </output>
11943 </layer>
11944 <layer id="770" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
11945 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
11946 <input>
11947 <port id="0" precision="FP32">
11948 <dim>-1</dim>
11949 <dim>-1</dim>
11950 <dim>384</dim>
11951 </port>
11952 <port id="1" precision="I32">
11953 <dim>1</dim>
11954 </port>
11955 </input>
11956 <output>
11957 <port id="2" precision="FP32">
11958 <dim>-1</dim>
11959 <dim>-1</dim>
11960 <dim>384</dim>
11961 </port>
11962 </output>
11963 </layer>
11964 <layer id="771" name="Constant_103804" type="Const" version="opset1">
11965 <data element_type="f32" shape="1, 1, 384" offset="470024848" size="1536" />
11966 <output>
11967 <port id="0" precision="FP32">
11968 <dim>1</dim>
11969 <dim>1</dim>
11970 <dim>384</dim>
11971 </port>
11972 </output>
11973 </layer>
11974 <layer id="772" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
11975 <data auto_broadcast="numpy" />
11976 <input>
11977 <port id="0" precision="FP32">
11978 <dim>-1</dim>
11979 <dim>-1</dim>
11980 <dim>384</dim>
11981 </port>
11982 <port id="1" precision="FP32">
11983 <dim>1</dim>
11984 <dim>1</dim>
11985 <dim>384</dim>
11986 </port>
11987 </input>
11988 <output>
11989 <port id="2" precision="FP32">
11990 <dim>-1</dim>
11991 <dim>-1</dim>
11992 <dim>384</dim>
11993 </port>
11994 </output>
11995 </layer>
11996 <layer id="773" name="Constant_103805" type="Const" version="opset1">
11997 <data element_type="f32" shape="1, 1, 384" offset="470026384" size="1536" />
11998 <output>
11999 <port id="0" precision="FP32">
12000 <dim>1</dim>
12001 <dim>1</dim>
12002 <dim>384</dim>
12003 </port>
12004 </output>
12005 </layer>
12006 <layer id="774" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
12007 <data auto_broadcast="numpy" />
12008 <input>
12009 <port id="0" precision="FP32">
12010 <dim>-1</dim>
12011 <dim>-1</dim>
12012 <dim>384</dim>
12013 </port>
12014 <port id="1" precision="FP32">
12015 <dim>1</dim>
12016 <dim>1</dim>
12017 <dim>384</dim>
12018 </port>
12019 </input>
12020 <output>
12021 <port id="2" precision="FP32" names="last_hidden_state">
12022 <dim>-1</dim>
12023 <dim>-1</dim>
12024 <dim>384</dim>
12025 </port>
12026 </output>
12027 </layer>
12028 <layer id="775" name="Result_98991" type="Result" version="opset1">
12029 <input>
12030 <port id="0" precision="FP32">
12031 <dim>-1</dim>
12032 <dim>-1</dim>
12033 <dim>384</dim>
12034 </port>
12035 </input>
12036 </layer>
12037 </layers>
12038 <edges>
12039 <edge from-layer="0" from-port="0" to-layer="8" to-port="0" />
12040 <edge from-layer="1" from-port="0" to-layer="58" to-port="0" />
12041 <edge from-layer="1" from-port="0" to-layer="61" to-port="0" />
12042 <edge from-layer="2" from-port="0" to-layer="15" to-port="0" />
12043 <edge from-layer="2" from-port="0" to-layer="4" to-port="0" />
12044 <edge from-layer="3" from-port="0" to-layer="6" to-port="0" />
12045 <edge from-layer="4" from-port="1" to-layer="6" to-port="1" />
12046 <edge from-layer="5" from-port="0" to-layer="6" to-port="2" />
12047 <edge from-layer="6" from-port="3" to-layer="11" to-port="0" />
12048 <edge from-layer="7" from-port="0" to-layer="10" to-port="0" />
12049 <edge from-layer="8" from-port="1" to-layer="10" to-port="1" />
12050 <edge from-layer="9" from-port="0" to-layer="10" to-port="2" />
12051 <edge from-layer="10" from-port="3" to-layer="11" to-port="1" />
12052 <edge from-layer="11" from-port="2" to-layer="25" to-port="0" />
12053 <edge from-layer="12" from-port="0" to-layer="24" to-port="0" />
12054 <edge from-layer="13" from-port="0" to-layer="21" to-port="0" />
12055 <edge from-layer="14" from-port="0" to-layer="21" to-port="1" />
12056 <edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
12057 <edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
12058 <edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
12059 <edge from-layer="18" from-port="3" to-layer="21" to-port="2" />
12060 <edge from-layer="18" from-port="3" to-layer="69" to-port="2" />
12061 <edge from-layer="19" from-port="0" to-layer="21" to-port="3" />
12062 <edge from-layer="20" from-port="0" to-layer="21" to-port="4" />
12063 <edge from-layer="21" from-port="5" to-layer="22" to-port="0" />
12064 <edge from-layer="22" from-port="1" to-layer="24" to-port="1" />
12065 <edge from-layer="23" from-port="0" to-layer="24" to-port="2" />
12066 <edge from-layer="24" from-port="3" to-layer="25" to-port="1" />
12067 <edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
12068 <edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
12069 <edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
12070 <edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
12071 <edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
12072 <edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
12073 <edge from-layer="31" from-port="2" to-layer="41" to-port="0" />
12074 <edge from-layer="31" from-port="2" to-layer="92" to-port="1" />
12075 <edge from-layer="31" from-port="2" to-layer="49" to-port="0" />
12076 <edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
12077 <edge from-layer="31" from-port="2" to-layer="81" to-port="0" />
12078 <edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
12079 <edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
12080 <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
12081 <edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
12082 <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
12083 <edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
12084 <edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
12085 <edge from-layer="39" from-port="2" to-layer="78" to-port="0" />
12086 <edge from-layer="40" from-port="0" to-layer="41" to-port="1" />
12087 <edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
12088 <edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
12089 <edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
12090 <edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
12091 <edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
12092 <edge from-layer="46" from-port="0" to-layer="47" to-port="1" />
12093 <edge from-layer="47" from-port="2" to-layer="78" to-port="1" />
12094 <edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
12095 <edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
12096 <edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
12097 <edge from-layer="51" from-port="2" to-layer="53" to-port="0" />
12098 <edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
12099 <edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
12100 <edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
12101 <edge from-layer="55" from-port="2" to-layer="78" to-port="2" />
12102 <edge from-layer="56" from-port="0" to-layer="74" to-port="0" />
12103 <edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
12104 <edge from-layer="58" from-port="2" to-layer="60" to-port="0" />
12105 <edge from-layer="59" from-port="0" to-layer="60" to-port="1" />
12106 <edge from-layer="60" from-port="2" to-layer="70" to-port="0" />
12107 <edge from-layer="61" from-port="1" to-layer="68" to-port="0" />
12108 <edge from-layer="61" from-port="1" to-layer="64" to-port="0" />
12109 <edge from-layer="62" from-port="0" to-layer="64" to-port="1" />
12110 <edge from-layer="63" from-port="0" to-layer="64" to-port="2" />
12111 <edge from-layer="64" from-port="3" to-layer="69" to-port="0" />
12112 <edge from-layer="65" from-port="0" to-layer="69" to-port="1" />
12113 <edge from-layer="66" from-port="0" to-layer="68" to-port="1" />
12114 <edge from-layer="67" from-port="0" to-layer="68" to-port="2" />
12115 <edge from-layer="68" from-port="3" to-layer="69" to-port="3" />
12116 <edge from-layer="69" from-port="4" to-layer="70" to-port="1" />
12117 <edge from-layer="70" from-port="2" to-layer="71" to-port="0" />
12118 <edge from-layer="71" from-port="1" to-layer="73" to-port="0" />
12119 <edge from-layer="72" from-port="0" to-layer="73" to-port="1" />
12120 <edge from-layer="73" from-port="2" to-layer="74" to-port="1" />
12121 <edge from-layer="74" from-port="2" to-layer="75" to-port="0" />
12122 <edge from-layer="74" from-port="2" to-layer="77" to-port="2" />
12123 <edge from-layer="75" from-port="1" to-layer="77" to-port="0" />
12124 <edge from-layer="76" from-port="0" to-layer="77" to-port="1" />
12125 <edge from-layer="77" from-port="3" to-layer="78" to-port="3" />
12126 <edge from-layer="77" from-port="3" to-layer="559" to-port="3" />
12127 <edge from-layer="77" from-port="3" to-layer="139" to-port="3" />
12128 <edge from-layer="77" from-port="3" to-layer="499" to-port="3" />
12129 <edge from-layer="77" from-port="3" to-layer="199" to-port="3" />
12130 <edge from-layer="77" from-port="3" to-layer="439" to-port="3" />
12131 <edge from-layer="77" from-port="3" to-layer="379" to-port="3" />
12132 <edge from-layer="77" from-port="3" to-layer="319" to-port="3" />
12133 <edge from-layer="77" from-port="3" to-layer="259" to-port="3" />
12134 <edge from-layer="77" from-port="3" to-layer="739" to-port="3" />
12135 <edge from-layer="77" from-port="3" to-layer="619" to-port="3" />
12136 <edge from-layer="77" from-port="3" to-layer="679" to-port="3" />
12137 <edge from-layer="78" from-port="4" to-layer="80" to-port="0" />
12138 <edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
12139 <edge from-layer="80" from-port="2" to-layer="87" to-port="0" />
12140 <edge from-layer="81" from-port="1" to-layer="84" to-port="0" />
12141 <edge from-layer="82" from-port="0" to-layer="84" to-port="1" />
12142 <edge from-layer="83" from-port="0" to-layer="84" to-port="2" />
12143 <edge from-layer="84" from-port="3" to-layer="86" to-port="0" />
12144 <edge from-layer="85" from-port="0" to-layer="86" to-port="1" />
12145 <edge from-layer="85" from-port="0" to-layer="686" to-port="1" />
12146 <edge from-layer="85" from-port="0" to-layer="146" to-port="1" />
12147 <edge from-layer="85" from-port="0" to-layer="626" to-port="1" />
12148 <edge from-layer="85" from-port="0" to-layer="566" to-port="1" />
12149 <edge from-layer="85" from-port="0" to-layer="506" to-port="1" />
12150 <edge from-layer="85" from-port="0" to-layer="446" to-port="1" />
12151 <edge from-layer="85" from-port="0" to-layer="386" to-port="1" />
12152 <edge from-layer="85" from-port="0" to-layer="326" to-port="1" />
12153 <edge from-layer="85" from-port="0" to-layer="746" to-port="1" />
12154 <edge from-layer="85" from-port="0" to-layer="266" to-port="1" />
12155 <edge from-layer="85" from-port="0" to-layer="206" to-port="1" />
12156 <edge from-layer="86" from-port="2" to-layer="87" to-port="1" />
12157 <edge from-layer="87" from-port="2" to-layer="89" to-port="0" />
12158 <edge from-layer="88" from-port="0" to-layer="89" to-port="1" />
12159 <edge from-layer="89" from-port="2" to-layer="91" to-port="0" />
12160 <edge from-layer="90" from-port="0" to-layer="91" to-port="1" />
12161 <edge from-layer="91" from-port="2" to-layer="92" to-port="0" />
12162 <edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
12163 <edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
12164 <edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
12165 <edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
12166 <edge from-layer="96" from-port="2" to-layer="98" to-port="0" />
12167 <edge from-layer="97" from-port="0" to-layer="98" to-port="1" />
12168 <edge from-layer="98" from-port="2" to-layer="100" to-port="0" />
12169 <edge from-layer="98" from-port="2" to-layer="108" to-port="1" />
12170 <edge from-layer="99" from-port="0" to-layer="100" to-port="1" />
12171 <edge from-layer="100" from-port="2" to-layer="102" to-port="0" />
12172 <edge from-layer="101" from-port="0" to-layer="102" to-port="1" />
12173 <edge from-layer="102" from-port="2" to-layer="103" to-port="0" />
12174 <edge from-layer="103" from-port="1" to-layer="105" to-port="0" />
12175 <edge from-layer="104" from-port="0" to-layer="105" to-port="1" />
12176 <edge from-layer="105" from-port="2" to-layer="107" to-port="0" />
12177 <edge from-layer="106" from-port="0" to-layer="107" to-port="1" />
12178 <edge from-layer="107" from-port="2" to-layer="108" to-port="0" />
12179 <edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
12180 <edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
12181 <edge from-layer="110" from-port="2" to-layer="112" to-port="0" />
12182 <edge from-layer="111" from-port="0" to-layer="112" to-port="1" />
12183 <edge from-layer="112" from-port="2" to-layer="114" to-port="0" />
12184 <edge from-layer="113" from-port="0" to-layer="114" to-port="1" />
12185 <edge from-layer="114" from-port="2" to-layer="116" to-port="0" />
12186 <edge from-layer="114" from-port="2" to-layer="142" to-port="0" />
12187 <edge from-layer="114" from-port="2" to-layer="132" to-port="0" />
12188 <edge from-layer="114" from-port="2" to-layer="152" to-port="1" />
12189 <edge from-layer="114" from-port="2" to-layer="124" to-port="0" />
12190 <edge from-layer="115" from-port="0" to-layer="116" to-port="1" />
12191 <edge from-layer="116" from-port="2" to-layer="118" to-port="0" />
12192 <edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
12193 <edge from-layer="118" from-port="2" to-layer="120" to-port="0" />
12194 <edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
12195 <edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
12196 <edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
12197 <edge from-layer="122" from-port="2" to-layer="139" to-port="0" />
12198 <edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
12199 <edge from-layer="124" from-port="2" to-layer="126" to-port="0" />
12200 <edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
12201 <edge from-layer="126" from-port="2" to-layer="128" to-port="0" />
12202 <edge from-layer="127" from-port="0" to-layer="128" to-port="1" />
12203 <edge from-layer="128" from-port="2" to-layer="130" to-port="0" />
12204 <edge from-layer="129" from-port="0" to-layer="130" to-port="1" />
12205 <edge from-layer="130" from-port="2" to-layer="139" to-port="1" />
12206 <edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
12207 <edge from-layer="132" from-port="2" to-layer="134" to-port="0" />
12208 <edge from-layer="133" from-port="0" to-layer="134" to-port="1" />
12209 <edge from-layer="134" from-port="2" to-layer="136" to-port="0" />
12210 <edge from-layer="135" from-port="0" to-layer="136" to-port="1" />
12211 <edge from-layer="136" from-port="2" to-layer="138" to-port="0" />
12212 <edge from-layer="137" from-port="0" to-layer="138" to-port="1" />
12213 <edge from-layer="138" from-port="2" to-layer="139" to-port="2" />
12214 <edge from-layer="139" from-port="4" to-layer="141" to-port="0" />
12215 <edge from-layer="140" from-port="0" to-layer="141" to-port="1" />
12216 <edge from-layer="141" from-port="2" to-layer="147" to-port="0" />
12217 <edge from-layer="142" from-port="1" to-layer="145" to-port="0" />
12218 <edge from-layer="143" from-port="0" to-layer="145" to-port="1" />
12219 <edge from-layer="144" from-port="0" to-layer="145" to-port="2" />
12220 <edge from-layer="145" from-port="3" to-layer="146" to-port="0" />
12221 <edge from-layer="146" from-port="2" to-layer="147" to-port="1" />
12222 <edge from-layer="147" from-port="2" to-layer="149" to-port="0" />
12223 <edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
12224 <edge from-layer="149" from-port="2" to-layer="151" to-port="0" />
12225 <edge from-layer="150" from-port="0" to-layer="151" to-port="1" />
12226 <edge from-layer="151" from-port="2" to-layer="152" to-port="0" />
12227 <edge from-layer="152" from-port="2" to-layer="154" to-port="0" />
12228 <edge from-layer="153" from-port="0" to-layer="154" to-port="1" />
12229 <edge from-layer="154" from-port="2" to-layer="156" to-port="0" />
12230 <edge from-layer="155" from-port="0" to-layer="156" to-port="1" />
12231 <edge from-layer="156" from-port="2" to-layer="158" to-port="0" />
12232 <edge from-layer="157" from-port="0" to-layer="158" to-port="1" />
12233 <edge from-layer="158" from-port="2" to-layer="168" to-port="1" />
12234 <edge from-layer="158" from-port="2" to-layer="160" to-port="0" />
12235 <edge from-layer="159" from-port="0" to-layer="160" to-port="1" />
12236 <edge from-layer="160" from-port="2" to-layer="162" to-port="0" />
12237 <edge from-layer="161" from-port="0" to-layer="162" to-port="1" />
12238 <edge from-layer="162" from-port="2" to-layer="163" to-port="0" />
12239 <edge from-layer="163" from-port="1" to-layer="165" to-port="0" />
12240 <edge from-layer="164" from-port="0" to-layer="165" to-port="1" />
12241 <edge from-layer="165" from-port="2" to-layer="167" to-port="0" />
12242 <edge from-layer="166" from-port="0" to-layer="167" to-port="1" />
12243 <edge from-layer="167" from-port="2" to-layer="168" to-port="0" />
12244 <edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
12245 <edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
12246 <edge from-layer="170" from-port="2" to-layer="172" to-port="0" />
12247 <edge from-layer="171" from-port="0" to-layer="172" to-port="1" />
12248 <edge from-layer="172" from-port="2" to-layer="174" to-port="0" />
12249 <edge from-layer="173" from-port="0" to-layer="174" to-port="1" />
12250 <edge from-layer="174" from-port="2" to-layer="184" to-port="0" />
12251 <edge from-layer="174" from-port="2" to-layer="176" to-port="0" />
12252 <edge from-layer="174" from-port="2" to-layer="192" to-port="0" />
12253 <edge from-layer="174" from-port="2" to-layer="212" to-port="1" />
12254 <edge from-layer="174" from-port="2" to-layer="202" to-port="0" />
12255 <edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
12256 <edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
12257 <edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
12258 <edge from-layer="178" from-port="2" to-layer="180" to-port="0" />
12259 <edge from-layer="179" from-port="0" to-layer="180" to-port="1" />
12260 <edge from-layer="180" from-port="2" to-layer="182" to-port="0" />
12261 <edge from-layer="181" from-port="0" to-layer="182" to-port="1" />
12262 <edge from-layer="182" from-port="2" to-layer="199" to-port="0" />
12263 <edge from-layer="183" from-port="0" to-layer="184" to-port="1" />
12264 <edge from-layer="184" from-port="2" to-layer="186" to-port="0" />
12265 <edge from-layer="185" from-port="0" to-layer="186" to-port="1" />
12266 <edge from-layer="186" from-port="2" to-layer="188" to-port="0" />
12267 <edge from-layer="187" from-port="0" to-layer="188" to-port="1" />
12268 <edge from-layer="188" from-port="2" to-layer="190" to-port="0" />
12269 <edge from-layer="189" from-port="0" to-layer="190" to-port="1" />
12270 <edge from-layer="190" from-port="2" to-layer="199" to-port="1" />
12271 <edge from-layer="191" from-port="0" to-layer="192" to-port="1" />
12272 <edge from-layer="192" from-port="2" to-layer="194" to-port="0" />
12273 <edge from-layer="193" from-port="0" to-layer="194" to-port="1" />
12274 <edge from-layer="194" from-port="2" to-layer="196" to-port="0" />
12275 <edge from-layer="195" from-port="0" to-layer="196" to-port="1" />
12276 <edge from-layer="196" from-port="2" to-layer="198" to-port="0" />
12277 <edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
12278 <edge from-layer="198" from-port="2" to-layer="199" to-port="2" />
12279 <edge from-layer="199" from-port="4" to-layer="201" to-port="0" />
12280 <edge from-layer="200" from-port="0" to-layer="201" to-port="1" />
12281 <edge from-layer="201" from-port="2" to-layer="207" to-port="0" />
12282 <edge from-layer="202" from-port="1" to-layer="205" to-port="0" />
12283 <edge from-layer="203" from-port="0" to-layer="205" to-port="1" />
12284 <edge from-layer="204" from-port="0" to-layer="205" to-port="2" />
12285 <edge from-layer="205" from-port="3" to-layer="206" to-port="0" />
12286 <edge from-layer="206" from-port="2" to-layer="207" to-port="1" />
12287 <edge from-layer="207" from-port="2" to-layer="209" to-port="0" />
12288 <edge from-layer="208" from-port="0" to-layer="209" to-port="1" />
12289 <edge from-layer="209" from-port="2" to-layer="211" to-port="0" />
12290 <edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
12291 <edge from-layer="211" from-port="2" to-layer="212" to-port="0" />
12292 <edge from-layer="212" from-port="2" to-layer="214" to-port="0" />
12293 <edge from-layer="213" from-port="0" to-layer="214" to-port="1" />
12294 <edge from-layer="214" from-port="2" to-layer="216" to-port="0" />
12295 <edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
12296 <edge from-layer="216" from-port="2" to-layer="218" to-port="0" />
12297 <edge from-layer="217" from-port="0" to-layer="218" to-port="1" />
12298 <edge from-layer="218" from-port="2" to-layer="228" to-port="1" />
12299 <edge from-layer="218" from-port="2" to-layer="220" to-port="0" />
12300 <edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
12301 <edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
12302 <edge from-layer="221" from-port="0" to-layer="222" to-port="1" />
12303 <edge from-layer="222" from-port="2" to-layer="223" to-port="0" />
12304 <edge from-layer="223" from-port="1" to-layer="225" to-port="0" />
12305 <edge from-layer="224" from-port="0" to-layer="225" to-port="1" />
12306 <edge from-layer="225" from-port="2" to-layer="227" to-port="0" />
12307 <edge from-layer="226" from-port="0" to-layer="227" to-port="1" />
12308 <edge from-layer="227" from-port="2" to-layer="228" to-port="0" />
12309 <edge from-layer="228" from-port="2" to-layer="230" to-port="0" />
12310 <edge from-layer="229" from-port="0" to-layer="230" to-port="1" />
12311 <edge from-layer="230" from-port="2" to-layer="232" to-port="0" />
12312 <edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
12313 <edge from-layer="232" from-port="2" to-layer="234" to-port="0" />
12314 <edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
12315 <edge from-layer="234" from-port="2" to-layer="236" to-port="0" />
12316 <edge from-layer="234" from-port="2" to-layer="272" to-port="1" />
12317 <edge from-layer="234" from-port="2" to-layer="252" to-port="0" />
12318 <edge from-layer="234" from-port="2" to-layer="244" to-port="0" />
12319 <edge from-layer="234" from-port="2" to-layer="262" to-port="0" />
12320 <edge from-layer="235" from-port="0" to-layer="236" to-port="1" />
12321 <edge from-layer="236" from-port="2" to-layer="238" to-port="0" />
12322 <edge from-layer="237" from-port="0" to-layer="238" to-port="1" />
12323 <edge from-layer="238" from-port="2" to-layer="240" to-port="0" />
12324 <edge from-layer="239" from-port="0" to-layer="240" to-port="1" />
12325 <edge from-layer="240" from-port="2" to-layer="242" to-port="0" />
12326 <edge from-layer="241" from-port="0" to-layer="242" to-port="1" />
12327 <edge from-layer="242" from-port="2" to-layer="259" to-port="0" />
12328 <edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
12329 <edge from-layer="244" from-port="2" to-layer="246" to-port="0" />
12330 <edge from-layer="245" from-port="0" to-layer="246" to-port="1" />
12331 <edge from-layer="246" from-port="2" to-layer="248" to-port="0" />
12332 <edge from-layer="247" from-port="0" to-layer="248" to-port="1" />
12333 <edge from-layer="248" from-port="2" to-layer="250" to-port="0" />
12334 <edge from-layer="249" from-port="0" to-layer="250" to-port="1" />
12335 <edge from-layer="250" from-port="2" to-layer="259" to-port="1" />
12336 <edge from-layer="251" from-port="0" to-layer="252" to-port="1" />
12337 <edge from-layer="252" from-port="2" to-layer="254" to-port="0" />
12338 <edge from-layer="253" from-port="0" to-layer="254" to-port="1" />
12339 <edge from-layer="254" from-port="2" to-layer="256" to-port="0" />
12340 <edge from-layer="255" from-port="0" to-layer="256" to-port="1" />
12341 <edge from-layer="256" from-port="2" to-layer="258" to-port="0" />
12342 <edge from-layer="257" from-port="0" to-layer="258" to-port="1" />
12343 <edge from-layer="258" from-port="2" to-layer="259" to-port="2" />
12344 <edge from-layer="259" from-port="4" to-layer="261" to-port="0" />
12345 <edge from-layer="260" from-port="0" to-layer="261" to-port="1" />
12346 <edge from-layer="261" from-port="2" to-layer="267" to-port="0" />
12347 <edge from-layer="262" from-port="1" to-layer="265" to-port="0" />
12348 <edge from-layer="263" from-port="0" to-layer="265" to-port="1" />
12349 <edge from-layer="264" from-port="0" to-layer="265" to-port="2" />
12350 <edge from-layer="265" from-port="3" to-layer="266" to-port="0" />
12351 <edge from-layer="266" from-port="2" to-layer="267" to-port="1" />
12352 <edge from-layer="267" from-port="2" to-layer="269" to-port="0" />
12353 <edge from-layer="268" from-port="0" to-layer="269" to-port="1" />
12354 <edge from-layer="269" from-port="2" to-layer="271" to-port="0" />
12355 <edge from-layer="270" from-port="0" to-layer="271" to-port="1" />
12356 <edge from-layer="271" from-port="2" to-layer="272" to-port="0" />
12357 <edge from-layer="272" from-port="2" to-layer="274" to-port="0" />
12358 <edge from-layer="273" from-port="0" to-layer="274" to-port="1" />
12359 <edge from-layer="274" from-port="2" to-layer="276" to-port="0" />
12360 <edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
12361 <edge from-layer="276" from-port="2" to-layer="278" to-port="0" />
12362 <edge from-layer="277" from-port="0" to-layer="278" to-port="1" />
12363 <edge from-layer="278" from-port="2" to-layer="280" to-port="0" />
12364 <edge from-layer="278" from-port="2" to-layer="288" to-port="1" />
12365 <edge from-layer="279" from-port="0" to-layer="280" to-port="1" />
12366 <edge from-layer="280" from-port="2" to-layer="282" to-port="0" />
12367 <edge from-layer="281" from-port="0" to-layer="282" to-port="1" />
12368 <edge from-layer="282" from-port="2" to-layer="283" to-port="0" />
12369 <edge from-layer="283" from-port="1" to-layer="285" to-port="0" />
12370 <edge from-layer="284" from-port="0" to-layer="285" to-port="1" />
12371 <edge from-layer="285" from-port="2" to-layer="287" to-port="0" />
12372 <edge from-layer="286" from-port="0" to-layer="287" to-port="1" />
12373 <edge from-layer="287" from-port="2" to-layer="288" to-port="0" />
12374 <edge from-layer="288" from-port="2" to-layer="290" to-port="0" />
12375 <edge from-layer="289" from-port="0" to-layer="290" to-port="1" />
12376 <edge from-layer="290" from-port="2" to-layer="292" to-port="0" />
12377 <edge from-layer="291" from-port="0" to-layer="292" to-port="1" />
12378 <edge from-layer="292" from-port="2" to-layer="294" to-port="0" />
12379 <edge from-layer="293" from-port="0" to-layer="294" to-port="1" />
12380 <edge from-layer="294" from-port="2" to-layer="296" to-port="0" />
12381 <edge from-layer="294" from-port="2" to-layer="312" to-port="0" />
12382 <edge from-layer="294" from-port="2" to-layer="322" to-port="0" />
12383 <edge from-layer="294" from-port="2" to-layer="332" to-port="1" />
12384 <edge from-layer="294" from-port="2" to-layer="304" to-port="0" />
12385 <edge from-layer="295" from-port="0" to-layer="296" to-port="1" />
12386 <edge from-layer="296" from-port="2" to-layer="298" to-port="0" />
12387 <edge from-layer="297" from-port="0" to-layer="298" to-port="1" />
12388 <edge from-layer="298" from-port="2" to-layer="300" to-port="0" />
12389 <edge from-layer="299" from-port="0" to-layer="300" to-port="1" />
12390 <edge from-layer="300" from-port="2" to-layer="302" to-port="0" />
12391 <edge from-layer="301" from-port="0" to-layer="302" to-port="1" />
12392 <edge from-layer="302" from-port="2" to-layer="319" to-port="0" />
12393 <edge from-layer="303" from-port="0" to-layer="304" to-port="1" />
12394 <edge from-layer="304" from-port="2" to-layer="306" to-port="0" />
12395 <edge from-layer="305" from-port="0" to-layer="306" to-port="1" />
12396 <edge from-layer="306" from-port="2" to-layer="308" to-port="0" />
12397 <edge from-layer="307" from-port="0" to-layer="308" to-port="1" />
12398 <edge from-layer="308" from-port="2" to-layer="310" to-port="0" />
12399 <edge from-layer="309" from-port="0" to-layer="310" to-port="1" />
12400 <edge from-layer="310" from-port="2" to-layer="319" to-port="1" />
12401 <edge from-layer="311" from-port="0" to-layer="312" to-port="1" />
12402 <edge from-layer="312" from-port="2" to-layer="314" to-port="0" />
12403 <edge from-layer="313" from-port="0" to-layer="314" to-port="1" />
12404 <edge from-layer="314" from-port="2" to-layer="316" to-port="0" />
12405 <edge from-layer="315" from-port="0" to-layer="316" to-port="1" />
12406 <edge from-layer="316" from-port="2" to-layer="318" to-port="0" />
12407 <edge from-layer="317" from-port="0" to-layer="318" to-port="1" />
12408 <edge from-layer="318" from-port="2" to-layer="319" to-port="2" />
12409 <edge from-layer="319" from-port="4" to-layer="321" to-port="0" />
12410 <edge from-layer="320" from-port="0" to-layer="321" to-port="1" />
12411 <edge from-layer="321" from-port="2" to-layer="327" to-port="0" />
12412 <edge from-layer="322" from-port="1" to-layer="325" to-port="0" />
12413 <edge from-layer="323" from-port="0" to-layer="325" to-port="1" />
12414 <edge from-layer="324" from-port="0" to-layer="325" to-port="2" />
12415 <edge from-layer="325" from-port="3" to-layer="326" to-port="0" />
12416 <edge from-layer="326" from-port="2" to-layer="327" to-port="1" />
12417 <edge from-layer="327" from-port="2" to-layer="329" to-port="0" />
12418 <edge from-layer="328" from-port="0" to-layer="329" to-port="1" />
12419 <edge from-layer="329" from-port="2" to-layer="331" to-port="0" />
12420 <edge from-layer="330" from-port="0" to-layer="331" to-port="1" />
12421 <edge from-layer="331" from-port="2" to-layer="332" to-port="0" />
12422 <edge from-layer="332" from-port="2" to-layer="334" to-port="0" />
12423 <edge from-layer="333" from-port="0" to-layer="334" to-port="1" />
12424 <edge from-layer="334" from-port="2" to-layer="336" to-port="0" />
12425 <edge from-layer="335" from-port="0" to-layer="336" to-port="1" />
12426 <edge from-layer="336" from-port="2" to-layer="338" to-port="0" />
12427 <edge from-layer="337" from-port="0" to-layer="338" to-port="1" />
12428 <edge from-layer="338" from-port="2" to-layer="340" to-port="0" />
12429 <edge from-layer="338" from-port="2" to-layer="348" to-port="1" />
12430 <edge from-layer="339" from-port="0" to-layer="340" to-port="1" />
12431 <edge from-layer="340" from-port="2" to-layer="342" to-port="0" />
12432 <edge from-layer="341" from-port="0" to-layer="342" to-port="1" />
12433 <edge from-layer="342" from-port="2" to-layer="343" to-port="0" />
12434 <edge from-layer="343" from-port="1" to-layer="345" to-port="0" />
12435 <edge from-layer="344" from-port="0" to-layer="345" to-port="1" />
12436 <edge from-layer="345" from-port="2" to-layer="347" to-port="0" />
12437 <edge from-layer="346" from-port="0" to-layer="347" to-port="1" />
12438 <edge from-layer="347" from-port="2" to-layer="348" to-port="0" />
12439 <edge from-layer="348" from-port="2" to-layer="350" to-port="0" />
12440 <edge from-layer="349" from-port="0" to-layer="350" to-port="1" />
12441 <edge from-layer="350" from-port="2" to-layer="352" to-port="0" />
12442 <edge from-layer="351" from-port="0" to-layer="352" to-port="1" />
12443 <edge from-layer="352" from-port="2" to-layer="354" to-port="0" />
12444 <edge from-layer="353" from-port="0" to-layer="354" to-port="1" />
12445 <edge from-layer="354" from-port="2" to-layer="364" to-port="0" />
12446 <edge from-layer="354" from-port="2" to-layer="356" to-port="0" />
12447 <edge from-layer="354" from-port="2" to-layer="382" to-port="0" />
12448 <edge from-layer="354" from-port="2" to-layer="392" to-port="1" />
12449 <edge from-layer="354" from-port="2" to-layer="372" to-port="0" />
12450 <edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
12451 <edge from-layer="356" from-port="2" to-layer="358" to-port="0" />
12452 <edge from-layer="357" from-port="0" to-layer="358" to-port="1" />
12453 <edge from-layer="358" from-port="2" to-layer="360" to-port="0" />
12454 <edge from-layer="359" from-port="0" to-layer="360" to-port="1" />
12455 <edge from-layer="360" from-port="2" to-layer="362" to-port="0" />
12456 <edge from-layer="361" from-port="0" to-layer="362" to-port="1" />
12457 <edge from-layer="362" from-port="2" to-layer="379" to-port="0" />
12458 <edge from-layer="363" from-port="0" to-layer="364" to-port="1" />
12459 <edge from-layer="364" from-port="2" to-layer="366" to-port="0" />
12460 <edge from-layer="365" from-port="0" to-layer="366" to-port="1" />
12461 <edge from-layer="366" from-port="2" to-layer="368" to-port="0" />
12462 <edge from-layer="367" from-port="0" to-layer="368" to-port="1" />
12463 <edge from-layer="368" from-port="2" to-layer="370" to-port="0" />
12464 <edge from-layer="369" from-port="0" to-layer="370" to-port="1" />
12465 <edge from-layer="370" from-port="2" to-layer="379" to-port="1" />
12466 <edge from-layer="371" from-port="0" to-layer="372" to-port="1" />
12467 <edge from-layer="372" from-port="2" to-layer="374" to-port="0" />
12468 <edge from-layer="373" from-port="0" to-layer="374" to-port="1" />
12469 <edge from-layer="374" from-port="2" to-layer="376" to-port="0" />
12470 <edge from-layer="375" from-port="0" to-layer="376" to-port="1" />
12471 <edge from-layer="376" from-port="2" to-layer="378" to-port="0" />
12472 <edge from-layer="377" from-port="0" to-layer="378" to-port="1" />
12473 <edge from-layer="378" from-port="2" to-layer="379" to-port="2" />
12474 <edge from-layer="379" from-port="4" to-layer="381" to-port="0" />
12475 <edge from-layer="380" from-port="0" to-layer="381" to-port="1" />
12476 <edge from-layer="381" from-port="2" to-layer="387" to-port="0" />
12477 <edge from-layer="382" from-port="1" to-layer="385" to-port="0" />
12478 <edge from-layer="383" from-port="0" to-layer="385" to-port="1" />
12479 <edge from-layer="384" from-port="0" to-layer="385" to-port="2" />
12480 <edge from-layer="385" from-port="3" to-layer="386" to-port="0" />
12481 <edge from-layer="386" from-port="2" to-layer="387" to-port="1" />
12482 <edge from-layer="387" from-port="2" to-layer="389" to-port="0" />
12483 <edge from-layer="388" from-port="0" to-layer="389" to-port="1" />
12484 <edge from-layer="389" from-port="2" to-layer="391" to-port="0" />
12485 <edge from-layer="390" from-port="0" to-layer="391" to-port="1" />
12486 <edge from-layer="391" from-port="2" to-layer="392" to-port="0" />
12487 <edge from-layer="392" from-port="2" to-layer="394" to-port="0" />
12488 <edge from-layer="393" from-port="0" to-layer="394" to-port="1" />
12489 <edge from-layer="394" from-port="2" to-layer="396" to-port="0" />
12490 <edge from-layer="395" from-port="0" to-layer="396" to-port="1" />
12491 <edge from-layer="396" from-port="2" to-layer="398" to-port="0" />
12492 <edge from-layer="397" from-port="0" to-layer="398" to-port="1" />
12493 <edge from-layer="398" from-port="2" to-layer="408" to-port="1" />
12494 <edge from-layer="398" from-port="2" to-layer="400" to-port="0" />
12495 <edge from-layer="399" from-port="0" to-layer="400" to-port="1" />
12496 <edge from-layer="400" from-port="2" to-layer="402" to-port="0" />
12497 <edge from-layer="401" from-port="0" to-layer="402" to-port="1" />
12498 <edge from-layer="402" from-port="2" to-layer="403" to-port="0" />
12499 <edge from-layer="403" from-port="1" to-layer="405" to-port="0" />
12500 <edge from-layer="404" from-port="0" to-layer="405" to-port="1" />
12501 <edge from-layer="405" from-port="2" to-layer="407" to-port="0" />
12502 <edge from-layer="406" from-port="0" to-layer="407" to-port="1" />
12503 <edge from-layer="407" from-port="2" to-layer="408" to-port="0" />
12504 <edge from-layer="408" from-port="2" to-layer="410" to-port="0" />
12505 <edge from-layer="409" from-port="0" to-layer="410" to-port="1" />
12506 <edge from-layer="410" from-port="2" to-layer="412" to-port="0" />
12507 <edge from-layer="411" from-port="0" to-layer="412" to-port="1" />
12508 <edge from-layer="412" from-port="2" to-layer="414" to-port="0" />
12509 <edge from-layer="413" from-port="0" to-layer="414" to-port="1" />
12510 <edge from-layer="414" from-port="2" to-layer="416" to-port="0" />
12511 <edge from-layer="414" from-port="2" to-layer="424" to-port="0" />
12512 <edge from-layer="414" from-port="2" to-layer="452" to-port="1" />
12513 <edge from-layer="414" from-port="2" to-layer="442" to-port="0" />
12514 <edge from-layer="414" from-port="2" to-layer="432" to-port="0" />
12515 <edge from-layer="415" from-port="0" to-layer="416" to-port="1" />
12516 <edge from-layer="416" from-port="2" to-layer="418" to-port="0" />
12517 <edge from-layer="417" from-port="0" to-layer="418" to-port="1" />
12518 <edge from-layer="418" from-port="2" to-layer="420" to-port="0" />
12519 <edge from-layer="419" from-port="0" to-layer="420" to-port="1" />
12520 <edge from-layer="420" from-port="2" to-layer="422" to-port="0" />
12521 <edge from-layer="421" from-port="0" to-layer="422" to-port="1" />
12522 <edge from-layer="422" from-port="2" to-layer="439" to-port="0" />
12523 <edge from-layer="423" from-port="0" to-layer="424" to-port="1" />
12524 <edge from-layer="424" from-port="2" to-layer="426" to-port="0" />
12525 <edge from-layer="425" from-port="0" to-layer="426" to-port="1" />
12526 <edge from-layer="426" from-port="2" to-layer="428" to-port="0" />
12527 <edge from-layer="427" from-port="0" to-layer="428" to-port="1" />
12528 <edge from-layer="428" from-port="2" to-layer="430" to-port="0" />
12529 <edge from-layer="429" from-port="0" to-layer="430" to-port="1" />
12530 <edge from-layer="430" from-port="2" to-layer="439" to-port="1" />
12531 <edge from-layer="431" from-port="0" to-layer="432" to-port="1" />
12532 <edge from-layer="432" from-port="2" to-layer="434" to-port="0" />
12533 <edge from-layer="433" from-port="0" to-layer="434" to-port="1" />
12534 <edge from-layer="434" from-port="2" to-layer="436" to-port="0" />
12535 <edge from-layer="435" from-port="0" to-layer="436" to-port="1" />
12536 <edge from-layer="436" from-port="2" to-layer="438" to-port="0" />
12537 <edge from-layer="437" from-port="0" to-layer="438" to-port="1" />
12538 <edge from-layer="438" from-port="2" to-layer="439" to-port="2" />
12539 <edge from-layer="439" from-port="4" to-layer="441" to-port="0" />
12540 <edge from-layer="440" from-port="0" to-layer="441" to-port="1" />
12541 <edge from-layer="441" from-port="2" to-layer="447" to-port="0" />
12542 <edge from-layer="442" from-port="1" to-layer="445" to-port="0" />
12543 <edge from-layer="443" from-port="0" to-layer="445" to-port="1" />
12544 <edge from-layer="444" from-port="0" to-layer="445" to-port="2" />
12545 <edge from-layer="445" from-port="3" to-layer="446" to-port="0" />
12546 <edge from-layer="446" from-port="2" to-layer="447" to-port="1" />
12547 <edge from-layer="447" from-port="2" to-layer="449" to-port="0" />
12548 <edge from-layer="448" from-port="0" to-layer="449" to-port="1" />
12549 <edge from-layer="449" from-port="2" to-layer="451" to-port="0" />
12550 <edge from-layer="450" from-port="0" to-layer="451" to-port="1" />
12551 <edge from-layer="451" from-port="2" to-layer="452" to-port="0" />
12552 <edge from-layer="452" from-port="2" to-layer="454" to-port="0" />
12553 <edge from-layer="453" from-port="0" to-layer="454" to-port="1" />
12554 <edge from-layer="454" from-port="2" to-layer="456" to-port="0" />
12555 <edge from-layer="455" from-port="0" to-layer="456" to-port="1" />
12556 <edge from-layer="456" from-port="2" to-layer="458" to-port="0" />
12557 <edge from-layer="457" from-port="0" to-layer="458" to-port="1" />
12558 <edge from-layer="458" from-port="2" to-layer="468" to-port="1" />
12559 <edge from-layer="458" from-port="2" to-layer="460" to-port="0" />
12560 <edge from-layer="459" from-port="0" to-layer="460" to-port="1" />
12561 <edge from-layer="460" from-port="2" to-layer="462" to-port="0" />
12562 <edge from-layer="461" from-port="0" to-layer="462" to-port="1" />
12563 <edge from-layer="462" from-port="2" to-layer="463" to-port="0" />
12564 <edge from-layer="463" from-port="1" to-layer="465" to-port="0" />
12565 <edge from-layer="464" from-port="0" to-layer="465" to-port="1" />
12566 <edge from-layer="465" from-port="2" to-layer="467" to-port="0" />
12567 <edge from-layer="466" from-port="0" to-layer="467" to-port="1" />
12568 <edge from-layer="467" from-port="2" to-layer="468" to-port="0" />
12569 <edge from-layer="468" from-port="2" to-layer="470" to-port="0" />
12570 <edge from-layer="469" from-port="0" to-layer="470" to-port="1" />
12571 <edge from-layer="470" from-port="2" to-layer="472" to-port="0" />
12572 <edge from-layer="471" from-port="0" to-layer="472" to-port="1" />
12573 <edge from-layer="472" from-port="2" to-layer="474" to-port="0" />
12574 <edge from-layer="473" from-port="0" to-layer="474" to-port="1" />
12575 <edge from-layer="474" from-port="2" to-layer="484" to-port="0" />
12576 <edge from-layer="474" from-port="2" to-layer="492" to-port="0" />
12577 <edge from-layer="474" from-port="2" to-layer="512" to-port="1" />
12578 <edge from-layer="474" from-port="2" to-layer="502" to-port="0" />
12579 <edge from-layer="474" from-port="2" to-layer="476" to-port="0" />
12580 <edge from-layer="475" from-port="0" to-layer="476" to-port="1" />
12581 <edge from-layer="476" from-port="2" to-layer="478" to-port="0" />
12582 <edge from-layer="477" from-port="0" to-layer="478" to-port="1" />
12583 <edge from-layer="478" from-port="2" to-layer="480" to-port="0" />
12584 <edge from-layer="479" from-port="0" to-layer="480" to-port="1" />
12585 <edge from-layer="480" from-port="2" to-layer="482" to-port="0" />
12586 <edge from-layer="481" from-port="0" to-layer="482" to-port="1" />
12587 <edge from-layer="482" from-port="2" to-layer="499" to-port="0" />
12588 <edge from-layer="483" from-port="0" to-layer="484" to-port="1" />
12589 <edge from-layer="484" from-port="2" to-layer="486" to-port="0" />
12590 <edge from-layer="485" from-port="0" to-layer="486" to-port="1" />
12591 <edge from-layer="486" from-port="2" to-layer="488" to-port="0" />
12592 <edge from-layer="487" from-port="0" to-layer="488" to-port="1" />
12593 <edge from-layer="488" from-port="2" to-layer="490" to-port="0" />
12594 <edge from-layer="489" from-port="0" to-layer="490" to-port="1" />
12595 <edge from-layer="490" from-port="2" to-layer="499" to-port="1" />
12596 <edge from-layer="491" from-port="0" to-layer="492" to-port="1" />
12597 <edge from-layer="492" from-port="2" to-layer="494" to-port="0" />
12598 <edge from-layer="493" from-port="0" to-layer="494" to-port="1" />
12599 <edge from-layer="494" from-port="2" to-layer="496" to-port="0" />
12600 <edge from-layer="495" from-port="0" to-layer="496" to-port="1" />
12601 <edge from-layer="496" from-port="2" to-layer="498" to-port="0" />
12602 <edge from-layer="497" from-port="0" to-layer="498" to-port="1" />
12603 <edge from-layer="498" from-port="2" to-layer="499" to-port="2" />
12604 <edge from-layer="499" from-port="4" to-layer="501" to-port="0" />
12605 <edge from-layer="500" from-port="0" to-layer="501" to-port="1" />
12606 <edge from-layer="501" from-port="2" to-layer="507" to-port="0" />
12607 <edge from-layer="502" from-port="1" to-layer="505" to-port="0" />
12608 <edge from-layer="503" from-port="0" to-layer="505" to-port="1" />
12609 <edge from-layer="504" from-port="0" to-layer="505" to-port="2" />
12610 <edge from-layer="505" from-port="3" to-layer="506" to-port="0" />
12611 <edge from-layer="506" from-port="2" to-layer="507" to-port="1" />
12612 <edge from-layer="507" from-port="2" to-layer="509" to-port="0" />
12613 <edge from-layer="508" from-port="0" to-layer="509" to-port="1" />
12614 <edge from-layer="509" from-port="2" to-layer="511" to-port="0" />
12615 <edge from-layer="510" from-port="0" to-layer="511" to-port="1" />
12616 <edge from-layer="511" from-port="2" to-layer="512" to-port="0" />
12617 <edge from-layer="512" from-port="2" to-layer="514" to-port="0" />
12618 <edge from-layer="513" from-port="0" to-layer="514" to-port="1" />
12619 <edge from-layer="514" from-port="2" to-layer="516" to-port="0" />
12620 <edge from-layer="515" from-port="0" to-layer="516" to-port="1" />
12621 <edge from-layer="516" from-port="2" to-layer="518" to-port="0" />
12622 <edge from-layer="517" from-port="0" to-layer="518" to-port="1" />
12623 <edge from-layer="518" from-port="2" to-layer="520" to-port="0" />
12624 <edge from-layer="518" from-port="2" to-layer="528" to-port="1" />
12625 <edge from-layer="519" from-port="0" to-layer="520" to-port="1" />
12626 <edge from-layer="520" from-port="2" to-layer="522" to-port="0" />
12627 <edge from-layer="521" from-port="0" to-layer="522" to-port="1" />
12628 <edge from-layer="522" from-port="2" to-layer="523" to-port="0" />
12629 <edge from-layer="523" from-port="1" to-layer="525" to-port="0" />
12630 <edge from-layer="524" from-port="0" to-layer="525" to-port="1" />
12631 <edge from-layer="525" from-port="2" to-layer="527" to-port="0" />
12632 <edge from-layer="526" from-port="0" to-layer="527" to-port="1" />
12633 <edge from-layer="527" from-port="2" to-layer="528" to-port="0" />
12634 <edge from-layer="528" from-port="2" to-layer="530" to-port="0" />
12635 <edge from-layer="529" from-port="0" to-layer="530" to-port="1" />
12636 <edge from-layer="530" from-port="2" to-layer="532" to-port="0" />
12637 <edge from-layer="531" from-port="0" to-layer="532" to-port="1" />
12638 <edge from-layer="532" from-port="2" to-layer="534" to-port="0" />
12639 <edge from-layer="533" from-port="0" to-layer="534" to-port="1" />
12640 <edge from-layer="534" from-port="2" to-layer="536" to-port="0" />
12641 <edge from-layer="534" from-port="2" to-layer="572" to-port="1" />
12642 <edge from-layer="534" from-port="2" to-layer="544" to-port="0" />
12643 <edge from-layer="534" from-port="2" to-layer="562" to-port="0" />
12644 <edge from-layer="534" from-port="2" to-layer="552" to-port="0" />
12645 <edge from-layer="535" from-port="0" to-layer="536" to-port="1" />
12646 <edge from-layer="536" from-port="2" to-layer="538" to-port="0" />
12647 <edge from-layer="537" from-port="0" to-layer="538" to-port="1" />
12648 <edge from-layer="538" from-port="2" to-layer="540" to-port="0" />
12649 <edge from-layer="539" from-port="0" to-layer="540" to-port="1" />
12650 <edge from-layer="540" from-port="2" to-layer="542" to-port="0" />
12651 <edge from-layer="541" from-port="0" to-layer="542" to-port="1" />
12652 <edge from-layer="542" from-port="2" to-layer="559" to-port="0" />
12653 <edge from-layer="543" from-port="0" to-layer="544" to-port="1" />
12654 <edge from-layer="544" from-port="2" to-layer="546" to-port="0" />
12655 <edge from-layer="545" from-port="0" to-layer="546" to-port="1" />
12656 <edge from-layer="546" from-port="2" to-layer="548" to-port="0" />
12657 <edge from-layer="547" from-port="0" to-layer="548" to-port="1" />
12658 <edge from-layer="548" from-port="2" to-layer="550" to-port="0" />
12659 <edge from-layer="549" from-port="0" to-layer="550" to-port="1" />
12660 <edge from-layer="550" from-port="2" to-layer="559" to-port="1" />
12661 <edge from-layer="551" from-port="0" to-layer="552" to-port="1" />
12662 <edge from-layer="552" from-port="2" to-layer="554" to-port="0" />
12663 <edge from-layer="553" from-port="0" to-layer="554" to-port="1" />
12664 <edge from-layer="554" from-port="2" to-layer="556" to-port="0" />
12665 <edge from-layer="555" from-port="0" to-layer="556" to-port="1" />
12666 <edge from-layer="556" from-port="2" to-layer="558" to-port="0" />
12667 <edge from-layer="557" from-port="0" to-layer="558" to-port="1" />
12668 <edge from-layer="558" from-port="2" to-layer="559" to-port="2" />
12669 <edge from-layer="559" from-port="4" to-layer="561" to-port="0" />
12670 <edge from-layer="560" from-port="0" to-layer="561" to-port="1" />
12671 <edge from-layer="561" from-port="2" to-layer="567" to-port="0" />
12672 <edge from-layer="562" from-port="1" to-layer="565" to-port="0" />
12673 <edge from-layer="563" from-port="0" to-layer="565" to-port="1" />
12674 <edge from-layer="564" from-port="0" to-layer="565" to-port="2" />
12675 <edge from-layer="565" from-port="3" to-layer="566" to-port="0" />
12676 <edge from-layer="566" from-port="2" to-layer="567" to-port="1" />
12677 <edge from-layer="567" from-port="2" to-layer="569" to-port="0" />
12678 <edge from-layer="568" from-port="0" to-layer="569" to-port="1" />
12679 <edge from-layer="569" from-port="2" to-layer="571" to-port="0" />
12680 <edge from-layer="570" from-port="0" to-layer="571" to-port="1" />
12681 <edge from-layer="571" from-port="2" to-layer="572" to-port="0" />
12682 <edge from-layer="572" from-port="2" to-layer="574" to-port="0" />
12683 <edge from-layer="573" from-port="0" to-layer="574" to-port="1" />
12684 <edge from-layer="574" from-port="2" to-layer="576" to-port="0" />
12685 <edge from-layer="575" from-port="0" to-layer="576" to-port="1" />
12686 <edge from-layer="576" from-port="2" to-layer="578" to-port="0" />
12687 <edge from-layer="577" from-port="0" to-layer="578" to-port="1" />
12688 <edge from-layer="578" from-port="2" to-layer="580" to-port="0" />
12689 <edge from-layer="578" from-port="2" to-layer="588" to-port="1" />
12690 <edge from-layer="579" from-port="0" to-layer="580" to-port="1" />
12691 <edge from-layer="580" from-port="2" to-layer="582" to-port="0" />
12692 <edge from-layer="581" from-port="0" to-layer="582" to-port="1" />
12693 <edge from-layer="582" from-port="2" to-layer="583" to-port="0" />
12694 <edge from-layer="583" from-port="1" to-layer="585" to-port="0" />
12695 <edge from-layer="584" from-port="0" to-layer="585" to-port="1" />
12696 <edge from-layer="585" from-port="2" to-layer="587" to-port="0" />
12697 <edge from-layer="586" from-port="0" to-layer="587" to-port="1" />
12698 <edge from-layer="587" from-port="2" to-layer="588" to-port="0" />
12699 <edge from-layer="588" from-port="2" to-layer="590" to-port="0" />
12700 <edge from-layer="589" from-port="0" to-layer="590" to-port="1" />
12701 <edge from-layer="590" from-port="2" to-layer="592" to-port="0" />
12702 <edge from-layer="591" from-port="0" to-layer="592" to-port="1" />
12703 <edge from-layer="592" from-port="2" to-layer="594" to-port="0" />
12704 <edge from-layer="593" from-port="0" to-layer="594" to-port="1" />
12705 <edge from-layer="594" from-port="2" to-layer="596" to-port="0" />
12706 <edge from-layer="594" from-port="2" to-layer="632" to-port="1" />
12707 <edge from-layer="594" from-port="2" to-layer="604" to-port="0" />
12708 <edge from-layer="594" from-port="2" to-layer="622" to-port="0" />
12709 <edge from-layer="594" from-port="2" to-layer="612" to-port="0" />
12710 <edge from-layer="595" from-port="0" to-layer="596" to-port="1" />
12711 <edge from-layer="596" from-port="2" to-layer="598" to-port="0" />
12712 <edge from-layer="597" from-port="0" to-layer="598" to-port="1" />
12713 <edge from-layer="598" from-port="2" to-layer="600" to-port="0" />
12714 <edge from-layer="599" from-port="0" to-layer="600" to-port="1" />
12715 <edge from-layer="600" from-port="2" to-layer="602" to-port="0" />
12716 <edge from-layer="601" from-port="0" to-layer="602" to-port="1" />
12717 <edge from-layer="602" from-port="2" to-layer="619" to-port="0" />
12718 <edge from-layer="603" from-port="0" to-layer="604" to-port="1" />
12719 <edge from-layer="604" from-port="2" to-layer="606" to-port="0" />
12720 <edge from-layer="605" from-port="0" to-layer="606" to-port="1" />
12721 <edge from-layer="606" from-port="2" to-layer="608" to-port="0" />
12722 <edge from-layer="607" from-port="0" to-layer="608" to-port="1" />
12723 <edge from-layer="608" from-port="2" to-layer="610" to-port="0" />
12724 <edge from-layer="609" from-port="0" to-layer="610" to-port="1" />
12725 <edge from-layer="610" from-port="2" to-layer="619" to-port="1" />
12726 <edge from-layer="611" from-port="0" to-layer="612" to-port="1" />
12727 <edge from-layer="612" from-port="2" to-layer="614" to-port="0" />
12728 <edge from-layer="613" from-port="0" to-layer="614" to-port="1" />
12729 <edge from-layer="614" from-port="2" to-layer="616" to-port="0" />
12730 <edge from-layer="615" from-port="0" to-layer="616" to-port="1" />
12731 <edge from-layer="616" from-port="2" to-layer="618" to-port="0" />
12732 <edge from-layer="617" from-port="0" to-layer="618" to-port="1" />
12733 <edge from-layer="618" from-port="2" to-layer="619" to-port="2" />
12734 <edge from-layer="619" from-port="4" to-layer="621" to-port="0" />
12735 <edge from-layer="620" from-port="0" to-layer="621" to-port="1" />
12736 <edge from-layer="621" from-port="2" to-layer="627" to-port="0" />
12737 <edge from-layer="622" from-port="1" to-layer="625" to-port="0" />
12738 <edge from-layer="623" from-port="0" to-layer="625" to-port="1" />
12739 <edge from-layer="624" from-port="0" to-layer="625" to-port="2" />
12740 <edge from-layer="625" from-port="3" to-layer="626" to-port="0" />
12741 <edge from-layer="626" from-port="2" to-layer="627" to-port="1" />
12742 <edge from-layer="627" from-port="2" to-layer="629" to-port="0" />
12743 <edge from-layer="628" from-port="0" to-layer="629" to-port="1" />
12744 <edge from-layer="629" from-port="2" to-layer="631" to-port="0" />
12745 <edge from-layer="630" from-port="0" to-layer="631" to-port="1" />
12746 <edge from-layer="631" from-port="2" to-layer="632" to-port="0" />
12747 <edge from-layer="632" from-port="2" to-layer="634" to-port="0" />
12748 <edge from-layer="633" from-port="0" to-layer="634" to-port="1" />
12749 <edge from-layer="634" from-port="2" to-layer="636" to-port="0" />
12750 <edge from-layer="635" from-port="0" to-layer="636" to-port="1" />
12751 <edge from-layer="636" from-port="2" to-layer="638" to-port="0" />
12752 <edge from-layer="637" from-port="0" to-layer="638" to-port="1" />
12753 <edge from-layer="638" from-port="2" to-layer="640" to-port="0" />
12754 <edge from-layer="638" from-port="2" to-layer="648" to-port="1" />
12755 <edge from-layer="639" from-port="0" to-layer="640" to-port="1" />
12756 <edge from-layer="640" from-port="2" to-layer="642" to-port="0" />
12757 <edge from-layer="641" from-port="0" to-layer="642" to-port="1" />
12758 <edge from-layer="642" from-port="2" to-layer="643" to-port="0" />
12759 <edge from-layer="643" from-port="1" to-layer="645" to-port="0" />
12760 <edge from-layer="644" from-port="0" to-layer="645" to-port="1" />
12761 <edge from-layer="645" from-port="2" to-layer="647" to-port="0" />
12762 <edge from-layer="646" from-port="0" to-layer="647" to-port="1" />
12763 <edge from-layer="647" from-port="2" to-layer="648" to-port="0" />
12764 <edge from-layer="648" from-port="2" to-layer="650" to-port="0" />
12765 <edge from-layer="649" from-port="0" to-layer="650" to-port="1" />
12766 <edge from-layer="650" from-port="2" to-layer="652" to-port="0" />
12767 <edge from-layer="651" from-port="0" to-layer="652" to-port="1" />
12768 <edge from-layer="652" from-port="2" to-layer="654" to-port="0" />
12769 <edge from-layer="653" from-port="0" to-layer="654" to-port="1" />
12770 <edge from-layer="654" from-port="2" to-layer="664" to-port="0" />
12771 <edge from-layer="654" from-port="2" to-layer="672" to-port="0" />
12772 <edge from-layer="654" from-port="2" to-layer="682" to-port="0" />
12773 <edge from-layer="654" from-port="2" to-layer="692" to-port="1" />
12774 <edge from-layer="654" from-port="2" to-layer="656" to-port="0" />
12775 <edge from-layer="655" from-port="0" to-layer="656" to-port="1" />
12776 <edge from-layer="656" from-port="2" to-layer="658" to-port="0" />
12777 <edge from-layer="657" from-port="0" to-layer="658" to-port="1" />
12778 <edge from-layer="658" from-port="2" to-layer="660" to-port="0" />
12779 <edge from-layer="659" from-port="0" to-layer="660" to-port="1" />
12780 <edge from-layer="660" from-port="2" to-layer="662" to-port="0" />
12781 <edge from-layer="661" from-port="0" to-layer="662" to-port="1" />
12782 <edge from-layer="662" from-port="2" to-layer="679" to-port="0" />
12783 <edge from-layer="663" from-port="0" to-layer="664" to-port="1" />
12784 <edge from-layer="664" from-port="2" to-layer="666" to-port="0" />
12785 <edge from-layer="665" from-port="0" to-layer="666" to-port="1" />
12786 <edge from-layer="666" from-port="2" to-layer="668" to-port="0" />
12787 <edge from-layer="667" from-port="0" to-layer="668" to-port="1" />
12788 <edge from-layer="668" from-port="2" to-layer="670" to-port="0" />
12789 <edge from-layer="669" from-port="0" to-layer="670" to-port="1" />
12790 <edge from-layer="670" from-port="2" to-layer="679" to-port="1" />
12791 <edge from-layer="671" from-port="0" to-layer="672" to-port="1" />
12792 <edge from-layer="672" from-port="2" to-layer="674" to-port="0" />
12793 <edge from-layer="673" from-port="0" to-layer="674" to-port="1" />
12794 <edge from-layer="674" from-port="2" to-layer="676" to-port="0" />
12795 <edge from-layer="675" from-port="0" to-layer="676" to-port="1" />
12796 <edge from-layer="676" from-port="2" to-layer="678" to-port="0" />
12797 <edge from-layer="677" from-port="0" to-layer="678" to-port="1" />
12798 <edge from-layer="678" from-port="2" to-layer="679" to-port="2" />
12799 <edge from-layer="679" from-port="4" to-layer="681" to-port="0" />
12800 <edge from-layer="680" from-port="0" to-layer="681" to-port="1" />
12801 <edge from-layer="681" from-port="2" to-layer="687" to-port="0" />
12802 <edge from-layer="682" from-port="1" to-layer="685" to-port="0" />
12803 <edge from-layer="683" from-port="0" to-layer="685" to-port="1" />
12804 <edge from-layer="684" from-port="0" to-layer="685" to-port="2" />
12805 <edge from-layer="685" from-port="3" to-layer="686" to-port="0" />
12806 <edge from-layer="686" from-port="2" to-layer="687" to-port="1" />
12807 <edge from-layer="687" from-port="2" to-layer="689" to-port="0" />
12808 <edge from-layer="688" from-port="0" to-layer="689" to-port="1" />
12809 <edge from-layer="689" from-port="2" to-layer="691" to-port="0" />
12810 <edge from-layer="690" from-port="0" to-layer="691" to-port="1" />
12811 <edge from-layer="691" from-port="2" to-layer="692" to-port="0" />
12812 <edge from-layer="692" from-port="2" to-layer="694" to-port="0" />
12813 <edge from-layer="693" from-port="0" to-layer="694" to-port="1" />
12814 <edge from-layer="694" from-port="2" to-layer="696" to-port="0" />
12815 <edge from-layer="695" from-port="0" to-layer="696" to-port="1" />
12816 <edge from-layer="696" from-port="2" to-layer="698" to-port="0" />
12817 <edge from-layer="697" from-port="0" to-layer="698" to-port="1" />
12818 <edge from-layer="698" from-port="2" to-layer="700" to-port="0" />
12819 <edge from-layer="698" from-port="2" to-layer="708" to-port="1" />
12820 <edge from-layer="699" from-port="0" to-layer="700" to-port="1" />
12821 <edge from-layer="700" from-port="2" to-layer="702" to-port="0" />
12822 <edge from-layer="701" from-port="0" to-layer="702" to-port="1" />
12823 <edge from-layer="702" from-port="2" to-layer="703" to-port="0" />
12824 <edge from-layer="703" from-port="1" to-layer="705" to-port="0" />
12825 <edge from-layer="704" from-port="0" to-layer="705" to-port="1" />
12826 <edge from-layer="705" from-port="2" to-layer="707" to-port="0" />
12827 <edge from-layer="706" from-port="0" to-layer="707" to-port="1" />
12828 <edge from-layer="707" from-port="2" to-layer="708" to-port="0" />
12829 <edge from-layer="708" from-port="2" to-layer="710" to-port="0" />
12830 <edge from-layer="709" from-port="0" to-layer="710" to-port="1" />
12831 <edge from-layer="710" from-port="2" to-layer="712" to-port="0" />
12832 <edge from-layer="711" from-port="0" to-layer="712" to-port="1" />
12833 <edge from-layer="712" from-port="2" to-layer="714" to-port="0" />
12834 <edge from-layer="713" from-port="0" to-layer="714" to-port="1" />
12835 <edge from-layer="714" from-port="2" to-layer="716" to-port="0" />
12836 <edge from-layer="714" from-port="2" to-layer="742" to-port="0" />
12837 <edge from-layer="714" from-port="2" to-layer="732" to-port="0" />
12838 <edge from-layer="714" from-port="2" to-layer="724" to-port="0" />
12839 <edge from-layer="714" from-port="2" to-layer="752" to-port="1" />
12840 <edge from-layer="715" from-port="0" to-layer="716" to-port="1" />
12841 <edge from-layer="716" from-port="2" to-layer="718" to-port="0" />
12842 <edge from-layer="717" from-port="0" to-layer="718" to-port="1" />
12843 <edge from-layer="718" from-port="2" to-layer="720" to-port="0" />
12844 <edge from-layer="719" from-port="0" to-layer="720" to-port="1" />
12845 <edge from-layer="720" from-port="2" to-layer="722" to-port="0" />
12846 <edge from-layer="721" from-port="0" to-layer="722" to-port="1" />
12847 <edge from-layer="722" from-port="2" to-layer="739" to-port="0" />
12848 <edge from-layer="723" from-port="0" to-layer="724" to-port="1" />
12849 <edge from-layer="724" from-port="2" to-layer="726" to-port="0" />
12850 <edge from-layer="725" from-port="0" to-layer="726" to-port="1" />
12851 <edge from-layer="726" from-port="2" to-layer="728" to-port="0" />
12852 <edge from-layer="727" from-port="0" to-layer="728" to-port="1" />
12853 <edge from-layer="728" from-port="2" to-layer="730" to-port="0" />
12854 <edge from-layer="729" from-port="0" to-layer="730" to-port="1" />
12855 <edge from-layer="730" from-port="2" to-layer="739" to-port="1" />
12856 <edge from-layer="731" from-port="0" to-layer="732" to-port="1" />
12857 <edge from-layer="732" from-port="2" to-layer="734" to-port="0" />
12858 <edge from-layer="733" from-port="0" to-layer="734" to-port="1" />
12859 <edge from-layer="734" from-port="2" to-layer="736" to-port="0" />
12860 <edge from-layer="735" from-port="0" to-layer="736" to-port="1" />
12861 <edge from-layer="736" from-port="2" to-layer="738" to-port="0" />
12862 <edge from-layer="737" from-port="0" to-layer="738" to-port="1" />
12863 <edge from-layer="738" from-port="2" to-layer="739" to-port="2" />
12864 <edge from-layer="739" from-port="4" to-layer="741" to-port="0" />
12865 <edge from-layer="740" from-port="0" to-layer="741" to-port="1" />
12866 <edge from-layer="741" from-port="2" to-layer="747" to-port="0" />
12867 <edge from-layer="742" from-port="1" to-layer="745" to-port="0" />
12868 <edge from-layer="743" from-port="0" to-layer="745" to-port="1" />
12869 <edge from-layer="744" from-port="0" to-layer="745" to-port="2" />
12870 <edge from-layer="745" from-port="3" to-layer="746" to-port="0" />
12871 <edge from-layer="746" from-port="2" to-layer="747" to-port="1" />
12872 <edge from-layer="747" from-port="2" to-layer="749" to-port="0" />
12873 <edge from-layer="748" from-port="0" to-layer="749" to-port="1" />
12874 <edge from-layer="749" from-port="2" to-layer="751" to-port="0" />
12875 <edge from-layer="750" from-port="0" to-layer="751" to-port="1" />
12876 <edge from-layer="751" from-port="2" to-layer="752" to-port="0" />
12877 <edge from-layer="752" from-port="2" to-layer="754" to-port="0" />
12878 <edge from-layer="753" from-port="0" to-layer="754" to-port="1" />
12879 <edge from-layer="754" from-port="2" to-layer="756" to-port="0" />
12880 <edge from-layer="755" from-port="0" to-layer="756" to-port="1" />
12881 <edge from-layer="756" from-port="2" to-layer="758" to-port="0" />
12882 <edge from-layer="757" from-port="0" to-layer="758" to-port="1" />
12883 <edge from-layer="758" from-port="2" to-layer="760" to-port="0" />
12884 <edge from-layer="758" from-port="2" to-layer="768" to-port="1" />
12885 <edge from-layer="759" from-port="0" to-layer="760" to-port="1" />
12886 <edge from-layer="760" from-port="2" to-layer="762" to-port="0" />
12887 <edge from-layer="761" from-port="0" to-layer="762" to-port="1" />
12888 <edge from-layer="762" from-port="2" to-layer="763" to-port="0" />
12889 <edge from-layer="763" from-port="1" to-layer="765" to-port="0" />
12890 <edge from-layer="764" from-port="0" to-layer="765" to-port="1" />
12891 <edge from-layer="765" from-port="2" to-layer="767" to-port="0" />
12892 <edge from-layer="766" from-port="0" to-layer="767" to-port="1" />
12893 <edge from-layer="767" from-port="2" to-layer="768" to-port="0" />
12894 <edge from-layer="768" from-port="2" to-layer="770" to-port="0" />
12895 <edge from-layer="769" from-port="0" to-layer="770" to-port="1" />
12896 <edge from-layer="770" from-port="2" to-layer="772" to-port="0" />
12897 <edge from-layer="771" from-port="0" to-layer="772" to-port="1" />
12898 <edge from-layer="772" from-port="2" to-layer="774" to-port="0" />
12899 <edge from-layer="773" from-port="0" to-layer="774" to-port="1" />
12900 <edge from-layer="774" from-port="2" to-layer="775" to-port="0" />
12901 </edges>
12902 <rt_info>
12903 <Runtime_version value="2024.4.1-16618-643f23d1318-releases/2024/4" />
12904 <conversion_parameters>
12905 <framework value="pytorch" />
12906 <is_python_object value="True" />
12907 </conversion_parameters>
12908 <optimum>
12909 <optimum_intel_version value="1.20.0.dev0+b31524c" />
12910 <optimum_version value="1.23.0" />
12911 <pytorch_version value="2.5.0.dev20240807+cu121" />
12912 <transformers_version value="4.43.4" />
12913 </optimum>
12914 </rt_info>
12915 </net>
12916