openvino/openvino_model.xml
354.9 KB · 12169 lines · xml Raw
1 <?xml version="1.0"?>
2 <net name="Model6796" version="11">
3 <layers>
4 <layer id="2" name="input_ids" type="Parameter" version="opset1">
5 <data shape="?,?" element_type="i64" />
6 <output>
7 <port id="0" precision="I64" names="input_ids">
8 <dim>-1</dim>
9 <dim>-1</dim>
10 </port>
11 </output>
12 </layer>
13 <layer id="1" name="attention_mask" type="Parameter" version="opset1">
14 <data shape="?,?" element_type="i64" />
15 <output>
16 <port id="0" precision="I64" names="attention_mask">
17 <dim>-1</dim>
18 <dim>-1</dim>
19 </port>
20 </output>
21 </layer>
22 <layer id="0" name="token_type_ids" type="Parameter" version="opset1">
23 <data shape="?,?" element_type="i64" />
24 <output>
25 <port id="0" precision="I64" names="token_type_ids">
26 <dim>-1</dim>
27 <dim>-1</dim>
28 </port>
29 </output>
30 </layer>
31 <layer id="3" name="self.embeddings.word_embeddings.weight" type="Const" version="opset1">
32 <data element_type="f32" shape="250037, 384" offset="0" size="384056832" />
33 <output>
34 <port id="0" precision="FP32" names="self.embeddings.word_embeddings.weight">
35 <dim>250037</dim>
36 <dim>384</dim>
37 </port>
38 </output>
39 </layer>
40 <layer id="4" name="__module.embeddings.word_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
41 <data destination_type="i32" />
42 <input>
43 <port id="0" precision="I64">
44 <dim>-1</dim>
45 <dim>-1</dim>
46 </port>
47 </input>
48 <output>
49 <port id="1" precision="I32">
50 <dim>-1</dim>
51 <dim>-1</dim>
52 </port>
53 </output>
54 </layer>
55 <layer id="5" name="__module.embeddings.word_embeddings/aten::embedding/Constant" type="Const" version="opset1">
56 <data element_type="i32" shape="" offset="384056832" size="4" />
57 <output>
58 <port id="0" precision="I32" />
59 </output>
60 </layer>
61 <layer id="6" name="__module.embeddings.word_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
62 <data batch_dims="0" />
63 <input>
64 <port id="0" precision="FP32">
65 <dim>250037</dim>
66 <dim>384</dim>
67 </port>
68 <port id="1" precision="I32">
69 <dim>-1</dim>
70 <dim>-1</dim>
71 </port>
72 <port id="2" precision="I32" />
73 </input>
74 <output>
75 <port id="3" precision="FP32" names="79,inputs_embeds">
76 <dim>-1</dim>
77 <dim>-1</dim>
78 <dim>384</dim>
79 </port>
80 </output>
81 </layer>
82 <layer id="7" name="self.embeddings.token_type_embeddings.weight" type="Const" version="opset1">
83 <data element_type="f32" shape="2, 384" offset="384056836" size="3072" />
84 <output>
85 <port id="0" precision="FP32" names="self.embeddings.token_type_embeddings.weight">
86 <dim>2</dim>
87 <dim>384</dim>
88 </port>
89 </output>
90 </layer>
91 <layer id="8" name="__module.embeddings.token_type_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
92 <data destination_type="i32" />
93 <input>
94 <port id="0" precision="I64">
95 <dim>-1</dim>
96 <dim>-1</dim>
97 </port>
98 </input>
99 <output>
100 <port id="1" precision="I32">
101 <dim>-1</dim>
102 <dim>-1</dim>
103 </port>
104 </output>
105 </layer>
106 <layer id="9" name="__module.embeddings.token_type_embeddings/aten::embedding/Constant" type="Const" version="opset1">
107 <data element_type="i32" shape="" offset="384056832" size="4" />
108 <output>
109 <port id="0" precision="I32" />
110 </output>
111 </layer>
112 <layer id="10" name="__module.embeddings.token_type_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
113 <data batch_dims="0" />
114 <input>
115 <port id="0" precision="FP32">
116 <dim>2</dim>
117 <dim>384</dim>
118 </port>
119 <port id="1" precision="I32">
120 <dim>-1</dim>
121 <dim>-1</dim>
122 </port>
123 <port id="2" precision="I32" />
124 </input>
125 <output>
126 <port id="3" precision="FP32" names="81,token_type_embeddings.1">
127 <dim>-1</dim>
128 <dim>-1</dim>
129 <dim>384</dim>
130 </port>
131 </output>
132 </layer>
133 <layer id="11" name="__module.embeddings/aten::add/Add" type="Add" version="opset1">
134 <data auto_broadcast="numpy" />
135 <input>
136 <port id="0" precision="FP32">
137 <dim>-1</dim>
138 <dim>-1</dim>
139 <dim>384</dim>
140 </port>
141 <port id="1" precision="FP32">
142 <dim>-1</dim>
143 <dim>-1</dim>
144 <dim>384</dim>
145 </port>
146 </input>
147 <output>
148 <port id="2" precision="FP32" names="82_1">
149 <dim>-1</dim>
150 <dim>-1</dim>
151 <dim>384</dim>
152 </port>
153 </output>
154 </layer>
155 <layer id="12" name="self.embeddings.position_embeddings.weight" type="Const" version="opset1">
156 <data element_type="f32" shape="512, 384" offset="384059908" size="786432" />
157 <output>
158 <port id="0" precision="FP32" names="self.embeddings.position_embeddings.weight">
159 <dim>512</dim>
160 <dim>384</dim>
161 </port>
162 </output>
163 </layer>
164 <layer id="13" name="__module.embeddings/aten::slice/Slice" type="Const" version="opset1">
165 <data element_type="i64" shape="1, 512" offset="384846340" size="4096" />
166 <output>
167 <port id="0" precision="I64" names="76">
168 <dim>1</dim>
169 <dim>512</dim>
170 </port>
171 </output>
172 </layer>
173 <layer id="14" name="__module.embeddings/aten::slice/Reshape" type="Const" version="opset1">
174 <data element_type="i64" shape="1" offset="384850436" size="8" />
175 <output>
176 <port id="0" precision="I64">
177 <dim>1</dim>
178 </port>
179 </output>
180 </layer>
181 <layer id="15" name="ShapeOf_6107595" type="ShapeOf" version="opset3">
182 <data output_type="i64" />
183 <input>
184 <port id="0" precision="I64">
185 <dim>-1</dim>
186 <dim>-1</dim>
187 </port>
188 </input>
189 <output>
190 <port id="1" precision="I64">
191 <dim>2</dim>
192 </port>
193 </output>
194 </layer>
195 <layer id="16" name="Constant_6107723" type="Const" version="opset1">
196 <data element_type="i64" shape="1" offset="384850444" size="8" />
197 <output>
198 <port id="0" precision="I64">
199 <dim>1</dim>
200 </port>
201 </output>
202 </layer>
203 <layer id="17" name="Constant_6107597" type="Const" version="opset1">
204 <data element_type="i64" shape="" offset="384850436" size="8" />
205 <output>
206 <port id="0" precision="I64" />
207 </output>
208 </layer>
209 <layer id="18" name="Gather_6107598" type="Gather" version="opset8">
210 <data batch_dims="0" />
211 <input>
212 <port id="0" precision="I64">
213 <dim>2</dim>
214 </port>
215 <port id="1" precision="I64">
216 <dim>1</dim>
217 </port>
218 <port id="2" precision="I64" />
219 </input>
220 <output>
221 <port id="3" precision="I64" names="10,17,19,72,74,75,8">
222 <dim>1</dim>
223 </port>
224 </output>
225 </layer>
226 <layer id="19" name="__module.embeddings/aten::slice/Reshape_2" type="Const" version="opset1">
227 <data element_type="i64" shape="1" offset="384850444" size="8" />
228 <output>
229 <port id="0" precision="I64">
230 <dim>1</dim>
231 </port>
232 </output>
233 </layer>
234 <layer id="20" name="__module.embeddings/aten::slice/Reshape_3" type="Const" version="opset1">
235 <data element_type="i64" shape="1" offset="384850444" size="8" />
236 <output>
237 <port id="0" precision="I64">
238 <dim>1</dim>
239 </port>
240 </output>
241 </layer>
242 <layer id="21" name="__module.embeddings/aten::slice/Slice_1" type="Slice" version="opset8">
243 <input>
244 <port id="0" precision="I64">
245 <dim>1</dim>
246 <dim>512</dim>
247 </port>
248 <port id="1" precision="I64">
249 <dim>1</dim>
250 </port>
251 <port id="2" precision="I64">
252 <dim>1</dim>
253 </port>
254 <port id="3" precision="I64">
255 <dim>1</dim>
256 </port>
257 <port id="4" precision="I64">
258 <dim>1</dim>
259 </port>
260 </input>
261 <output>
262 <port id="5" precision="I64" names="77">
263 <dim>1</dim>
264 <dim>-1</dim>
265 </port>
266 </output>
267 </layer>
268 <layer id="22" name="__module.embeddings.position_embeddings/aten::embedding/Convert" type="Convert" version="opset1">
269 <data destination_type="i32" />
270 <input>
271 <port id="0" precision="I64">
272 <dim>1</dim>
273 <dim>-1</dim>
274 </port>
275 </input>
276 <output>
277 <port id="1" precision="I32">
278 <dim>1</dim>
279 <dim>-1</dim>
280 </port>
281 </output>
282 </layer>
283 <layer id="23" name="__module.embeddings.position_embeddings/aten::embedding/Constant" type="Const" version="opset1">
284 <data element_type="i32" shape="" offset="384056832" size="4" />
285 <output>
286 <port id="0" precision="I32" />
287 </output>
288 </layer>
289 <layer id="24" name="__module.embeddings.position_embeddings/aten::embedding/Gather" type="Gather" version="opset8">
290 <data batch_dims="0" />
291 <input>
292 <port id="0" precision="FP32">
293 <dim>512</dim>
294 <dim>384</dim>
295 </port>
296 <port id="1" precision="I32">
297 <dim>1</dim>
298 <dim>-1</dim>
299 </port>
300 <port id="2" precision="I32" />
301 </input>
302 <output>
303 <port id="3" precision="FP32" names="84,position_embeddings.1">
304 <dim>1</dim>
305 <dim>-1</dim>
306 <dim>384</dim>
307 </port>
308 </output>
309 </layer>
310 <layer id="25" name="__module.embeddings/aten::add_/Add" type="Add" version="opset1">
311 <data auto_broadcast="numpy" />
312 <input>
313 <port id="0" precision="FP32">
314 <dim>-1</dim>
315 <dim>-1</dim>
316 <dim>384</dim>
317 </port>
318 <port id="1" precision="FP32">
319 <dim>1</dim>
320 <dim>-1</dim>
321 <dim>384</dim>
322 </port>
323 </input>
324 <output>
325 <port id="2" precision="FP32" names="82,embeddings.1">
326 <dim>-1</dim>
327 <dim>-1</dim>
328 <dim>384</dim>
329 </port>
330 </output>
331 </layer>
332 <layer id="26" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
333 <data element_type="i32" shape="1" offset="384850452" size="4" />
334 <output>
335 <port id="0" precision="I32">
336 <dim>1</dim>
337 </port>
338 </output>
339 </layer>
340 <layer id="27" name="__module.embeddings.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
341 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
342 <input>
343 <port id="0" precision="FP32">
344 <dim>-1</dim>
345 <dim>-1</dim>
346 <dim>384</dim>
347 </port>
348 <port id="1" precision="I32">
349 <dim>1</dim>
350 </port>
351 </input>
352 <output>
353 <port id="2" precision="FP32">
354 <dim>-1</dim>
355 <dim>-1</dim>
356 <dim>384</dim>
357 </port>
358 </output>
359 </layer>
360 <layer id="28" name="Constant_6107368" type="Const" version="opset1">
361 <data element_type="f32" shape="1, 1, 384" offset="384850456" size="1536" />
362 <output>
363 <port id="0" precision="FP32">
364 <dim>1</dim>
365 <dim>1</dim>
366 <dim>384</dim>
367 </port>
368 </output>
369 </layer>
370 <layer id="29" name="__module.embeddings.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
371 <data auto_broadcast="numpy" />
372 <input>
373 <port id="0" precision="FP32">
374 <dim>-1</dim>
375 <dim>-1</dim>
376 <dim>384</dim>
377 </port>
378 <port id="1" precision="FP32">
379 <dim>1</dim>
380 <dim>1</dim>
381 <dim>384</dim>
382 </port>
383 </input>
384 <output>
385 <port id="2" precision="FP32">
386 <dim>-1</dim>
387 <dim>-1</dim>
388 <dim>384</dim>
389 </port>
390 </output>
391 </layer>
392 <layer id="30" name="Constant_6107369" type="Const" version="opset1">
393 <data element_type="f32" shape="1, 1, 384" offset="384851992" size="1536" />
394 <output>
395 <port id="0" precision="FP32">
396 <dim>1</dim>
397 <dim>1</dim>
398 <dim>384</dim>
399 </port>
400 </output>
401 </layer>
402 <layer id="31" name="__module.embeddings.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
403 <data auto_broadcast="numpy" />
404 <input>
405 <port id="0" precision="FP32">
406 <dim>-1</dim>
407 <dim>-1</dim>
408 <dim>384</dim>
409 </port>
410 <port id="1" precision="FP32">
411 <dim>1</dim>
412 <dim>1</dim>
413 <dim>384</dim>
414 </port>
415 </input>
416 <output>
417 <port id="2" precision="FP32" names="89,input.1">
418 <dim>-1</dim>
419 <dim>-1</dim>
420 <dim>384</dim>
421 </port>
422 </output>
423 </layer>
424 <layer id="32" name="self.encoder.layer.0.attention.self.query.weight" type="Const" version="opset1">
425 <data element_type="f32" shape="384, 384" offset="384853528" size="589824" />
426 <output>
427 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.query.weight">
428 <dim>384</dim>
429 <dim>384</dim>
430 </port>
431 </output>
432 </layer>
433 <layer id="33" name="__module.encoder.layer.0.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
434 <data transpose_a="false" transpose_b="true" />
435 <input>
436 <port id="0" precision="FP32">
437 <dim>-1</dim>
438 <dim>-1</dim>
439 <dim>384</dim>
440 </port>
441 <port id="1" precision="FP32">
442 <dim>384</dim>
443 <dim>384</dim>
444 </port>
445 </input>
446 <output>
447 <port id="2" precision="FP32">
448 <dim>-1</dim>
449 <dim>-1</dim>
450 <dim>384</dim>
451 </port>
452 </output>
453 </layer>
454 <layer id="34" name="Constant_6107370" type="Const" version="opset1">
455 <data element_type="f32" shape="1, 1, 384" offset="385443352" size="1536" />
456 <output>
457 <port id="0" precision="FP32">
458 <dim>1</dim>
459 <dim>1</dim>
460 <dim>384</dim>
461 </port>
462 </output>
463 </layer>
464 <layer id="35" name="__module.encoder.layer.0.attention.self.query/aten::linear/Add" type="Add" version="opset1">
465 <data auto_broadcast="numpy" />
466 <input>
467 <port id="0" precision="FP32">
468 <dim>-1</dim>
469 <dim>-1</dim>
470 <dim>384</dim>
471 </port>
472 <port id="1" precision="FP32">
473 <dim>1</dim>
474 <dim>1</dim>
475 <dim>384</dim>
476 </port>
477 </input>
478 <output>
479 <port id="2" precision="FP32" names="141,x.1">
480 <dim>-1</dim>
481 <dim>-1</dim>
482 <dim>384</dim>
483 </port>
484 </output>
485 </layer>
486 <layer id="36" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
487 <data element_type="i64" shape="4" offset="385444888" size="32" />
488 <output>
489 <port id="0" precision="I64">
490 <dim>4</dim>
491 </port>
492 </output>
493 </layer>
494 <layer id="37" name="__module.encoder.layer.0.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
495 <data special_zero="true" />
496 <input>
497 <port id="0" precision="FP32">
498 <dim>-1</dim>
499 <dim>-1</dim>
500 <dim>384</dim>
501 </port>
502 <port id="1" precision="I64">
503 <dim>4</dim>
504 </port>
505 </input>
506 <output>
507 <port id="2" precision="FP32" names="145,x.3">
508 <dim>-1</dim>
509 <dim>-1</dim>
510 <dim>12</dim>
511 <dim>32</dim>
512 </port>
513 </output>
514 </layer>
515 <layer id="38" name="Constant_6098247" type="Const" version="opset1">
516 <data element_type="i64" shape="4" offset="385444920" size="32" />
517 <output>
518 <port id="0" precision="I64" names="146">
519 <dim>4</dim>
520 </port>
521 </output>
522 </layer>
523 <layer id="39" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
524 <input>
525 <port id="0" precision="FP32">
526 <dim>-1</dim>
527 <dim>-1</dim>
528 <dim>12</dim>
529 <dim>32</dim>
530 </port>
531 <port id="1" precision="I64">
532 <dim>4</dim>
533 </port>
534 </input>
535 <output>
536 <port id="2" precision="FP32" names="147">
537 <dim>-1</dim>
538 <dim>12</dim>
539 <dim>-1</dim>
540 <dim>32</dim>
541 </port>
542 </output>
543 </layer>
544 <layer id="40" name="self.encoder.layer.0.attention.self.key.weight" type="Const" version="opset1">
545 <data element_type="f32" shape="384, 384" offset="385444952" size="589824" />
546 <output>
547 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.key.weight">
548 <dim>384</dim>
549 <dim>384</dim>
550 </port>
551 </output>
552 </layer>
553 <layer id="41" name="__module.encoder.layer.0.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
554 <data transpose_a="false" transpose_b="true" />
555 <input>
556 <port id="0" precision="FP32">
557 <dim>-1</dim>
558 <dim>-1</dim>
559 <dim>384</dim>
560 </port>
561 <port id="1" precision="FP32">
562 <dim>384</dim>
563 <dim>384</dim>
564 </port>
565 </input>
566 <output>
567 <port id="2" precision="FP32">
568 <dim>-1</dim>
569 <dim>-1</dim>
570 <dim>384</dim>
571 </port>
572 </output>
573 </layer>
574 <layer id="42" name="Constant_6107371" type="Const" version="opset1">
575 <data element_type="f32" shape="1, 1, 384" offset="386034776" size="1536" />
576 <output>
577 <port id="0" precision="FP32">
578 <dim>1</dim>
579 <dim>1</dim>
580 <dim>384</dim>
581 </port>
582 </output>
583 </layer>
584 <layer id="43" name="__module.encoder.layer.0.attention.self.key/aten::linear/Add" type="Add" version="opset1">
585 <data auto_broadcast="numpy" />
586 <input>
587 <port id="0" precision="FP32">
588 <dim>-1</dim>
589 <dim>-1</dim>
590 <dim>384</dim>
591 </port>
592 <port id="1" precision="FP32">
593 <dim>1</dim>
594 <dim>1</dim>
595 <dim>384</dim>
596 </port>
597 </input>
598 <output>
599 <port id="2" precision="FP32" names="150,x.5">
600 <dim>-1</dim>
601 <dim>-1</dim>
602 <dim>384</dim>
603 </port>
604 </output>
605 </layer>
606 <layer id="44" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
607 <data element_type="i64" shape="4" offset="385444888" size="32" />
608 <output>
609 <port id="0" precision="I64">
610 <dim>4</dim>
611 </port>
612 </output>
613 </layer>
614 <layer id="45" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
615 <data special_zero="true" />
616 <input>
617 <port id="0" precision="FP32">
618 <dim>-1</dim>
619 <dim>-1</dim>
620 <dim>384</dim>
621 </port>
622 <port id="1" precision="I64">
623 <dim>4</dim>
624 </port>
625 </input>
626 <output>
627 <port id="2" precision="FP32" names="154,x.7">
628 <dim>-1</dim>
629 <dim>-1</dim>
630 <dim>12</dim>
631 <dim>32</dim>
632 </port>
633 </output>
634 </layer>
635 <layer id="46" name="Constant_6098272" type="Const" version="opset1">
636 <data element_type="i64" shape="4" offset="385444920" size="32" />
637 <output>
638 <port id="0" precision="I64" names="155">
639 <dim>4</dim>
640 </port>
641 </output>
642 </layer>
643 <layer id="47" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
644 <input>
645 <port id="0" precision="FP32">
646 <dim>-1</dim>
647 <dim>-1</dim>
648 <dim>12</dim>
649 <dim>32</dim>
650 </port>
651 <port id="1" precision="I64">
652 <dim>4</dim>
653 </port>
654 </input>
655 <output>
656 <port id="2" precision="FP32" names="156">
657 <dim>-1</dim>
658 <dim>12</dim>
659 <dim>-1</dim>
660 <dim>32</dim>
661 </port>
662 </output>
663 </layer>
664 <layer id="48" name="self.encoder.layer.0.attention.self.value.weight" type="Const" version="opset1">
665 <data element_type="f32" shape="384, 384" offset="386036312" size="589824" />
666 <output>
667 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.self.value.weight">
668 <dim>384</dim>
669 <dim>384</dim>
670 </port>
671 </output>
672 </layer>
673 <layer id="49" name="__module.encoder.layer.0.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
674 <data transpose_a="false" transpose_b="true" />
675 <input>
676 <port id="0" precision="FP32">
677 <dim>-1</dim>
678 <dim>-1</dim>
679 <dim>384</dim>
680 </port>
681 <port id="1" precision="FP32">
682 <dim>384</dim>
683 <dim>384</dim>
684 </port>
685 </input>
686 <output>
687 <port id="2" precision="FP32">
688 <dim>-1</dim>
689 <dim>-1</dim>
690 <dim>384</dim>
691 </port>
692 </output>
693 </layer>
694 <layer id="50" name="Constant_6107372" type="Const" version="opset1">
695 <data element_type="f32" shape="1, 1, 384" offset="386626136" size="1536" />
696 <output>
697 <port id="0" precision="FP32">
698 <dim>1</dim>
699 <dim>1</dim>
700 <dim>384</dim>
701 </port>
702 </output>
703 </layer>
704 <layer id="51" name="__module.encoder.layer.0.attention.self.value/aten::linear/Add" type="Add" version="opset1">
705 <data auto_broadcast="numpy" />
706 <input>
707 <port id="0" precision="FP32">
708 <dim>-1</dim>
709 <dim>-1</dim>
710 <dim>384</dim>
711 </port>
712 <port id="1" precision="FP32">
713 <dim>1</dim>
714 <dim>1</dim>
715 <dim>384</dim>
716 </port>
717 </input>
718 <output>
719 <port id="2" precision="FP32" names="159,x.9">
720 <dim>-1</dim>
721 <dim>-1</dim>
722 <dim>384</dim>
723 </port>
724 </output>
725 </layer>
726 <layer id="52" name="__module.encoder.layer.0.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
727 <data element_type="i64" shape="4" offset="385444888" size="32" />
728 <output>
729 <port id="0" precision="I64">
730 <dim>4</dim>
731 </port>
732 </output>
733 </layer>
734 <layer id="53" name="__module.encoder.layer.0.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
735 <data special_zero="true" />
736 <input>
737 <port id="0" precision="FP32">
738 <dim>-1</dim>
739 <dim>-1</dim>
740 <dim>384</dim>
741 </port>
742 <port id="1" precision="I64">
743 <dim>4</dim>
744 </port>
745 </input>
746 <output>
747 <port id="2" precision="FP32" names="163,x.11">
748 <dim>-1</dim>
749 <dim>-1</dim>
750 <dim>12</dim>
751 <dim>32</dim>
752 </port>
753 </output>
754 </layer>
755 <layer id="54" name="Constant_6098297" type="Const" version="opset1">
756 <data element_type="i64" shape="4" offset="385444920" size="32" />
757 <output>
758 <port id="0" precision="I64" names="164">
759 <dim>4</dim>
760 </port>
761 </output>
762 </layer>
763 <layer id="55" name="__module.encoder.layer.0.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
764 <input>
765 <port id="0" precision="FP32">
766 <dim>-1</dim>
767 <dim>-1</dim>
768 <dim>12</dim>
769 <dim>32</dim>
770 </port>
771 <port id="1" precision="I64">
772 <dim>4</dim>
773 </port>
774 </input>
775 <output>
776 <port id="2" precision="FP32" names="165">
777 <dim>-1</dim>
778 <dim>12</dim>
779 <dim>-1</dim>
780 <dim>32</dim>
781 </port>
782 </output>
783 </layer>
784 <layer id="56" name="Constant_6107374" type="Const" version="opset1">
785 <data element_type="f32" shape="1, 1, 1, 1" offset="386627672" size="4" />
786 <output>
787 <port id="0" precision="FP32">
788 <dim>1</dim>
789 <dim>1</dim>
790 <dim>1</dim>
791 <dim>1</dim>
792 </port>
793 </output>
794 </layer>
795 <layer id="57" name="25" type="Const" version="opset1">
796 <data element_type="i64" shape="" offset="384850444" size="8" />
797 <output>
798 <port id="0" precision="I64" names="25" />
799 </output>
800 </layer>
801 <layer id="58" name="aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
802 <input>
803 <port id="0" precision="I64">
804 <dim>-1</dim>
805 <dim>-1</dim>
806 </port>
807 <port id="1" precision="I64" />
808 </input>
809 <output>
810 <port id="2" precision="I64" names="26">
811 <dim>-1</dim>
812 <dim>1</dim>
813 <dim>-1</dim>
814 </port>
815 </output>
816 </layer>
817 <layer id="59" name="27" type="Const" version="opset1">
818 <data element_type="i64" shape="" offset="386627676" size="8" />
819 <output>
820 <port id="0" precision="I64" names="27" />
821 </output>
822 </layer>
823 <layer id="60" name="aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
824 <input>
825 <port id="0" precision="I64">
826 <dim>-1</dim>
827 <dim>1</dim>
828 <dim>-1</dim>
829 </port>
830 <port id="1" precision="I64" />
831 </input>
832 <output>
833 <port id="2" precision="I64" names="28,33">
834 <dim>-1</dim>
835 <dim>1</dim>
836 <dim>1</dim>
837 <dim>-1</dim>
838 </port>
839 </output>
840 </layer>
841 <layer id="61" name="Constant_6107726" type="Const" version="opset1">
842 <data element_type="i64" shape="1" offset="384850436" size="8" />
843 <output>
844 <port id="0" precision="I64">
845 <dim>1</dim>
846 </port>
847 </output>
848 </layer>
849 <layer id="62" name="Constant_6107605" type="Const" version="opset1">
850 <data element_type="i64" shape="" offset="384850436" size="8" />
851 <output>
852 <port id="0" precision="I64" />
853 </output>
854 </layer>
855 <layer id="63" name="Gather_6107606" type="Gather" version="opset8">
856 <data batch_dims="0" />
857 <input>
858 <port id="0" precision="I64">
859 <dim>2</dim>
860 </port>
861 <port id="1" precision="I64">
862 <dim>1</dim>
863 </port>
864 <port id="2" precision="I64" />
865 </input>
866 <output>
867 <port id="3" precision="I64" names="13,15">
868 <dim>1</dim>
869 </port>
870 </output>
871 </layer>
872 <layer id="64" name="Constant_6105992" type="Const" version="opset1">
873 <data element_type="i64" shape="1" offset="384850444" size="8" />
874 <output>
875 <port id="0" precision="I64">
876 <dim>1</dim>
877 </port>
878 </output>
879 </layer>
880 <layer id="65" name="Constant_6107728" type="Const" version="opset1">
881 <data element_type="i64" shape="2" offset="386627684" size="16" />
882 <output>
883 <port id="0" precision="I64">
884 <dim>2</dim>
885 </port>
886 </output>
887 </layer>
888 <layer id="66" name="Constant_6107729" type="Const" version="opset1">
889 <data element_type="i64" shape="" offset="384850436" size="8" />
890 <output>
891 <port id="0" precision="I64" />
892 </output>
893 </layer>
894 <layer id="67" name="Gather_6107730" type="Gather" version="opset8">
895 <data batch_dims="0" />
896 <input>
897 <port id="0" precision="I64">
898 <dim>2</dim>
899 </port>
900 <port id="1" precision="I64">
901 <dim>2</dim>
902 </port>
903 <port id="2" precision="I64" />
904 </input>
905 <output>
906 <port id="3" precision="I64">
907 <dim>2</dim>
908 </port>
909 </output>
910 </layer>
911 <layer id="68" name="prim::ListConstruct/Concat" type="Concat" version="opset1">
912 <data axis="0" />
913 <input>
914 <port id="0" precision="I64">
915 <dim>1</dim>
916 </port>
917 <port id="1" precision="I64">
918 <dim>1</dim>
919 </port>
920 <port id="2" precision="I64">
921 <dim>2</dim>
922 </port>
923 </input>
924 <output>
925 <port id="3" precision="I64" names="35">
926 <dim>4</dim>
927 </port>
928 </output>
929 </layer>
930 <layer id="69" name="aten::expand/Broadcast" type="Broadcast" version="opset3">
931 <data mode="bidirectional" />
932 <input>
933 <port id="0" precision="I64">
934 <dim>-1</dim>
935 <dim>1</dim>
936 <dim>1</dim>
937 <dim>-1</dim>
938 </port>
939 <port id="1" precision="I64">
940 <dim>4</dim>
941 </port>
942 </input>
943 <output>
944 <port id="2" precision="I64" names="37">
945 <dim>-1</dim>
946 <dim>1</dim>
947 <dim>-1</dim>
948 <dim>-1</dim>
949 </port>
950 </output>
951 </layer>
952 <layer id="70" name="aten::to/Convert" type="Convert" version="opset1">
953 <data destination_type="f32" />
954 <input>
955 <port id="0" precision="I64">
956 <dim>-1</dim>
957 <dim>1</dim>
958 <dim>-1</dim>
959 <dim>-1</dim>
960 </port>
961 </input>
962 <output>
963 <port id="1" precision="FP32" names="42">
964 <dim>-1</dim>
965 <dim>1</dim>
966 <dim>-1</dim>
967 <dim>-1</dim>
968 </port>
969 </output>
970 </layer>
971 <layer id="71" name="Constant_6107373" type="Const" version="opset1">
972 <data element_type="f32" shape="1, 1, 1, 1" offset="386627672" size="4" />
973 <output>
974 <port id="0" precision="FP32">
975 <dim>1</dim>
976 <dim>1</dim>
977 <dim>1</dim>
978 <dim>1</dim>
979 </port>
980 </output>
981 </layer>
982 <layer id="72" name="aten::rsub/Multiply" type="Multiply" version="opset1">
983 <data auto_broadcast="numpy" />
984 <input>
985 <port id="0" precision="FP32">
986 <dim>-1</dim>
987 <dim>1</dim>
988 <dim>-1</dim>
989 <dim>-1</dim>
990 </port>
991 <port id="1" precision="FP32">
992 <dim>1</dim>
993 <dim>1</dim>
994 <dim>1</dim>
995 <dim>1</dim>
996 </port>
997 </input>
998 <output>
999 <port id="2" precision="FP32">
1000 <dim>-1</dim>
1001 <dim>1</dim>
1002 <dim>-1</dim>
1003 <dim>-1</dim>
1004 </port>
1005 </output>
1006 </layer>
1007 <layer id="73" name="aten::rsub/Subtract" type="Subtract" version="opset1">
1008 <data auto_broadcast="numpy" />
1009 <input>
1010 <port id="0" precision="FP32">
1011 <dim>1</dim>
1012 <dim>1</dim>
1013 <dim>1</dim>
1014 <dim>1</dim>
1015 </port>
1016 <port id="1" precision="FP32">
1017 <dim>-1</dim>
1018 <dim>1</dim>
1019 <dim>-1</dim>
1020 <dim>-1</dim>
1021 </port>
1022 </input>
1023 <output>
1024 <port id="2" precision="FP32" names="45,inverted_mask">
1025 <dim>-1</dim>
1026 <dim>1</dim>
1027 <dim>-1</dim>
1028 <dim>-1</dim>
1029 </port>
1030 </output>
1031 </layer>
1032 <layer id="74" name="aten::to/Convert_1" type="Convert" version="opset1">
1033 <data destination_type="boolean" />
1034 <input>
1035 <port id="0" precision="FP32">
1036 <dim>-1</dim>
1037 <dim>1</dim>
1038 <dim>-1</dim>
1039 <dim>-1</dim>
1040 </port>
1041 </input>
1042 <output>
1043 <port id="1" precision="BOOL" names="50">
1044 <dim>-1</dim>
1045 <dim>1</dim>
1046 <dim>-1</dim>
1047 <dim>-1</dim>
1048 </port>
1049 </output>
1050 </layer>
1051 <layer id="75" name="aten::masked_fill/ConvertLike" type="Const" version="opset1">
1052 <data element_type="f32" shape="" offset="386627700" size="4" />
1053 <output>
1054 <port id="0" precision="FP32" />
1055 </output>
1056 </layer>
1057 <layer id="76" name="aten::masked_fill/Select" type="Select" version="opset1">
1058 <data auto_broadcast="numpy" />
1059 <input>
1060 <port id="0" precision="BOOL">
1061 <dim>-1</dim>
1062 <dim>1</dim>
1063 <dim>-1</dim>
1064 <dim>-1</dim>
1065 </port>
1066 <port id="1" precision="FP32" />
1067 <port id="2" precision="FP32">
1068 <dim>-1</dim>
1069 <dim>1</dim>
1070 <dim>-1</dim>
1071 <dim>-1</dim>
1072 </port>
1073 </input>
1074 <output>
1075 <port id="3" precision="FP32" names="52">
1076 <dim>-1</dim>
1077 <dim>1</dim>
1078 <dim>-1</dim>
1079 <dim>-1</dim>
1080 </port>
1081 </output>
1082 </layer>
1083 <layer id="77" name="__module.encoder.layer.0.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
1084 <data causal="false" />
1085 <input>
1086 <port id="0" precision="FP32">
1087 <dim>-1</dim>
1088 <dim>12</dim>
1089 <dim>-1</dim>
1090 <dim>32</dim>
1091 </port>
1092 <port id="1" precision="FP32">
1093 <dim>-1</dim>
1094 <dim>12</dim>
1095 <dim>-1</dim>
1096 <dim>32</dim>
1097 </port>
1098 <port id="2" precision="FP32">
1099 <dim>-1</dim>
1100 <dim>12</dim>
1101 <dim>-1</dim>
1102 <dim>32</dim>
1103 </port>
1104 <port id="3" precision="FP32">
1105 <dim>-1</dim>
1106 <dim>1</dim>
1107 <dim>-1</dim>
1108 <dim>-1</dim>
1109 </port>
1110 </input>
1111 <output>
1112 <port id="4" precision="FP32" names="166,attn_output.1">
1113 <dim>-1</dim>
1114 <dim>12</dim>
1115 <dim>-1</dim>
1116 <dim>32</dim>
1117 </port>
1118 </output>
1119 </layer>
1120 <layer id="78" name="__module.encoder.layer.0.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
1121 <data element_type="i32" shape="4" offset="386627704" size="16" />
1122 <output>
1123 <port id="0" precision="I32">
1124 <dim>4</dim>
1125 </port>
1126 </output>
1127 </layer>
1128 <layer id="79" name="__module.encoder.layer.0.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
1129 <input>
1130 <port id="0" precision="FP32">
1131 <dim>-1</dim>
1132 <dim>12</dim>
1133 <dim>-1</dim>
1134 <dim>32</dim>
1135 </port>
1136 <port id="1" precision="I32">
1137 <dim>4</dim>
1138 </port>
1139 </input>
1140 <output>
1141 <port id="2" precision="FP32" names="167,attn_output.3">
1142 <dim>-1</dim>
1143 <dim>-1</dim>
1144 <dim>12</dim>
1145 <dim>32</dim>
1146 </port>
1147 </output>
1148 </layer>
1149 <layer id="80" name="Constant_6107614" type="Const" version="opset1">
1150 <data element_type="i64" shape="3" offset="386627720" size="24" />
1151 <output>
1152 <port id="0" precision="I64">
1153 <dim>3</dim>
1154 </port>
1155 </output>
1156 </layer>
1157 <layer id="81" name="__module.encoder.layer.0.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
1158 <data special_zero="true" />
1159 <input>
1160 <port id="0" precision="FP32">
1161 <dim>-1</dim>
1162 <dim>-1</dim>
1163 <dim>12</dim>
1164 <dim>32</dim>
1165 </port>
1166 <port id="1" precision="I64">
1167 <dim>3</dim>
1168 </port>
1169 </input>
1170 <output>
1171 <port id="2" precision="FP32" names="169">
1172 <dim>-1</dim>
1173 <dim>-1</dim>
1174 <dim>384</dim>
1175 </port>
1176 </output>
1177 </layer>
1178 <layer id="82" name="self.encoder.layer.0.attention.output.dense.weight" type="Const" version="opset1">
1179 <data element_type="f32" shape="384, 384" offset="386627744" size="589824" />
1180 <output>
1181 <port id="0" precision="FP32" names="self.encoder.layer.0.attention.output.dense.weight">
1182 <dim>384</dim>
1183 <dim>384</dim>
1184 </port>
1185 </output>
1186 </layer>
1187 <layer id="83" name="__module.encoder.layer.0.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1188 <data transpose_a="false" transpose_b="true" />
1189 <input>
1190 <port id="0" precision="FP32">
1191 <dim>-1</dim>
1192 <dim>-1</dim>
1193 <dim>384</dim>
1194 </port>
1195 <port id="1" precision="FP32">
1196 <dim>384</dim>
1197 <dim>384</dim>
1198 </port>
1199 </input>
1200 <output>
1201 <port id="2" precision="FP32">
1202 <dim>-1</dim>
1203 <dim>-1</dim>
1204 <dim>384</dim>
1205 </port>
1206 </output>
1207 </layer>
1208 <layer id="84" name="Constant_6107375" type="Const" version="opset1">
1209 <data element_type="f32" shape="1, 1, 384" offset="387217568" size="1536" />
1210 <output>
1211 <port id="0" precision="FP32">
1212 <dim>1</dim>
1213 <dim>1</dim>
1214 <dim>384</dim>
1215 </port>
1216 </output>
1217 </layer>
1218 <layer id="85" name="__module.encoder.layer.0.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
1219 <data auto_broadcast="numpy" />
1220 <input>
1221 <port id="0" precision="FP32">
1222 <dim>-1</dim>
1223 <dim>-1</dim>
1224 <dim>384</dim>
1225 </port>
1226 <port id="1" precision="FP32">
1227 <dim>1</dim>
1228 <dim>1</dim>
1229 <dim>384</dim>
1230 </port>
1231 </input>
1232 <output>
1233 <port id="2" precision="FP32" names="175,input.3">
1234 <dim>-1</dim>
1235 <dim>-1</dim>
1236 <dim>384</dim>
1237 </port>
1238 </output>
1239 </layer>
1240 <layer id="86" name="__module.encoder.layer.0.attention.output/aten::add/Add" type="Add" version="opset1">
1241 <data auto_broadcast="numpy" />
1242 <input>
1243 <port id="0" precision="FP32">
1244 <dim>-1</dim>
1245 <dim>-1</dim>
1246 <dim>384</dim>
1247 </port>
1248 <port id="1" precision="FP32">
1249 <dim>-1</dim>
1250 <dim>-1</dim>
1251 <dim>384</dim>
1252 </port>
1253 </input>
1254 <output>
1255 <port id="2" precision="FP32" names="177">
1256 <dim>-1</dim>
1257 <dim>-1</dim>
1258 <dim>384</dim>
1259 </port>
1260 </output>
1261 </layer>
1262 <layer id="87" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
1263 <data element_type="i32" shape="1" offset="384850452" size="4" />
1264 <output>
1265 <port id="0" precision="I32">
1266 <dim>1</dim>
1267 </port>
1268 </output>
1269 </layer>
1270 <layer id="88" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
1271 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1272 <input>
1273 <port id="0" precision="FP32">
1274 <dim>-1</dim>
1275 <dim>-1</dim>
1276 <dim>384</dim>
1277 </port>
1278 <port id="1" precision="I32">
1279 <dim>1</dim>
1280 </port>
1281 </input>
1282 <output>
1283 <port id="2" precision="FP32">
1284 <dim>-1</dim>
1285 <dim>-1</dim>
1286 <dim>384</dim>
1287 </port>
1288 </output>
1289 </layer>
1290 <layer id="89" name="Constant_6107376" type="Const" version="opset1">
1291 <data element_type="f32" shape="1, 1, 384" offset="387219104" size="1536" />
1292 <output>
1293 <port id="0" precision="FP32">
1294 <dim>1</dim>
1295 <dim>1</dim>
1296 <dim>384</dim>
1297 </port>
1298 </output>
1299 </layer>
1300 <layer id="90" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
1301 <data auto_broadcast="numpy" />
1302 <input>
1303 <port id="0" precision="FP32">
1304 <dim>-1</dim>
1305 <dim>-1</dim>
1306 <dim>384</dim>
1307 </port>
1308 <port id="1" precision="FP32">
1309 <dim>1</dim>
1310 <dim>1</dim>
1311 <dim>384</dim>
1312 </port>
1313 </input>
1314 <output>
1315 <port id="2" precision="FP32">
1316 <dim>-1</dim>
1317 <dim>-1</dim>
1318 <dim>384</dim>
1319 </port>
1320 </output>
1321 </layer>
1322 <layer id="91" name="Constant_6107377" type="Const" version="opset1">
1323 <data element_type="f32" shape="1, 1, 384" offset="387220640" size="1536" />
1324 <output>
1325 <port id="0" precision="FP32">
1326 <dim>1</dim>
1327 <dim>1</dim>
1328 <dim>384</dim>
1329 </port>
1330 </output>
1331 </layer>
1332 <layer id="92" name="__module.encoder.layer.0.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
1333 <data auto_broadcast="numpy" />
1334 <input>
1335 <port id="0" precision="FP32">
1336 <dim>-1</dim>
1337 <dim>-1</dim>
1338 <dim>384</dim>
1339 </port>
1340 <port id="1" precision="FP32">
1341 <dim>1</dim>
1342 <dim>1</dim>
1343 <dim>384</dim>
1344 </port>
1345 </input>
1346 <output>
1347 <port id="2" precision="FP32" names="181,input_tensor.1">
1348 <dim>-1</dim>
1349 <dim>-1</dim>
1350 <dim>384</dim>
1351 </port>
1352 </output>
1353 </layer>
1354 <layer id="93" name="self.encoder.layer.0.intermediate.dense.weight" type="Const" version="opset1">
1355 <data element_type="f32" shape="1536, 384" offset="387222176" size="2359296" />
1356 <output>
1357 <port id="0" precision="FP32" names="self.encoder.layer.0.intermediate.dense.weight">
1358 <dim>1536</dim>
1359 <dim>384</dim>
1360 </port>
1361 </output>
1362 </layer>
1363 <layer id="94" name="__module.encoder.layer.0.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1364 <data transpose_a="false" transpose_b="true" />
1365 <input>
1366 <port id="0" precision="FP32">
1367 <dim>-1</dim>
1368 <dim>-1</dim>
1369 <dim>384</dim>
1370 </port>
1371 <port id="1" precision="FP32">
1372 <dim>1536</dim>
1373 <dim>384</dim>
1374 </port>
1375 </input>
1376 <output>
1377 <port id="2" precision="FP32">
1378 <dim>-1</dim>
1379 <dim>-1</dim>
1380 <dim>1536</dim>
1381 </port>
1382 </output>
1383 </layer>
1384 <layer id="95" name="Constant_6107378" type="Const" version="opset1">
1385 <data element_type="f32" shape="1, 1, 1536" offset="389581472" size="6144" />
1386 <output>
1387 <port id="0" precision="FP32">
1388 <dim>1</dim>
1389 <dim>1</dim>
1390 <dim>1536</dim>
1391 </port>
1392 </output>
1393 </layer>
1394 <layer id="96" name="__module.encoder.layer.0.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
1395 <data auto_broadcast="numpy" />
1396 <input>
1397 <port id="0" precision="FP32">
1398 <dim>-1</dim>
1399 <dim>-1</dim>
1400 <dim>1536</dim>
1401 </port>
1402 <port id="1" precision="FP32">
1403 <dim>1</dim>
1404 <dim>1</dim>
1405 <dim>1536</dim>
1406 </port>
1407 </input>
1408 <output>
1409 <port id="2" precision="FP32" names="186">
1410 <dim>-1</dim>
1411 <dim>-1</dim>
1412 <dim>1536</dim>
1413 </port>
1414 </output>
1415 </layer>
1416 <layer id="97" name="__module.encoder.layer.0.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
1417 <data approximation_mode="ERF" />
1418 <input>
1419 <port id="0" precision="FP32">
1420 <dim>-1</dim>
1421 <dim>-1</dim>
1422 <dim>1536</dim>
1423 </port>
1424 </input>
1425 <output>
1426 <port id="1" precision="FP32" names="187">
1427 <dim>-1</dim>
1428 <dim>-1</dim>
1429 <dim>1536</dim>
1430 </port>
1431 </output>
1432 </layer>
1433 <layer id="98" name="self.encoder.layer.0.output.dense.weight" type="Const" version="opset1">
1434 <data element_type="f32" shape="384, 1536" offset="389587616" size="2359296" />
1435 <output>
1436 <port id="0" precision="FP32" names="self.encoder.layer.0.output.dense.weight">
1437 <dim>384</dim>
1438 <dim>1536</dim>
1439 </port>
1440 </output>
1441 </layer>
1442 <layer id="99" name="__module.encoder.layer.0.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
1443 <data transpose_a="false" transpose_b="true" />
1444 <input>
1445 <port id="0" precision="FP32">
1446 <dim>-1</dim>
1447 <dim>-1</dim>
1448 <dim>1536</dim>
1449 </port>
1450 <port id="1" precision="FP32">
1451 <dim>384</dim>
1452 <dim>1536</dim>
1453 </port>
1454 </input>
1455 <output>
1456 <port id="2" precision="FP32">
1457 <dim>-1</dim>
1458 <dim>-1</dim>
1459 <dim>384</dim>
1460 </port>
1461 </output>
1462 </layer>
1463 <layer id="100" name="Constant_6107379" type="Const" version="opset1">
1464 <data element_type="f32" shape="1, 1, 384" offset="391946912" size="1536" />
1465 <output>
1466 <port id="0" precision="FP32">
1467 <dim>1</dim>
1468 <dim>1</dim>
1469 <dim>384</dim>
1470 </port>
1471 </output>
1472 </layer>
1473 <layer id="101" name="__module.encoder.layer.0.output.dense/aten::linear/Add" type="Add" version="opset1">
1474 <data auto_broadcast="numpy" />
1475 <input>
1476 <port id="0" precision="FP32">
1477 <dim>-1</dim>
1478 <dim>-1</dim>
1479 <dim>384</dim>
1480 </port>
1481 <port id="1" precision="FP32">
1482 <dim>1</dim>
1483 <dim>1</dim>
1484 <dim>384</dim>
1485 </port>
1486 </input>
1487 <output>
1488 <port id="2" precision="FP32" names="193,input.5">
1489 <dim>-1</dim>
1490 <dim>-1</dim>
1491 <dim>384</dim>
1492 </port>
1493 </output>
1494 </layer>
1495 <layer id="102" name="__module.encoder.layer.0.output/aten::add/Add" type="Add" version="opset1">
1496 <data auto_broadcast="numpy" />
1497 <input>
1498 <port id="0" precision="FP32">
1499 <dim>-1</dim>
1500 <dim>-1</dim>
1501 <dim>384</dim>
1502 </port>
1503 <port id="1" precision="FP32">
1504 <dim>-1</dim>
1505 <dim>-1</dim>
1506 <dim>384</dim>
1507 </port>
1508 </input>
1509 <output>
1510 <port id="2" precision="FP32" names="195">
1511 <dim>-1</dim>
1512 <dim>-1</dim>
1513 <dim>384</dim>
1514 </port>
1515 </output>
1516 </layer>
1517 <layer id="103" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
1518 <data element_type="i32" shape="1" offset="384850452" size="4" />
1519 <output>
1520 <port id="0" precision="I32">
1521 <dim>1</dim>
1522 </port>
1523 </output>
1524 </layer>
1525 <layer id="104" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
1526 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
1527 <input>
1528 <port id="0" precision="FP32">
1529 <dim>-1</dim>
1530 <dim>-1</dim>
1531 <dim>384</dim>
1532 </port>
1533 <port id="1" precision="I32">
1534 <dim>1</dim>
1535 </port>
1536 </input>
1537 <output>
1538 <port id="2" precision="FP32">
1539 <dim>-1</dim>
1540 <dim>-1</dim>
1541 <dim>384</dim>
1542 </port>
1543 </output>
1544 </layer>
1545 <layer id="105" name="Constant_6107380" type="Const" version="opset1">
1546 <data element_type="f32" shape="1, 1, 384" offset="391948448" size="1536" />
1547 <output>
1548 <port id="0" precision="FP32">
1549 <dim>1</dim>
1550 <dim>1</dim>
1551 <dim>384</dim>
1552 </port>
1553 </output>
1554 </layer>
1555 <layer id="106" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
1556 <data auto_broadcast="numpy" />
1557 <input>
1558 <port id="0" precision="FP32">
1559 <dim>-1</dim>
1560 <dim>-1</dim>
1561 <dim>384</dim>
1562 </port>
1563 <port id="1" precision="FP32">
1564 <dim>1</dim>
1565 <dim>1</dim>
1566 <dim>384</dim>
1567 </port>
1568 </input>
1569 <output>
1570 <port id="2" precision="FP32">
1571 <dim>-1</dim>
1572 <dim>-1</dim>
1573 <dim>384</dim>
1574 </port>
1575 </output>
1576 </layer>
1577 <layer id="107" name="Constant_6107381" type="Const" version="opset1">
1578 <data element_type="f32" shape="1, 1, 384" offset="391949984" size="1536" />
1579 <output>
1580 <port id="0" precision="FP32">
1581 <dim>1</dim>
1582 <dim>1</dim>
1583 <dim>384</dim>
1584 </port>
1585 </output>
1586 </layer>
1587 <layer id="108" name="__module.encoder.layer.0.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
1588 <data auto_broadcast="numpy" />
1589 <input>
1590 <port id="0" precision="FP32">
1591 <dim>-1</dim>
1592 <dim>-1</dim>
1593 <dim>384</dim>
1594 </port>
1595 <port id="1" precision="FP32">
1596 <dim>1</dim>
1597 <dim>1</dim>
1598 <dim>384</dim>
1599 </port>
1600 </input>
1601 <output>
1602 <port id="2" precision="FP32" names="199,hidden_states.7">
1603 <dim>-1</dim>
1604 <dim>-1</dim>
1605 <dim>384</dim>
1606 </port>
1607 </output>
1608 </layer>
1609 <layer id="109" name="self.encoder.layer.1.attention.self.query.weight" type="Const" version="opset1">
1610 <data element_type="f32" shape="384, 384" offset="391951520" size="589824" />
1611 <output>
1612 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.query.weight">
1613 <dim>384</dim>
1614 <dim>384</dim>
1615 </port>
1616 </output>
1617 </layer>
1618 <layer id="110" name="__module.encoder.layer.1.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
1619 <data transpose_a="false" transpose_b="true" />
1620 <input>
1621 <port id="0" precision="FP32">
1622 <dim>-1</dim>
1623 <dim>-1</dim>
1624 <dim>384</dim>
1625 </port>
1626 <port id="1" precision="FP32">
1627 <dim>384</dim>
1628 <dim>384</dim>
1629 </port>
1630 </input>
1631 <output>
1632 <port id="2" precision="FP32">
1633 <dim>-1</dim>
1634 <dim>-1</dim>
1635 <dim>384</dim>
1636 </port>
1637 </output>
1638 </layer>
1639 <layer id="111" name="Constant_6107382" type="Const" version="opset1">
1640 <data element_type="f32" shape="1, 1, 384" offset="392541344" size="1536" />
1641 <output>
1642 <port id="0" precision="FP32">
1643 <dim>1</dim>
1644 <dim>1</dim>
1645 <dim>384</dim>
1646 </port>
1647 </output>
1648 </layer>
1649 <layer id="112" name="__module.encoder.layer.1.attention.self.query/aten::linear/Add" type="Add" version="opset1">
1650 <data auto_broadcast="numpy" />
1651 <input>
1652 <port id="0" precision="FP32">
1653 <dim>-1</dim>
1654 <dim>-1</dim>
1655 <dim>384</dim>
1656 </port>
1657 <port id="1" precision="FP32">
1658 <dim>1</dim>
1659 <dim>1</dim>
1660 <dim>384</dim>
1661 </port>
1662 </input>
1663 <output>
1664 <port id="2" precision="FP32" names="212,x.13">
1665 <dim>-1</dim>
1666 <dim>-1</dim>
1667 <dim>384</dim>
1668 </port>
1669 </output>
1670 </layer>
1671 <layer id="113" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
1672 <data element_type="i64" shape="4" offset="385444888" size="32" />
1673 <output>
1674 <port id="0" precision="I64">
1675 <dim>4</dim>
1676 </port>
1677 </output>
1678 </layer>
1679 <layer id="114" name="__module.encoder.layer.1.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
1680 <data special_zero="true" />
1681 <input>
1682 <port id="0" precision="FP32">
1683 <dim>-1</dim>
1684 <dim>-1</dim>
1685 <dim>384</dim>
1686 </port>
1687 <port id="1" precision="I64">
1688 <dim>4</dim>
1689 </port>
1690 </input>
1691 <output>
1692 <port id="2" precision="FP32" names="216,x.15">
1693 <dim>-1</dim>
1694 <dim>-1</dim>
1695 <dim>12</dim>
1696 <dim>32</dim>
1697 </port>
1698 </output>
1699 </layer>
1700 <layer id="115" name="Constant_6098479" type="Const" version="opset1">
1701 <data element_type="i64" shape="4" offset="385444920" size="32" />
1702 <output>
1703 <port id="0" precision="I64" names="217">
1704 <dim>4</dim>
1705 </port>
1706 </output>
1707 </layer>
1708 <layer id="116" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
1709 <input>
1710 <port id="0" precision="FP32">
1711 <dim>-1</dim>
1712 <dim>-1</dim>
1713 <dim>12</dim>
1714 <dim>32</dim>
1715 </port>
1716 <port id="1" precision="I64">
1717 <dim>4</dim>
1718 </port>
1719 </input>
1720 <output>
1721 <port id="2" precision="FP32" names="218">
1722 <dim>-1</dim>
1723 <dim>12</dim>
1724 <dim>-1</dim>
1725 <dim>32</dim>
1726 </port>
1727 </output>
1728 </layer>
1729 <layer id="117" name="self.encoder.layer.1.attention.self.key.weight" type="Const" version="opset1">
1730 <data element_type="f32" shape="384, 384" offset="392542880" size="589824" />
1731 <output>
1732 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.key.weight">
1733 <dim>384</dim>
1734 <dim>384</dim>
1735 </port>
1736 </output>
1737 </layer>
1738 <layer id="118" name="__module.encoder.layer.1.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
1739 <data transpose_a="false" transpose_b="true" />
1740 <input>
1741 <port id="0" precision="FP32">
1742 <dim>-1</dim>
1743 <dim>-1</dim>
1744 <dim>384</dim>
1745 </port>
1746 <port id="1" precision="FP32">
1747 <dim>384</dim>
1748 <dim>384</dim>
1749 </port>
1750 </input>
1751 <output>
1752 <port id="2" precision="FP32">
1753 <dim>-1</dim>
1754 <dim>-1</dim>
1755 <dim>384</dim>
1756 </port>
1757 </output>
1758 </layer>
1759 <layer id="119" name="Constant_6107383" type="Const" version="opset1">
1760 <data element_type="f32" shape="1, 1, 384" offset="393132704" size="1536" />
1761 <output>
1762 <port id="0" precision="FP32">
1763 <dim>1</dim>
1764 <dim>1</dim>
1765 <dim>384</dim>
1766 </port>
1767 </output>
1768 </layer>
1769 <layer id="120" name="__module.encoder.layer.1.attention.self.key/aten::linear/Add" type="Add" version="opset1">
1770 <data auto_broadcast="numpy" />
1771 <input>
1772 <port id="0" precision="FP32">
1773 <dim>-1</dim>
1774 <dim>-1</dim>
1775 <dim>384</dim>
1776 </port>
1777 <port id="1" precision="FP32">
1778 <dim>1</dim>
1779 <dim>1</dim>
1780 <dim>384</dim>
1781 </port>
1782 </input>
1783 <output>
1784 <port id="2" precision="FP32" names="221,x.17">
1785 <dim>-1</dim>
1786 <dim>-1</dim>
1787 <dim>384</dim>
1788 </port>
1789 </output>
1790 </layer>
1791 <layer id="121" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
1792 <data element_type="i64" shape="4" offset="385444888" size="32" />
1793 <output>
1794 <port id="0" precision="I64">
1795 <dim>4</dim>
1796 </port>
1797 </output>
1798 </layer>
1799 <layer id="122" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
1800 <data special_zero="true" />
1801 <input>
1802 <port id="0" precision="FP32">
1803 <dim>-1</dim>
1804 <dim>-1</dim>
1805 <dim>384</dim>
1806 </port>
1807 <port id="1" precision="I64">
1808 <dim>4</dim>
1809 </port>
1810 </input>
1811 <output>
1812 <port id="2" precision="FP32" names="225,x.19">
1813 <dim>-1</dim>
1814 <dim>-1</dim>
1815 <dim>12</dim>
1816 <dim>32</dim>
1817 </port>
1818 </output>
1819 </layer>
1820 <layer id="123" name="Constant_6098502" type="Const" version="opset1">
1821 <data element_type="i64" shape="4" offset="385444920" size="32" />
1822 <output>
1823 <port id="0" precision="I64" names="226">
1824 <dim>4</dim>
1825 </port>
1826 </output>
1827 </layer>
1828 <layer id="124" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
1829 <input>
1830 <port id="0" precision="FP32">
1831 <dim>-1</dim>
1832 <dim>-1</dim>
1833 <dim>12</dim>
1834 <dim>32</dim>
1835 </port>
1836 <port id="1" precision="I64">
1837 <dim>4</dim>
1838 </port>
1839 </input>
1840 <output>
1841 <port id="2" precision="FP32" names="227">
1842 <dim>-1</dim>
1843 <dim>12</dim>
1844 <dim>-1</dim>
1845 <dim>32</dim>
1846 </port>
1847 </output>
1848 </layer>
1849 <layer id="125" name="self.encoder.layer.1.attention.self.value.weight" type="Const" version="opset1">
1850 <data element_type="f32" shape="384, 384" offset="393134240" size="589824" />
1851 <output>
1852 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.self.value.weight">
1853 <dim>384</dim>
1854 <dim>384</dim>
1855 </port>
1856 </output>
1857 </layer>
1858 <layer id="126" name="__module.encoder.layer.1.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
1859 <data transpose_a="false" transpose_b="true" />
1860 <input>
1861 <port id="0" precision="FP32">
1862 <dim>-1</dim>
1863 <dim>-1</dim>
1864 <dim>384</dim>
1865 </port>
1866 <port id="1" precision="FP32">
1867 <dim>384</dim>
1868 <dim>384</dim>
1869 </port>
1870 </input>
1871 <output>
1872 <port id="2" precision="FP32">
1873 <dim>-1</dim>
1874 <dim>-1</dim>
1875 <dim>384</dim>
1876 </port>
1877 </output>
1878 </layer>
1879 <layer id="127" name="Constant_6107384" type="Const" version="opset1">
1880 <data element_type="f32" shape="1, 1, 384" offset="393724064" size="1536" />
1881 <output>
1882 <port id="0" precision="FP32">
1883 <dim>1</dim>
1884 <dim>1</dim>
1885 <dim>384</dim>
1886 </port>
1887 </output>
1888 </layer>
1889 <layer id="128" name="__module.encoder.layer.1.attention.self.value/aten::linear/Add" type="Add" version="opset1">
1890 <data auto_broadcast="numpy" />
1891 <input>
1892 <port id="0" precision="FP32">
1893 <dim>-1</dim>
1894 <dim>-1</dim>
1895 <dim>384</dim>
1896 </port>
1897 <port id="1" precision="FP32">
1898 <dim>1</dim>
1899 <dim>1</dim>
1900 <dim>384</dim>
1901 </port>
1902 </input>
1903 <output>
1904 <port id="2" precision="FP32" names="230,x.21">
1905 <dim>-1</dim>
1906 <dim>-1</dim>
1907 <dim>384</dim>
1908 </port>
1909 </output>
1910 </layer>
1911 <layer id="129" name="__module.encoder.layer.1.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
1912 <data element_type="i64" shape="4" offset="385444888" size="32" />
1913 <output>
1914 <port id="0" precision="I64">
1915 <dim>4</dim>
1916 </port>
1917 </output>
1918 </layer>
1919 <layer id="130" name="__module.encoder.layer.1.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
1920 <data special_zero="true" />
1921 <input>
1922 <port id="0" precision="FP32">
1923 <dim>-1</dim>
1924 <dim>-1</dim>
1925 <dim>384</dim>
1926 </port>
1927 <port id="1" precision="I64">
1928 <dim>4</dim>
1929 </port>
1930 </input>
1931 <output>
1932 <port id="2" precision="FP32" names="234,x.23">
1933 <dim>-1</dim>
1934 <dim>-1</dim>
1935 <dim>12</dim>
1936 <dim>32</dim>
1937 </port>
1938 </output>
1939 </layer>
1940 <layer id="131" name="Constant_6098525" type="Const" version="opset1">
1941 <data element_type="i64" shape="4" offset="385444920" size="32" />
1942 <output>
1943 <port id="0" precision="I64" names="235">
1944 <dim>4</dim>
1945 </port>
1946 </output>
1947 </layer>
1948 <layer id="132" name="__module.encoder.layer.1.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
1949 <input>
1950 <port id="0" precision="FP32">
1951 <dim>-1</dim>
1952 <dim>-1</dim>
1953 <dim>12</dim>
1954 <dim>32</dim>
1955 </port>
1956 <port id="1" precision="I64">
1957 <dim>4</dim>
1958 </port>
1959 </input>
1960 <output>
1961 <port id="2" precision="FP32" names="236">
1962 <dim>-1</dim>
1963 <dim>12</dim>
1964 <dim>-1</dim>
1965 <dim>32</dim>
1966 </port>
1967 </output>
1968 </layer>
1969 <layer id="133" name="__module.encoder.layer.1.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
1970 <data causal="false" />
1971 <input>
1972 <port id="0" precision="FP32">
1973 <dim>-1</dim>
1974 <dim>12</dim>
1975 <dim>-1</dim>
1976 <dim>32</dim>
1977 </port>
1978 <port id="1" precision="FP32">
1979 <dim>-1</dim>
1980 <dim>12</dim>
1981 <dim>-1</dim>
1982 <dim>32</dim>
1983 </port>
1984 <port id="2" precision="FP32">
1985 <dim>-1</dim>
1986 <dim>12</dim>
1987 <dim>-1</dim>
1988 <dim>32</dim>
1989 </port>
1990 <port id="3" precision="FP32">
1991 <dim>-1</dim>
1992 <dim>1</dim>
1993 <dim>-1</dim>
1994 <dim>-1</dim>
1995 </port>
1996 </input>
1997 <output>
1998 <port id="4" precision="FP32" names="237,attn_output.5">
1999 <dim>-1</dim>
2000 <dim>12</dim>
2001 <dim>-1</dim>
2002 <dim>32</dim>
2003 </port>
2004 </output>
2005 </layer>
2006 <layer id="134" name="__module.encoder.layer.1.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
2007 <data element_type="i32" shape="4" offset="386627704" size="16" />
2008 <output>
2009 <port id="0" precision="I32">
2010 <dim>4</dim>
2011 </port>
2012 </output>
2013 </layer>
2014 <layer id="135" name="__module.encoder.layer.1.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
2015 <input>
2016 <port id="0" precision="FP32">
2017 <dim>-1</dim>
2018 <dim>12</dim>
2019 <dim>-1</dim>
2020 <dim>32</dim>
2021 </port>
2022 <port id="1" precision="I32">
2023 <dim>4</dim>
2024 </port>
2025 </input>
2026 <output>
2027 <port id="2" precision="FP32" names="238,attn_output.7">
2028 <dim>-1</dim>
2029 <dim>-1</dim>
2030 <dim>12</dim>
2031 <dim>32</dim>
2032 </port>
2033 </output>
2034 </layer>
2035 <layer id="136" name="Constant_6107615" type="Const" version="opset1">
2036 <data element_type="i64" shape="3" offset="386627720" size="24" />
2037 <output>
2038 <port id="0" precision="I64">
2039 <dim>3</dim>
2040 </port>
2041 </output>
2042 </layer>
2043 <layer id="137" name="__module.encoder.layer.1.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
2044 <data special_zero="true" />
2045 <input>
2046 <port id="0" precision="FP32">
2047 <dim>-1</dim>
2048 <dim>-1</dim>
2049 <dim>12</dim>
2050 <dim>32</dim>
2051 </port>
2052 <port id="1" precision="I64">
2053 <dim>3</dim>
2054 </port>
2055 </input>
2056 <output>
2057 <port id="2" precision="FP32" names="240">
2058 <dim>-1</dim>
2059 <dim>-1</dim>
2060 <dim>384</dim>
2061 </port>
2062 </output>
2063 </layer>
2064 <layer id="138" name="self.encoder.layer.1.attention.output.dense.weight" type="Const" version="opset1">
2065 <data element_type="f32" shape="384, 384" offset="393725600" size="589824" />
2066 <output>
2067 <port id="0" precision="FP32" names="self.encoder.layer.1.attention.output.dense.weight">
2068 <dim>384</dim>
2069 <dim>384</dim>
2070 </port>
2071 </output>
2072 </layer>
2073 <layer id="139" name="__module.encoder.layer.1.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2074 <data transpose_a="false" transpose_b="true" />
2075 <input>
2076 <port id="0" precision="FP32">
2077 <dim>-1</dim>
2078 <dim>-1</dim>
2079 <dim>384</dim>
2080 </port>
2081 <port id="1" precision="FP32">
2082 <dim>384</dim>
2083 <dim>384</dim>
2084 </port>
2085 </input>
2086 <output>
2087 <port id="2" precision="FP32">
2088 <dim>-1</dim>
2089 <dim>-1</dim>
2090 <dim>384</dim>
2091 </port>
2092 </output>
2093 </layer>
2094 <layer id="140" name="Constant_6107385" type="Const" version="opset1">
2095 <data element_type="f32" shape="1, 1, 384" offset="394315424" size="1536" />
2096 <output>
2097 <port id="0" precision="FP32">
2098 <dim>1</dim>
2099 <dim>1</dim>
2100 <dim>384</dim>
2101 </port>
2102 </output>
2103 </layer>
2104 <layer id="141" name="__module.encoder.layer.1.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
2105 <data auto_broadcast="numpy" />
2106 <input>
2107 <port id="0" precision="FP32">
2108 <dim>-1</dim>
2109 <dim>-1</dim>
2110 <dim>384</dim>
2111 </port>
2112 <port id="1" precision="FP32">
2113 <dim>1</dim>
2114 <dim>1</dim>
2115 <dim>384</dim>
2116 </port>
2117 </input>
2118 <output>
2119 <port id="2" precision="FP32" names="246,input.7">
2120 <dim>-1</dim>
2121 <dim>-1</dim>
2122 <dim>384</dim>
2123 </port>
2124 </output>
2125 </layer>
2126 <layer id="142" name="__module.encoder.layer.1.attention.output/aten::add/Add" type="Add" version="opset1">
2127 <data auto_broadcast="numpy" />
2128 <input>
2129 <port id="0" precision="FP32">
2130 <dim>-1</dim>
2131 <dim>-1</dim>
2132 <dim>384</dim>
2133 </port>
2134 <port id="1" precision="FP32">
2135 <dim>-1</dim>
2136 <dim>-1</dim>
2137 <dim>384</dim>
2138 </port>
2139 </input>
2140 <output>
2141 <port id="2" precision="FP32" names="248">
2142 <dim>-1</dim>
2143 <dim>-1</dim>
2144 <dim>384</dim>
2145 </port>
2146 </output>
2147 </layer>
2148 <layer id="143" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
2149 <data element_type="i32" shape="1" offset="384850452" size="4" />
2150 <output>
2151 <port id="0" precision="I32">
2152 <dim>1</dim>
2153 </port>
2154 </output>
2155 </layer>
2156 <layer id="144" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
2157 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2158 <input>
2159 <port id="0" precision="FP32">
2160 <dim>-1</dim>
2161 <dim>-1</dim>
2162 <dim>384</dim>
2163 </port>
2164 <port id="1" precision="I32">
2165 <dim>1</dim>
2166 </port>
2167 </input>
2168 <output>
2169 <port id="2" precision="FP32">
2170 <dim>-1</dim>
2171 <dim>-1</dim>
2172 <dim>384</dim>
2173 </port>
2174 </output>
2175 </layer>
2176 <layer id="145" name="Constant_6107386" type="Const" version="opset1">
2177 <data element_type="f32" shape="1, 1, 384" offset="394316960" size="1536" />
2178 <output>
2179 <port id="0" precision="FP32">
2180 <dim>1</dim>
2181 <dim>1</dim>
2182 <dim>384</dim>
2183 </port>
2184 </output>
2185 </layer>
2186 <layer id="146" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
2187 <data auto_broadcast="numpy" />
2188 <input>
2189 <port id="0" precision="FP32">
2190 <dim>-1</dim>
2191 <dim>-1</dim>
2192 <dim>384</dim>
2193 </port>
2194 <port id="1" precision="FP32">
2195 <dim>1</dim>
2196 <dim>1</dim>
2197 <dim>384</dim>
2198 </port>
2199 </input>
2200 <output>
2201 <port id="2" precision="FP32">
2202 <dim>-1</dim>
2203 <dim>-1</dim>
2204 <dim>384</dim>
2205 </port>
2206 </output>
2207 </layer>
2208 <layer id="147" name="Constant_6107387" type="Const" version="opset1">
2209 <data element_type="f32" shape="1, 1, 384" offset="394318496" size="1536" />
2210 <output>
2211 <port id="0" precision="FP32">
2212 <dim>1</dim>
2213 <dim>1</dim>
2214 <dim>384</dim>
2215 </port>
2216 </output>
2217 </layer>
2218 <layer id="148" name="__module.encoder.layer.1.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
2219 <data auto_broadcast="numpy" />
2220 <input>
2221 <port id="0" precision="FP32">
2222 <dim>-1</dim>
2223 <dim>-1</dim>
2224 <dim>384</dim>
2225 </port>
2226 <port id="1" precision="FP32">
2227 <dim>1</dim>
2228 <dim>1</dim>
2229 <dim>384</dim>
2230 </port>
2231 </input>
2232 <output>
2233 <port id="2" precision="FP32" names="252,input_tensor.3">
2234 <dim>-1</dim>
2235 <dim>-1</dim>
2236 <dim>384</dim>
2237 </port>
2238 </output>
2239 </layer>
2240 <layer id="149" name="self.encoder.layer.1.intermediate.dense.weight" type="Const" version="opset1">
2241 <data element_type="f32" shape="1536, 384" offset="394320032" size="2359296" />
2242 <output>
2243 <port id="0" precision="FP32" names="self.encoder.layer.1.intermediate.dense.weight">
2244 <dim>1536</dim>
2245 <dim>384</dim>
2246 </port>
2247 </output>
2248 </layer>
2249 <layer id="150" name="__module.encoder.layer.1.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2250 <data transpose_a="false" transpose_b="true" />
2251 <input>
2252 <port id="0" precision="FP32">
2253 <dim>-1</dim>
2254 <dim>-1</dim>
2255 <dim>384</dim>
2256 </port>
2257 <port id="1" precision="FP32">
2258 <dim>1536</dim>
2259 <dim>384</dim>
2260 </port>
2261 </input>
2262 <output>
2263 <port id="2" precision="FP32">
2264 <dim>-1</dim>
2265 <dim>-1</dim>
2266 <dim>1536</dim>
2267 </port>
2268 </output>
2269 </layer>
2270 <layer id="151" name="Constant_6107388" type="Const" version="opset1">
2271 <data element_type="f32" shape="1, 1, 1536" offset="396679328" size="6144" />
2272 <output>
2273 <port id="0" precision="FP32">
2274 <dim>1</dim>
2275 <dim>1</dim>
2276 <dim>1536</dim>
2277 </port>
2278 </output>
2279 </layer>
2280 <layer id="152" name="__module.encoder.layer.1.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
2281 <data auto_broadcast="numpy" />
2282 <input>
2283 <port id="0" precision="FP32">
2284 <dim>-1</dim>
2285 <dim>-1</dim>
2286 <dim>1536</dim>
2287 </port>
2288 <port id="1" precision="FP32">
2289 <dim>1</dim>
2290 <dim>1</dim>
2291 <dim>1536</dim>
2292 </port>
2293 </input>
2294 <output>
2295 <port id="2" precision="FP32" names="257">
2296 <dim>-1</dim>
2297 <dim>-1</dim>
2298 <dim>1536</dim>
2299 </port>
2300 </output>
2301 </layer>
2302 <layer id="153" name="__module.encoder.layer.1.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
2303 <data approximation_mode="ERF" />
2304 <input>
2305 <port id="0" precision="FP32">
2306 <dim>-1</dim>
2307 <dim>-1</dim>
2308 <dim>1536</dim>
2309 </port>
2310 </input>
2311 <output>
2312 <port id="1" precision="FP32" names="258">
2313 <dim>-1</dim>
2314 <dim>-1</dim>
2315 <dim>1536</dim>
2316 </port>
2317 </output>
2318 </layer>
2319 <layer id="154" name="self.encoder.layer.1.output.dense.weight" type="Const" version="opset1">
2320 <data element_type="f32" shape="384, 1536" offset="396685472" size="2359296" />
2321 <output>
2322 <port id="0" precision="FP32" names="self.encoder.layer.1.output.dense.weight">
2323 <dim>384</dim>
2324 <dim>1536</dim>
2325 </port>
2326 </output>
2327 </layer>
2328 <layer id="155" name="__module.encoder.layer.1.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2329 <data transpose_a="false" transpose_b="true" />
2330 <input>
2331 <port id="0" precision="FP32">
2332 <dim>-1</dim>
2333 <dim>-1</dim>
2334 <dim>1536</dim>
2335 </port>
2336 <port id="1" precision="FP32">
2337 <dim>384</dim>
2338 <dim>1536</dim>
2339 </port>
2340 </input>
2341 <output>
2342 <port id="2" precision="FP32">
2343 <dim>-1</dim>
2344 <dim>-1</dim>
2345 <dim>384</dim>
2346 </port>
2347 </output>
2348 </layer>
2349 <layer id="156" name="Constant_6107389" type="Const" version="opset1">
2350 <data element_type="f32" shape="1, 1, 384" offset="399044768" size="1536" />
2351 <output>
2352 <port id="0" precision="FP32">
2353 <dim>1</dim>
2354 <dim>1</dim>
2355 <dim>384</dim>
2356 </port>
2357 </output>
2358 </layer>
2359 <layer id="157" name="__module.encoder.layer.1.output.dense/aten::linear/Add" type="Add" version="opset1">
2360 <data auto_broadcast="numpy" />
2361 <input>
2362 <port id="0" precision="FP32">
2363 <dim>-1</dim>
2364 <dim>-1</dim>
2365 <dim>384</dim>
2366 </port>
2367 <port id="1" precision="FP32">
2368 <dim>1</dim>
2369 <dim>1</dim>
2370 <dim>384</dim>
2371 </port>
2372 </input>
2373 <output>
2374 <port id="2" precision="FP32" names="264,input.9">
2375 <dim>-1</dim>
2376 <dim>-1</dim>
2377 <dim>384</dim>
2378 </port>
2379 </output>
2380 </layer>
2381 <layer id="158" name="__module.encoder.layer.1.output/aten::add/Add" type="Add" version="opset1">
2382 <data auto_broadcast="numpy" />
2383 <input>
2384 <port id="0" precision="FP32">
2385 <dim>-1</dim>
2386 <dim>-1</dim>
2387 <dim>384</dim>
2388 </port>
2389 <port id="1" precision="FP32">
2390 <dim>-1</dim>
2391 <dim>-1</dim>
2392 <dim>384</dim>
2393 </port>
2394 </input>
2395 <output>
2396 <port id="2" precision="FP32" names="266">
2397 <dim>-1</dim>
2398 <dim>-1</dim>
2399 <dim>384</dim>
2400 </port>
2401 </output>
2402 </layer>
2403 <layer id="159" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
2404 <data element_type="i32" shape="1" offset="384850452" size="4" />
2405 <output>
2406 <port id="0" precision="I32">
2407 <dim>1</dim>
2408 </port>
2409 </output>
2410 </layer>
2411 <layer id="160" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
2412 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
2413 <input>
2414 <port id="0" precision="FP32">
2415 <dim>-1</dim>
2416 <dim>-1</dim>
2417 <dim>384</dim>
2418 </port>
2419 <port id="1" precision="I32">
2420 <dim>1</dim>
2421 </port>
2422 </input>
2423 <output>
2424 <port id="2" precision="FP32">
2425 <dim>-1</dim>
2426 <dim>-1</dim>
2427 <dim>384</dim>
2428 </port>
2429 </output>
2430 </layer>
2431 <layer id="161" name="Constant_6107390" type="Const" version="opset1">
2432 <data element_type="f32" shape="1, 1, 384" offset="399046304" size="1536" />
2433 <output>
2434 <port id="0" precision="FP32">
2435 <dim>1</dim>
2436 <dim>1</dim>
2437 <dim>384</dim>
2438 </port>
2439 </output>
2440 </layer>
2441 <layer id="162" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
2442 <data auto_broadcast="numpy" />
2443 <input>
2444 <port id="0" precision="FP32">
2445 <dim>-1</dim>
2446 <dim>-1</dim>
2447 <dim>384</dim>
2448 </port>
2449 <port id="1" precision="FP32">
2450 <dim>1</dim>
2451 <dim>1</dim>
2452 <dim>384</dim>
2453 </port>
2454 </input>
2455 <output>
2456 <port id="2" precision="FP32">
2457 <dim>-1</dim>
2458 <dim>-1</dim>
2459 <dim>384</dim>
2460 </port>
2461 </output>
2462 </layer>
2463 <layer id="163" name="Constant_6107391" type="Const" version="opset1">
2464 <data element_type="f32" shape="1, 1, 384" offset="399047840" size="1536" />
2465 <output>
2466 <port id="0" precision="FP32">
2467 <dim>1</dim>
2468 <dim>1</dim>
2469 <dim>384</dim>
2470 </port>
2471 </output>
2472 </layer>
2473 <layer id="164" name="__module.encoder.layer.1.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
2474 <data auto_broadcast="numpy" />
2475 <input>
2476 <port id="0" precision="FP32">
2477 <dim>-1</dim>
2478 <dim>-1</dim>
2479 <dim>384</dim>
2480 </port>
2481 <port id="1" precision="FP32">
2482 <dim>1</dim>
2483 <dim>1</dim>
2484 <dim>384</dim>
2485 </port>
2486 </input>
2487 <output>
2488 <port id="2" precision="FP32" names="270,hidden_states.13">
2489 <dim>-1</dim>
2490 <dim>-1</dim>
2491 <dim>384</dim>
2492 </port>
2493 </output>
2494 </layer>
2495 <layer id="165" name="self.encoder.layer.2.attention.self.query.weight" type="Const" version="opset1">
2496 <data element_type="f32" shape="384, 384" offset="399049376" size="589824" />
2497 <output>
2498 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.query.weight">
2499 <dim>384</dim>
2500 <dim>384</dim>
2501 </port>
2502 </output>
2503 </layer>
2504 <layer id="166" name="__module.encoder.layer.2.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
2505 <data transpose_a="false" transpose_b="true" />
2506 <input>
2507 <port id="0" precision="FP32">
2508 <dim>-1</dim>
2509 <dim>-1</dim>
2510 <dim>384</dim>
2511 </port>
2512 <port id="1" precision="FP32">
2513 <dim>384</dim>
2514 <dim>384</dim>
2515 </port>
2516 </input>
2517 <output>
2518 <port id="2" precision="FP32">
2519 <dim>-1</dim>
2520 <dim>-1</dim>
2521 <dim>384</dim>
2522 </port>
2523 </output>
2524 </layer>
2525 <layer id="167" name="Constant_6107392" type="Const" version="opset1">
2526 <data element_type="f32" shape="1, 1, 384" offset="399639200" size="1536" />
2527 <output>
2528 <port id="0" precision="FP32">
2529 <dim>1</dim>
2530 <dim>1</dim>
2531 <dim>384</dim>
2532 </port>
2533 </output>
2534 </layer>
2535 <layer id="168" name="__module.encoder.layer.2.attention.self.query/aten::linear/Add" type="Add" version="opset1">
2536 <data auto_broadcast="numpy" />
2537 <input>
2538 <port id="0" precision="FP32">
2539 <dim>-1</dim>
2540 <dim>-1</dim>
2541 <dim>384</dim>
2542 </port>
2543 <port id="1" precision="FP32">
2544 <dim>1</dim>
2545 <dim>1</dim>
2546 <dim>384</dim>
2547 </port>
2548 </input>
2549 <output>
2550 <port id="2" precision="FP32" names="283,x.25">
2551 <dim>-1</dim>
2552 <dim>-1</dim>
2553 <dim>384</dim>
2554 </port>
2555 </output>
2556 </layer>
2557 <layer id="169" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
2558 <data element_type="i64" shape="4" offset="385444888" size="32" />
2559 <output>
2560 <port id="0" precision="I64">
2561 <dim>4</dim>
2562 </port>
2563 </output>
2564 </layer>
2565 <layer id="170" name="__module.encoder.layer.2.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
2566 <data special_zero="true" />
2567 <input>
2568 <port id="0" precision="FP32">
2569 <dim>-1</dim>
2570 <dim>-1</dim>
2571 <dim>384</dim>
2572 </port>
2573 <port id="1" precision="I64">
2574 <dim>4</dim>
2575 </port>
2576 </input>
2577 <output>
2578 <port id="2" precision="FP32" names="287,x.27">
2579 <dim>-1</dim>
2580 <dim>-1</dim>
2581 <dim>12</dim>
2582 <dim>32</dim>
2583 </port>
2584 </output>
2585 </layer>
2586 <layer id="171" name="Constant_6098705" type="Const" version="opset1">
2587 <data element_type="i64" shape="4" offset="385444920" size="32" />
2588 <output>
2589 <port id="0" precision="I64" names="288">
2590 <dim>4</dim>
2591 </port>
2592 </output>
2593 </layer>
2594 <layer id="172" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
2595 <input>
2596 <port id="0" precision="FP32">
2597 <dim>-1</dim>
2598 <dim>-1</dim>
2599 <dim>12</dim>
2600 <dim>32</dim>
2601 </port>
2602 <port id="1" precision="I64">
2603 <dim>4</dim>
2604 </port>
2605 </input>
2606 <output>
2607 <port id="2" precision="FP32" names="289">
2608 <dim>-1</dim>
2609 <dim>12</dim>
2610 <dim>-1</dim>
2611 <dim>32</dim>
2612 </port>
2613 </output>
2614 </layer>
2615 <layer id="173" name="self.encoder.layer.2.attention.self.key.weight" type="Const" version="opset1">
2616 <data element_type="f32" shape="384, 384" offset="399640736" size="589824" />
2617 <output>
2618 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.key.weight">
2619 <dim>384</dim>
2620 <dim>384</dim>
2621 </port>
2622 </output>
2623 </layer>
2624 <layer id="174" name="__module.encoder.layer.2.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
2625 <data transpose_a="false" transpose_b="true" />
2626 <input>
2627 <port id="0" precision="FP32">
2628 <dim>-1</dim>
2629 <dim>-1</dim>
2630 <dim>384</dim>
2631 </port>
2632 <port id="1" precision="FP32">
2633 <dim>384</dim>
2634 <dim>384</dim>
2635 </port>
2636 </input>
2637 <output>
2638 <port id="2" precision="FP32">
2639 <dim>-1</dim>
2640 <dim>-1</dim>
2641 <dim>384</dim>
2642 </port>
2643 </output>
2644 </layer>
2645 <layer id="175" name="Constant_6107393" type="Const" version="opset1">
2646 <data element_type="f32" shape="1, 1, 384" offset="400230560" size="1536" />
2647 <output>
2648 <port id="0" precision="FP32">
2649 <dim>1</dim>
2650 <dim>1</dim>
2651 <dim>384</dim>
2652 </port>
2653 </output>
2654 </layer>
2655 <layer id="176" name="__module.encoder.layer.2.attention.self.key/aten::linear/Add" type="Add" version="opset1">
2656 <data auto_broadcast="numpy" />
2657 <input>
2658 <port id="0" precision="FP32">
2659 <dim>-1</dim>
2660 <dim>-1</dim>
2661 <dim>384</dim>
2662 </port>
2663 <port id="1" precision="FP32">
2664 <dim>1</dim>
2665 <dim>1</dim>
2666 <dim>384</dim>
2667 </port>
2668 </input>
2669 <output>
2670 <port id="2" precision="FP32" names="292,x.29">
2671 <dim>-1</dim>
2672 <dim>-1</dim>
2673 <dim>384</dim>
2674 </port>
2675 </output>
2676 </layer>
2677 <layer id="177" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
2678 <data element_type="i64" shape="4" offset="385444888" size="32" />
2679 <output>
2680 <port id="0" precision="I64">
2681 <dim>4</dim>
2682 </port>
2683 </output>
2684 </layer>
2685 <layer id="178" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
2686 <data special_zero="true" />
2687 <input>
2688 <port id="0" precision="FP32">
2689 <dim>-1</dim>
2690 <dim>-1</dim>
2691 <dim>384</dim>
2692 </port>
2693 <port id="1" precision="I64">
2694 <dim>4</dim>
2695 </port>
2696 </input>
2697 <output>
2698 <port id="2" precision="FP32" names="296,x.31">
2699 <dim>-1</dim>
2700 <dim>-1</dim>
2701 <dim>12</dim>
2702 <dim>32</dim>
2703 </port>
2704 </output>
2705 </layer>
2706 <layer id="179" name="Constant_6098728" type="Const" version="opset1">
2707 <data element_type="i64" shape="4" offset="385444920" size="32" />
2708 <output>
2709 <port id="0" precision="I64" names="297">
2710 <dim>4</dim>
2711 </port>
2712 </output>
2713 </layer>
2714 <layer id="180" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
2715 <input>
2716 <port id="0" precision="FP32">
2717 <dim>-1</dim>
2718 <dim>-1</dim>
2719 <dim>12</dim>
2720 <dim>32</dim>
2721 </port>
2722 <port id="1" precision="I64">
2723 <dim>4</dim>
2724 </port>
2725 </input>
2726 <output>
2727 <port id="2" precision="FP32" names="298">
2728 <dim>-1</dim>
2729 <dim>12</dim>
2730 <dim>-1</dim>
2731 <dim>32</dim>
2732 </port>
2733 </output>
2734 </layer>
2735 <layer id="181" name="self.encoder.layer.2.attention.self.value.weight" type="Const" version="opset1">
2736 <data element_type="f32" shape="384, 384" offset="400232096" size="589824" />
2737 <output>
2738 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.self.value.weight">
2739 <dim>384</dim>
2740 <dim>384</dim>
2741 </port>
2742 </output>
2743 </layer>
2744 <layer id="182" name="__module.encoder.layer.2.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
2745 <data transpose_a="false" transpose_b="true" />
2746 <input>
2747 <port id="0" precision="FP32">
2748 <dim>-1</dim>
2749 <dim>-1</dim>
2750 <dim>384</dim>
2751 </port>
2752 <port id="1" precision="FP32">
2753 <dim>384</dim>
2754 <dim>384</dim>
2755 </port>
2756 </input>
2757 <output>
2758 <port id="2" precision="FP32">
2759 <dim>-1</dim>
2760 <dim>-1</dim>
2761 <dim>384</dim>
2762 </port>
2763 </output>
2764 </layer>
2765 <layer id="183" name="Constant_6107394" type="Const" version="opset1">
2766 <data element_type="f32" shape="1, 1, 384" offset="400821920" size="1536" />
2767 <output>
2768 <port id="0" precision="FP32">
2769 <dim>1</dim>
2770 <dim>1</dim>
2771 <dim>384</dim>
2772 </port>
2773 </output>
2774 </layer>
2775 <layer id="184" name="__module.encoder.layer.2.attention.self.value/aten::linear/Add" type="Add" version="opset1">
2776 <data auto_broadcast="numpy" />
2777 <input>
2778 <port id="0" precision="FP32">
2779 <dim>-1</dim>
2780 <dim>-1</dim>
2781 <dim>384</dim>
2782 </port>
2783 <port id="1" precision="FP32">
2784 <dim>1</dim>
2785 <dim>1</dim>
2786 <dim>384</dim>
2787 </port>
2788 </input>
2789 <output>
2790 <port id="2" precision="FP32" names="301,x.33">
2791 <dim>-1</dim>
2792 <dim>-1</dim>
2793 <dim>384</dim>
2794 </port>
2795 </output>
2796 </layer>
2797 <layer id="185" name="__module.encoder.layer.2.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
2798 <data element_type="i64" shape="4" offset="385444888" size="32" />
2799 <output>
2800 <port id="0" precision="I64">
2801 <dim>4</dim>
2802 </port>
2803 </output>
2804 </layer>
2805 <layer id="186" name="__module.encoder.layer.2.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
2806 <data special_zero="true" />
2807 <input>
2808 <port id="0" precision="FP32">
2809 <dim>-1</dim>
2810 <dim>-1</dim>
2811 <dim>384</dim>
2812 </port>
2813 <port id="1" precision="I64">
2814 <dim>4</dim>
2815 </port>
2816 </input>
2817 <output>
2818 <port id="2" precision="FP32" names="305,x.35">
2819 <dim>-1</dim>
2820 <dim>-1</dim>
2821 <dim>12</dim>
2822 <dim>32</dim>
2823 </port>
2824 </output>
2825 </layer>
2826 <layer id="187" name="Constant_6098751" type="Const" version="opset1">
2827 <data element_type="i64" shape="4" offset="385444920" size="32" />
2828 <output>
2829 <port id="0" precision="I64" names="306">
2830 <dim>4</dim>
2831 </port>
2832 </output>
2833 </layer>
2834 <layer id="188" name="__module.encoder.layer.2.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
2835 <input>
2836 <port id="0" precision="FP32">
2837 <dim>-1</dim>
2838 <dim>-1</dim>
2839 <dim>12</dim>
2840 <dim>32</dim>
2841 </port>
2842 <port id="1" precision="I64">
2843 <dim>4</dim>
2844 </port>
2845 </input>
2846 <output>
2847 <port id="2" precision="FP32" names="307">
2848 <dim>-1</dim>
2849 <dim>12</dim>
2850 <dim>-1</dim>
2851 <dim>32</dim>
2852 </port>
2853 </output>
2854 </layer>
2855 <layer id="189" name="__module.encoder.layer.2.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
2856 <data causal="false" />
2857 <input>
2858 <port id="0" precision="FP32">
2859 <dim>-1</dim>
2860 <dim>12</dim>
2861 <dim>-1</dim>
2862 <dim>32</dim>
2863 </port>
2864 <port id="1" precision="FP32">
2865 <dim>-1</dim>
2866 <dim>12</dim>
2867 <dim>-1</dim>
2868 <dim>32</dim>
2869 </port>
2870 <port id="2" precision="FP32">
2871 <dim>-1</dim>
2872 <dim>12</dim>
2873 <dim>-1</dim>
2874 <dim>32</dim>
2875 </port>
2876 <port id="3" precision="FP32">
2877 <dim>-1</dim>
2878 <dim>1</dim>
2879 <dim>-1</dim>
2880 <dim>-1</dim>
2881 </port>
2882 </input>
2883 <output>
2884 <port id="4" precision="FP32" names="308,attn_output.9">
2885 <dim>-1</dim>
2886 <dim>12</dim>
2887 <dim>-1</dim>
2888 <dim>32</dim>
2889 </port>
2890 </output>
2891 </layer>
2892 <layer id="190" name="__module.encoder.layer.2.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
2893 <data element_type="i32" shape="4" offset="386627704" size="16" />
2894 <output>
2895 <port id="0" precision="I32">
2896 <dim>4</dim>
2897 </port>
2898 </output>
2899 </layer>
2900 <layer id="191" name="__module.encoder.layer.2.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
2901 <input>
2902 <port id="0" precision="FP32">
2903 <dim>-1</dim>
2904 <dim>12</dim>
2905 <dim>-1</dim>
2906 <dim>32</dim>
2907 </port>
2908 <port id="1" precision="I32">
2909 <dim>4</dim>
2910 </port>
2911 </input>
2912 <output>
2913 <port id="2" precision="FP32" names="309,attn_output.11">
2914 <dim>-1</dim>
2915 <dim>-1</dim>
2916 <dim>12</dim>
2917 <dim>32</dim>
2918 </port>
2919 </output>
2920 </layer>
2921 <layer id="192" name="Constant_6107616" type="Const" version="opset1">
2922 <data element_type="i64" shape="3" offset="386627720" size="24" />
2923 <output>
2924 <port id="0" precision="I64">
2925 <dim>3</dim>
2926 </port>
2927 </output>
2928 </layer>
2929 <layer id="193" name="__module.encoder.layer.2.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
2930 <data special_zero="true" />
2931 <input>
2932 <port id="0" precision="FP32">
2933 <dim>-1</dim>
2934 <dim>-1</dim>
2935 <dim>12</dim>
2936 <dim>32</dim>
2937 </port>
2938 <port id="1" precision="I64">
2939 <dim>3</dim>
2940 </port>
2941 </input>
2942 <output>
2943 <port id="2" precision="FP32" names="311">
2944 <dim>-1</dim>
2945 <dim>-1</dim>
2946 <dim>384</dim>
2947 </port>
2948 </output>
2949 </layer>
2950 <layer id="194" name="self.encoder.layer.2.attention.output.dense.weight" type="Const" version="opset1">
2951 <data element_type="f32" shape="384, 384" offset="400823456" size="589824" />
2952 <output>
2953 <port id="0" precision="FP32" names="self.encoder.layer.2.attention.output.dense.weight">
2954 <dim>384</dim>
2955 <dim>384</dim>
2956 </port>
2957 </output>
2958 </layer>
2959 <layer id="195" name="__module.encoder.layer.2.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
2960 <data transpose_a="false" transpose_b="true" />
2961 <input>
2962 <port id="0" precision="FP32">
2963 <dim>-1</dim>
2964 <dim>-1</dim>
2965 <dim>384</dim>
2966 </port>
2967 <port id="1" precision="FP32">
2968 <dim>384</dim>
2969 <dim>384</dim>
2970 </port>
2971 </input>
2972 <output>
2973 <port id="2" precision="FP32">
2974 <dim>-1</dim>
2975 <dim>-1</dim>
2976 <dim>384</dim>
2977 </port>
2978 </output>
2979 </layer>
2980 <layer id="196" name="Constant_6107395" type="Const" version="opset1">
2981 <data element_type="f32" shape="1, 1, 384" offset="401413280" size="1536" />
2982 <output>
2983 <port id="0" precision="FP32">
2984 <dim>1</dim>
2985 <dim>1</dim>
2986 <dim>384</dim>
2987 </port>
2988 </output>
2989 </layer>
2990 <layer id="197" name="__module.encoder.layer.2.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
2991 <data auto_broadcast="numpy" />
2992 <input>
2993 <port id="0" precision="FP32">
2994 <dim>-1</dim>
2995 <dim>-1</dim>
2996 <dim>384</dim>
2997 </port>
2998 <port id="1" precision="FP32">
2999 <dim>1</dim>
3000 <dim>1</dim>
3001 <dim>384</dim>
3002 </port>
3003 </input>
3004 <output>
3005 <port id="2" precision="FP32" names="317,input.11">
3006 <dim>-1</dim>
3007 <dim>-1</dim>
3008 <dim>384</dim>
3009 </port>
3010 </output>
3011 </layer>
3012 <layer id="198" name="__module.encoder.layer.2.attention.output/aten::add/Add" type="Add" version="opset1">
3013 <data auto_broadcast="numpy" />
3014 <input>
3015 <port id="0" precision="FP32">
3016 <dim>-1</dim>
3017 <dim>-1</dim>
3018 <dim>384</dim>
3019 </port>
3020 <port id="1" precision="FP32">
3021 <dim>-1</dim>
3022 <dim>-1</dim>
3023 <dim>384</dim>
3024 </port>
3025 </input>
3026 <output>
3027 <port id="2" precision="FP32" names="319">
3028 <dim>-1</dim>
3029 <dim>-1</dim>
3030 <dim>384</dim>
3031 </port>
3032 </output>
3033 </layer>
3034 <layer id="199" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3035 <data element_type="i32" shape="1" offset="384850452" size="4" />
3036 <output>
3037 <port id="0" precision="I32">
3038 <dim>1</dim>
3039 </port>
3040 </output>
3041 </layer>
3042 <layer id="200" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3043 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3044 <input>
3045 <port id="0" precision="FP32">
3046 <dim>-1</dim>
3047 <dim>-1</dim>
3048 <dim>384</dim>
3049 </port>
3050 <port id="1" precision="I32">
3051 <dim>1</dim>
3052 </port>
3053 </input>
3054 <output>
3055 <port id="2" precision="FP32">
3056 <dim>-1</dim>
3057 <dim>-1</dim>
3058 <dim>384</dim>
3059 </port>
3060 </output>
3061 </layer>
3062 <layer id="201" name="Constant_6107396" type="Const" version="opset1">
3063 <data element_type="f32" shape="1, 1, 384" offset="401414816" size="1536" />
3064 <output>
3065 <port id="0" precision="FP32">
3066 <dim>1</dim>
3067 <dim>1</dim>
3068 <dim>384</dim>
3069 </port>
3070 </output>
3071 </layer>
3072 <layer id="202" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3073 <data auto_broadcast="numpy" />
3074 <input>
3075 <port id="0" precision="FP32">
3076 <dim>-1</dim>
3077 <dim>-1</dim>
3078 <dim>384</dim>
3079 </port>
3080 <port id="1" precision="FP32">
3081 <dim>1</dim>
3082 <dim>1</dim>
3083 <dim>384</dim>
3084 </port>
3085 </input>
3086 <output>
3087 <port id="2" precision="FP32">
3088 <dim>-1</dim>
3089 <dim>-1</dim>
3090 <dim>384</dim>
3091 </port>
3092 </output>
3093 </layer>
3094 <layer id="203" name="Constant_6107397" type="Const" version="opset1">
3095 <data element_type="f32" shape="1, 1, 384" offset="401416352" size="1536" />
3096 <output>
3097 <port id="0" precision="FP32">
3098 <dim>1</dim>
3099 <dim>1</dim>
3100 <dim>384</dim>
3101 </port>
3102 </output>
3103 </layer>
3104 <layer id="204" name="__module.encoder.layer.2.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3105 <data auto_broadcast="numpy" />
3106 <input>
3107 <port id="0" precision="FP32">
3108 <dim>-1</dim>
3109 <dim>-1</dim>
3110 <dim>384</dim>
3111 </port>
3112 <port id="1" precision="FP32">
3113 <dim>1</dim>
3114 <dim>1</dim>
3115 <dim>384</dim>
3116 </port>
3117 </input>
3118 <output>
3119 <port id="2" precision="FP32" names="323,input_tensor.5">
3120 <dim>-1</dim>
3121 <dim>-1</dim>
3122 <dim>384</dim>
3123 </port>
3124 </output>
3125 </layer>
3126 <layer id="205" name="self.encoder.layer.2.intermediate.dense.weight" type="Const" version="opset1">
3127 <data element_type="f32" shape="1536, 384" offset="401417888" size="2359296" />
3128 <output>
3129 <port id="0" precision="FP32" names="self.encoder.layer.2.intermediate.dense.weight">
3130 <dim>1536</dim>
3131 <dim>384</dim>
3132 </port>
3133 </output>
3134 </layer>
3135 <layer id="206" name="__module.encoder.layer.2.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3136 <data transpose_a="false" transpose_b="true" />
3137 <input>
3138 <port id="0" precision="FP32">
3139 <dim>-1</dim>
3140 <dim>-1</dim>
3141 <dim>384</dim>
3142 </port>
3143 <port id="1" precision="FP32">
3144 <dim>1536</dim>
3145 <dim>384</dim>
3146 </port>
3147 </input>
3148 <output>
3149 <port id="2" precision="FP32">
3150 <dim>-1</dim>
3151 <dim>-1</dim>
3152 <dim>1536</dim>
3153 </port>
3154 </output>
3155 </layer>
3156 <layer id="207" name="Constant_6107398" type="Const" version="opset1">
3157 <data element_type="f32" shape="1, 1, 1536" offset="403777184" size="6144" />
3158 <output>
3159 <port id="0" precision="FP32">
3160 <dim>1</dim>
3161 <dim>1</dim>
3162 <dim>1536</dim>
3163 </port>
3164 </output>
3165 </layer>
3166 <layer id="208" name="__module.encoder.layer.2.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
3167 <data auto_broadcast="numpy" />
3168 <input>
3169 <port id="0" precision="FP32">
3170 <dim>-1</dim>
3171 <dim>-1</dim>
3172 <dim>1536</dim>
3173 </port>
3174 <port id="1" precision="FP32">
3175 <dim>1</dim>
3176 <dim>1</dim>
3177 <dim>1536</dim>
3178 </port>
3179 </input>
3180 <output>
3181 <port id="2" precision="FP32" names="328">
3182 <dim>-1</dim>
3183 <dim>-1</dim>
3184 <dim>1536</dim>
3185 </port>
3186 </output>
3187 </layer>
3188 <layer id="209" name="__module.encoder.layer.2.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
3189 <data approximation_mode="ERF" />
3190 <input>
3191 <port id="0" precision="FP32">
3192 <dim>-1</dim>
3193 <dim>-1</dim>
3194 <dim>1536</dim>
3195 </port>
3196 </input>
3197 <output>
3198 <port id="1" precision="FP32" names="329">
3199 <dim>-1</dim>
3200 <dim>-1</dim>
3201 <dim>1536</dim>
3202 </port>
3203 </output>
3204 </layer>
3205 <layer id="210" name="self.encoder.layer.2.output.dense.weight" type="Const" version="opset1">
3206 <data element_type="f32" shape="384, 1536" offset="403783328" size="2359296" />
3207 <output>
3208 <port id="0" precision="FP32" names="self.encoder.layer.2.output.dense.weight">
3209 <dim>384</dim>
3210 <dim>1536</dim>
3211 </port>
3212 </output>
3213 </layer>
3214 <layer id="211" name="__module.encoder.layer.2.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3215 <data transpose_a="false" transpose_b="true" />
3216 <input>
3217 <port id="0" precision="FP32">
3218 <dim>-1</dim>
3219 <dim>-1</dim>
3220 <dim>1536</dim>
3221 </port>
3222 <port id="1" precision="FP32">
3223 <dim>384</dim>
3224 <dim>1536</dim>
3225 </port>
3226 </input>
3227 <output>
3228 <port id="2" precision="FP32">
3229 <dim>-1</dim>
3230 <dim>-1</dim>
3231 <dim>384</dim>
3232 </port>
3233 </output>
3234 </layer>
3235 <layer id="212" name="Constant_6107399" type="Const" version="opset1">
3236 <data element_type="f32" shape="1, 1, 384" offset="406142624" size="1536" />
3237 <output>
3238 <port id="0" precision="FP32">
3239 <dim>1</dim>
3240 <dim>1</dim>
3241 <dim>384</dim>
3242 </port>
3243 </output>
3244 </layer>
3245 <layer id="213" name="__module.encoder.layer.2.output.dense/aten::linear/Add" type="Add" version="opset1">
3246 <data auto_broadcast="numpy" />
3247 <input>
3248 <port id="0" precision="FP32">
3249 <dim>-1</dim>
3250 <dim>-1</dim>
3251 <dim>384</dim>
3252 </port>
3253 <port id="1" precision="FP32">
3254 <dim>1</dim>
3255 <dim>1</dim>
3256 <dim>384</dim>
3257 </port>
3258 </input>
3259 <output>
3260 <port id="2" precision="FP32" names="335,input.13">
3261 <dim>-1</dim>
3262 <dim>-1</dim>
3263 <dim>384</dim>
3264 </port>
3265 </output>
3266 </layer>
3267 <layer id="214" name="__module.encoder.layer.2.output/aten::add/Add" type="Add" version="opset1">
3268 <data auto_broadcast="numpy" />
3269 <input>
3270 <port id="0" precision="FP32">
3271 <dim>-1</dim>
3272 <dim>-1</dim>
3273 <dim>384</dim>
3274 </port>
3275 <port id="1" precision="FP32">
3276 <dim>-1</dim>
3277 <dim>-1</dim>
3278 <dim>384</dim>
3279 </port>
3280 </input>
3281 <output>
3282 <port id="2" precision="FP32" names="337">
3283 <dim>-1</dim>
3284 <dim>-1</dim>
3285 <dim>384</dim>
3286 </port>
3287 </output>
3288 </layer>
3289 <layer id="215" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3290 <data element_type="i32" shape="1" offset="384850452" size="4" />
3291 <output>
3292 <port id="0" precision="I32">
3293 <dim>1</dim>
3294 </port>
3295 </output>
3296 </layer>
3297 <layer id="216" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3298 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3299 <input>
3300 <port id="0" precision="FP32">
3301 <dim>-1</dim>
3302 <dim>-1</dim>
3303 <dim>384</dim>
3304 </port>
3305 <port id="1" precision="I32">
3306 <dim>1</dim>
3307 </port>
3308 </input>
3309 <output>
3310 <port id="2" precision="FP32">
3311 <dim>-1</dim>
3312 <dim>-1</dim>
3313 <dim>384</dim>
3314 </port>
3315 </output>
3316 </layer>
3317 <layer id="217" name="Constant_6107400" type="Const" version="opset1">
3318 <data element_type="f32" shape="1, 1, 384" offset="406144160" size="1536" />
3319 <output>
3320 <port id="0" precision="FP32">
3321 <dim>1</dim>
3322 <dim>1</dim>
3323 <dim>384</dim>
3324 </port>
3325 </output>
3326 </layer>
3327 <layer id="218" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3328 <data auto_broadcast="numpy" />
3329 <input>
3330 <port id="0" precision="FP32">
3331 <dim>-1</dim>
3332 <dim>-1</dim>
3333 <dim>384</dim>
3334 </port>
3335 <port id="1" precision="FP32">
3336 <dim>1</dim>
3337 <dim>1</dim>
3338 <dim>384</dim>
3339 </port>
3340 </input>
3341 <output>
3342 <port id="2" precision="FP32">
3343 <dim>-1</dim>
3344 <dim>-1</dim>
3345 <dim>384</dim>
3346 </port>
3347 </output>
3348 </layer>
3349 <layer id="219" name="Constant_6107401" type="Const" version="opset1">
3350 <data element_type="f32" shape="1, 1, 384" offset="406145696" size="1536" />
3351 <output>
3352 <port id="0" precision="FP32">
3353 <dim>1</dim>
3354 <dim>1</dim>
3355 <dim>384</dim>
3356 </port>
3357 </output>
3358 </layer>
3359 <layer id="220" name="__module.encoder.layer.2.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3360 <data auto_broadcast="numpy" />
3361 <input>
3362 <port id="0" precision="FP32">
3363 <dim>-1</dim>
3364 <dim>-1</dim>
3365 <dim>384</dim>
3366 </port>
3367 <port id="1" precision="FP32">
3368 <dim>1</dim>
3369 <dim>1</dim>
3370 <dim>384</dim>
3371 </port>
3372 </input>
3373 <output>
3374 <port id="2" precision="FP32" names="341,hidden_states.19">
3375 <dim>-1</dim>
3376 <dim>-1</dim>
3377 <dim>384</dim>
3378 </port>
3379 </output>
3380 </layer>
3381 <layer id="221" name="self.encoder.layer.3.attention.self.query.weight" type="Const" version="opset1">
3382 <data element_type="f32" shape="384, 384" offset="406147232" size="589824" />
3383 <output>
3384 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.query.weight">
3385 <dim>384</dim>
3386 <dim>384</dim>
3387 </port>
3388 </output>
3389 </layer>
3390 <layer id="222" name="__module.encoder.layer.3.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
3391 <data transpose_a="false" transpose_b="true" />
3392 <input>
3393 <port id="0" precision="FP32">
3394 <dim>-1</dim>
3395 <dim>-1</dim>
3396 <dim>384</dim>
3397 </port>
3398 <port id="1" precision="FP32">
3399 <dim>384</dim>
3400 <dim>384</dim>
3401 </port>
3402 </input>
3403 <output>
3404 <port id="2" precision="FP32">
3405 <dim>-1</dim>
3406 <dim>-1</dim>
3407 <dim>384</dim>
3408 </port>
3409 </output>
3410 </layer>
3411 <layer id="223" name="Constant_6107402" type="Const" version="opset1">
3412 <data element_type="f32" shape="1, 1, 384" offset="406737056" size="1536" />
3413 <output>
3414 <port id="0" precision="FP32">
3415 <dim>1</dim>
3416 <dim>1</dim>
3417 <dim>384</dim>
3418 </port>
3419 </output>
3420 </layer>
3421 <layer id="224" name="__module.encoder.layer.3.attention.self.query/aten::linear/Add" type="Add" version="opset1">
3422 <data auto_broadcast="numpy" />
3423 <input>
3424 <port id="0" precision="FP32">
3425 <dim>-1</dim>
3426 <dim>-1</dim>
3427 <dim>384</dim>
3428 </port>
3429 <port id="1" precision="FP32">
3430 <dim>1</dim>
3431 <dim>1</dim>
3432 <dim>384</dim>
3433 </port>
3434 </input>
3435 <output>
3436 <port id="2" precision="FP32" names="354,x.37">
3437 <dim>-1</dim>
3438 <dim>-1</dim>
3439 <dim>384</dim>
3440 </port>
3441 </output>
3442 </layer>
3443 <layer id="225" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
3444 <data element_type="i64" shape="4" offset="385444888" size="32" />
3445 <output>
3446 <port id="0" precision="I64">
3447 <dim>4</dim>
3448 </port>
3449 </output>
3450 </layer>
3451 <layer id="226" name="__module.encoder.layer.3.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
3452 <data special_zero="true" />
3453 <input>
3454 <port id="0" precision="FP32">
3455 <dim>-1</dim>
3456 <dim>-1</dim>
3457 <dim>384</dim>
3458 </port>
3459 <port id="1" precision="I64">
3460 <dim>4</dim>
3461 </port>
3462 </input>
3463 <output>
3464 <port id="2" precision="FP32" names="358,x.39">
3465 <dim>-1</dim>
3466 <dim>-1</dim>
3467 <dim>12</dim>
3468 <dim>32</dim>
3469 </port>
3470 </output>
3471 </layer>
3472 <layer id="227" name="Constant_6098931" type="Const" version="opset1">
3473 <data element_type="i64" shape="4" offset="385444920" size="32" />
3474 <output>
3475 <port id="0" precision="I64" names="359">
3476 <dim>4</dim>
3477 </port>
3478 </output>
3479 </layer>
3480 <layer id="228" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
3481 <input>
3482 <port id="0" precision="FP32">
3483 <dim>-1</dim>
3484 <dim>-1</dim>
3485 <dim>12</dim>
3486 <dim>32</dim>
3487 </port>
3488 <port id="1" precision="I64">
3489 <dim>4</dim>
3490 </port>
3491 </input>
3492 <output>
3493 <port id="2" precision="FP32" names="360">
3494 <dim>-1</dim>
3495 <dim>12</dim>
3496 <dim>-1</dim>
3497 <dim>32</dim>
3498 </port>
3499 </output>
3500 </layer>
3501 <layer id="229" name="self.encoder.layer.3.attention.self.key.weight" type="Const" version="opset1">
3502 <data element_type="f32" shape="384, 384" offset="406738592" size="589824" />
3503 <output>
3504 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.key.weight">
3505 <dim>384</dim>
3506 <dim>384</dim>
3507 </port>
3508 </output>
3509 </layer>
3510 <layer id="230" name="__module.encoder.layer.3.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
3511 <data transpose_a="false" transpose_b="true" />
3512 <input>
3513 <port id="0" precision="FP32">
3514 <dim>-1</dim>
3515 <dim>-1</dim>
3516 <dim>384</dim>
3517 </port>
3518 <port id="1" precision="FP32">
3519 <dim>384</dim>
3520 <dim>384</dim>
3521 </port>
3522 </input>
3523 <output>
3524 <port id="2" precision="FP32">
3525 <dim>-1</dim>
3526 <dim>-1</dim>
3527 <dim>384</dim>
3528 </port>
3529 </output>
3530 </layer>
3531 <layer id="231" name="Constant_6107403" type="Const" version="opset1">
3532 <data element_type="f32" shape="1, 1, 384" offset="407328416" size="1536" />
3533 <output>
3534 <port id="0" precision="FP32">
3535 <dim>1</dim>
3536 <dim>1</dim>
3537 <dim>384</dim>
3538 </port>
3539 </output>
3540 </layer>
3541 <layer id="232" name="__module.encoder.layer.3.attention.self.key/aten::linear/Add" type="Add" version="opset1">
3542 <data auto_broadcast="numpy" />
3543 <input>
3544 <port id="0" precision="FP32">
3545 <dim>-1</dim>
3546 <dim>-1</dim>
3547 <dim>384</dim>
3548 </port>
3549 <port id="1" precision="FP32">
3550 <dim>1</dim>
3551 <dim>1</dim>
3552 <dim>384</dim>
3553 </port>
3554 </input>
3555 <output>
3556 <port id="2" precision="FP32" names="363,x.41">
3557 <dim>-1</dim>
3558 <dim>-1</dim>
3559 <dim>384</dim>
3560 </port>
3561 </output>
3562 </layer>
3563 <layer id="233" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
3564 <data element_type="i64" shape="4" offset="385444888" size="32" />
3565 <output>
3566 <port id="0" precision="I64">
3567 <dim>4</dim>
3568 </port>
3569 </output>
3570 </layer>
3571 <layer id="234" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
3572 <data special_zero="true" />
3573 <input>
3574 <port id="0" precision="FP32">
3575 <dim>-1</dim>
3576 <dim>-1</dim>
3577 <dim>384</dim>
3578 </port>
3579 <port id="1" precision="I64">
3580 <dim>4</dim>
3581 </port>
3582 </input>
3583 <output>
3584 <port id="2" precision="FP32" names="367,x.43">
3585 <dim>-1</dim>
3586 <dim>-1</dim>
3587 <dim>12</dim>
3588 <dim>32</dim>
3589 </port>
3590 </output>
3591 </layer>
3592 <layer id="235" name="Constant_6098954" type="Const" version="opset1">
3593 <data element_type="i64" shape="4" offset="385444920" size="32" />
3594 <output>
3595 <port id="0" precision="I64" names="368">
3596 <dim>4</dim>
3597 </port>
3598 </output>
3599 </layer>
3600 <layer id="236" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
3601 <input>
3602 <port id="0" precision="FP32">
3603 <dim>-1</dim>
3604 <dim>-1</dim>
3605 <dim>12</dim>
3606 <dim>32</dim>
3607 </port>
3608 <port id="1" precision="I64">
3609 <dim>4</dim>
3610 </port>
3611 </input>
3612 <output>
3613 <port id="2" precision="FP32" names="369">
3614 <dim>-1</dim>
3615 <dim>12</dim>
3616 <dim>-1</dim>
3617 <dim>32</dim>
3618 </port>
3619 </output>
3620 </layer>
3621 <layer id="237" name="self.encoder.layer.3.attention.self.value.weight" type="Const" version="opset1">
3622 <data element_type="f32" shape="384, 384" offset="407329952" size="589824" />
3623 <output>
3624 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.self.value.weight">
3625 <dim>384</dim>
3626 <dim>384</dim>
3627 </port>
3628 </output>
3629 </layer>
3630 <layer id="238" name="__module.encoder.layer.3.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
3631 <data transpose_a="false" transpose_b="true" />
3632 <input>
3633 <port id="0" precision="FP32">
3634 <dim>-1</dim>
3635 <dim>-1</dim>
3636 <dim>384</dim>
3637 </port>
3638 <port id="1" precision="FP32">
3639 <dim>384</dim>
3640 <dim>384</dim>
3641 </port>
3642 </input>
3643 <output>
3644 <port id="2" precision="FP32">
3645 <dim>-1</dim>
3646 <dim>-1</dim>
3647 <dim>384</dim>
3648 </port>
3649 </output>
3650 </layer>
3651 <layer id="239" name="Constant_6107404" type="Const" version="opset1">
3652 <data element_type="f32" shape="1, 1, 384" offset="407919776" size="1536" />
3653 <output>
3654 <port id="0" precision="FP32">
3655 <dim>1</dim>
3656 <dim>1</dim>
3657 <dim>384</dim>
3658 </port>
3659 </output>
3660 </layer>
3661 <layer id="240" name="__module.encoder.layer.3.attention.self.value/aten::linear/Add" type="Add" version="opset1">
3662 <data auto_broadcast="numpy" />
3663 <input>
3664 <port id="0" precision="FP32">
3665 <dim>-1</dim>
3666 <dim>-1</dim>
3667 <dim>384</dim>
3668 </port>
3669 <port id="1" precision="FP32">
3670 <dim>1</dim>
3671 <dim>1</dim>
3672 <dim>384</dim>
3673 </port>
3674 </input>
3675 <output>
3676 <port id="2" precision="FP32" names="372,x.45">
3677 <dim>-1</dim>
3678 <dim>-1</dim>
3679 <dim>384</dim>
3680 </port>
3681 </output>
3682 </layer>
3683 <layer id="241" name="__module.encoder.layer.3.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
3684 <data element_type="i64" shape="4" offset="385444888" size="32" />
3685 <output>
3686 <port id="0" precision="I64">
3687 <dim>4</dim>
3688 </port>
3689 </output>
3690 </layer>
3691 <layer id="242" name="__module.encoder.layer.3.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
3692 <data special_zero="true" />
3693 <input>
3694 <port id="0" precision="FP32">
3695 <dim>-1</dim>
3696 <dim>-1</dim>
3697 <dim>384</dim>
3698 </port>
3699 <port id="1" precision="I64">
3700 <dim>4</dim>
3701 </port>
3702 </input>
3703 <output>
3704 <port id="2" precision="FP32" names="376,x.47">
3705 <dim>-1</dim>
3706 <dim>-1</dim>
3707 <dim>12</dim>
3708 <dim>32</dim>
3709 </port>
3710 </output>
3711 </layer>
3712 <layer id="243" name="Constant_6098977" type="Const" version="opset1">
3713 <data element_type="i64" shape="4" offset="385444920" size="32" />
3714 <output>
3715 <port id="0" precision="I64" names="377">
3716 <dim>4</dim>
3717 </port>
3718 </output>
3719 </layer>
3720 <layer id="244" name="__module.encoder.layer.3.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
3721 <input>
3722 <port id="0" precision="FP32">
3723 <dim>-1</dim>
3724 <dim>-1</dim>
3725 <dim>12</dim>
3726 <dim>32</dim>
3727 </port>
3728 <port id="1" precision="I64">
3729 <dim>4</dim>
3730 </port>
3731 </input>
3732 <output>
3733 <port id="2" precision="FP32" names="378">
3734 <dim>-1</dim>
3735 <dim>12</dim>
3736 <dim>-1</dim>
3737 <dim>32</dim>
3738 </port>
3739 </output>
3740 </layer>
3741 <layer id="245" name="__module.encoder.layer.3.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
3742 <data causal="false" />
3743 <input>
3744 <port id="0" precision="FP32">
3745 <dim>-1</dim>
3746 <dim>12</dim>
3747 <dim>-1</dim>
3748 <dim>32</dim>
3749 </port>
3750 <port id="1" precision="FP32">
3751 <dim>-1</dim>
3752 <dim>12</dim>
3753 <dim>-1</dim>
3754 <dim>32</dim>
3755 </port>
3756 <port id="2" precision="FP32">
3757 <dim>-1</dim>
3758 <dim>12</dim>
3759 <dim>-1</dim>
3760 <dim>32</dim>
3761 </port>
3762 <port id="3" precision="FP32">
3763 <dim>-1</dim>
3764 <dim>1</dim>
3765 <dim>-1</dim>
3766 <dim>-1</dim>
3767 </port>
3768 </input>
3769 <output>
3770 <port id="4" precision="FP32" names="379,attn_output.13">
3771 <dim>-1</dim>
3772 <dim>12</dim>
3773 <dim>-1</dim>
3774 <dim>32</dim>
3775 </port>
3776 </output>
3777 </layer>
3778 <layer id="246" name="__module.encoder.layer.3.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
3779 <data element_type="i32" shape="4" offset="386627704" size="16" />
3780 <output>
3781 <port id="0" precision="I32">
3782 <dim>4</dim>
3783 </port>
3784 </output>
3785 </layer>
3786 <layer id="247" name="__module.encoder.layer.3.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
3787 <input>
3788 <port id="0" precision="FP32">
3789 <dim>-1</dim>
3790 <dim>12</dim>
3791 <dim>-1</dim>
3792 <dim>32</dim>
3793 </port>
3794 <port id="1" precision="I32">
3795 <dim>4</dim>
3796 </port>
3797 </input>
3798 <output>
3799 <port id="2" precision="FP32" names="380,attn_output.15">
3800 <dim>-1</dim>
3801 <dim>-1</dim>
3802 <dim>12</dim>
3803 <dim>32</dim>
3804 </port>
3805 </output>
3806 </layer>
3807 <layer id="248" name="Constant_6107617" type="Const" version="opset1">
3808 <data element_type="i64" shape="3" offset="386627720" size="24" />
3809 <output>
3810 <port id="0" precision="I64">
3811 <dim>3</dim>
3812 </port>
3813 </output>
3814 </layer>
3815 <layer id="249" name="__module.encoder.layer.3.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
3816 <data special_zero="true" />
3817 <input>
3818 <port id="0" precision="FP32">
3819 <dim>-1</dim>
3820 <dim>-1</dim>
3821 <dim>12</dim>
3822 <dim>32</dim>
3823 </port>
3824 <port id="1" precision="I64">
3825 <dim>3</dim>
3826 </port>
3827 </input>
3828 <output>
3829 <port id="2" precision="FP32" names="382">
3830 <dim>-1</dim>
3831 <dim>-1</dim>
3832 <dim>384</dim>
3833 </port>
3834 </output>
3835 </layer>
3836 <layer id="250" name="self.encoder.layer.3.attention.output.dense.weight" type="Const" version="opset1">
3837 <data element_type="f32" shape="384, 384" offset="407921312" size="589824" />
3838 <output>
3839 <port id="0" precision="FP32" names="self.encoder.layer.3.attention.output.dense.weight">
3840 <dim>384</dim>
3841 <dim>384</dim>
3842 </port>
3843 </output>
3844 </layer>
3845 <layer id="251" name="__module.encoder.layer.3.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
3846 <data transpose_a="false" transpose_b="true" />
3847 <input>
3848 <port id="0" precision="FP32">
3849 <dim>-1</dim>
3850 <dim>-1</dim>
3851 <dim>384</dim>
3852 </port>
3853 <port id="1" precision="FP32">
3854 <dim>384</dim>
3855 <dim>384</dim>
3856 </port>
3857 </input>
3858 <output>
3859 <port id="2" precision="FP32">
3860 <dim>-1</dim>
3861 <dim>-1</dim>
3862 <dim>384</dim>
3863 </port>
3864 </output>
3865 </layer>
3866 <layer id="252" name="Constant_6107405" type="Const" version="opset1">
3867 <data element_type="f32" shape="1, 1, 384" offset="408511136" size="1536" />
3868 <output>
3869 <port id="0" precision="FP32">
3870 <dim>1</dim>
3871 <dim>1</dim>
3872 <dim>384</dim>
3873 </port>
3874 </output>
3875 </layer>
3876 <layer id="253" name="__module.encoder.layer.3.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
3877 <data auto_broadcast="numpy" />
3878 <input>
3879 <port id="0" precision="FP32">
3880 <dim>-1</dim>
3881 <dim>-1</dim>
3882 <dim>384</dim>
3883 </port>
3884 <port id="1" precision="FP32">
3885 <dim>1</dim>
3886 <dim>1</dim>
3887 <dim>384</dim>
3888 </port>
3889 </input>
3890 <output>
3891 <port id="2" precision="FP32" names="388,input.15">
3892 <dim>-1</dim>
3893 <dim>-1</dim>
3894 <dim>384</dim>
3895 </port>
3896 </output>
3897 </layer>
3898 <layer id="254" name="__module.encoder.layer.3.attention.output/aten::add/Add" type="Add" version="opset1">
3899 <data auto_broadcast="numpy" />
3900 <input>
3901 <port id="0" precision="FP32">
3902 <dim>-1</dim>
3903 <dim>-1</dim>
3904 <dim>384</dim>
3905 </port>
3906 <port id="1" precision="FP32">
3907 <dim>-1</dim>
3908 <dim>-1</dim>
3909 <dim>384</dim>
3910 </port>
3911 </input>
3912 <output>
3913 <port id="2" precision="FP32" names="390">
3914 <dim>-1</dim>
3915 <dim>-1</dim>
3916 <dim>384</dim>
3917 </port>
3918 </output>
3919 </layer>
3920 <layer id="255" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
3921 <data element_type="i32" shape="1" offset="384850452" size="4" />
3922 <output>
3923 <port id="0" precision="I32">
3924 <dim>1</dim>
3925 </port>
3926 </output>
3927 </layer>
3928 <layer id="256" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
3929 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
3930 <input>
3931 <port id="0" precision="FP32">
3932 <dim>-1</dim>
3933 <dim>-1</dim>
3934 <dim>384</dim>
3935 </port>
3936 <port id="1" precision="I32">
3937 <dim>1</dim>
3938 </port>
3939 </input>
3940 <output>
3941 <port id="2" precision="FP32">
3942 <dim>-1</dim>
3943 <dim>-1</dim>
3944 <dim>384</dim>
3945 </port>
3946 </output>
3947 </layer>
3948 <layer id="257" name="Constant_6107406" type="Const" version="opset1">
3949 <data element_type="f32" shape="1, 1, 384" offset="408512672" size="1536" />
3950 <output>
3951 <port id="0" precision="FP32">
3952 <dim>1</dim>
3953 <dim>1</dim>
3954 <dim>384</dim>
3955 </port>
3956 </output>
3957 </layer>
3958 <layer id="258" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
3959 <data auto_broadcast="numpy" />
3960 <input>
3961 <port id="0" precision="FP32">
3962 <dim>-1</dim>
3963 <dim>-1</dim>
3964 <dim>384</dim>
3965 </port>
3966 <port id="1" precision="FP32">
3967 <dim>1</dim>
3968 <dim>1</dim>
3969 <dim>384</dim>
3970 </port>
3971 </input>
3972 <output>
3973 <port id="2" precision="FP32">
3974 <dim>-1</dim>
3975 <dim>-1</dim>
3976 <dim>384</dim>
3977 </port>
3978 </output>
3979 </layer>
3980 <layer id="259" name="Constant_6107407" type="Const" version="opset1">
3981 <data element_type="f32" shape="1, 1, 384" offset="408514208" size="1536" />
3982 <output>
3983 <port id="0" precision="FP32">
3984 <dim>1</dim>
3985 <dim>1</dim>
3986 <dim>384</dim>
3987 </port>
3988 </output>
3989 </layer>
3990 <layer id="260" name="__module.encoder.layer.3.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
3991 <data auto_broadcast="numpy" />
3992 <input>
3993 <port id="0" precision="FP32">
3994 <dim>-1</dim>
3995 <dim>-1</dim>
3996 <dim>384</dim>
3997 </port>
3998 <port id="1" precision="FP32">
3999 <dim>1</dim>
4000 <dim>1</dim>
4001 <dim>384</dim>
4002 </port>
4003 </input>
4004 <output>
4005 <port id="2" precision="FP32" names="394,input_tensor.7">
4006 <dim>-1</dim>
4007 <dim>-1</dim>
4008 <dim>384</dim>
4009 </port>
4010 </output>
4011 </layer>
4012 <layer id="261" name="self.encoder.layer.3.intermediate.dense.weight" type="Const" version="opset1">
4013 <data element_type="f32" shape="1536, 384" offset="408515744" size="2359296" />
4014 <output>
4015 <port id="0" precision="FP32" names="self.encoder.layer.3.intermediate.dense.weight">
4016 <dim>1536</dim>
4017 <dim>384</dim>
4018 </port>
4019 </output>
4020 </layer>
4021 <layer id="262" name="__module.encoder.layer.3.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4022 <data transpose_a="false" transpose_b="true" />
4023 <input>
4024 <port id="0" precision="FP32">
4025 <dim>-1</dim>
4026 <dim>-1</dim>
4027 <dim>384</dim>
4028 </port>
4029 <port id="1" precision="FP32">
4030 <dim>1536</dim>
4031 <dim>384</dim>
4032 </port>
4033 </input>
4034 <output>
4035 <port id="2" precision="FP32">
4036 <dim>-1</dim>
4037 <dim>-1</dim>
4038 <dim>1536</dim>
4039 </port>
4040 </output>
4041 </layer>
4042 <layer id="263" name="Constant_6107408" type="Const" version="opset1">
4043 <data element_type="f32" shape="1, 1, 1536" offset="410875040" size="6144" />
4044 <output>
4045 <port id="0" precision="FP32">
4046 <dim>1</dim>
4047 <dim>1</dim>
4048 <dim>1536</dim>
4049 </port>
4050 </output>
4051 </layer>
4052 <layer id="264" name="__module.encoder.layer.3.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
4053 <data auto_broadcast="numpy" />
4054 <input>
4055 <port id="0" precision="FP32">
4056 <dim>-1</dim>
4057 <dim>-1</dim>
4058 <dim>1536</dim>
4059 </port>
4060 <port id="1" precision="FP32">
4061 <dim>1</dim>
4062 <dim>1</dim>
4063 <dim>1536</dim>
4064 </port>
4065 </input>
4066 <output>
4067 <port id="2" precision="FP32" names="399">
4068 <dim>-1</dim>
4069 <dim>-1</dim>
4070 <dim>1536</dim>
4071 </port>
4072 </output>
4073 </layer>
4074 <layer id="265" name="__module.encoder.layer.3.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
4075 <data approximation_mode="ERF" />
4076 <input>
4077 <port id="0" precision="FP32">
4078 <dim>-1</dim>
4079 <dim>-1</dim>
4080 <dim>1536</dim>
4081 </port>
4082 </input>
4083 <output>
4084 <port id="1" precision="FP32" names="400">
4085 <dim>-1</dim>
4086 <dim>-1</dim>
4087 <dim>1536</dim>
4088 </port>
4089 </output>
4090 </layer>
4091 <layer id="266" name="self.encoder.layer.3.output.dense.weight" type="Const" version="opset1">
4092 <data element_type="f32" shape="384, 1536" offset="410881184" size="2359296" />
4093 <output>
4094 <port id="0" precision="FP32" names="self.encoder.layer.3.output.dense.weight">
4095 <dim>384</dim>
4096 <dim>1536</dim>
4097 </port>
4098 </output>
4099 </layer>
4100 <layer id="267" name="__module.encoder.layer.3.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4101 <data transpose_a="false" transpose_b="true" />
4102 <input>
4103 <port id="0" precision="FP32">
4104 <dim>-1</dim>
4105 <dim>-1</dim>
4106 <dim>1536</dim>
4107 </port>
4108 <port id="1" precision="FP32">
4109 <dim>384</dim>
4110 <dim>1536</dim>
4111 </port>
4112 </input>
4113 <output>
4114 <port id="2" precision="FP32">
4115 <dim>-1</dim>
4116 <dim>-1</dim>
4117 <dim>384</dim>
4118 </port>
4119 </output>
4120 </layer>
4121 <layer id="268" name="Constant_6107409" type="Const" version="opset1">
4122 <data element_type="f32" shape="1, 1, 384" offset="413240480" size="1536" />
4123 <output>
4124 <port id="0" precision="FP32">
4125 <dim>1</dim>
4126 <dim>1</dim>
4127 <dim>384</dim>
4128 </port>
4129 </output>
4130 </layer>
4131 <layer id="269" name="__module.encoder.layer.3.output.dense/aten::linear/Add" type="Add" version="opset1">
4132 <data auto_broadcast="numpy" />
4133 <input>
4134 <port id="0" precision="FP32">
4135 <dim>-1</dim>
4136 <dim>-1</dim>
4137 <dim>384</dim>
4138 </port>
4139 <port id="1" precision="FP32">
4140 <dim>1</dim>
4141 <dim>1</dim>
4142 <dim>384</dim>
4143 </port>
4144 </input>
4145 <output>
4146 <port id="2" precision="FP32" names="406,input.17">
4147 <dim>-1</dim>
4148 <dim>-1</dim>
4149 <dim>384</dim>
4150 </port>
4151 </output>
4152 </layer>
4153 <layer id="270" name="__module.encoder.layer.3.output/aten::add/Add" type="Add" version="opset1">
4154 <data auto_broadcast="numpy" />
4155 <input>
4156 <port id="0" precision="FP32">
4157 <dim>-1</dim>
4158 <dim>-1</dim>
4159 <dim>384</dim>
4160 </port>
4161 <port id="1" precision="FP32">
4162 <dim>-1</dim>
4163 <dim>-1</dim>
4164 <dim>384</dim>
4165 </port>
4166 </input>
4167 <output>
4168 <port id="2" precision="FP32" names="408">
4169 <dim>-1</dim>
4170 <dim>-1</dim>
4171 <dim>384</dim>
4172 </port>
4173 </output>
4174 </layer>
4175 <layer id="271" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
4176 <data element_type="i32" shape="1" offset="384850452" size="4" />
4177 <output>
4178 <port id="0" precision="I32">
4179 <dim>1</dim>
4180 </port>
4181 </output>
4182 </layer>
4183 <layer id="272" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
4184 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
4185 <input>
4186 <port id="0" precision="FP32">
4187 <dim>-1</dim>
4188 <dim>-1</dim>
4189 <dim>384</dim>
4190 </port>
4191 <port id="1" precision="I32">
4192 <dim>1</dim>
4193 </port>
4194 </input>
4195 <output>
4196 <port id="2" precision="FP32">
4197 <dim>-1</dim>
4198 <dim>-1</dim>
4199 <dim>384</dim>
4200 </port>
4201 </output>
4202 </layer>
4203 <layer id="273" name="Constant_6107410" type="Const" version="opset1">
4204 <data element_type="f32" shape="1, 1, 384" offset="413242016" size="1536" />
4205 <output>
4206 <port id="0" precision="FP32">
4207 <dim>1</dim>
4208 <dim>1</dim>
4209 <dim>384</dim>
4210 </port>
4211 </output>
4212 </layer>
4213 <layer id="274" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
4214 <data auto_broadcast="numpy" />
4215 <input>
4216 <port id="0" precision="FP32">
4217 <dim>-1</dim>
4218 <dim>-1</dim>
4219 <dim>384</dim>
4220 </port>
4221 <port id="1" precision="FP32">
4222 <dim>1</dim>
4223 <dim>1</dim>
4224 <dim>384</dim>
4225 </port>
4226 </input>
4227 <output>
4228 <port id="2" precision="FP32">
4229 <dim>-1</dim>
4230 <dim>-1</dim>
4231 <dim>384</dim>
4232 </port>
4233 </output>
4234 </layer>
4235 <layer id="275" name="Constant_6107411" type="Const" version="opset1">
4236 <data element_type="f32" shape="1, 1, 384" offset="413243552" size="1536" />
4237 <output>
4238 <port id="0" precision="FP32">
4239 <dim>1</dim>
4240 <dim>1</dim>
4241 <dim>384</dim>
4242 </port>
4243 </output>
4244 </layer>
4245 <layer id="276" name="__module.encoder.layer.3.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
4246 <data auto_broadcast="numpy" />
4247 <input>
4248 <port id="0" precision="FP32">
4249 <dim>-1</dim>
4250 <dim>-1</dim>
4251 <dim>384</dim>
4252 </port>
4253 <port id="1" precision="FP32">
4254 <dim>1</dim>
4255 <dim>1</dim>
4256 <dim>384</dim>
4257 </port>
4258 </input>
4259 <output>
4260 <port id="2" precision="FP32" names="412,hidden_states.25">
4261 <dim>-1</dim>
4262 <dim>-1</dim>
4263 <dim>384</dim>
4264 </port>
4265 </output>
4266 </layer>
4267 <layer id="277" name="self.encoder.layer.4.attention.self.query.weight" type="Const" version="opset1">
4268 <data element_type="f32" shape="384, 384" offset="413245088" size="589824" />
4269 <output>
4270 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.query.weight">
4271 <dim>384</dim>
4272 <dim>384</dim>
4273 </port>
4274 </output>
4275 </layer>
4276 <layer id="278" name="__module.encoder.layer.4.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
4277 <data transpose_a="false" transpose_b="true" />
4278 <input>
4279 <port id="0" precision="FP32">
4280 <dim>-1</dim>
4281 <dim>-1</dim>
4282 <dim>384</dim>
4283 </port>
4284 <port id="1" precision="FP32">
4285 <dim>384</dim>
4286 <dim>384</dim>
4287 </port>
4288 </input>
4289 <output>
4290 <port id="2" precision="FP32">
4291 <dim>-1</dim>
4292 <dim>-1</dim>
4293 <dim>384</dim>
4294 </port>
4295 </output>
4296 </layer>
4297 <layer id="279" name="Constant_6107412" type="Const" version="opset1">
4298 <data element_type="f32" shape="1, 1, 384" offset="413834912" size="1536" />
4299 <output>
4300 <port id="0" precision="FP32">
4301 <dim>1</dim>
4302 <dim>1</dim>
4303 <dim>384</dim>
4304 </port>
4305 </output>
4306 </layer>
4307 <layer id="280" name="__module.encoder.layer.4.attention.self.query/aten::linear/Add" type="Add" version="opset1">
4308 <data auto_broadcast="numpy" />
4309 <input>
4310 <port id="0" precision="FP32">
4311 <dim>-1</dim>
4312 <dim>-1</dim>
4313 <dim>384</dim>
4314 </port>
4315 <port id="1" precision="FP32">
4316 <dim>1</dim>
4317 <dim>1</dim>
4318 <dim>384</dim>
4319 </port>
4320 </input>
4321 <output>
4322 <port id="2" precision="FP32" names="425,x.49">
4323 <dim>-1</dim>
4324 <dim>-1</dim>
4325 <dim>384</dim>
4326 </port>
4327 </output>
4328 </layer>
4329 <layer id="281" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
4330 <data element_type="i64" shape="4" offset="385444888" size="32" />
4331 <output>
4332 <port id="0" precision="I64">
4333 <dim>4</dim>
4334 </port>
4335 </output>
4336 </layer>
4337 <layer id="282" name="__module.encoder.layer.4.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
4338 <data special_zero="true" />
4339 <input>
4340 <port id="0" precision="FP32">
4341 <dim>-1</dim>
4342 <dim>-1</dim>
4343 <dim>384</dim>
4344 </port>
4345 <port id="1" precision="I64">
4346 <dim>4</dim>
4347 </port>
4348 </input>
4349 <output>
4350 <port id="2" precision="FP32" names="429,x.51">
4351 <dim>-1</dim>
4352 <dim>-1</dim>
4353 <dim>12</dim>
4354 <dim>32</dim>
4355 </port>
4356 </output>
4357 </layer>
4358 <layer id="283" name="Constant_6099157" type="Const" version="opset1">
4359 <data element_type="i64" shape="4" offset="385444920" size="32" />
4360 <output>
4361 <port id="0" precision="I64" names="430">
4362 <dim>4</dim>
4363 </port>
4364 </output>
4365 </layer>
4366 <layer id="284" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
4367 <input>
4368 <port id="0" precision="FP32">
4369 <dim>-1</dim>
4370 <dim>-1</dim>
4371 <dim>12</dim>
4372 <dim>32</dim>
4373 </port>
4374 <port id="1" precision="I64">
4375 <dim>4</dim>
4376 </port>
4377 </input>
4378 <output>
4379 <port id="2" precision="FP32" names="431">
4380 <dim>-1</dim>
4381 <dim>12</dim>
4382 <dim>-1</dim>
4383 <dim>32</dim>
4384 </port>
4385 </output>
4386 </layer>
4387 <layer id="285" name="self.encoder.layer.4.attention.self.key.weight" type="Const" version="opset1">
4388 <data element_type="f32" shape="384, 384" offset="413836448" size="589824" />
4389 <output>
4390 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.key.weight">
4391 <dim>384</dim>
4392 <dim>384</dim>
4393 </port>
4394 </output>
4395 </layer>
4396 <layer id="286" name="__module.encoder.layer.4.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
4397 <data transpose_a="false" transpose_b="true" />
4398 <input>
4399 <port id="0" precision="FP32">
4400 <dim>-1</dim>
4401 <dim>-1</dim>
4402 <dim>384</dim>
4403 </port>
4404 <port id="1" precision="FP32">
4405 <dim>384</dim>
4406 <dim>384</dim>
4407 </port>
4408 </input>
4409 <output>
4410 <port id="2" precision="FP32">
4411 <dim>-1</dim>
4412 <dim>-1</dim>
4413 <dim>384</dim>
4414 </port>
4415 </output>
4416 </layer>
4417 <layer id="287" name="Constant_6107413" type="Const" version="opset1">
4418 <data element_type="f32" shape="1, 1, 384" offset="414426272" size="1536" />
4419 <output>
4420 <port id="0" precision="FP32">
4421 <dim>1</dim>
4422 <dim>1</dim>
4423 <dim>384</dim>
4424 </port>
4425 </output>
4426 </layer>
4427 <layer id="288" name="__module.encoder.layer.4.attention.self.key/aten::linear/Add" type="Add" version="opset1">
4428 <data auto_broadcast="numpy" />
4429 <input>
4430 <port id="0" precision="FP32">
4431 <dim>-1</dim>
4432 <dim>-1</dim>
4433 <dim>384</dim>
4434 </port>
4435 <port id="1" precision="FP32">
4436 <dim>1</dim>
4437 <dim>1</dim>
4438 <dim>384</dim>
4439 </port>
4440 </input>
4441 <output>
4442 <port id="2" precision="FP32" names="434,x.53">
4443 <dim>-1</dim>
4444 <dim>-1</dim>
4445 <dim>384</dim>
4446 </port>
4447 </output>
4448 </layer>
4449 <layer id="289" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
4450 <data element_type="i64" shape="4" offset="385444888" size="32" />
4451 <output>
4452 <port id="0" precision="I64">
4453 <dim>4</dim>
4454 </port>
4455 </output>
4456 </layer>
4457 <layer id="290" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
4458 <data special_zero="true" />
4459 <input>
4460 <port id="0" precision="FP32">
4461 <dim>-1</dim>
4462 <dim>-1</dim>
4463 <dim>384</dim>
4464 </port>
4465 <port id="1" precision="I64">
4466 <dim>4</dim>
4467 </port>
4468 </input>
4469 <output>
4470 <port id="2" precision="FP32" names="438,x.55">
4471 <dim>-1</dim>
4472 <dim>-1</dim>
4473 <dim>12</dim>
4474 <dim>32</dim>
4475 </port>
4476 </output>
4477 </layer>
4478 <layer id="291" name="Constant_6099180" type="Const" version="opset1">
4479 <data element_type="i64" shape="4" offset="385444920" size="32" />
4480 <output>
4481 <port id="0" precision="I64" names="439">
4482 <dim>4</dim>
4483 </port>
4484 </output>
4485 </layer>
4486 <layer id="292" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
4487 <input>
4488 <port id="0" precision="FP32">
4489 <dim>-1</dim>
4490 <dim>-1</dim>
4491 <dim>12</dim>
4492 <dim>32</dim>
4493 </port>
4494 <port id="1" precision="I64">
4495 <dim>4</dim>
4496 </port>
4497 </input>
4498 <output>
4499 <port id="2" precision="FP32" names="440">
4500 <dim>-1</dim>
4501 <dim>12</dim>
4502 <dim>-1</dim>
4503 <dim>32</dim>
4504 </port>
4505 </output>
4506 </layer>
4507 <layer id="293" name="self.encoder.layer.4.attention.self.value.weight" type="Const" version="opset1">
4508 <data element_type="f32" shape="384, 384" offset="414427808" size="589824" />
4509 <output>
4510 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.self.value.weight">
4511 <dim>384</dim>
4512 <dim>384</dim>
4513 </port>
4514 </output>
4515 </layer>
4516 <layer id="294" name="__module.encoder.layer.4.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
4517 <data transpose_a="false" transpose_b="true" />
4518 <input>
4519 <port id="0" precision="FP32">
4520 <dim>-1</dim>
4521 <dim>-1</dim>
4522 <dim>384</dim>
4523 </port>
4524 <port id="1" precision="FP32">
4525 <dim>384</dim>
4526 <dim>384</dim>
4527 </port>
4528 </input>
4529 <output>
4530 <port id="2" precision="FP32">
4531 <dim>-1</dim>
4532 <dim>-1</dim>
4533 <dim>384</dim>
4534 </port>
4535 </output>
4536 </layer>
4537 <layer id="295" name="Constant_6107414" type="Const" version="opset1">
4538 <data element_type="f32" shape="1, 1, 384" offset="415017632" size="1536" />
4539 <output>
4540 <port id="0" precision="FP32">
4541 <dim>1</dim>
4542 <dim>1</dim>
4543 <dim>384</dim>
4544 </port>
4545 </output>
4546 </layer>
4547 <layer id="296" name="__module.encoder.layer.4.attention.self.value/aten::linear/Add" type="Add" version="opset1">
4548 <data auto_broadcast="numpy" />
4549 <input>
4550 <port id="0" precision="FP32">
4551 <dim>-1</dim>
4552 <dim>-1</dim>
4553 <dim>384</dim>
4554 </port>
4555 <port id="1" precision="FP32">
4556 <dim>1</dim>
4557 <dim>1</dim>
4558 <dim>384</dim>
4559 </port>
4560 </input>
4561 <output>
4562 <port id="2" precision="FP32" names="443,x.57">
4563 <dim>-1</dim>
4564 <dim>-1</dim>
4565 <dim>384</dim>
4566 </port>
4567 </output>
4568 </layer>
4569 <layer id="297" name="__module.encoder.layer.4.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
4570 <data element_type="i64" shape="4" offset="385444888" size="32" />
4571 <output>
4572 <port id="0" precision="I64">
4573 <dim>4</dim>
4574 </port>
4575 </output>
4576 </layer>
4577 <layer id="298" name="__module.encoder.layer.4.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
4578 <data special_zero="true" />
4579 <input>
4580 <port id="0" precision="FP32">
4581 <dim>-1</dim>
4582 <dim>-1</dim>
4583 <dim>384</dim>
4584 </port>
4585 <port id="1" precision="I64">
4586 <dim>4</dim>
4587 </port>
4588 </input>
4589 <output>
4590 <port id="2" precision="FP32" names="447,x.59">
4591 <dim>-1</dim>
4592 <dim>-1</dim>
4593 <dim>12</dim>
4594 <dim>32</dim>
4595 </port>
4596 </output>
4597 </layer>
4598 <layer id="299" name="Constant_6099203" type="Const" version="opset1">
4599 <data element_type="i64" shape="4" offset="385444920" size="32" />
4600 <output>
4601 <port id="0" precision="I64" names="448">
4602 <dim>4</dim>
4603 </port>
4604 </output>
4605 </layer>
4606 <layer id="300" name="__module.encoder.layer.4.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
4607 <input>
4608 <port id="0" precision="FP32">
4609 <dim>-1</dim>
4610 <dim>-1</dim>
4611 <dim>12</dim>
4612 <dim>32</dim>
4613 </port>
4614 <port id="1" precision="I64">
4615 <dim>4</dim>
4616 </port>
4617 </input>
4618 <output>
4619 <port id="2" precision="FP32" names="449">
4620 <dim>-1</dim>
4621 <dim>12</dim>
4622 <dim>-1</dim>
4623 <dim>32</dim>
4624 </port>
4625 </output>
4626 </layer>
4627 <layer id="301" name="__module.encoder.layer.4.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
4628 <data causal="false" />
4629 <input>
4630 <port id="0" precision="FP32">
4631 <dim>-1</dim>
4632 <dim>12</dim>
4633 <dim>-1</dim>
4634 <dim>32</dim>
4635 </port>
4636 <port id="1" precision="FP32">
4637 <dim>-1</dim>
4638 <dim>12</dim>
4639 <dim>-1</dim>
4640 <dim>32</dim>
4641 </port>
4642 <port id="2" precision="FP32">
4643 <dim>-1</dim>
4644 <dim>12</dim>
4645 <dim>-1</dim>
4646 <dim>32</dim>
4647 </port>
4648 <port id="3" precision="FP32">
4649 <dim>-1</dim>
4650 <dim>1</dim>
4651 <dim>-1</dim>
4652 <dim>-1</dim>
4653 </port>
4654 </input>
4655 <output>
4656 <port id="4" precision="FP32" names="450,attn_output.17">
4657 <dim>-1</dim>
4658 <dim>12</dim>
4659 <dim>-1</dim>
4660 <dim>32</dim>
4661 </port>
4662 </output>
4663 </layer>
4664 <layer id="302" name="__module.encoder.layer.4.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
4665 <data element_type="i32" shape="4" offset="386627704" size="16" />
4666 <output>
4667 <port id="0" precision="I32">
4668 <dim>4</dim>
4669 </port>
4670 </output>
4671 </layer>
4672 <layer id="303" name="__module.encoder.layer.4.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
4673 <input>
4674 <port id="0" precision="FP32">
4675 <dim>-1</dim>
4676 <dim>12</dim>
4677 <dim>-1</dim>
4678 <dim>32</dim>
4679 </port>
4680 <port id="1" precision="I32">
4681 <dim>4</dim>
4682 </port>
4683 </input>
4684 <output>
4685 <port id="2" precision="FP32" names="451,attn_output.19">
4686 <dim>-1</dim>
4687 <dim>-1</dim>
4688 <dim>12</dim>
4689 <dim>32</dim>
4690 </port>
4691 </output>
4692 </layer>
4693 <layer id="304" name="Constant_6107618" type="Const" version="opset1">
4694 <data element_type="i64" shape="3" offset="386627720" size="24" />
4695 <output>
4696 <port id="0" precision="I64">
4697 <dim>3</dim>
4698 </port>
4699 </output>
4700 </layer>
4701 <layer id="305" name="__module.encoder.layer.4.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
4702 <data special_zero="true" />
4703 <input>
4704 <port id="0" precision="FP32">
4705 <dim>-1</dim>
4706 <dim>-1</dim>
4707 <dim>12</dim>
4708 <dim>32</dim>
4709 </port>
4710 <port id="1" precision="I64">
4711 <dim>3</dim>
4712 </port>
4713 </input>
4714 <output>
4715 <port id="2" precision="FP32" names="453">
4716 <dim>-1</dim>
4717 <dim>-1</dim>
4718 <dim>384</dim>
4719 </port>
4720 </output>
4721 </layer>
4722 <layer id="306" name="self.encoder.layer.4.attention.output.dense.weight" type="Const" version="opset1">
4723 <data element_type="f32" shape="384, 384" offset="415019168" size="589824" />
4724 <output>
4725 <port id="0" precision="FP32" names="self.encoder.layer.4.attention.output.dense.weight">
4726 <dim>384</dim>
4727 <dim>384</dim>
4728 </port>
4729 </output>
4730 </layer>
4731 <layer id="307" name="__module.encoder.layer.4.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4732 <data transpose_a="false" transpose_b="true" />
4733 <input>
4734 <port id="0" precision="FP32">
4735 <dim>-1</dim>
4736 <dim>-1</dim>
4737 <dim>384</dim>
4738 </port>
4739 <port id="1" precision="FP32">
4740 <dim>384</dim>
4741 <dim>384</dim>
4742 </port>
4743 </input>
4744 <output>
4745 <port id="2" precision="FP32">
4746 <dim>-1</dim>
4747 <dim>-1</dim>
4748 <dim>384</dim>
4749 </port>
4750 </output>
4751 </layer>
4752 <layer id="308" name="Constant_6107415" type="Const" version="opset1">
4753 <data element_type="f32" shape="1, 1, 384" offset="415608992" size="1536" />
4754 <output>
4755 <port id="0" precision="FP32">
4756 <dim>1</dim>
4757 <dim>1</dim>
4758 <dim>384</dim>
4759 </port>
4760 </output>
4761 </layer>
4762 <layer id="309" name="__module.encoder.layer.4.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
4763 <data auto_broadcast="numpy" />
4764 <input>
4765 <port id="0" precision="FP32">
4766 <dim>-1</dim>
4767 <dim>-1</dim>
4768 <dim>384</dim>
4769 </port>
4770 <port id="1" precision="FP32">
4771 <dim>1</dim>
4772 <dim>1</dim>
4773 <dim>384</dim>
4774 </port>
4775 </input>
4776 <output>
4777 <port id="2" precision="FP32" names="459,input.19">
4778 <dim>-1</dim>
4779 <dim>-1</dim>
4780 <dim>384</dim>
4781 </port>
4782 </output>
4783 </layer>
4784 <layer id="310" name="__module.encoder.layer.4.attention.output/aten::add/Add" type="Add" version="opset1">
4785 <data auto_broadcast="numpy" />
4786 <input>
4787 <port id="0" precision="FP32">
4788 <dim>-1</dim>
4789 <dim>-1</dim>
4790 <dim>384</dim>
4791 </port>
4792 <port id="1" precision="FP32">
4793 <dim>-1</dim>
4794 <dim>-1</dim>
4795 <dim>384</dim>
4796 </port>
4797 </input>
4798 <output>
4799 <port id="2" precision="FP32" names="461">
4800 <dim>-1</dim>
4801 <dim>-1</dim>
4802 <dim>384</dim>
4803 </port>
4804 </output>
4805 </layer>
4806 <layer id="311" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
4807 <data element_type="i32" shape="1" offset="384850452" size="4" />
4808 <output>
4809 <port id="0" precision="I32">
4810 <dim>1</dim>
4811 </port>
4812 </output>
4813 </layer>
4814 <layer id="312" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
4815 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
4816 <input>
4817 <port id="0" precision="FP32">
4818 <dim>-1</dim>
4819 <dim>-1</dim>
4820 <dim>384</dim>
4821 </port>
4822 <port id="1" precision="I32">
4823 <dim>1</dim>
4824 </port>
4825 </input>
4826 <output>
4827 <port id="2" precision="FP32">
4828 <dim>-1</dim>
4829 <dim>-1</dim>
4830 <dim>384</dim>
4831 </port>
4832 </output>
4833 </layer>
4834 <layer id="313" name="Constant_6107416" type="Const" version="opset1">
4835 <data element_type="f32" shape="1, 1, 384" offset="415610528" size="1536" />
4836 <output>
4837 <port id="0" precision="FP32">
4838 <dim>1</dim>
4839 <dim>1</dim>
4840 <dim>384</dim>
4841 </port>
4842 </output>
4843 </layer>
4844 <layer id="314" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
4845 <data auto_broadcast="numpy" />
4846 <input>
4847 <port id="0" precision="FP32">
4848 <dim>-1</dim>
4849 <dim>-1</dim>
4850 <dim>384</dim>
4851 </port>
4852 <port id="1" precision="FP32">
4853 <dim>1</dim>
4854 <dim>1</dim>
4855 <dim>384</dim>
4856 </port>
4857 </input>
4858 <output>
4859 <port id="2" precision="FP32">
4860 <dim>-1</dim>
4861 <dim>-1</dim>
4862 <dim>384</dim>
4863 </port>
4864 </output>
4865 </layer>
4866 <layer id="315" name="Constant_6107417" type="Const" version="opset1">
4867 <data element_type="f32" shape="1, 1, 384" offset="415612064" size="1536" />
4868 <output>
4869 <port id="0" precision="FP32">
4870 <dim>1</dim>
4871 <dim>1</dim>
4872 <dim>384</dim>
4873 </port>
4874 </output>
4875 </layer>
4876 <layer id="316" name="__module.encoder.layer.4.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
4877 <data auto_broadcast="numpy" />
4878 <input>
4879 <port id="0" precision="FP32">
4880 <dim>-1</dim>
4881 <dim>-1</dim>
4882 <dim>384</dim>
4883 </port>
4884 <port id="1" precision="FP32">
4885 <dim>1</dim>
4886 <dim>1</dim>
4887 <dim>384</dim>
4888 </port>
4889 </input>
4890 <output>
4891 <port id="2" precision="FP32" names="465,input_tensor.9">
4892 <dim>-1</dim>
4893 <dim>-1</dim>
4894 <dim>384</dim>
4895 </port>
4896 </output>
4897 </layer>
4898 <layer id="317" name="self.encoder.layer.4.intermediate.dense.weight" type="Const" version="opset1">
4899 <data element_type="f32" shape="1536, 384" offset="415613600" size="2359296" />
4900 <output>
4901 <port id="0" precision="FP32" names="self.encoder.layer.4.intermediate.dense.weight">
4902 <dim>1536</dim>
4903 <dim>384</dim>
4904 </port>
4905 </output>
4906 </layer>
4907 <layer id="318" name="__module.encoder.layer.4.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4908 <data transpose_a="false" transpose_b="true" />
4909 <input>
4910 <port id="0" precision="FP32">
4911 <dim>-1</dim>
4912 <dim>-1</dim>
4913 <dim>384</dim>
4914 </port>
4915 <port id="1" precision="FP32">
4916 <dim>1536</dim>
4917 <dim>384</dim>
4918 </port>
4919 </input>
4920 <output>
4921 <port id="2" precision="FP32">
4922 <dim>-1</dim>
4923 <dim>-1</dim>
4924 <dim>1536</dim>
4925 </port>
4926 </output>
4927 </layer>
4928 <layer id="319" name="Constant_6107418" type="Const" version="opset1">
4929 <data element_type="f32" shape="1, 1, 1536" offset="417972896" size="6144" />
4930 <output>
4931 <port id="0" precision="FP32">
4932 <dim>1</dim>
4933 <dim>1</dim>
4934 <dim>1536</dim>
4935 </port>
4936 </output>
4937 </layer>
4938 <layer id="320" name="__module.encoder.layer.4.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
4939 <data auto_broadcast="numpy" />
4940 <input>
4941 <port id="0" precision="FP32">
4942 <dim>-1</dim>
4943 <dim>-1</dim>
4944 <dim>1536</dim>
4945 </port>
4946 <port id="1" precision="FP32">
4947 <dim>1</dim>
4948 <dim>1</dim>
4949 <dim>1536</dim>
4950 </port>
4951 </input>
4952 <output>
4953 <port id="2" precision="FP32" names="470">
4954 <dim>-1</dim>
4955 <dim>-1</dim>
4956 <dim>1536</dim>
4957 </port>
4958 </output>
4959 </layer>
4960 <layer id="321" name="__module.encoder.layer.4.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
4961 <data approximation_mode="ERF" />
4962 <input>
4963 <port id="0" precision="FP32">
4964 <dim>-1</dim>
4965 <dim>-1</dim>
4966 <dim>1536</dim>
4967 </port>
4968 </input>
4969 <output>
4970 <port id="1" precision="FP32" names="471">
4971 <dim>-1</dim>
4972 <dim>-1</dim>
4973 <dim>1536</dim>
4974 </port>
4975 </output>
4976 </layer>
4977 <layer id="322" name="self.encoder.layer.4.output.dense.weight" type="Const" version="opset1">
4978 <data element_type="f32" shape="384, 1536" offset="417979040" size="2359296" />
4979 <output>
4980 <port id="0" precision="FP32" names="self.encoder.layer.4.output.dense.weight">
4981 <dim>384</dim>
4982 <dim>1536</dim>
4983 </port>
4984 </output>
4985 </layer>
4986 <layer id="323" name="__module.encoder.layer.4.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
4987 <data transpose_a="false" transpose_b="true" />
4988 <input>
4989 <port id="0" precision="FP32">
4990 <dim>-1</dim>
4991 <dim>-1</dim>
4992 <dim>1536</dim>
4993 </port>
4994 <port id="1" precision="FP32">
4995 <dim>384</dim>
4996 <dim>1536</dim>
4997 </port>
4998 </input>
4999 <output>
5000 <port id="2" precision="FP32">
5001 <dim>-1</dim>
5002 <dim>-1</dim>
5003 <dim>384</dim>
5004 </port>
5005 </output>
5006 </layer>
5007 <layer id="324" name="Constant_6107419" type="Const" version="opset1">
5008 <data element_type="f32" shape="1, 1, 384" offset="420338336" size="1536" />
5009 <output>
5010 <port id="0" precision="FP32">
5011 <dim>1</dim>
5012 <dim>1</dim>
5013 <dim>384</dim>
5014 </port>
5015 </output>
5016 </layer>
5017 <layer id="325" name="__module.encoder.layer.4.output.dense/aten::linear/Add" type="Add" version="opset1">
5018 <data auto_broadcast="numpy" />
5019 <input>
5020 <port id="0" precision="FP32">
5021 <dim>-1</dim>
5022 <dim>-1</dim>
5023 <dim>384</dim>
5024 </port>
5025 <port id="1" precision="FP32">
5026 <dim>1</dim>
5027 <dim>1</dim>
5028 <dim>384</dim>
5029 </port>
5030 </input>
5031 <output>
5032 <port id="2" precision="FP32" names="477,input.21">
5033 <dim>-1</dim>
5034 <dim>-1</dim>
5035 <dim>384</dim>
5036 </port>
5037 </output>
5038 </layer>
5039 <layer id="326" name="__module.encoder.layer.4.output/aten::add/Add" type="Add" version="opset1">
5040 <data auto_broadcast="numpy" />
5041 <input>
5042 <port id="0" precision="FP32">
5043 <dim>-1</dim>
5044 <dim>-1</dim>
5045 <dim>384</dim>
5046 </port>
5047 <port id="1" precision="FP32">
5048 <dim>-1</dim>
5049 <dim>-1</dim>
5050 <dim>384</dim>
5051 </port>
5052 </input>
5053 <output>
5054 <port id="2" precision="FP32" names="479">
5055 <dim>-1</dim>
5056 <dim>-1</dim>
5057 <dim>384</dim>
5058 </port>
5059 </output>
5060 </layer>
5061 <layer id="327" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5062 <data element_type="i32" shape="1" offset="384850452" size="4" />
5063 <output>
5064 <port id="0" precision="I32">
5065 <dim>1</dim>
5066 </port>
5067 </output>
5068 </layer>
5069 <layer id="328" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5070 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5071 <input>
5072 <port id="0" precision="FP32">
5073 <dim>-1</dim>
5074 <dim>-1</dim>
5075 <dim>384</dim>
5076 </port>
5077 <port id="1" precision="I32">
5078 <dim>1</dim>
5079 </port>
5080 </input>
5081 <output>
5082 <port id="2" precision="FP32">
5083 <dim>-1</dim>
5084 <dim>-1</dim>
5085 <dim>384</dim>
5086 </port>
5087 </output>
5088 </layer>
5089 <layer id="329" name="Constant_6107420" type="Const" version="opset1">
5090 <data element_type="f32" shape="1, 1, 384" offset="420339872" size="1536" />
5091 <output>
5092 <port id="0" precision="FP32">
5093 <dim>1</dim>
5094 <dim>1</dim>
5095 <dim>384</dim>
5096 </port>
5097 </output>
5098 </layer>
5099 <layer id="330" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5100 <data auto_broadcast="numpy" />
5101 <input>
5102 <port id="0" precision="FP32">
5103 <dim>-1</dim>
5104 <dim>-1</dim>
5105 <dim>384</dim>
5106 </port>
5107 <port id="1" precision="FP32">
5108 <dim>1</dim>
5109 <dim>1</dim>
5110 <dim>384</dim>
5111 </port>
5112 </input>
5113 <output>
5114 <port id="2" precision="FP32">
5115 <dim>-1</dim>
5116 <dim>-1</dim>
5117 <dim>384</dim>
5118 </port>
5119 </output>
5120 </layer>
5121 <layer id="331" name="Constant_6107421" type="Const" version="opset1">
5122 <data element_type="f32" shape="1, 1, 384" offset="420341408" size="1536" />
5123 <output>
5124 <port id="0" precision="FP32">
5125 <dim>1</dim>
5126 <dim>1</dim>
5127 <dim>384</dim>
5128 </port>
5129 </output>
5130 </layer>
5131 <layer id="332" name="__module.encoder.layer.4.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
5132 <data auto_broadcast="numpy" />
5133 <input>
5134 <port id="0" precision="FP32">
5135 <dim>-1</dim>
5136 <dim>-1</dim>
5137 <dim>384</dim>
5138 </port>
5139 <port id="1" precision="FP32">
5140 <dim>1</dim>
5141 <dim>1</dim>
5142 <dim>384</dim>
5143 </port>
5144 </input>
5145 <output>
5146 <port id="2" precision="FP32" names="483,hidden_states.31">
5147 <dim>-1</dim>
5148 <dim>-1</dim>
5149 <dim>384</dim>
5150 </port>
5151 </output>
5152 </layer>
5153 <layer id="333" name="self.encoder.layer.5.attention.self.query.weight" type="Const" version="opset1">
5154 <data element_type="f32" shape="384, 384" offset="420342944" size="589824" />
5155 <output>
5156 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.query.weight">
5157 <dim>384</dim>
5158 <dim>384</dim>
5159 </port>
5160 </output>
5161 </layer>
5162 <layer id="334" name="__module.encoder.layer.5.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
5163 <data transpose_a="false" transpose_b="true" />
5164 <input>
5165 <port id="0" precision="FP32">
5166 <dim>-1</dim>
5167 <dim>-1</dim>
5168 <dim>384</dim>
5169 </port>
5170 <port id="1" precision="FP32">
5171 <dim>384</dim>
5172 <dim>384</dim>
5173 </port>
5174 </input>
5175 <output>
5176 <port id="2" precision="FP32">
5177 <dim>-1</dim>
5178 <dim>-1</dim>
5179 <dim>384</dim>
5180 </port>
5181 </output>
5182 </layer>
5183 <layer id="335" name="Constant_6107422" type="Const" version="opset1">
5184 <data element_type="f32" shape="1, 1, 384" offset="420932768" size="1536" />
5185 <output>
5186 <port id="0" precision="FP32">
5187 <dim>1</dim>
5188 <dim>1</dim>
5189 <dim>384</dim>
5190 </port>
5191 </output>
5192 </layer>
5193 <layer id="336" name="__module.encoder.layer.5.attention.self.query/aten::linear/Add" type="Add" version="opset1">
5194 <data auto_broadcast="numpy" />
5195 <input>
5196 <port id="0" precision="FP32">
5197 <dim>-1</dim>
5198 <dim>-1</dim>
5199 <dim>384</dim>
5200 </port>
5201 <port id="1" precision="FP32">
5202 <dim>1</dim>
5203 <dim>1</dim>
5204 <dim>384</dim>
5205 </port>
5206 </input>
5207 <output>
5208 <port id="2" precision="FP32" names="496,x.61">
5209 <dim>-1</dim>
5210 <dim>-1</dim>
5211 <dim>384</dim>
5212 </port>
5213 </output>
5214 </layer>
5215 <layer id="337" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
5216 <data element_type="i64" shape="4" offset="385444888" size="32" />
5217 <output>
5218 <port id="0" precision="I64">
5219 <dim>4</dim>
5220 </port>
5221 </output>
5222 </layer>
5223 <layer id="338" name="__module.encoder.layer.5.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
5224 <data special_zero="true" />
5225 <input>
5226 <port id="0" precision="FP32">
5227 <dim>-1</dim>
5228 <dim>-1</dim>
5229 <dim>384</dim>
5230 </port>
5231 <port id="1" precision="I64">
5232 <dim>4</dim>
5233 </port>
5234 </input>
5235 <output>
5236 <port id="2" precision="FP32" names="500,x.63">
5237 <dim>-1</dim>
5238 <dim>-1</dim>
5239 <dim>12</dim>
5240 <dim>32</dim>
5241 </port>
5242 </output>
5243 </layer>
5244 <layer id="339" name="Constant_6099383" type="Const" version="opset1">
5245 <data element_type="i64" shape="4" offset="385444920" size="32" />
5246 <output>
5247 <port id="0" precision="I64" names="501">
5248 <dim>4</dim>
5249 </port>
5250 </output>
5251 </layer>
5252 <layer id="340" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
5253 <input>
5254 <port id="0" precision="FP32">
5255 <dim>-1</dim>
5256 <dim>-1</dim>
5257 <dim>12</dim>
5258 <dim>32</dim>
5259 </port>
5260 <port id="1" precision="I64">
5261 <dim>4</dim>
5262 </port>
5263 </input>
5264 <output>
5265 <port id="2" precision="FP32" names="502">
5266 <dim>-1</dim>
5267 <dim>12</dim>
5268 <dim>-1</dim>
5269 <dim>32</dim>
5270 </port>
5271 </output>
5272 </layer>
5273 <layer id="341" name="self.encoder.layer.5.attention.self.key.weight" type="Const" version="opset1">
5274 <data element_type="f32" shape="384, 384" offset="420934304" size="589824" />
5275 <output>
5276 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.key.weight">
5277 <dim>384</dim>
5278 <dim>384</dim>
5279 </port>
5280 </output>
5281 </layer>
5282 <layer id="342" name="__module.encoder.layer.5.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
5283 <data transpose_a="false" transpose_b="true" />
5284 <input>
5285 <port id="0" precision="FP32">
5286 <dim>-1</dim>
5287 <dim>-1</dim>
5288 <dim>384</dim>
5289 </port>
5290 <port id="1" precision="FP32">
5291 <dim>384</dim>
5292 <dim>384</dim>
5293 </port>
5294 </input>
5295 <output>
5296 <port id="2" precision="FP32">
5297 <dim>-1</dim>
5298 <dim>-1</dim>
5299 <dim>384</dim>
5300 </port>
5301 </output>
5302 </layer>
5303 <layer id="343" name="Constant_6107423" type="Const" version="opset1">
5304 <data element_type="f32" shape="1, 1, 384" offset="421524128" size="1536" />
5305 <output>
5306 <port id="0" precision="FP32">
5307 <dim>1</dim>
5308 <dim>1</dim>
5309 <dim>384</dim>
5310 </port>
5311 </output>
5312 </layer>
5313 <layer id="344" name="__module.encoder.layer.5.attention.self.key/aten::linear/Add" type="Add" version="opset1">
5314 <data auto_broadcast="numpy" />
5315 <input>
5316 <port id="0" precision="FP32">
5317 <dim>-1</dim>
5318 <dim>-1</dim>
5319 <dim>384</dim>
5320 </port>
5321 <port id="1" precision="FP32">
5322 <dim>1</dim>
5323 <dim>1</dim>
5324 <dim>384</dim>
5325 </port>
5326 </input>
5327 <output>
5328 <port id="2" precision="FP32" names="505,x.65">
5329 <dim>-1</dim>
5330 <dim>-1</dim>
5331 <dim>384</dim>
5332 </port>
5333 </output>
5334 </layer>
5335 <layer id="345" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
5336 <data element_type="i64" shape="4" offset="385444888" size="32" />
5337 <output>
5338 <port id="0" precision="I64">
5339 <dim>4</dim>
5340 </port>
5341 </output>
5342 </layer>
5343 <layer id="346" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
5344 <data special_zero="true" />
5345 <input>
5346 <port id="0" precision="FP32">
5347 <dim>-1</dim>
5348 <dim>-1</dim>
5349 <dim>384</dim>
5350 </port>
5351 <port id="1" precision="I64">
5352 <dim>4</dim>
5353 </port>
5354 </input>
5355 <output>
5356 <port id="2" precision="FP32" names="509,x.67">
5357 <dim>-1</dim>
5358 <dim>-1</dim>
5359 <dim>12</dim>
5360 <dim>32</dim>
5361 </port>
5362 </output>
5363 </layer>
5364 <layer id="347" name="Constant_6099406" type="Const" version="opset1">
5365 <data element_type="i64" shape="4" offset="385444920" size="32" />
5366 <output>
5367 <port id="0" precision="I64" names="510">
5368 <dim>4</dim>
5369 </port>
5370 </output>
5371 </layer>
5372 <layer id="348" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
5373 <input>
5374 <port id="0" precision="FP32">
5375 <dim>-1</dim>
5376 <dim>-1</dim>
5377 <dim>12</dim>
5378 <dim>32</dim>
5379 </port>
5380 <port id="1" precision="I64">
5381 <dim>4</dim>
5382 </port>
5383 </input>
5384 <output>
5385 <port id="2" precision="FP32" names="511">
5386 <dim>-1</dim>
5387 <dim>12</dim>
5388 <dim>-1</dim>
5389 <dim>32</dim>
5390 </port>
5391 </output>
5392 </layer>
5393 <layer id="349" name="self.encoder.layer.5.attention.self.value.weight" type="Const" version="opset1">
5394 <data element_type="f32" shape="384, 384" offset="421525664" size="589824" />
5395 <output>
5396 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.self.value.weight">
5397 <dim>384</dim>
5398 <dim>384</dim>
5399 </port>
5400 </output>
5401 </layer>
5402 <layer id="350" name="__module.encoder.layer.5.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
5403 <data transpose_a="false" transpose_b="true" />
5404 <input>
5405 <port id="0" precision="FP32">
5406 <dim>-1</dim>
5407 <dim>-1</dim>
5408 <dim>384</dim>
5409 </port>
5410 <port id="1" precision="FP32">
5411 <dim>384</dim>
5412 <dim>384</dim>
5413 </port>
5414 </input>
5415 <output>
5416 <port id="2" precision="FP32">
5417 <dim>-1</dim>
5418 <dim>-1</dim>
5419 <dim>384</dim>
5420 </port>
5421 </output>
5422 </layer>
5423 <layer id="351" name="Constant_6107424" type="Const" version="opset1">
5424 <data element_type="f32" shape="1, 1, 384" offset="422115488" size="1536" />
5425 <output>
5426 <port id="0" precision="FP32">
5427 <dim>1</dim>
5428 <dim>1</dim>
5429 <dim>384</dim>
5430 </port>
5431 </output>
5432 </layer>
5433 <layer id="352" name="__module.encoder.layer.5.attention.self.value/aten::linear/Add" type="Add" version="opset1">
5434 <data auto_broadcast="numpy" />
5435 <input>
5436 <port id="0" precision="FP32">
5437 <dim>-1</dim>
5438 <dim>-1</dim>
5439 <dim>384</dim>
5440 </port>
5441 <port id="1" precision="FP32">
5442 <dim>1</dim>
5443 <dim>1</dim>
5444 <dim>384</dim>
5445 </port>
5446 </input>
5447 <output>
5448 <port id="2" precision="FP32" names="514,x.69">
5449 <dim>-1</dim>
5450 <dim>-1</dim>
5451 <dim>384</dim>
5452 </port>
5453 </output>
5454 </layer>
5455 <layer id="353" name="__module.encoder.layer.5.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
5456 <data element_type="i64" shape="4" offset="385444888" size="32" />
5457 <output>
5458 <port id="0" precision="I64">
5459 <dim>4</dim>
5460 </port>
5461 </output>
5462 </layer>
5463 <layer id="354" name="__module.encoder.layer.5.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
5464 <data special_zero="true" />
5465 <input>
5466 <port id="0" precision="FP32">
5467 <dim>-1</dim>
5468 <dim>-1</dim>
5469 <dim>384</dim>
5470 </port>
5471 <port id="1" precision="I64">
5472 <dim>4</dim>
5473 </port>
5474 </input>
5475 <output>
5476 <port id="2" precision="FP32" names="518,x.71">
5477 <dim>-1</dim>
5478 <dim>-1</dim>
5479 <dim>12</dim>
5480 <dim>32</dim>
5481 </port>
5482 </output>
5483 </layer>
5484 <layer id="355" name="Constant_6099429" type="Const" version="opset1">
5485 <data element_type="i64" shape="4" offset="385444920" size="32" />
5486 <output>
5487 <port id="0" precision="I64" names="519">
5488 <dim>4</dim>
5489 </port>
5490 </output>
5491 </layer>
5492 <layer id="356" name="__module.encoder.layer.5.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
5493 <input>
5494 <port id="0" precision="FP32">
5495 <dim>-1</dim>
5496 <dim>-1</dim>
5497 <dim>12</dim>
5498 <dim>32</dim>
5499 </port>
5500 <port id="1" precision="I64">
5501 <dim>4</dim>
5502 </port>
5503 </input>
5504 <output>
5505 <port id="2" precision="FP32" names="520">
5506 <dim>-1</dim>
5507 <dim>12</dim>
5508 <dim>-1</dim>
5509 <dim>32</dim>
5510 </port>
5511 </output>
5512 </layer>
5513 <layer id="357" name="__module.encoder.layer.5.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
5514 <data causal="false" />
5515 <input>
5516 <port id="0" precision="FP32">
5517 <dim>-1</dim>
5518 <dim>12</dim>
5519 <dim>-1</dim>
5520 <dim>32</dim>
5521 </port>
5522 <port id="1" precision="FP32">
5523 <dim>-1</dim>
5524 <dim>12</dim>
5525 <dim>-1</dim>
5526 <dim>32</dim>
5527 </port>
5528 <port id="2" precision="FP32">
5529 <dim>-1</dim>
5530 <dim>12</dim>
5531 <dim>-1</dim>
5532 <dim>32</dim>
5533 </port>
5534 <port id="3" precision="FP32">
5535 <dim>-1</dim>
5536 <dim>1</dim>
5537 <dim>-1</dim>
5538 <dim>-1</dim>
5539 </port>
5540 </input>
5541 <output>
5542 <port id="4" precision="FP32" names="521,attn_output.21">
5543 <dim>-1</dim>
5544 <dim>12</dim>
5545 <dim>-1</dim>
5546 <dim>32</dim>
5547 </port>
5548 </output>
5549 </layer>
5550 <layer id="358" name="__module.encoder.layer.5.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
5551 <data element_type="i32" shape="4" offset="386627704" size="16" />
5552 <output>
5553 <port id="0" precision="I32">
5554 <dim>4</dim>
5555 </port>
5556 </output>
5557 </layer>
5558 <layer id="359" name="__module.encoder.layer.5.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
5559 <input>
5560 <port id="0" precision="FP32">
5561 <dim>-1</dim>
5562 <dim>12</dim>
5563 <dim>-1</dim>
5564 <dim>32</dim>
5565 </port>
5566 <port id="1" precision="I32">
5567 <dim>4</dim>
5568 </port>
5569 </input>
5570 <output>
5571 <port id="2" precision="FP32" names="522,attn_output.23">
5572 <dim>-1</dim>
5573 <dim>-1</dim>
5574 <dim>12</dim>
5575 <dim>32</dim>
5576 </port>
5577 </output>
5578 </layer>
5579 <layer id="360" name="Constant_6107619" type="Const" version="opset1">
5580 <data element_type="i64" shape="3" offset="386627720" size="24" />
5581 <output>
5582 <port id="0" precision="I64">
5583 <dim>3</dim>
5584 </port>
5585 </output>
5586 </layer>
5587 <layer id="361" name="__module.encoder.layer.5.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
5588 <data special_zero="true" />
5589 <input>
5590 <port id="0" precision="FP32">
5591 <dim>-1</dim>
5592 <dim>-1</dim>
5593 <dim>12</dim>
5594 <dim>32</dim>
5595 </port>
5596 <port id="1" precision="I64">
5597 <dim>3</dim>
5598 </port>
5599 </input>
5600 <output>
5601 <port id="2" precision="FP32" names="524">
5602 <dim>-1</dim>
5603 <dim>-1</dim>
5604 <dim>384</dim>
5605 </port>
5606 </output>
5607 </layer>
5608 <layer id="362" name="self.encoder.layer.5.attention.output.dense.weight" type="Const" version="opset1">
5609 <data element_type="f32" shape="384, 384" offset="422117024" size="589824" />
5610 <output>
5611 <port id="0" precision="FP32" names="self.encoder.layer.5.attention.output.dense.weight">
5612 <dim>384</dim>
5613 <dim>384</dim>
5614 </port>
5615 </output>
5616 </layer>
5617 <layer id="363" name="__module.encoder.layer.5.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5618 <data transpose_a="false" transpose_b="true" />
5619 <input>
5620 <port id="0" precision="FP32">
5621 <dim>-1</dim>
5622 <dim>-1</dim>
5623 <dim>384</dim>
5624 </port>
5625 <port id="1" precision="FP32">
5626 <dim>384</dim>
5627 <dim>384</dim>
5628 </port>
5629 </input>
5630 <output>
5631 <port id="2" precision="FP32">
5632 <dim>-1</dim>
5633 <dim>-1</dim>
5634 <dim>384</dim>
5635 </port>
5636 </output>
5637 </layer>
5638 <layer id="364" name="Constant_6107425" type="Const" version="opset1">
5639 <data element_type="f32" shape="1, 1, 384" offset="422706848" size="1536" />
5640 <output>
5641 <port id="0" precision="FP32">
5642 <dim>1</dim>
5643 <dim>1</dim>
5644 <dim>384</dim>
5645 </port>
5646 </output>
5647 </layer>
5648 <layer id="365" name="__module.encoder.layer.5.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
5649 <data auto_broadcast="numpy" />
5650 <input>
5651 <port id="0" precision="FP32">
5652 <dim>-1</dim>
5653 <dim>-1</dim>
5654 <dim>384</dim>
5655 </port>
5656 <port id="1" precision="FP32">
5657 <dim>1</dim>
5658 <dim>1</dim>
5659 <dim>384</dim>
5660 </port>
5661 </input>
5662 <output>
5663 <port id="2" precision="FP32" names="530,input.23">
5664 <dim>-1</dim>
5665 <dim>-1</dim>
5666 <dim>384</dim>
5667 </port>
5668 </output>
5669 </layer>
5670 <layer id="366" name="__module.encoder.layer.5.attention.output/aten::add/Add" type="Add" version="opset1">
5671 <data auto_broadcast="numpy" />
5672 <input>
5673 <port id="0" precision="FP32">
5674 <dim>-1</dim>
5675 <dim>-1</dim>
5676 <dim>384</dim>
5677 </port>
5678 <port id="1" precision="FP32">
5679 <dim>-1</dim>
5680 <dim>-1</dim>
5681 <dim>384</dim>
5682 </port>
5683 </input>
5684 <output>
5685 <port id="2" precision="FP32" names="532">
5686 <dim>-1</dim>
5687 <dim>-1</dim>
5688 <dim>384</dim>
5689 </port>
5690 </output>
5691 </layer>
5692 <layer id="367" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5693 <data element_type="i32" shape="1" offset="384850452" size="4" />
5694 <output>
5695 <port id="0" precision="I32">
5696 <dim>1</dim>
5697 </port>
5698 </output>
5699 </layer>
5700 <layer id="368" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5701 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5702 <input>
5703 <port id="0" precision="FP32">
5704 <dim>-1</dim>
5705 <dim>-1</dim>
5706 <dim>384</dim>
5707 </port>
5708 <port id="1" precision="I32">
5709 <dim>1</dim>
5710 </port>
5711 </input>
5712 <output>
5713 <port id="2" precision="FP32">
5714 <dim>-1</dim>
5715 <dim>-1</dim>
5716 <dim>384</dim>
5717 </port>
5718 </output>
5719 </layer>
5720 <layer id="369" name="Constant_6107426" type="Const" version="opset1">
5721 <data element_type="f32" shape="1, 1, 384" offset="422708384" size="1536" />
5722 <output>
5723 <port id="0" precision="FP32">
5724 <dim>1</dim>
5725 <dim>1</dim>
5726 <dim>384</dim>
5727 </port>
5728 </output>
5729 </layer>
5730 <layer id="370" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5731 <data auto_broadcast="numpy" />
5732 <input>
5733 <port id="0" precision="FP32">
5734 <dim>-1</dim>
5735 <dim>-1</dim>
5736 <dim>384</dim>
5737 </port>
5738 <port id="1" precision="FP32">
5739 <dim>1</dim>
5740 <dim>1</dim>
5741 <dim>384</dim>
5742 </port>
5743 </input>
5744 <output>
5745 <port id="2" precision="FP32">
5746 <dim>-1</dim>
5747 <dim>-1</dim>
5748 <dim>384</dim>
5749 </port>
5750 </output>
5751 </layer>
5752 <layer id="371" name="Constant_6107427" type="Const" version="opset1">
5753 <data element_type="f32" shape="1, 1, 384" offset="422709920" size="1536" />
5754 <output>
5755 <port id="0" precision="FP32">
5756 <dim>1</dim>
5757 <dim>1</dim>
5758 <dim>384</dim>
5759 </port>
5760 </output>
5761 </layer>
5762 <layer id="372" name="__module.encoder.layer.5.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
5763 <data auto_broadcast="numpy" />
5764 <input>
5765 <port id="0" precision="FP32">
5766 <dim>-1</dim>
5767 <dim>-1</dim>
5768 <dim>384</dim>
5769 </port>
5770 <port id="1" precision="FP32">
5771 <dim>1</dim>
5772 <dim>1</dim>
5773 <dim>384</dim>
5774 </port>
5775 </input>
5776 <output>
5777 <port id="2" precision="FP32" names="536,input_tensor.11">
5778 <dim>-1</dim>
5779 <dim>-1</dim>
5780 <dim>384</dim>
5781 </port>
5782 </output>
5783 </layer>
5784 <layer id="373" name="self.encoder.layer.5.intermediate.dense.weight" type="Const" version="opset1">
5785 <data element_type="f32" shape="1536, 384" offset="422711456" size="2359296" />
5786 <output>
5787 <port id="0" precision="FP32" names="self.encoder.layer.5.intermediate.dense.weight">
5788 <dim>1536</dim>
5789 <dim>384</dim>
5790 </port>
5791 </output>
5792 </layer>
5793 <layer id="374" name="__module.encoder.layer.5.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5794 <data transpose_a="false" transpose_b="true" />
5795 <input>
5796 <port id="0" precision="FP32">
5797 <dim>-1</dim>
5798 <dim>-1</dim>
5799 <dim>384</dim>
5800 </port>
5801 <port id="1" precision="FP32">
5802 <dim>1536</dim>
5803 <dim>384</dim>
5804 </port>
5805 </input>
5806 <output>
5807 <port id="2" precision="FP32">
5808 <dim>-1</dim>
5809 <dim>-1</dim>
5810 <dim>1536</dim>
5811 </port>
5812 </output>
5813 </layer>
5814 <layer id="375" name="Constant_6107428" type="Const" version="opset1">
5815 <data element_type="f32" shape="1, 1, 1536" offset="425070752" size="6144" />
5816 <output>
5817 <port id="0" precision="FP32">
5818 <dim>1</dim>
5819 <dim>1</dim>
5820 <dim>1536</dim>
5821 </port>
5822 </output>
5823 </layer>
5824 <layer id="376" name="__module.encoder.layer.5.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
5825 <data auto_broadcast="numpy" />
5826 <input>
5827 <port id="0" precision="FP32">
5828 <dim>-1</dim>
5829 <dim>-1</dim>
5830 <dim>1536</dim>
5831 </port>
5832 <port id="1" precision="FP32">
5833 <dim>1</dim>
5834 <dim>1</dim>
5835 <dim>1536</dim>
5836 </port>
5837 </input>
5838 <output>
5839 <port id="2" precision="FP32" names="541">
5840 <dim>-1</dim>
5841 <dim>-1</dim>
5842 <dim>1536</dim>
5843 </port>
5844 </output>
5845 </layer>
5846 <layer id="377" name="__module.encoder.layer.5.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
5847 <data approximation_mode="ERF" />
5848 <input>
5849 <port id="0" precision="FP32">
5850 <dim>-1</dim>
5851 <dim>-1</dim>
5852 <dim>1536</dim>
5853 </port>
5854 </input>
5855 <output>
5856 <port id="1" precision="FP32" names="542">
5857 <dim>-1</dim>
5858 <dim>-1</dim>
5859 <dim>1536</dim>
5860 </port>
5861 </output>
5862 </layer>
5863 <layer id="378" name="self.encoder.layer.5.output.dense.weight" type="Const" version="opset1">
5864 <data element_type="f32" shape="384, 1536" offset="425076896" size="2359296" />
5865 <output>
5866 <port id="0" precision="FP32" names="self.encoder.layer.5.output.dense.weight">
5867 <dim>384</dim>
5868 <dim>1536</dim>
5869 </port>
5870 </output>
5871 </layer>
5872 <layer id="379" name="__module.encoder.layer.5.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
5873 <data transpose_a="false" transpose_b="true" />
5874 <input>
5875 <port id="0" precision="FP32">
5876 <dim>-1</dim>
5877 <dim>-1</dim>
5878 <dim>1536</dim>
5879 </port>
5880 <port id="1" precision="FP32">
5881 <dim>384</dim>
5882 <dim>1536</dim>
5883 </port>
5884 </input>
5885 <output>
5886 <port id="2" precision="FP32">
5887 <dim>-1</dim>
5888 <dim>-1</dim>
5889 <dim>384</dim>
5890 </port>
5891 </output>
5892 </layer>
5893 <layer id="380" name="Constant_6107429" type="Const" version="opset1">
5894 <data element_type="f32" shape="1, 1, 384" offset="427436192" size="1536" />
5895 <output>
5896 <port id="0" precision="FP32">
5897 <dim>1</dim>
5898 <dim>1</dim>
5899 <dim>384</dim>
5900 </port>
5901 </output>
5902 </layer>
5903 <layer id="381" name="__module.encoder.layer.5.output.dense/aten::linear/Add" type="Add" version="opset1">
5904 <data auto_broadcast="numpy" />
5905 <input>
5906 <port id="0" precision="FP32">
5907 <dim>-1</dim>
5908 <dim>-1</dim>
5909 <dim>384</dim>
5910 </port>
5911 <port id="1" precision="FP32">
5912 <dim>1</dim>
5913 <dim>1</dim>
5914 <dim>384</dim>
5915 </port>
5916 </input>
5917 <output>
5918 <port id="2" precision="FP32" names="548,input.25">
5919 <dim>-1</dim>
5920 <dim>-1</dim>
5921 <dim>384</dim>
5922 </port>
5923 </output>
5924 </layer>
5925 <layer id="382" name="__module.encoder.layer.5.output/aten::add/Add" type="Add" version="opset1">
5926 <data auto_broadcast="numpy" />
5927 <input>
5928 <port id="0" precision="FP32">
5929 <dim>-1</dim>
5930 <dim>-1</dim>
5931 <dim>384</dim>
5932 </port>
5933 <port id="1" precision="FP32">
5934 <dim>-1</dim>
5935 <dim>-1</dim>
5936 <dim>384</dim>
5937 </port>
5938 </input>
5939 <output>
5940 <port id="2" precision="FP32" names="550">
5941 <dim>-1</dim>
5942 <dim>-1</dim>
5943 <dim>384</dim>
5944 </port>
5945 </output>
5946 </layer>
5947 <layer id="383" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
5948 <data element_type="i32" shape="1" offset="384850452" size="4" />
5949 <output>
5950 <port id="0" precision="I32">
5951 <dim>1</dim>
5952 </port>
5953 </output>
5954 </layer>
5955 <layer id="384" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
5956 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
5957 <input>
5958 <port id="0" precision="FP32">
5959 <dim>-1</dim>
5960 <dim>-1</dim>
5961 <dim>384</dim>
5962 </port>
5963 <port id="1" precision="I32">
5964 <dim>1</dim>
5965 </port>
5966 </input>
5967 <output>
5968 <port id="2" precision="FP32">
5969 <dim>-1</dim>
5970 <dim>-1</dim>
5971 <dim>384</dim>
5972 </port>
5973 </output>
5974 </layer>
5975 <layer id="385" name="Constant_6107430" type="Const" version="opset1">
5976 <data element_type="f32" shape="1, 1, 384" offset="427437728" size="1536" />
5977 <output>
5978 <port id="0" precision="FP32">
5979 <dim>1</dim>
5980 <dim>1</dim>
5981 <dim>384</dim>
5982 </port>
5983 </output>
5984 </layer>
5985 <layer id="386" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
5986 <data auto_broadcast="numpy" />
5987 <input>
5988 <port id="0" precision="FP32">
5989 <dim>-1</dim>
5990 <dim>-1</dim>
5991 <dim>384</dim>
5992 </port>
5993 <port id="1" precision="FP32">
5994 <dim>1</dim>
5995 <dim>1</dim>
5996 <dim>384</dim>
5997 </port>
5998 </input>
5999 <output>
6000 <port id="2" precision="FP32">
6001 <dim>-1</dim>
6002 <dim>-1</dim>
6003 <dim>384</dim>
6004 </port>
6005 </output>
6006 </layer>
6007 <layer id="387" name="Constant_6107431" type="Const" version="opset1">
6008 <data element_type="f32" shape="1, 1, 384" offset="427439264" size="1536" />
6009 <output>
6010 <port id="0" precision="FP32">
6011 <dim>1</dim>
6012 <dim>1</dim>
6013 <dim>384</dim>
6014 </port>
6015 </output>
6016 </layer>
6017 <layer id="388" name="__module.encoder.layer.5.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6018 <data auto_broadcast="numpy" />
6019 <input>
6020 <port id="0" precision="FP32">
6021 <dim>-1</dim>
6022 <dim>-1</dim>
6023 <dim>384</dim>
6024 </port>
6025 <port id="1" precision="FP32">
6026 <dim>1</dim>
6027 <dim>1</dim>
6028 <dim>384</dim>
6029 </port>
6030 </input>
6031 <output>
6032 <port id="2" precision="FP32" names="554,hidden_states.37">
6033 <dim>-1</dim>
6034 <dim>-1</dim>
6035 <dim>384</dim>
6036 </port>
6037 </output>
6038 </layer>
6039 <layer id="389" name="self.encoder.layer.6.attention.self.query.weight" type="Const" version="opset1">
6040 <data element_type="f32" shape="384, 384" offset="427440800" size="589824" />
6041 <output>
6042 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.query.weight">
6043 <dim>384</dim>
6044 <dim>384</dim>
6045 </port>
6046 </output>
6047 </layer>
6048 <layer id="390" name="__module.encoder.layer.6.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
6049 <data transpose_a="false" transpose_b="true" />
6050 <input>
6051 <port id="0" precision="FP32">
6052 <dim>-1</dim>
6053 <dim>-1</dim>
6054 <dim>384</dim>
6055 </port>
6056 <port id="1" precision="FP32">
6057 <dim>384</dim>
6058 <dim>384</dim>
6059 </port>
6060 </input>
6061 <output>
6062 <port id="2" precision="FP32">
6063 <dim>-1</dim>
6064 <dim>-1</dim>
6065 <dim>384</dim>
6066 </port>
6067 </output>
6068 </layer>
6069 <layer id="391" name="Constant_6107432" type="Const" version="opset1">
6070 <data element_type="f32" shape="1, 1, 384" offset="428030624" size="1536" />
6071 <output>
6072 <port id="0" precision="FP32">
6073 <dim>1</dim>
6074 <dim>1</dim>
6075 <dim>384</dim>
6076 </port>
6077 </output>
6078 </layer>
6079 <layer id="392" name="__module.encoder.layer.6.attention.self.query/aten::linear/Add" type="Add" version="opset1">
6080 <data auto_broadcast="numpy" />
6081 <input>
6082 <port id="0" precision="FP32">
6083 <dim>-1</dim>
6084 <dim>-1</dim>
6085 <dim>384</dim>
6086 </port>
6087 <port id="1" precision="FP32">
6088 <dim>1</dim>
6089 <dim>1</dim>
6090 <dim>384</dim>
6091 </port>
6092 </input>
6093 <output>
6094 <port id="2" precision="FP32" names="567,x.73">
6095 <dim>-1</dim>
6096 <dim>-1</dim>
6097 <dim>384</dim>
6098 </port>
6099 </output>
6100 </layer>
6101 <layer id="393" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
6102 <data element_type="i64" shape="4" offset="385444888" size="32" />
6103 <output>
6104 <port id="0" precision="I64">
6105 <dim>4</dim>
6106 </port>
6107 </output>
6108 </layer>
6109 <layer id="394" name="__module.encoder.layer.6.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
6110 <data special_zero="true" />
6111 <input>
6112 <port id="0" precision="FP32">
6113 <dim>-1</dim>
6114 <dim>-1</dim>
6115 <dim>384</dim>
6116 </port>
6117 <port id="1" precision="I64">
6118 <dim>4</dim>
6119 </port>
6120 </input>
6121 <output>
6122 <port id="2" precision="FP32" names="571,x.75">
6123 <dim>-1</dim>
6124 <dim>-1</dim>
6125 <dim>12</dim>
6126 <dim>32</dim>
6127 </port>
6128 </output>
6129 </layer>
6130 <layer id="395" name="Constant_6099609" type="Const" version="opset1">
6131 <data element_type="i64" shape="4" offset="385444920" size="32" />
6132 <output>
6133 <port id="0" precision="I64" names="572">
6134 <dim>4</dim>
6135 </port>
6136 </output>
6137 </layer>
6138 <layer id="396" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
6139 <input>
6140 <port id="0" precision="FP32">
6141 <dim>-1</dim>
6142 <dim>-1</dim>
6143 <dim>12</dim>
6144 <dim>32</dim>
6145 </port>
6146 <port id="1" precision="I64">
6147 <dim>4</dim>
6148 </port>
6149 </input>
6150 <output>
6151 <port id="2" precision="FP32" names="573">
6152 <dim>-1</dim>
6153 <dim>12</dim>
6154 <dim>-1</dim>
6155 <dim>32</dim>
6156 </port>
6157 </output>
6158 </layer>
6159 <layer id="397" name="self.encoder.layer.6.attention.self.key.weight" type="Const" version="opset1">
6160 <data element_type="f32" shape="384, 384" offset="428032160" size="589824" />
6161 <output>
6162 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.key.weight">
6163 <dim>384</dim>
6164 <dim>384</dim>
6165 </port>
6166 </output>
6167 </layer>
6168 <layer id="398" name="__module.encoder.layer.6.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
6169 <data transpose_a="false" transpose_b="true" />
6170 <input>
6171 <port id="0" precision="FP32">
6172 <dim>-1</dim>
6173 <dim>-1</dim>
6174 <dim>384</dim>
6175 </port>
6176 <port id="1" precision="FP32">
6177 <dim>384</dim>
6178 <dim>384</dim>
6179 </port>
6180 </input>
6181 <output>
6182 <port id="2" precision="FP32">
6183 <dim>-1</dim>
6184 <dim>-1</dim>
6185 <dim>384</dim>
6186 </port>
6187 </output>
6188 </layer>
6189 <layer id="399" name="Constant_6107433" type="Const" version="opset1">
6190 <data element_type="f32" shape="1, 1, 384" offset="428621984" size="1536" />
6191 <output>
6192 <port id="0" precision="FP32">
6193 <dim>1</dim>
6194 <dim>1</dim>
6195 <dim>384</dim>
6196 </port>
6197 </output>
6198 </layer>
6199 <layer id="400" name="__module.encoder.layer.6.attention.self.key/aten::linear/Add" type="Add" version="opset1">
6200 <data auto_broadcast="numpy" />
6201 <input>
6202 <port id="0" precision="FP32">
6203 <dim>-1</dim>
6204 <dim>-1</dim>
6205 <dim>384</dim>
6206 </port>
6207 <port id="1" precision="FP32">
6208 <dim>1</dim>
6209 <dim>1</dim>
6210 <dim>384</dim>
6211 </port>
6212 </input>
6213 <output>
6214 <port id="2" precision="FP32" names="576,x.77">
6215 <dim>-1</dim>
6216 <dim>-1</dim>
6217 <dim>384</dim>
6218 </port>
6219 </output>
6220 </layer>
6221 <layer id="401" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
6222 <data element_type="i64" shape="4" offset="385444888" size="32" />
6223 <output>
6224 <port id="0" precision="I64">
6225 <dim>4</dim>
6226 </port>
6227 </output>
6228 </layer>
6229 <layer id="402" name="__module.encoder.layer.6.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
6230 <data special_zero="true" />
6231 <input>
6232 <port id="0" precision="FP32">
6233 <dim>-1</dim>
6234 <dim>-1</dim>
6235 <dim>384</dim>
6236 </port>
6237 <port id="1" precision="I64">
6238 <dim>4</dim>
6239 </port>
6240 </input>
6241 <output>
6242 <port id="2" precision="FP32" names="580,x.79">
6243 <dim>-1</dim>
6244 <dim>-1</dim>
6245 <dim>12</dim>
6246 <dim>32</dim>
6247 </port>
6248 </output>
6249 </layer>
6250 <layer id="403" name="Constant_6099632" type="Const" version="opset1">
6251 <data element_type="i64" shape="4" offset="385444920" size="32" />
6252 <output>
6253 <port id="0" precision="I64" names="581">
6254 <dim>4</dim>
6255 </port>
6256 </output>
6257 </layer>
6258 <layer id="404" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
6259 <input>
6260 <port id="0" precision="FP32">
6261 <dim>-1</dim>
6262 <dim>-1</dim>
6263 <dim>12</dim>
6264 <dim>32</dim>
6265 </port>
6266 <port id="1" precision="I64">
6267 <dim>4</dim>
6268 </port>
6269 </input>
6270 <output>
6271 <port id="2" precision="FP32" names="582">
6272 <dim>-1</dim>
6273 <dim>12</dim>
6274 <dim>-1</dim>
6275 <dim>32</dim>
6276 </port>
6277 </output>
6278 </layer>
6279 <layer id="405" name="self.encoder.layer.6.attention.self.value.weight" type="Const" version="opset1">
6280 <data element_type="f32" shape="384, 384" offset="428623520" size="589824" />
6281 <output>
6282 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.self.value.weight">
6283 <dim>384</dim>
6284 <dim>384</dim>
6285 </port>
6286 </output>
6287 </layer>
6288 <layer id="406" name="__module.encoder.layer.6.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
6289 <data transpose_a="false" transpose_b="true" />
6290 <input>
6291 <port id="0" precision="FP32">
6292 <dim>-1</dim>
6293 <dim>-1</dim>
6294 <dim>384</dim>
6295 </port>
6296 <port id="1" precision="FP32">
6297 <dim>384</dim>
6298 <dim>384</dim>
6299 </port>
6300 </input>
6301 <output>
6302 <port id="2" precision="FP32">
6303 <dim>-1</dim>
6304 <dim>-1</dim>
6305 <dim>384</dim>
6306 </port>
6307 </output>
6308 </layer>
6309 <layer id="407" name="Constant_6107434" type="Const" version="opset1">
6310 <data element_type="f32" shape="1, 1, 384" offset="429213344" size="1536" />
6311 <output>
6312 <port id="0" precision="FP32">
6313 <dim>1</dim>
6314 <dim>1</dim>
6315 <dim>384</dim>
6316 </port>
6317 </output>
6318 </layer>
6319 <layer id="408" name="__module.encoder.layer.6.attention.self.value/aten::linear/Add" type="Add" version="opset1">
6320 <data auto_broadcast="numpy" />
6321 <input>
6322 <port id="0" precision="FP32">
6323 <dim>-1</dim>
6324 <dim>-1</dim>
6325 <dim>384</dim>
6326 </port>
6327 <port id="1" precision="FP32">
6328 <dim>1</dim>
6329 <dim>1</dim>
6330 <dim>384</dim>
6331 </port>
6332 </input>
6333 <output>
6334 <port id="2" precision="FP32" names="585,x.81">
6335 <dim>-1</dim>
6336 <dim>-1</dim>
6337 <dim>384</dim>
6338 </port>
6339 </output>
6340 </layer>
6341 <layer id="409" name="__module.encoder.layer.6.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
6342 <data element_type="i64" shape="4" offset="385444888" size="32" />
6343 <output>
6344 <port id="0" precision="I64">
6345 <dim>4</dim>
6346 </port>
6347 </output>
6348 </layer>
6349 <layer id="410" name="__module.encoder.layer.6.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
6350 <data special_zero="true" />
6351 <input>
6352 <port id="0" precision="FP32">
6353 <dim>-1</dim>
6354 <dim>-1</dim>
6355 <dim>384</dim>
6356 </port>
6357 <port id="1" precision="I64">
6358 <dim>4</dim>
6359 </port>
6360 </input>
6361 <output>
6362 <port id="2" precision="FP32" names="589,x.83">
6363 <dim>-1</dim>
6364 <dim>-1</dim>
6365 <dim>12</dim>
6366 <dim>32</dim>
6367 </port>
6368 </output>
6369 </layer>
6370 <layer id="411" name="Constant_6099655" type="Const" version="opset1">
6371 <data element_type="i64" shape="4" offset="385444920" size="32" />
6372 <output>
6373 <port id="0" precision="I64" names="590">
6374 <dim>4</dim>
6375 </port>
6376 </output>
6377 </layer>
6378 <layer id="412" name="__module.encoder.layer.6.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
6379 <input>
6380 <port id="0" precision="FP32">
6381 <dim>-1</dim>
6382 <dim>-1</dim>
6383 <dim>12</dim>
6384 <dim>32</dim>
6385 </port>
6386 <port id="1" precision="I64">
6387 <dim>4</dim>
6388 </port>
6389 </input>
6390 <output>
6391 <port id="2" precision="FP32" names="591">
6392 <dim>-1</dim>
6393 <dim>12</dim>
6394 <dim>-1</dim>
6395 <dim>32</dim>
6396 </port>
6397 </output>
6398 </layer>
6399 <layer id="413" name="__module.encoder.layer.6.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
6400 <data causal="false" />
6401 <input>
6402 <port id="0" precision="FP32">
6403 <dim>-1</dim>
6404 <dim>12</dim>
6405 <dim>-1</dim>
6406 <dim>32</dim>
6407 </port>
6408 <port id="1" precision="FP32">
6409 <dim>-1</dim>
6410 <dim>12</dim>
6411 <dim>-1</dim>
6412 <dim>32</dim>
6413 </port>
6414 <port id="2" precision="FP32">
6415 <dim>-1</dim>
6416 <dim>12</dim>
6417 <dim>-1</dim>
6418 <dim>32</dim>
6419 </port>
6420 <port id="3" precision="FP32">
6421 <dim>-1</dim>
6422 <dim>1</dim>
6423 <dim>-1</dim>
6424 <dim>-1</dim>
6425 </port>
6426 </input>
6427 <output>
6428 <port id="4" precision="FP32" names="592,attn_output.25">
6429 <dim>-1</dim>
6430 <dim>12</dim>
6431 <dim>-1</dim>
6432 <dim>32</dim>
6433 </port>
6434 </output>
6435 </layer>
6436 <layer id="414" name="__module.encoder.layer.6.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
6437 <data element_type="i32" shape="4" offset="386627704" size="16" />
6438 <output>
6439 <port id="0" precision="I32">
6440 <dim>4</dim>
6441 </port>
6442 </output>
6443 </layer>
6444 <layer id="415" name="__module.encoder.layer.6.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
6445 <input>
6446 <port id="0" precision="FP32">
6447 <dim>-1</dim>
6448 <dim>12</dim>
6449 <dim>-1</dim>
6450 <dim>32</dim>
6451 </port>
6452 <port id="1" precision="I32">
6453 <dim>4</dim>
6454 </port>
6455 </input>
6456 <output>
6457 <port id="2" precision="FP32" names="593,attn_output.27">
6458 <dim>-1</dim>
6459 <dim>-1</dim>
6460 <dim>12</dim>
6461 <dim>32</dim>
6462 </port>
6463 </output>
6464 </layer>
6465 <layer id="416" name="Constant_6107620" type="Const" version="opset1">
6466 <data element_type="i64" shape="3" offset="386627720" size="24" />
6467 <output>
6468 <port id="0" precision="I64">
6469 <dim>3</dim>
6470 </port>
6471 </output>
6472 </layer>
6473 <layer id="417" name="__module.encoder.layer.6.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
6474 <data special_zero="true" />
6475 <input>
6476 <port id="0" precision="FP32">
6477 <dim>-1</dim>
6478 <dim>-1</dim>
6479 <dim>12</dim>
6480 <dim>32</dim>
6481 </port>
6482 <port id="1" precision="I64">
6483 <dim>3</dim>
6484 </port>
6485 </input>
6486 <output>
6487 <port id="2" precision="FP32" names="595">
6488 <dim>-1</dim>
6489 <dim>-1</dim>
6490 <dim>384</dim>
6491 </port>
6492 </output>
6493 </layer>
6494 <layer id="418" name="self.encoder.layer.6.attention.output.dense.weight" type="Const" version="opset1">
6495 <data element_type="f32" shape="384, 384" offset="429214880" size="589824" />
6496 <output>
6497 <port id="0" precision="FP32" names="self.encoder.layer.6.attention.output.dense.weight">
6498 <dim>384</dim>
6499 <dim>384</dim>
6500 </port>
6501 </output>
6502 </layer>
6503 <layer id="419" name="__module.encoder.layer.6.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6504 <data transpose_a="false" transpose_b="true" />
6505 <input>
6506 <port id="0" precision="FP32">
6507 <dim>-1</dim>
6508 <dim>-1</dim>
6509 <dim>384</dim>
6510 </port>
6511 <port id="1" precision="FP32">
6512 <dim>384</dim>
6513 <dim>384</dim>
6514 </port>
6515 </input>
6516 <output>
6517 <port id="2" precision="FP32">
6518 <dim>-1</dim>
6519 <dim>-1</dim>
6520 <dim>384</dim>
6521 </port>
6522 </output>
6523 </layer>
6524 <layer id="420" name="Constant_6107435" type="Const" version="opset1">
6525 <data element_type="f32" shape="1, 1, 384" offset="429804704" size="1536" />
6526 <output>
6527 <port id="0" precision="FP32">
6528 <dim>1</dim>
6529 <dim>1</dim>
6530 <dim>384</dim>
6531 </port>
6532 </output>
6533 </layer>
6534 <layer id="421" name="__module.encoder.layer.6.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
6535 <data auto_broadcast="numpy" />
6536 <input>
6537 <port id="0" precision="FP32">
6538 <dim>-1</dim>
6539 <dim>-1</dim>
6540 <dim>384</dim>
6541 </port>
6542 <port id="1" precision="FP32">
6543 <dim>1</dim>
6544 <dim>1</dim>
6545 <dim>384</dim>
6546 </port>
6547 </input>
6548 <output>
6549 <port id="2" precision="FP32" names="601,input.27">
6550 <dim>-1</dim>
6551 <dim>-1</dim>
6552 <dim>384</dim>
6553 </port>
6554 </output>
6555 </layer>
6556 <layer id="422" name="__module.encoder.layer.6.attention.output/aten::add/Add" type="Add" version="opset1">
6557 <data auto_broadcast="numpy" />
6558 <input>
6559 <port id="0" precision="FP32">
6560 <dim>-1</dim>
6561 <dim>-1</dim>
6562 <dim>384</dim>
6563 </port>
6564 <port id="1" precision="FP32">
6565 <dim>-1</dim>
6566 <dim>-1</dim>
6567 <dim>384</dim>
6568 </port>
6569 </input>
6570 <output>
6571 <port id="2" precision="FP32" names="603">
6572 <dim>-1</dim>
6573 <dim>-1</dim>
6574 <dim>384</dim>
6575 </port>
6576 </output>
6577 </layer>
6578 <layer id="423" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6579 <data element_type="i32" shape="1" offset="384850452" size="4" />
6580 <output>
6581 <port id="0" precision="I32">
6582 <dim>1</dim>
6583 </port>
6584 </output>
6585 </layer>
6586 <layer id="424" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6587 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6588 <input>
6589 <port id="0" precision="FP32">
6590 <dim>-1</dim>
6591 <dim>-1</dim>
6592 <dim>384</dim>
6593 </port>
6594 <port id="1" precision="I32">
6595 <dim>1</dim>
6596 </port>
6597 </input>
6598 <output>
6599 <port id="2" precision="FP32">
6600 <dim>-1</dim>
6601 <dim>-1</dim>
6602 <dim>384</dim>
6603 </port>
6604 </output>
6605 </layer>
6606 <layer id="425" name="Constant_6107436" type="Const" version="opset1">
6607 <data element_type="f32" shape="1, 1, 384" offset="429806240" size="1536" />
6608 <output>
6609 <port id="0" precision="FP32">
6610 <dim>1</dim>
6611 <dim>1</dim>
6612 <dim>384</dim>
6613 </port>
6614 </output>
6615 </layer>
6616 <layer id="426" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
6617 <data auto_broadcast="numpy" />
6618 <input>
6619 <port id="0" precision="FP32">
6620 <dim>-1</dim>
6621 <dim>-1</dim>
6622 <dim>384</dim>
6623 </port>
6624 <port id="1" precision="FP32">
6625 <dim>1</dim>
6626 <dim>1</dim>
6627 <dim>384</dim>
6628 </port>
6629 </input>
6630 <output>
6631 <port id="2" precision="FP32">
6632 <dim>-1</dim>
6633 <dim>-1</dim>
6634 <dim>384</dim>
6635 </port>
6636 </output>
6637 </layer>
6638 <layer id="427" name="Constant_6107437" type="Const" version="opset1">
6639 <data element_type="f32" shape="1, 1, 384" offset="429807776" size="1536" />
6640 <output>
6641 <port id="0" precision="FP32">
6642 <dim>1</dim>
6643 <dim>1</dim>
6644 <dim>384</dim>
6645 </port>
6646 </output>
6647 </layer>
6648 <layer id="428" name="__module.encoder.layer.6.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6649 <data auto_broadcast="numpy" />
6650 <input>
6651 <port id="0" precision="FP32">
6652 <dim>-1</dim>
6653 <dim>-1</dim>
6654 <dim>384</dim>
6655 </port>
6656 <port id="1" precision="FP32">
6657 <dim>1</dim>
6658 <dim>1</dim>
6659 <dim>384</dim>
6660 </port>
6661 </input>
6662 <output>
6663 <port id="2" precision="FP32" names="607,input_tensor.13">
6664 <dim>-1</dim>
6665 <dim>-1</dim>
6666 <dim>384</dim>
6667 </port>
6668 </output>
6669 </layer>
6670 <layer id="429" name="self.encoder.layer.6.intermediate.dense.weight" type="Const" version="opset1">
6671 <data element_type="f32" shape="1536, 384" offset="429809312" size="2359296" />
6672 <output>
6673 <port id="0" precision="FP32" names="self.encoder.layer.6.intermediate.dense.weight">
6674 <dim>1536</dim>
6675 <dim>384</dim>
6676 </port>
6677 </output>
6678 </layer>
6679 <layer id="430" name="__module.encoder.layer.6.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6680 <data transpose_a="false" transpose_b="true" />
6681 <input>
6682 <port id="0" precision="FP32">
6683 <dim>-1</dim>
6684 <dim>-1</dim>
6685 <dim>384</dim>
6686 </port>
6687 <port id="1" precision="FP32">
6688 <dim>1536</dim>
6689 <dim>384</dim>
6690 </port>
6691 </input>
6692 <output>
6693 <port id="2" precision="FP32">
6694 <dim>-1</dim>
6695 <dim>-1</dim>
6696 <dim>1536</dim>
6697 </port>
6698 </output>
6699 </layer>
6700 <layer id="431" name="Constant_6107438" type="Const" version="opset1">
6701 <data element_type="f32" shape="1, 1, 1536" offset="432168608" size="6144" />
6702 <output>
6703 <port id="0" precision="FP32">
6704 <dim>1</dim>
6705 <dim>1</dim>
6706 <dim>1536</dim>
6707 </port>
6708 </output>
6709 </layer>
6710 <layer id="432" name="__module.encoder.layer.6.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
6711 <data auto_broadcast="numpy" />
6712 <input>
6713 <port id="0" precision="FP32">
6714 <dim>-1</dim>
6715 <dim>-1</dim>
6716 <dim>1536</dim>
6717 </port>
6718 <port id="1" precision="FP32">
6719 <dim>1</dim>
6720 <dim>1</dim>
6721 <dim>1536</dim>
6722 </port>
6723 </input>
6724 <output>
6725 <port id="2" precision="FP32" names="612">
6726 <dim>-1</dim>
6727 <dim>-1</dim>
6728 <dim>1536</dim>
6729 </port>
6730 </output>
6731 </layer>
6732 <layer id="433" name="__module.encoder.layer.6.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
6733 <data approximation_mode="ERF" />
6734 <input>
6735 <port id="0" precision="FP32">
6736 <dim>-1</dim>
6737 <dim>-1</dim>
6738 <dim>1536</dim>
6739 </port>
6740 </input>
6741 <output>
6742 <port id="1" precision="FP32" names="613">
6743 <dim>-1</dim>
6744 <dim>-1</dim>
6745 <dim>1536</dim>
6746 </port>
6747 </output>
6748 </layer>
6749 <layer id="434" name="self.encoder.layer.6.output.dense.weight" type="Const" version="opset1">
6750 <data element_type="f32" shape="384, 1536" offset="432174752" size="2359296" />
6751 <output>
6752 <port id="0" precision="FP32" names="self.encoder.layer.6.output.dense.weight">
6753 <dim>384</dim>
6754 <dim>1536</dim>
6755 </port>
6756 </output>
6757 </layer>
6758 <layer id="435" name="__module.encoder.layer.6.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
6759 <data transpose_a="false" transpose_b="true" />
6760 <input>
6761 <port id="0" precision="FP32">
6762 <dim>-1</dim>
6763 <dim>-1</dim>
6764 <dim>1536</dim>
6765 </port>
6766 <port id="1" precision="FP32">
6767 <dim>384</dim>
6768 <dim>1536</dim>
6769 </port>
6770 </input>
6771 <output>
6772 <port id="2" precision="FP32">
6773 <dim>-1</dim>
6774 <dim>-1</dim>
6775 <dim>384</dim>
6776 </port>
6777 </output>
6778 </layer>
6779 <layer id="436" name="Constant_6107439" type="Const" version="opset1">
6780 <data element_type="f32" shape="1, 1, 384" offset="434534048" size="1536" />
6781 <output>
6782 <port id="0" precision="FP32">
6783 <dim>1</dim>
6784 <dim>1</dim>
6785 <dim>384</dim>
6786 </port>
6787 </output>
6788 </layer>
6789 <layer id="437" name="__module.encoder.layer.6.output.dense/aten::linear/Add" type="Add" version="opset1">
6790 <data auto_broadcast="numpy" />
6791 <input>
6792 <port id="0" precision="FP32">
6793 <dim>-1</dim>
6794 <dim>-1</dim>
6795 <dim>384</dim>
6796 </port>
6797 <port id="1" precision="FP32">
6798 <dim>1</dim>
6799 <dim>1</dim>
6800 <dim>384</dim>
6801 </port>
6802 </input>
6803 <output>
6804 <port id="2" precision="FP32" names="619,input.29">
6805 <dim>-1</dim>
6806 <dim>-1</dim>
6807 <dim>384</dim>
6808 </port>
6809 </output>
6810 </layer>
6811 <layer id="438" name="__module.encoder.layer.6.output/aten::add/Add" type="Add" version="opset1">
6812 <data auto_broadcast="numpy" />
6813 <input>
6814 <port id="0" precision="FP32">
6815 <dim>-1</dim>
6816 <dim>-1</dim>
6817 <dim>384</dim>
6818 </port>
6819 <port id="1" precision="FP32">
6820 <dim>-1</dim>
6821 <dim>-1</dim>
6822 <dim>384</dim>
6823 </port>
6824 </input>
6825 <output>
6826 <port id="2" precision="FP32" names="621">
6827 <dim>-1</dim>
6828 <dim>-1</dim>
6829 <dim>384</dim>
6830 </port>
6831 </output>
6832 </layer>
6833 <layer id="439" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
6834 <data element_type="i32" shape="1" offset="384850452" size="4" />
6835 <output>
6836 <port id="0" precision="I32">
6837 <dim>1</dim>
6838 </port>
6839 </output>
6840 </layer>
6841 <layer id="440" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
6842 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
6843 <input>
6844 <port id="0" precision="FP32">
6845 <dim>-1</dim>
6846 <dim>-1</dim>
6847 <dim>384</dim>
6848 </port>
6849 <port id="1" precision="I32">
6850 <dim>1</dim>
6851 </port>
6852 </input>
6853 <output>
6854 <port id="2" precision="FP32">
6855 <dim>-1</dim>
6856 <dim>-1</dim>
6857 <dim>384</dim>
6858 </port>
6859 </output>
6860 </layer>
6861 <layer id="441" name="Constant_6107440" type="Const" version="opset1">
6862 <data element_type="f32" shape="1, 1, 384" offset="434535584" size="1536" />
6863 <output>
6864 <port id="0" precision="FP32">
6865 <dim>1</dim>
6866 <dim>1</dim>
6867 <dim>384</dim>
6868 </port>
6869 </output>
6870 </layer>
6871 <layer id="442" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
6872 <data auto_broadcast="numpy" />
6873 <input>
6874 <port id="0" precision="FP32">
6875 <dim>-1</dim>
6876 <dim>-1</dim>
6877 <dim>384</dim>
6878 </port>
6879 <port id="1" precision="FP32">
6880 <dim>1</dim>
6881 <dim>1</dim>
6882 <dim>384</dim>
6883 </port>
6884 </input>
6885 <output>
6886 <port id="2" precision="FP32">
6887 <dim>-1</dim>
6888 <dim>-1</dim>
6889 <dim>384</dim>
6890 </port>
6891 </output>
6892 </layer>
6893 <layer id="443" name="Constant_6107441" type="Const" version="opset1">
6894 <data element_type="f32" shape="1, 1, 384" offset="434537120" size="1536" />
6895 <output>
6896 <port id="0" precision="FP32">
6897 <dim>1</dim>
6898 <dim>1</dim>
6899 <dim>384</dim>
6900 </port>
6901 </output>
6902 </layer>
6903 <layer id="444" name="__module.encoder.layer.6.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
6904 <data auto_broadcast="numpy" />
6905 <input>
6906 <port id="0" precision="FP32">
6907 <dim>-1</dim>
6908 <dim>-1</dim>
6909 <dim>384</dim>
6910 </port>
6911 <port id="1" precision="FP32">
6912 <dim>1</dim>
6913 <dim>1</dim>
6914 <dim>384</dim>
6915 </port>
6916 </input>
6917 <output>
6918 <port id="2" precision="FP32" names="625,hidden_states.43">
6919 <dim>-1</dim>
6920 <dim>-1</dim>
6921 <dim>384</dim>
6922 </port>
6923 </output>
6924 </layer>
6925 <layer id="445" name="self.encoder.layer.7.attention.self.query.weight" type="Const" version="opset1">
6926 <data element_type="f32" shape="384, 384" offset="434538656" size="589824" />
6927 <output>
6928 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.query.weight">
6929 <dim>384</dim>
6930 <dim>384</dim>
6931 </port>
6932 </output>
6933 </layer>
6934 <layer id="446" name="__module.encoder.layer.7.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
6935 <data transpose_a="false" transpose_b="true" />
6936 <input>
6937 <port id="0" precision="FP32">
6938 <dim>-1</dim>
6939 <dim>-1</dim>
6940 <dim>384</dim>
6941 </port>
6942 <port id="1" precision="FP32">
6943 <dim>384</dim>
6944 <dim>384</dim>
6945 </port>
6946 </input>
6947 <output>
6948 <port id="2" precision="FP32">
6949 <dim>-1</dim>
6950 <dim>-1</dim>
6951 <dim>384</dim>
6952 </port>
6953 </output>
6954 </layer>
6955 <layer id="447" name="Constant_6107442" type="Const" version="opset1">
6956 <data element_type="f32" shape="1, 1, 384" offset="435128480" size="1536" />
6957 <output>
6958 <port id="0" precision="FP32">
6959 <dim>1</dim>
6960 <dim>1</dim>
6961 <dim>384</dim>
6962 </port>
6963 </output>
6964 </layer>
6965 <layer id="448" name="__module.encoder.layer.7.attention.self.query/aten::linear/Add" type="Add" version="opset1">
6966 <data auto_broadcast="numpy" />
6967 <input>
6968 <port id="0" precision="FP32">
6969 <dim>-1</dim>
6970 <dim>-1</dim>
6971 <dim>384</dim>
6972 </port>
6973 <port id="1" precision="FP32">
6974 <dim>1</dim>
6975 <dim>1</dim>
6976 <dim>384</dim>
6977 </port>
6978 </input>
6979 <output>
6980 <port id="2" precision="FP32" names="638,x.85">
6981 <dim>-1</dim>
6982 <dim>-1</dim>
6983 <dim>384</dim>
6984 </port>
6985 </output>
6986 </layer>
6987 <layer id="449" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
6988 <data element_type="i64" shape="4" offset="385444888" size="32" />
6989 <output>
6990 <port id="0" precision="I64">
6991 <dim>4</dim>
6992 </port>
6993 </output>
6994 </layer>
6995 <layer id="450" name="__module.encoder.layer.7.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
6996 <data special_zero="true" />
6997 <input>
6998 <port id="0" precision="FP32">
6999 <dim>-1</dim>
7000 <dim>-1</dim>
7001 <dim>384</dim>
7002 </port>
7003 <port id="1" precision="I64">
7004 <dim>4</dim>
7005 </port>
7006 </input>
7007 <output>
7008 <port id="2" precision="FP32" names="642,x.87">
7009 <dim>-1</dim>
7010 <dim>-1</dim>
7011 <dim>12</dim>
7012 <dim>32</dim>
7013 </port>
7014 </output>
7015 </layer>
7016 <layer id="451" name="Constant_6099835" type="Const" version="opset1">
7017 <data element_type="i64" shape="4" offset="385444920" size="32" />
7018 <output>
7019 <port id="0" precision="I64" names="643">
7020 <dim>4</dim>
7021 </port>
7022 </output>
7023 </layer>
7024 <layer id="452" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
7025 <input>
7026 <port id="0" precision="FP32">
7027 <dim>-1</dim>
7028 <dim>-1</dim>
7029 <dim>12</dim>
7030 <dim>32</dim>
7031 </port>
7032 <port id="1" precision="I64">
7033 <dim>4</dim>
7034 </port>
7035 </input>
7036 <output>
7037 <port id="2" precision="FP32" names="644">
7038 <dim>-1</dim>
7039 <dim>12</dim>
7040 <dim>-1</dim>
7041 <dim>32</dim>
7042 </port>
7043 </output>
7044 </layer>
7045 <layer id="453" name="self.encoder.layer.7.attention.self.key.weight" type="Const" version="opset1">
7046 <data element_type="f32" shape="384, 384" offset="435130016" size="589824" />
7047 <output>
7048 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.key.weight">
7049 <dim>384</dim>
7050 <dim>384</dim>
7051 </port>
7052 </output>
7053 </layer>
7054 <layer id="454" name="__module.encoder.layer.7.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
7055 <data transpose_a="false" transpose_b="true" />
7056 <input>
7057 <port id="0" precision="FP32">
7058 <dim>-1</dim>
7059 <dim>-1</dim>
7060 <dim>384</dim>
7061 </port>
7062 <port id="1" precision="FP32">
7063 <dim>384</dim>
7064 <dim>384</dim>
7065 </port>
7066 </input>
7067 <output>
7068 <port id="2" precision="FP32">
7069 <dim>-1</dim>
7070 <dim>-1</dim>
7071 <dim>384</dim>
7072 </port>
7073 </output>
7074 </layer>
7075 <layer id="455" name="Constant_6107443" type="Const" version="opset1">
7076 <data element_type="f32" shape="1, 1, 384" offset="435719840" size="1536" />
7077 <output>
7078 <port id="0" precision="FP32">
7079 <dim>1</dim>
7080 <dim>1</dim>
7081 <dim>384</dim>
7082 </port>
7083 </output>
7084 </layer>
7085 <layer id="456" name="__module.encoder.layer.7.attention.self.key/aten::linear/Add" type="Add" version="opset1">
7086 <data auto_broadcast="numpy" />
7087 <input>
7088 <port id="0" precision="FP32">
7089 <dim>-1</dim>
7090 <dim>-1</dim>
7091 <dim>384</dim>
7092 </port>
7093 <port id="1" precision="FP32">
7094 <dim>1</dim>
7095 <dim>1</dim>
7096 <dim>384</dim>
7097 </port>
7098 </input>
7099 <output>
7100 <port id="2" precision="FP32" names="647,x.89">
7101 <dim>-1</dim>
7102 <dim>-1</dim>
7103 <dim>384</dim>
7104 </port>
7105 </output>
7106 </layer>
7107 <layer id="457" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
7108 <data element_type="i64" shape="4" offset="385444888" size="32" />
7109 <output>
7110 <port id="0" precision="I64">
7111 <dim>4</dim>
7112 </port>
7113 </output>
7114 </layer>
7115 <layer id="458" name="__module.encoder.layer.7.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
7116 <data special_zero="true" />
7117 <input>
7118 <port id="0" precision="FP32">
7119 <dim>-1</dim>
7120 <dim>-1</dim>
7121 <dim>384</dim>
7122 </port>
7123 <port id="1" precision="I64">
7124 <dim>4</dim>
7125 </port>
7126 </input>
7127 <output>
7128 <port id="2" precision="FP32" names="651,x.91">
7129 <dim>-1</dim>
7130 <dim>-1</dim>
7131 <dim>12</dim>
7132 <dim>32</dim>
7133 </port>
7134 </output>
7135 </layer>
7136 <layer id="459" name="Constant_6099858" type="Const" version="opset1">
7137 <data element_type="i64" shape="4" offset="385444920" size="32" />
7138 <output>
7139 <port id="0" precision="I64" names="652">
7140 <dim>4</dim>
7141 </port>
7142 </output>
7143 </layer>
7144 <layer id="460" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
7145 <input>
7146 <port id="0" precision="FP32">
7147 <dim>-1</dim>
7148 <dim>-1</dim>
7149 <dim>12</dim>
7150 <dim>32</dim>
7151 </port>
7152 <port id="1" precision="I64">
7153 <dim>4</dim>
7154 </port>
7155 </input>
7156 <output>
7157 <port id="2" precision="FP32" names="653">
7158 <dim>-1</dim>
7159 <dim>12</dim>
7160 <dim>-1</dim>
7161 <dim>32</dim>
7162 </port>
7163 </output>
7164 </layer>
7165 <layer id="461" name="self.encoder.layer.7.attention.self.value.weight" type="Const" version="opset1">
7166 <data element_type="f32" shape="384, 384" offset="435721376" size="589824" />
7167 <output>
7168 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.self.value.weight">
7169 <dim>384</dim>
7170 <dim>384</dim>
7171 </port>
7172 </output>
7173 </layer>
7174 <layer id="462" name="__module.encoder.layer.7.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
7175 <data transpose_a="false" transpose_b="true" />
7176 <input>
7177 <port id="0" precision="FP32">
7178 <dim>-1</dim>
7179 <dim>-1</dim>
7180 <dim>384</dim>
7181 </port>
7182 <port id="1" precision="FP32">
7183 <dim>384</dim>
7184 <dim>384</dim>
7185 </port>
7186 </input>
7187 <output>
7188 <port id="2" precision="FP32">
7189 <dim>-1</dim>
7190 <dim>-1</dim>
7191 <dim>384</dim>
7192 </port>
7193 </output>
7194 </layer>
7195 <layer id="463" name="Constant_6107444" type="Const" version="opset1">
7196 <data element_type="f32" shape="1, 1, 384" offset="436311200" size="1536" />
7197 <output>
7198 <port id="0" precision="FP32">
7199 <dim>1</dim>
7200 <dim>1</dim>
7201 <dim>384</dim>
7202 </port>
7203 </output>
7204 </layer>
7205 <layer id="464" name="__module.encoder.layer.7.attention.self.value/aten::linear/Add" type="Add" version="opset1">
7206 <data auto_broadcast="numpy" />
7207 <input>
7208 <port id="0" precision="FP32">
7209 <dim>-1</dim>
7210 <dim>-1</dim>
7211 <dim>384</dim>
7212 </port>
7213 <port id="1" precision="FP32">
7214 <dim>1</dim>
7215 <dim>1</dim>
7216 <dim>384</dim>
7217 </port>
7218 </input>
7219 <output>
7220 <port id="2" precision="FP32" names="656,x.93">
7221 <dim>-1</dim>
7222 <dim>-1</dim>
7223 <dim>384</dim>
7224 </port>
7225 </output>
7226 </layer>
7227 <layer id="465" name="__module.encoder.layer.7.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
7228 <data element_type="i64" shape="4" offset="385444888" size="32" />
7229 <output>
7230 <port id="0" precision="I64">
7231 <dim>4</dim>
7232 </port>
7233 </output>
7234 </layer>
7235 <layer id="466" name="__module.encoder.layer.7.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
7236 <data special_zero="true" />
7237 <input>
7238 <port id="0" precision="FP32">
7239 <dim>-1</dim>
7240 <dim>-1</dim>
7241 <dim>384</dim>
7242 </port>
7243 <port id="1" precision="I64">
7244 <dim>4</dim>
7245 </port>
7246 </input>
7247 <output>
7248 <port id="2" precision="FP32" names="660,x.95">
7249 <dim>-1</dim>
7250 <dim>-1</dim>
7251 <dim>12</dim>
7252 <dim>32</dim>
7253 </port>
7254 </output>
7255 </layer>
7256 <layer id="467" name="Constant_6099881" type="Const" version="opset1">
7257 <data element_type="i64" shape="4" offset="385444920" size="32" />
7258 <output>
7259 <port id="0" precision="I64" names="661">
7260 <dim>4</dim>
7261 </port>
7262 </output>
7263 </layer>
7264 <layer id="468" name="__module.encoder.layer.7.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
7265 <input>
7266 <port id="0" precision="FP32">
7267 <dim>-1</dim>
7268 <dim>-1</dim>
7269 <dim>12</dim>
7270 <dim>32</dim>
7271 </port>
7272 <port id="1" precision="I64">
7273 <dim>4</dim>
7274 </port>
7275 </input>
7276 <output>
7277 <port id="2" precision="FP32" names="662">
7278 <dim>-1</dim>
7279 <dim>12</dim>
7280 <dim>-1</dim>
7281 <dim>32</dim>
7282 </port>
7283 </output>
7284 </layer>
7285 <layer id="469" name="__module.encoder.layer.7.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
7286 <data causal="false" />
7287 <input>
7288 <port id="0" precision="FP32">
7289 <dim>-1</dim>
7290 <dim>12</dim>
7291 <dim>-1</dim>
7292 <dim>32</dim>
7293 </port>
7294 <port id="1" precision="FP32">
7295 <dim>-1</dim>
7296 <dim>12</dim>
7297 <dim>-1</dim>
7298 <dim>32</dim>
7299 </port>
7300 <port id="2" precision="FP32">
7301 <dim>-1</dim>
7302 <dim>12</dim>
7303 <dim>-1</dim>
7304 <dim>32</dim>
7305 </port>
7306 <port id="3" precision="FP32">
7307 <dim>-1</dim>
7308 <dim>1</dim>
7309 <dim>-1</dim>
7310 <dim>-1</dim>
7311 </port>
7312 </input>
7313 <output>
7314 <port id="4" precision="FP32" names="663,attn_output.29">
7315 <dim>-1</dim>
7316 <dim>12</dim>
7317 <dim>-1</dim>
7318 <dim>32</dim>
7319 </port>
7320 </output>
7321 </layer>
7322 <layer id="470" name="__module.encoder.layer.7.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
7323 <data element_type="i32" shape="4" offset="386627704" size="16" />
7324 <output>
7325 <port id="0" precision="I32">
7326 <dim>4</dim>
7327 </port>
7328 </output>
7329 </layer>
7330 <layer id="471" name="__module.encoder.layer.7.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
7331 <input>
7332 <port id="0" precision="FP32">
7333 <dim>-1</dim>
7334 <dim>12</dim>
7335 <dim>-1</dim>
7336 <dim>32</dim>
7337 </port>
7338 <port id="1" precision="I32">
7339 <dim>4</dim>
7340 </port>
7341 </input>
7342 <output>
7343 <port id="2" precision="FP32" names="664,attn_output.31">
7344 <dim>-1</dim>
7345 <dim>-1</dim>
7346 <dim>12</dim>
7347 <dim>32</dim>
7348 </port>
7349 </output>
7350 </layer>
7351 <layer id="472" name="Constant_6107621" type="Const" version="opset1">
7352 <data element_type="i64" shape="3" offset="386627720" size="24" />
7353 <output>
7354 <port id="0" precision="I64">
7355 <dim>3</dim>
7356 </port>
7357 </output>
7358 </layer>
7359 <layer id="473" name="__module.encoder.layer.7.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
7360 <data special_zero="true" />
7361 <input>
7362 <port id="0" precision="FP32">
7363 <dim>-1</dim>
7364 <dim>-1</dim>
7365 <dim>12</dim>
7366 <dim>32</dim>
7367 </port>
7368 <port id="1" precision="I64">
7369 <dim>3</dim>
7370 </port>
7371 </input>
7372 <output>
7373 <port id="2" precision="FP32" names="666">
7374 <dim>-1</dim>
7375 <dim>-1</dim>
7376 <dim>384</dim>
7377 </port>
7378 </output>
7379 </layer>
7380 <layer id="474" name="self.encoder.layer.7.attention.output.dense.weight" type="Const" version="opset1">
7381 <data element_type="f32" shape="384, 384" offset="436312736" size="589824" />
7382 <output>
7383 <port id="0" precision="FP32" names="self.encoder.layer.7.attention.output.dense.weight">
7384 <dim>384</dim>
7385 <dim>384</dim>
7386 </port>
7387 </output>
7388 </layer>
7389 <layer id="475" name="__module.encoder.layer.7.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
7390 <data transpose_a="false" transpose_b="true" />
7391 <input>
7392 <port id="0" precision="FP32">
7393 <dim>-1</dim>
7394 <dim>-1</dim>
7395 <dim>384</dim>
7396 </port>
7397 <port id="1" precision="FP32">
7398 <dim>384</dim>
7399 <dim>384</dim>
7400 </port>
7401 </input>
7402 <output>
7403 <port id="2" precision="FP32">
7404 <dim>-1</dim>
7405 <dim>-1</dim>
7406 <dim>384</dim>
7407 </port>
7408 </output>
7409 </layer>
7410 <layer id="476" name="Constant_6107445" type="Const" version="opset1">
7411 <data element_type="f32" shape="1, 1, 384" offset="436902560" size="1536" />
7412 <output>
7413 <port id="0" precision="FP32">
7414 <dim>1</dim>
7415 <dim>1</dim>
7416 <dim>384</dim>
7417 </port>
7418 </output>
7419 </layer>
7420 <layer id="477" name="__module.encoder.layer.7.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
7421 <data auto_broadcast="numpy" />
7422 <input>
7423 <port id="0" precision="FP32">
7424 <dim>-1</dim>
7425 <dim>-1</dim>
7426 <dim>384</dim>
7427 </port>
7428 <port id="1" precision="FP32">
7429 <dim>1</dim>
7430 <dim>1</dim>
7431 <dim>384</dim>
7432 </port>
7433 </input>
7434 <output>
7435 <port id="2" precision="FP32" names="672,input.31">
7436 <dim>-1</dim>
7437 <dim>-1</dim>
7438 <dim>384</dim>
7439 </port>
7440 </output>
7441 </layer>
7442 <layer id="478" name="__module.encoder.layer.7.attention.output/aten::add/Add" type="Add" version="opset1">
7443 <data auto_broadcast="numpy" />
7444 <input>
7445 <port id="0" precision="FP32">
7446 <dim>-1</dim>
7447 <dim>-1</dim>
7448 <dim>384</dim>
7449 </port>
7450 <port id="1" precision="FP32">
7451 <dim>-1</dim>
7452 <dim>-1</dim>
7453 <dim>384</dim>
7454 </port>
7455 </input>
7456 <output>
7457 <port id="2" precision="FP32" names="674">
7458 <dim>-1</dim>
7459 <dim>-1</dim>
7460 <dim>384</dim>
7461 </port>
7462 </output>
7463 </layer>
7464 <layer id="479" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
7465 <data element_type="i32" shape="1" offset="384850452" size="4" />
7466 <output>
7467 <port id="0" precision="I32">
7468 <dim>1</dim>
7469 </port>
7470 </output>
7471 </layer>
7472 <layer id="480" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
7473 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
7474 <input>
7475 <port id="0" precision="FP32">
7476 <dim>-1</dim>
7477 <dim>-1</dim>
7478 <dim>384</dim>
7479 </port>
7480 <port id="1" precision="I32">
7481 <dim>1</dim>
7482 </port>
7483 </input>
7484 <output>
7485 <port id="2" precision="FP32">
7486 <dim>-1</dim>
7487 <dim>-1</dim>
7488 <dim>384</dim>
7489 </port>
7490 </output>
7491 </layer>
7492 <layer id="481" name="Constant_6107446" type="Const" version="opset1">
7493 <data element_type="f32" shape="1, 1, 384" offset="436904096" size="1536" />
7494 <output>
7495 <port id="0" precision="FP32">
7496 <dim>1</dim>
7497 <dim>1</dim>
7498 <dim>384</dim>
7499 </port>
7500 </output>
7501 </layer>
7502 <layer id="482" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
7503 <data auto_broadcast="numpy" />
7504 <input>
7505 <port id="0" precision="FP32">
7506 <dim>-1</dim>
7507 <dim>-1</dim>
7508 <dim>384</dim>
7509 </port>
7510 <port id="1" precision="FP32">
7511 <dim>1</dim>
7512 <dim>1</dim>
7513 <dim>384</dim>
7514 </port>
7515 </input>
7516 <output>
7517 <port id="2" precision="FP32">
7518 <dim>-1</dim>
7519 <dim>-1</dim>
7520 <dim>384</dim>
7521 </port>
7522 </output>
7523 </layer>
7524 <layer id="483" name="Constant_6107447" type="Const" version="opset1">
7525 <data element_type="f32" shape="1, 1, 384" offset="436905632" size="1536" />
7526 <output>
7527 <port id="0" precision="FP32">
7528 <dim>1</dim>
7529 <dim>1</dim>
7530 <dim>384</dim>
7531 </port>
7532 </output>
7533 </layer>
7534 <layer id="484" name="__module.encoder.layer.7.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
7535 <data auto_broadcast="numpy" />
7536 <input>
7537 <port id="0" precision="FP32">
7538 <dim>-1</dim>
7539 <dim>-1</dim>
7540 <dim>384</dim>
7541 </port>
7542 <port id="1" precision="FP32">
7543 <dim>1</dim>
7544 <dim>1</dim>
7545 <dim>384</dim>
7546 </port>
7547 </input>
7548 <output>
7549 <port id="2" precision="FP32" names="678,input_tensor.15">
7550 <dim>-1</dim>
7551 <dim>-1</dim>
7552 <dim>384</dim>
7553 </port>
7554 </output>
7555 </layer>
7556 <layer id="485" name="self.encoder.layer.7.intermediate.dense.weight" type="Const" version="opset1">
7557 <data element_type="f32" shape="1536, 384" offset="436907168" size="2359296" />
7558 <output>
7559 <port id="0" precision="FP32" names="self.encoder.layer.7.intermediate.dense.weight">
7560 <dim>1536</dim>
7561 <dim>384</dim>
7562 </port>
7563 </output>
7564 </layer>
7565 <layer id="486" name="__module.encoder.layer.7.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
7566 <data transpose_a="false" transpose_b="true" />
7567 <input>
7568 <port id="0" precision="FP32">
7569 <dim>-1</dim>
7570 <dim>-1</dim>
7571 <dim>384</dim>
7572 </port>
7573 <port id="1" precision="FP32">
7574 <dim>1536</dim>
7575 <dim>384</dim>
7576 </port>
7577 </input>
7578 <output>
7579 <port id="2" precision="FP32">
7580 <dim>-1</dim>
7581 <dim>-1</dim>
7582 <dim>1536</dim>
7583 </port>
7584 </output>
7585 </layer>
7586 <layer id="487" name="Constant_6107448" type="Const" version="opset1">
7587 <data element_type="f32" shape="1, 1, 1536" offset="439266464" size="6144" />
7588 <output>
7589 <port id="0" precision="FP32">
7590 <dim>1</dim>
7591 <dim>1</dim>
7592 <dim>1536</dim>
7593 </port>
7594 </output>
7595 </layer>
7596 <layer id="488" name="__module.encoder.layer.7.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
7597 <data auto_broadcast="numpy" />
7598 <input>
7599 <port id="0" precision="FP32">
7600 <dim>-1</dim>
7601 <dim>-1</dim>
7602 <dim>1536</dim>
7603 </port>
7604 <port id="1" precision="FP32">
7605 <dim>1</dim>
7606 <dim>1</dim>
7607 <dim>1536</dim>
7608 </port>
7609 </input>
7610 <output>
7611 <port id="2" precision="FP32" names="683">
7612 <dim>-1</dim>
7613 <dim>-1</dim>
7614 <dim>1536</dim>
7615 </port>
7616 </output>
7617 </layer>
7618 <layer id="489" name="__module.encoder.layer.7.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
7619 <data approximation_mode="ERF" />
7620 <input>
7621 <port id="0" precision="FP32">
7622 <dim>-1</dim>
7623 <dim>-1</dim>
7624 <dim>1536</dim>
7625 </port>
7626 </input>
7627 <output>
7628 <port id="1" precision="FP32" names="684">
7629 <dim>-1</dim>
7630 <dim>-1</dim>
7631 <dim>1536</dim>
7632 </port>
7633 </output>
7634 </layer>
7635 <layer id="490" name="self.encoder.layer.7.output.dense.weight" type="Const" version="opset1">
7636 <data element_type="f32" shape="384, 1536" offset="439272608" size="2359296" />
7637 <output>
7638 <port id="0" precision="FP32" names="self.encoder.layer.7.output.dense.weight">
7639 <dim>384</dim>
7640 <dim>1536</dim>
7641 </port>
7642 </output>
7643 </layer>
7644 <layer id="491" name="__module.encoder.layer.7.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
7645 <data transpose_a="false" transpose_b="true" />
7646 <input>
7647 <port id="0" precision="FP32">
7648 <dim>-1</dim>
7649 <dim>-1</dim>
7650 <dim>1536</dim>
7651 </port>
7652 <port id="1" precision="FP32">
7653 <dim>384</dim>
7654 <dim>1536</dim>
7655 </port>
7656 </input>
7657 <output>
7658 <port id="2" precision="FP32">
7659 <dim>-1</dim>
7660 <dim>-1</dim>
7661 <dim>384</dim>
7662 </port>
7663 </output>
7664 </layer>
7665 <layer id="492" name="Constant_6107449" type="Const" version="opset1">
7666 <data element_type="f32" shape="1, 1, 384" offset="441631904" size="1536" />
7667 <output>
7668 <port id="0" precision="FP32">
7669 <dim>1</dim>
7670 <dim>1</dim>
7671 <dim>384</dim>
7672 </port>
7673 </output>
7674 </layer>
7675 <layer id="493" name="__module.encoder.layer.7.output.dense/aten::linear/Add" type="Add" version="opset1">
7676 <data auto_broadcast="numpy" />
7677 <input>
7678 <port id="0" precision="FP32">
7679 <dim>-1</dim>
7680 <dim>-1</dim>
7681 <dim>384</dim>
7682 </port>
7683 <port id="1" precision="FP32">
7684 <dim>1</dim>
7685 <dim>1</dim>
7686 <dim>384</dim>
7687 </port>
7688 </input>
7689 <output>
7690 <port id="2" precision="FP32" names="690,input.33">
7691 <dim>-1</dim>
7692 <dim>-1</dim>
7693 <dim>384</dim>
7694 </port>
7695 </output>
7696 </layer>
7697 <layer id="494" name="__module.encoder.layer.7.output/aten::add/Add" type="Add" version="opset1">
7698 <data auto_broadcast="numpy" />
7699 <input>
7700 <port id="0" precision="FP32">
7701 <dim>-1</dim>
7702 <dim>-1</dim>
7703 <dim>384</dim>
7704 </port>
7705 <port id="1" precision="FP32">
7706 <dim>-1</dim>
7707 <dim>-1</dim>
7708 <dim>384</dim>
7709 </port>
7710 </input>
7711 <output>
7712 <port id="2" precision="FP32" names="692">
7713 <dim>-1</dim>
7714 <dim>-1</dim>
7715 <dim>384</dim>
7716 </port>
7717 </output>
7718 </layer>
7719 <layer id="495" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
7720 <data element_type="i32" shape="1" offset="384850452" size="4" />
7721 <output>
7722 <port id="0" precision="I32">
7723 <dim>1</dim>
7724 </port>
7725 </output>
7726 </layer>
7727 <layer id="496" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
7728 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
7729 <input>
7730 <port id="0" precision="FP32">
7731 <dim>-1</dim>
7732 <dim>-1</dim>
7733 <dim>384</dim>
7734 </port>
7735 <port id="1" precision="I32">
7736 <dim>1</dim>
7737 </port>
7738 </input>
7739 <output>
7740 <port id="2" precision="FP32">
7741 <dim>-1</dim>
7742 <dim>-1</dim>
7743 <dim>384</dim>
7744 </port>
7745 </output>
7746 </layer>
7747 <layer id="497" name="Constant_6107450" type="Const" version="opset1">
7748 <data element_type="f32" shape="1, 1, 384" offset="441633440" size="1536" />
7749 <output>
7750 <port id="0" precision="FP32">
7751 <dim>1</dim>
7752 <dim>1</dim>
7753 <dim>384</dim>
7754 </port>
7755 </output>
7756 </layer>
7757 <layer id="498" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
7758 <data auto_broadcast="numpy" />
7759 <input>
7760 <port id="0" precision="FP32">
7761 <dim>-1</dim>
7762 <dim>-1</dim>
7763 <dim>384</dim>
7764 </port>
7765 <port id="1" precision="FP32">
7766 <dim>1</dim>
7767 <dim>1</dim>
7768 <dim>384</dim>
7769 </port>
7770 </input>
7771 <output>
7772 <port id="2" precision="FP32">
7773 <dim>-1</dim>
7774 <dim>-1</dim>
7775 <dim>384</dim>
7776 </port>
7777 </output>
7778 </layer>
7779 <layer id="499" name="Constant_6107451" type="Const" version="opset1">
7780 <data element_type="f32" shape="1, 1, 384" offset="441634976" size="1536" />
7781 <output>
7782 <port id="0" precision="FP32">
7783 <dim>1</dim>
7784 <dim>1</dim>
7785 <dim>384</dim>
7786 </port>
7787 </output>
7788 </layer>
7789 <layer id="500" name="__module.encoder.layer.7.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
7790 <data auto_broadcast="numpy" />
7791 <input>
7792 <port id="0" precision="FP32">
7793 <dim>-1</dim>
7794 <dim>-1</dim>
7795 <dim>384</dim>
7796 </port>
7797 <port id="1" precision="FP32">
7798 <dim>1</dim>
7799 <dim>1</dim>
7800 <dim>384</dim>
7801 </port>
7802 </input>
7803 <output>
7804 <port id="2" precision="FP32" names="696,hidden_states.49">
7805 <dim>-1</dim>
7806 <dim>-1</dim>
7807 <dim>384</dim>
7808 </port>
7809 </output>
7810 </layer>
7811 <layer id="501" name="self.encoder.layer.8.attention.self.query.weight" type="Const" version="opset1">
7812 <data element_type="f32" shape="384, 384" offset="441636512" size="589824" />
7813 <output>
7814 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.query.weight">
7815 <dim>384</dim>
7816 <dim>384</dim>
7817 </port>
7818 </output>
7819 </layer>
7820 <layer id="502" name="__module.encoder.layer.8.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
7821 <data transpose_a="false" transpose_b="true" />
7822 <input>
7823 <port id="0" precision="FP32">
7824 <dim>-1</dim>
7825 <dim>-1</dim>
7826 <dim>384</dim>
7827 </port>
7828 <port id="1" precision="FP32">
7829 <dim>384</dim>
7830 <dim>384</dim>
7831 </port>
7832 </input>
7833 <output>
7834 <port id="2" precision="FP32">
7835 <dim>-1</dim>
7836 <dim>-1</dim>
7837 <dim>384</dim>
7838 </port>
7839 </output>
7840 </layer>
7841 <layer id="503" name="Constant_6107452" type="Const" version="opset1">
7842 <data element_type="f32" shape="1, 1, 384" offset="442226336" size="1536" />
7843 <output>
7844 <port id="0" precision="FP32">
7845 <dim>1</dim>
7846 <dim>1</dim>
7847 <dim>384</dim>
7848 </port>
7849 </output>
7850 </layer>
7851 <layer id="504" name="__module.encoder.layer.8.attention.self.query/aten::linear/Add" type="Add" version="opset1">
7852 <data auto_broadcast="numpy" />
7853 <input>
7854 <port id="0" precision="FP32">
7855 <dim>-1</dim>
7856 <dim>-1</dim>
7857 <dim>384</dim>
7858 </port>
7859 <port id="1" precision="FP32">
7860 <dim>1</dim>
7861 <dim>1</dim>
7862 <dim>384</dim>
7863 </port>
7864 </input>
7865 <output>
7866 <port id="2" precision="FP32" names="709,x.97">
7867 <dim>-1</dim>
7868 <dim>-1</dim>
7869 <dim>384</dim>
7870 </port>
7871 </output>
7872 </layer>
7873 <layer id="505" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
7874 <data element_type="i64" shape="4" offset="385444888" size="32" />
7875 <output>
7876 <port id="0" precision="I64">
7877 <dim>4</dim>
7878 </port>
7879 </output>
7880 </layer>
7881 <layer id="506" name="__module.encoder.layer.8.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
7882 <data special_zero="true" />
7883 <input>
7884 <port id="0" precision="FP32">
7885 <dim>-1</dim>
7886 <dim>-1</dim>
7887 <dim>384</dim>
7888 </port>
7889 <port id="1" precision="I64">
7890 <dim>4</dim>
7891 </port>
7892 </input>
7893 <output>
7894 <port id="2" precision="FP32" names="713,x.99">
7895 <dim>-1</dim>
7896 <dim>-1</dim>
7897 <dim>12</dim>
7898 <dim>32</dim>
7899 </port>
7900 </output>
7901 </layer>
7902 <layer id="507" name="Constant_6100061" type="Const" version="opset1">
7903 <data element_type="i64" shape="4" offset="385444920" size="32" />
7904 <output>
7905 <port id="0" precision="I64" names="714">
7906 <dim>4</dim>
7907 </port>
7908 </output>
7909 </layer>
7910 <layer id="508" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
7911 <input>
7912 <port id="0" precision="FP32">
7913 <dim>-1</dim>
7914 <dim>-1</dim>
7915 <dim>12</dim>
7916 <dim>32</dim>
7917 </port>
7918 <port id="1" precision="I64">
7919 <dim>4</dim>
7920 </port>
7921 </input>
7922 <output>
7923 <port id="2" precision="FP32" names="715">
7924 <dim>-1</dim>
7925 <dim>12</dim>
7926 <dim>-1</dim>
7927 <dim>32</dim>
7928 </port>
7929 </output>
7930 </layer>
7931 <layer id="509" name="self.encoder.layer.8.attention.self.key.weight" type="Const" version="opset1">
7932 <data element_type="f32" shape="384, 384" offset="442227872" size="589824" />
7933 <output>
7934 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.key.weight">
7935 <dim>384</dim>
7936 <dim>384</dim>
7937 </port>
7938 </output>
7939 </layer>
7940 <layer id="510" name="__module.encoder.layer.8.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
7941 <data transpose_a="false" transpose_b="true" />
7942 <input>
7943 <port id="0" precision="FP32">
7944 <dim>-1</dim>
7945 <dim>-1</dim>
7946 <dim>384</dim>
7947 </port>
7948 <port id="1" precision="FP32">
7949 <dim>384</dim>
7950 <dim>384</dim>
7951 </port>
7952 </input>
7953 <output>
7954 <port id="2" precision="FP32">
7955 <dim>-1</dim>
7956 <dim>-1</dim>
7957 <dim>384</dim>
7958 </port>
7959 </output>
7960 </layer>
7961 <layer id="511" name="Constant_6107453" type="Const" version="opset1">
7962 <data element_type="f32" shape="1, 1, 384" offset="442817696" size="1536" />
7963 <output>
7964 <port id="0" precision="FP32">
7965 <dim>1</dim>
7966 <dim>1</dim>
7967 <dim>384</dim>
7968 </port>
7969 </output>
7970 </layer>
7971 <layer id="512" name="__module.encoder.layer.8.attention.self.key/aten::linear/Add" type="Add" version="opset1">
7972 <data auto_broadcast="numpy" />
7973 <input>
7974 <port id="0" precision="FP32">
7975 <dim>-1</dim>
7976 <dim>-1</dim>
7977 <dim>384</dim>
7978 </port>
7979 <port id="1" precision="FP32">
7980 <dim>1</dim>
7981 <dim>1</dim>
7982 <dim>384</dim>
7983 </port>
7984 </input>
7985 <output>
7986 <port id="2" precision="FP32" names="718,x.101">
7987 <dim>-1</dim>
7988 <dim>-1</dim>
7989 <dim>384</dim>
7990 </port>
7991 </output>
7992 </layer>
7993 <layer id="513" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
7994 <data element_type="i64" shape="4" offset="385444888" size="32" />
7995 <output>
7996 <port id="0" precision="I64">
7997 <dim>4</dim>
7998 </port>
7999 </output>
8000 </layer>
8001 <layer id="514" name="__module.encoder.layer.8.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
8002 <data special_zero="true" />
8003 <input>
8004 <port id="0" precision="FP32">
8005 <dim>-1</dim>
8006 <dim>-1</dim>
8007 <dim>384</dim>
8008 </port>
8009 <port id="1" precision="I64">
8010 <dim>4</dim>
8011 </port>
8012 </input>
8013 <output>
8014 <port id="2" precision="FP32" names="722,x.103">
8015 <dim>-1</dim>
8016 <dim>-1</dim>
8017 <dim>12</dim>
8018 <dim>32</dim>
8019 </port>
8020 </output>
8021 </layer>
8022 <layer id="515" name="Constant_6100084" type="Const" version="opset1">
8023 <data element_type="i64" shape="4" offset="385444920" size="32" />
8024 <output>
8025 <port id="0" precision="I64" names="723">
8026 <dim>4</dim>
8027 </port>
8028 </output>
8029 </layer>
8030 <layer id="516" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
8031 <input>
8032 <port id="0" precision="FP32">
8033 <dim>-1</dim>
8034 <dim>-1</dim>
8035 <dim>12</dim>
8036 <dim>32</dim>
8037 </port>
8038 <port id="1" precision="I64">
8039 <dim>4</dim>
8040 </port>
8041 </input>
8042 <output>
8043 <port id="2" precision="FP32" names="724">
8044 <dim>-1</dim>
8045 <dim>12</dim>
8046 <dim>-1</dim>
8047 <dim>32</dim>
8048 </port>
8049 </output>
8050 </layer>
8051 <layer id="517" name="self.encoder.layer.8.attention.self.value.weight" type="Const" version="opset1">
8052 <data element_type="f32" shape="384, 384" offset="442819232" size="589824" />
8053 <output>
8054 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.self.value.weight">
8055 <dim>384</dim>
8056 <dim>384</dim>
8057 </port>
8058 </output>
8059 </layer>
8060 <layer id="518" name="__module.encoder.layer.8.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
8061 <data transpose_a="false" transpose_b="true" />
8062 <input>
8063 <port id="0" precision="FP32">
8064 <dim>-1</dim>
8065 <dim>-1</dim>
8066 <dim>384</dim>
8067 </port>
8068 <port id="1" precision="FP32">
8069 <dim>384</dim>
8070 <dim>384</dim>
8071 </port>
8072 </input>
8073 <output>
8074 <port id="2" precision="FP32">
8075 <dim>-1</dim>
8076 <dim>-1</dim>
8077 <dim>384</dim>
8078 </port>
8079 </output>
8080 </layer>
8081 <layer id="519" name="Constant_6107454" type="Const" version="opset1">
8082 <data element_type="f32" shape="1, 1, 384" offset="443409056" size="1536" />
8083 <output>
8084 <port id="0" precision="FP32">
8085 <dim>1</dim>
8086 <dim>1</dim>
8087 <dim>384</dim>
8088 </port>
8089 </output>
8090 </layer>
8091 <layer id="520" name="__module.encoder.layer.8.attention.self.value/aten::linear/Add" type="Add" version="opset1">
8092 <data auto_broadcast="numpy" />
8093 <input>
8094 <port id="0" precision="FP32">
8095 <dim>-1</dim>
8096 <dim>-1</dim>
8097 <dim>384</dim>
8098 </port>
8099 <port id="1" precision="FP32">
8100 <dim>1</dim>
8101 <dim>1</dim>
8102 <dim>384</dim>
8103 </port>
8104 </input>
8105 <output>
8106 <port id="2" precision="FP32" names="727,x.105">
8107 <dim>-1</dim>
8108 <dim>-1</dim>
8109 <dim>384</dim>
8110 </port>
8111 </output>
8112 </layer>
8113 <layer id="521" name="__module.encoder.layer.8.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
8114 <data element_type="i64" shape="4" offset="385444888" size="32" />
8115 <output>
8116 <port id="0" precision="I64">
8117 <dim>4</dim>
8118 </port>
8119 </output>
8120 </layer>
8121 <layer id="522" name="__module.encoder.layer.8.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
8122 <data special_zero="true" />
8123 <input>
8124 <port id="0" precision="FP32">
8125 <dim>-1</dim>
8126 <dim>-1</dim>
8127 <dim>384</dim>
8128 </port>
8129 <port id="1" precision="I64">
8130 <dim>4</dim>
8131 </port>
8132 </input>
8133 <output>
8134 <port id="2" precision="FP32" names="731,x.107">
8135 <dim>-1</dim>
8136 <dim>-1</dim>
8137 <dim>12</dim>
8138 <dim>32</dim>
8139 </port>
8140 </output>
8141 </layer>
8142 <layer id="523" name="Constant_6100107" type="Const" version="opset1">
8143 <data element_type="i64" shape="4" offset="385444920" size="32" />
8144 <output>
8145 <port id="0" precision="I64" names="732">
8146 <dim>4</dim>
8147 </port>
8148 </output>
8149 </layer>
8150 <layer id="524" name="__module.encoder.layer.8.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
8151 <input>
8152 <port id="0" precision="FP32">
8153 <dim>-1</dim>
8154 <dim>-1</dim>
8155 <dim>12</dim>
8156 <dim>32</dim>
8157 </port>
8158 <port id="1" precision="I64">
8159 <dim>4</dim>
8160 </port>
8161 </input>
8162 <output>
8163 <port id="2" precision="FP32" names="733">
8164 <dim>-1</dim>
8165 <dim>12</dim>
8166 <dim>-1</dim>
8167 <dim>32</dim>
8168 </port>
8169 </output>
8170 </layer>
8171 <layer id="525" name="__module.encoder.layer.8.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
8172 <data causal="false" />
8173 <input>
8174 <port id="0" precision="FP32">
8175 <dim>-1</dim>
8176 <dim>12</dim>
8177 <dim>-1</dim>
8178 <dim>32</dim>
8179 </port>
8180 <port id="1" precision="FP32">
8181 <dim>-1</dim>
8182 <dim>12</dim>
8183 <dim>-1</dim>
8184 <dim>32</dim>
8185 </port>
8186 <port id="2" precision="FP32">
8187 <dim>-1</dim>
8188 <dim>12</dim>
8189 <dim>-1</dim>
8190 <dim>32</dim>
8191 </port>
8192 <port id="3" precision="FP32">
8193 <dim>-1</dim>
8194 <dim>1</dim>
8195 <dim>-1</dim>
8196 <dim>-1</dim>
8197 </port>
8198 </input>
8199 <output>
8200 <port id="4" precision="FP32" names="734,attn_output.33">
8201 <dim>-1</dim>
8202 <dim>12</dim>
8203 <dim>-1</dim>
8204 <dim>32</dim>
8205 </port>
8206 </output>
8207 </layer>
8208 <layer id="526" name="__module.encoder.layer.8.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
8209 <data element_type="i32" shape="4" offset="386627704" size="16" />
8210 <output>
8211 <port id="0" precision="I32">
8212 <dim>4</dim>
8213 </port>
8214 </output>
8215 </layer>
8216 <layer id="527" name="__module.encoder.layer.8.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
8217 <input>
8218 <port id="0" precision="FP32">
8219 <dim>-1</dim>
8220 <dim>12</dim>
8221 <dim>-1</dim>
8222 <dim>32</dim>
8223 </port>
8224 <port id="1" precision="I32">
8225 <dim>4</dim>
8226 </port>
8227 </input>
8228 <output>
8229 <port id="2" precision="FP32" names="735,attn_output.35">
8230 <dim>-1</dim>
8231 <dim>-1</dim>
8232 <dim>12</dim>
8233 <dim>32</dim>
8234 </port>
8235 </output>
8236 </layer>
8237 <layer id="528" name="Constant_6107622" type="Const" version="opset1">
8238 <data element_type="i64" shape="3" offset="386627720" size="24" />
8239 <output>
8240 <port id="0" precision="I64">
8241 <dim>3</dim>
8242 </port>
8243 </output>
8244 </layer>
8245 <layer id="529" name="__module.encoder.layer.8.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
8246 <data special_zero="true" />
8247 <input>
8248 <port id="0" precision="FP32">
8249 <dim>-1</dim>
8250 <dim>-1</dim>
8251 <dim>12</dim>
8252 <dim>32</dim>
8253 </port>
8254 <port id="1" precision="I64">
8255 <dim>3</dim>
8256 </port>
8257 </input>
8258 <output>
8259 <port id="2" precision="FP32" names="737">
8260 <dim>-1</dim>
8261 <dim>-1</dim>
8262 <dim>384</dim>
8263 </port>
8264 </output>
8265 </layer>
8266 <layer id="530" name="self.encoder.layer.8.attention.output.dense.weight" type="Const" version="opset1">
8267 <data element_type="f32" shape="384, 384" offset="443410592" size="589824" />
8268 <output>
8269 <port id="0" precision="FP32" names="self.encoder.layer.8.attention.output.dense.weight">
8270 <dim>384</dim>
8271 <dim>384</dim>
8272 </port>
8273 </output>
8274 </layer>
8275 <layer id="531" name="__module.encoder.layer.8.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8276 <data transpose_a="false" transpose_b="true" />
8277 <input>
8278 <port id="0" precision="FP32">
8279 <dim>-1</dim>
8280 <dim>-1</dim>
8281 <dim>384</dim>
8282 </port>
8283 <port id="1" precision="FP32">
8284 <dim>384</dim>
8285 <dim>384</dim>
8286 </port>
8287 </input>
8288 <output>
8289 <port id="2" precision="FP32">
8290 <dim>-1</dim>
8291 <dim>-1</dim>
8292 <dim>384</dim>
8293 </port>
8294 </output>
8295 </layer>
8296 <layer id="532" name="Constant_6107455" type="Const" version="opset1">
8297 <data element_type="f32" shape="1, 1, 384" offset="444000416" size="1536" />
8298 <output>
8299 <port id="0" precision="FP32">
8300 <dim>1</dim>
8301 <dim>1</dim>
8302 <dim>384</dim>
8303 </port>
8304 </output>
8305 </layer>
8306 <layer id="533" name="__module.encoder.layer.8.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
8307 <data auto_broadcast="numpy" />
8308 <input>
8309 <port id="0" precision="FP32">
8310 <dim>-1</dim>
8311 <dim>-1</dim>
8312 <dim>384</dim>
8313 </port>
8314 <port id="1" precision="FP32">
8315 <dim>1</dim>
8316 <dim>1</dim>
8317 <dim>384</dim>
8318 </port>
8319 </input>
8320 <output>
8321 <port id="2" precision="FP32" names="743,input.35">
8322 <dim>-1</dim>
8323 <dim>-1</dim>
8324 <dim>384</dim>
8325 </port>
8326 </output>
8327 </layer>
8328 <layer id="534" name="__module.encoder.layer.8.attention.output/aten::add/Add" type="Add" version="opset1">
8329 <data auto_broadcast="numpy" />
8330 <input>
8331 <port id="0" precision="FP32">
8332 <dim>-1</dim>
8333 <dim>-1</dim>
8334 <dim>384</dim>
8335 </port>
8336 <port id="1" precision="FP32">
8337 <dim>-1</dim>
8338 <dim>-1</dim>
8339 <dim>384</dim>
8340 </port>
8341 </input>
8342 <output>
8343 <port id="2" precision="FP32" names="745">
8344 <dim>-1</dim>
8345 <dim>-1</dim>
8346 <dim>384</dim>
8347 </port>
8348 </output>
8349 </layer>
8350 <layer id="535" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
8351 <data element_type="i32" shape="1" offset="384850452" size="4" />
8352 <output>
8353 <port id="0" precision="I32">
8354 <dim>1</dim>
8355 </port>
8356 </output>
8357 </layer>
8358 <layer id="536" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
8359 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
8360 <input>
8361 <port id="0" precision="FP32">
8362 <dim>-1</dim>
8363 <dim>-1</dim>
8364 <dim>384</dim>
8365 </port>
8366 <port id="1" precision="I32">
8367 <dim>1</dim>
8368 </port>
8369 </input>
8370 <output>
8371 <port id="2" precision="FP32">
8372 <dim>-1</dim>
8373 <dim>-1</dim>
8374 <dim>384</dim>
8375 </port>
8376 </output>
8377 </layer>
8378 <layer id="537" name="Constant_6107456" type="Const" version="opset1">
8379 <data element_type="f32" shape="1, 1, 384" offset="444001952" size="1536" />
8380 <output>
8381 <port id="0" precision="FP32">
8382 <dim>1</dim>
8383 <dim>1</dim>
8384 <dim>384</dim>
8385 </port>
8386 </output>
8387 </layer>
8388 <layer id="538" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
8389 <data auto_broadcast="numpy" />
8390 <input>
8391 <port id="0" precision="FP32">
8392 <dim>-1</dim>
8393 <dim>-1</dim>
8394 <dim>384</dim>
8395 </port>
8396 <port id="1" precision="FP32">
8397 <dim>1</dim>
8398 <dim>1</dim>
8399 <dim>384</dim>
8400 </port>
8401 </input>
8402 <output>
8403 <port id="2" precision="FP32">
8404 <dim>-1</dim>
8405 <dim>-1</dim>
8406 <dim>384</dim>
8407 </port>
8408 </output>
8409 </layer>
8410 <layer id="539" name="Constant_6107457" type="Const" version="opset1">
8411 <data element_type="f32" shape="1, 1, 384" offset="444003488" size="1536" />
8412 <output>
8413 <port id="0" precision="FP32">
8414 <dim>1</dim>
8415 <dim>1</dim>
8416 <dim>384</dim>
8417 </port>
8418 </output>
8419 </layer>
8420 <layer id="540" name="__module.encoder.layer.8.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
8421 <data auto_broadcast="numpy" />
8422 <input>
8423 <port id="0" precision="FP32">
8424 <dim>-1</dim>
8425 <dim>-1</dim>
8426 <dim>384</dim>
8427 </port>
8428 <port id="1" precision="FP32">
8429 <dim>1</dim>
8430 <dim>1</dim>
8431 <dim>384</dim>
8432 </port>
8433 </input>
8434 <output>
8435 <port id="2" precision="FP32" names="749,input_tensor.17">
8436 <dim>-1</dim>
8437 <dim>-1</dim>
8438 <dim>384</dim>
8439 </port>
8440 </output>
8441 </layer>
8442 <layer id="541" name="self.encoder.layer.8.intermediate.dense.weight" type="Const" version="opset1">
8443 <data element_type="f32" shape="1536, 384" offset="444005024" size="2359296" />
8444 <output>
8445 <port id="0" precision="FP32" names="self.encoder.layer.8.intermediate.dense.weight">
8446 <dim>1536</dim>
8447 <dim>384</dim>
8448 </port>
8449 </output>
8450 </layer>
8451 <layer id="542" name="__module.encoder.layer.8.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8452 <data transpose_a="false" transpose_b="true" />
8453 <input>
8454 <port id="0" precision="FP32">
8455 <dim>-1</dim>
8456 <dim>-1</dim>
8457 <dim>384</dim>
8458 </port>
8459 <port id="1" precision="FP32">
8460 <dim>1536</dim>
8461 <dim>384</dim>
8462 </port>
8463 </input>
8464 <output>
8465 <port id="2" precision="FP32">
8466 <dim>-1</dim>
8467 <dim>-1</dim>
8468 <dim>1536</dim>
8469 </port>
8470 </output>
8471 </layer>
8472 <layer id="543" name="Constant_6107458" type="Const" version="opset1">
8473 <data element_type="f32" shape="1, 1, 1536" offset="446364320" size="6144" />
8474 <output>
8475 <port id="0" precision="FP32">
8476 <dim>1</dim>
8477 <dim>1</dim>
8478 <dim>1536</dim>
8479 </port>
8480 </output>
8481 </layer>
8482 <layer id="544" name="__module.encoder.layer.8.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
8483 <data auto_broadcast="numpy" />
8484 <input>
8485 <port id="0" precision="FP32">
8486 <dim>-1</dim>
8487 <dim>-1</dim>
8488 <dim>1536</dim>
8489 </port>
8490 <port id="1" precision="FP32">
8491 <dim>1</dim>
8492 <dim>1</dim>
8493 <dim>1536</dim>
8494 </port>
8495 </input>
8496 <output>
8497 <port id="2" precision="FP32" names="754">
8498 <dim>-1</dim>
8499 <dim>-1</dim>
8500 <dim>1536</dim>
8501 </port>
8502 </output>
8503 </layer>
8504 <layer id="545" name="__module.encoder.layer.8.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
8505 <data approximation_mode="ERF" />
8506 <input>
8507 <port id="0" precision="FP32">
8508 <dim>-1</dim>
8509 <dim>-1</dim>
8510 <dim>1536</dim>
8511 </port>
8512 </input>
8513 <output>
8514 <port id="1" precision="FP32" names="755">
8515 <dim>-1</dim>
8516 <dim>-1</dim>
8517 <dim>1536</dim>
8518 </port>
8519 </output>
8520 </layer>
8521 <layer id="546" name="self.encoder.layer.8.output.dense.weight" type="Const" version="opset1">
8522 <data element_type="f32" shape="384, 1536" offset="446370464" size="2359296" />
8523 <output>
8524 <port id="0" precision="FP32" names="self.encoder.layer.8.output.dense.weight">
8525 <dim>384</dim>
8526 <dim>1536</dim>
8527 </port>
8528 </output>
8529 </layer>
8530 <layer id="547" name="__module.encoder.layer.8.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
8531 <data transpose_a="false" transpose_b="true" />
8532 <input>
8533 <port id="0" precision="FP32">
8534 <dim>-1</dim>
8535 <dim>-1</dim>
8536 <dim>1536</dim>
8537 </port>
8538 <port id="1" precision="FP32">
8539 <dim>384</dim>
8540 <dim>1536</dim>
8541 </port>
8542 </input>
8543 <output>
8544 <port id="2" precision="FP32">
8545 <dim>-1</dim>
8546 <dim>-1</dim>
8547 <dim>384</dim>
8548 </port>
8549 </output>
8550 </layer>
8551 <layer id="548" name="Constant_6107459" type="Const" version="opset1">
8552 <data element_type="f32" shape="1, 1, 384" offset="448729760" size="1536" />
8553 <output>
8554 <port id="0" precision="FP32">
8555 <dim>1</dim>
8556 <dim>1</dim>
8557 <dim>384</dim>
8558 </port>
8559 </output>
8560 </layer>
8561 <layer id="549" name="__module.encoder.layer.8.output.dense/aten::linear/Add" type="Add" version="opset1">
8562 <data auto_broadcast="numpy" />
8563 <input>
8564 <port id="0" precision="FP32">
8565 <dim>-1</dim>
8566 <dim>-1</dim>
8567 <dim>384</dim>
8568 </port>
8569 <port id="1" precision="FP32">
8570 <dim>1</dim>
8571 <dim>1</dim>
8572 <dim>384</dim>
8573 </port>
8574 </input>
8575 <output>
8576 <port id="2" precision="FP32" names="761,input.37">
8577 <dim>-1</dim>
8578 <dim>-1</dim>
8579 <dim>384</dim>
8580 </port>
8581 </output>
8582 </layer>
8583 <layer id="550" name="__module.encoder.layer.8.output/aten::add/Add" type="Add" version="opset1">
8584 <data auto_broadcast="numpy" />
8585 <input>
8586 <port id="0" precision="FP32">
8587 <dim>-1</dim>
8588 <dim>-1</dim>
8589 <dim>384</dim>
8590 </port>
8591 <port id="1" precision="FP32">
8592 <dim>-1</dim>
8593 <dim>-1</dim>
8594 <dim>384</dim>
8595 </port>
8596 </input>
8597 <output>
8598 <port id="2" precision="FP32" names="763">
8599 <dim>-1</dim>
8600 <dim>-1</dim>
8601 <dim>384</dim>
8602 </port>
8603 </output>
8604 </layer>
8605 <layer id="551" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
8606 <data element_type="i32" shape="1" offset="384850452" size="4" />
8607 <output>
8608 <port id="0" precision="I32">
8609 <dim>1</dim>
8610 </port>
8611 </output>
8612 </layer>
8613 <layer id="552" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
8614 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
8615 <input>
8616 <port id="0" precision="FP32">
8617 <dim>-1</dim>
8618 <dim>-1</dim>
8619 <dim>384</dim>
8620 </port>
8621 <port id="1" precision="I32">
8622 <dim>1</dim>
8623 </port>
8624 </input>
8625 <output>
8626 <port id="2" precision="FP32">
8627 <dim>-1</dim>
8628 <dim>-1</dim>
8629 <dim>384</dim>
8630 </port>
8631 </output>
8632 </layer>
8633 <layer id="553" name="Constant_6107460" type="Const" version="opset1">
8634 <data element_type="f32" shape="1, 1, 384" offset="448731296" size="1536" />
8635 <output>
8636 <port id="0" precision="FP32">
8637 <dim>1</dim>
8638 <dim>1</dim>
8639 <dim>384</dim>
8640 </port>
8641 </output>
8642 </layer>
8643 <layer id="554" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
8644 <data auto_broadcast="numpy" />
8645 <input>
8646 <port id="0" precision="FP32">
8647 <dim>-1</dim>
8648 <dim>-1</dim>
8649 <dim>384</dim>
8650 </port>
8651 <port id="1" precision="FP32">
8652 <dim>1</dim>
8653 <dim>1</dim>
8654 <dim>384</dim>
8655 </port>
8656 </input>
8657 <output>
8658 <port id="2" precision="FP32">
8659 <dim>-1</dim>
8660 <dim>-1</dim>
8661 <dim>384</dim>
8662 </port>
8663 </output>
8664 </layer>
8665 <layer id="555" name="Constant_6107461" type="Const" version="opset1">
8666 <data element_type="f32" shape="1, 1, 384" offset="448732832" size="1536" />
8667 <output>
8668 <port id="0" precision="FP32">
8669 <dim>1</dim>
8670 <dim>1</dim>
8671 <dim>384</dim>
8672 </port>
8673 </output>
8674 </layer>
8675 <layer id="556" name="__module.encoder.layer.8.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
8676 <data auto_broadcast="numpy" />
8677 <input>
8678 <port id="0" precision="FP32">
8679 <dim>-1</dim>
8680 <dim>-1</dim>
8681 <dim>384</dim>
8682 </port>
8683 <port id="1" precision="FP32">
8684 <dim>1</dim>
8685 <dim>1</dim>
8686 <dim>384</dim>
8687 </port>
8688 </input>
8689 <output>
8690 <port id="2" precision="FP32" names="767,hidden_states.55">
8691 <dim>-1</dim>
8692 <dim>-1</dim>
8693 <dim>384</dim>
8694 </port>
8695 </output>
8696 </layer>
8697 <layer id="557" name="self.encoder.layer.9.attention.self.query.weight" type="Const" version="opset1">
8698 <data element_type="f32" shape="384, 384" offset="448734368" size="589824" />
8699 <output>
8700 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.query.weight">
8701 <dim>384</dim>
8702 <dim>384</dim>
8703 </port>
8704 </output>
8705 </layer>
8706 <layer id="558" name="__module.encoder.layer.9.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
8707 <data transpose_a="false" transpose_b="true" />
8708 <input>
8709 <port id="0" precision="FP32">
8710 <dim>-1</dim>
8711 <dim>-1</dim>
8712 <dim>384</dim>
8713 </port>
8714 <port id="1" precision="FP32">
8715 <dim>384</dim>
8716 <dim>384</dim>
8717 </port>
8718 </input>
8719 <output>
8720 <port id="2" precision="FP32">
8721 <dim>-1</dim>
8722 <dim>-1</dim>
8723 <dim>384</dim>
8724 </port>
8725 </output>
8726 </layer>
8727 <layer id="559" name="Constant_6107462" type="Const" version="opset1">
8728 <data element_type="f32" shape="1, 1, 384" offset="449324192" size="1536" />
8729 <output>
8730 <port id="0" precision="FP32">
8731 <dim>1</dim>
8732 <dim>1</dim>
8733 <dim>384</dim>
8734 </port>
8735 </output>
8736 </layer>
8737 <layer id="560" name="__module.encoder.layer.9.attention.self.query/aten::linear/Add" type="Add" version="opset1">
8738 <data auto_broadcast="numpy" />
8739 <input>
8740 <port id="0" precision="FP32">
8741 <dim>-1</dim>
8742 <dim>-1</dim>
8743 <dim>384</dim>
8744 </port>
8745 <port id="1" precision="FP32">
8746 <dim>1</dim>
8747 <dim>1</dim>
8748 <dim>384</dim>
8749 </port>
8750 </input>
8751 <output>
8752 <port id="2" precision="FP32" names="780,x.109">
8753 <dim>-1</dim>
8754 <dim>-1</dim>
8755 <dim>384</dim>
8756 </port>
8757 </output>
8758 </layer>
8759 <layer id="561" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
8760 <data element_type="i64" shape="4" offset="385444888" size="32" />
8761 <output>
8762 <port id="0" precision="I64">
8763 <dim>4</dim>
8764 </port>
8765 </output>
8766 </layer>
8767 <layer id="562" name="__module.encoder.layer.9.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
8768 <data special_zero="true" />
8769 <input>
8770 <port id="0" precision="FP32">
8771 <dim>-1</dim>
8772 <dim>-1</dim>
8773 <dim>384</dim>
8774 </port>
8775 <port id="1" precision="I64">
8776 <dim>4</dim>
8777 </port>
8778 </input>
8779 <output>
8780 <port id="2" precision="FP32" names="784,x.111">
8781 <dim>-1</dim>
8782 <dim>-1</dim>
8783 <dim>12</dim>
8784 <dim>32</dim>
8785 </port>
8786 </output>
8787 </layer>
8788 <layer id="563" name="Constant_6100287" type="Const" version="opset1">
8789 <data element_type="i64" shape="4" offset="385444920" size="32" />
8790 <output>
8791 <port id="0" precision="I64" names="785">
8792 <dim>4</dim>
8793 </port>
8794 </output>
8795 </layer>
8796 <layer id="564" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
8797 <input>
8798 <port id="0" precision="FP32">
8799 <dim>-1</dim>
8800 <dim>-1</dim>
8801 <dim>12</dim>
8802 <dim>32</dim>
8803 </port>
8804 <port id="1" precision="I64">
8805 <dim>4</dim>
8806 </port>
8807 </input>
8808 <output>
8809 <port id="2" precision="FP32" names="786">
8810 <dim>-1</dim>
8811 <dim>12</dim>
8812 <dim>-1</dim>
8813 <dim>32</dim>
8814 </port>
8815 </output>
8816 </layer>
8817 <layer id="565" name="self.encoder.layer.9.attention.self.key.weight" type="Const" version="opset1">
8818 <data element_type="f32" shape="384, 384" offset="449325728" size="589824" />
8819 <output>
8820 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.key.weight">
8821 <dim>384</dim>
8822 <dim>384</dim>
8823 </port>
8824 </output>
8825 </layer>
8826 <layer id="566" name="__module.encoder.layer.9.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
8827 <data transpose_a="false" transpose_b="true" />
8828 <input>
8829 <port id="0" precision="FP32">
8830 <dim>-1</dim>
8831 <dim>-1</dim>
8832 <dim>384</dim>
8833 </port>
8834 <port id="1" precision="FP32">
8835 <dim>384</dim>
8836 <dim>384</dim>
8837 </port>
8838 </input>
8839 <output>
8840 <port id="2" precision="FP32">
8841 <dim>-1</dim>
8842 <dim>-1</dim>
8843 <dim>384</dim>
8844 </port>
8845 </output>
8846 </layer>
8847 <layer id="567" name="Constant_6107463" type="Const" version="opset1">
8848 <data element_type="f32" shape="1, 1, 384" offset="449915552" size="1536" />
8849 <output>
8850 <port id="0" precision="FP32">
8851 <dim>1</dim>
8852 <dim>1</dim>
8853 <dim>384</dim>
8854 </port>
8855 </output>
8856 </layer>
8857 <layer id="568" name="__module.encoder.layer.9.attention.self.key/aten::linear/Add" type="Add" version="opset1">
8858 <data auto_broadcast="numpy" />
8859 <input>
8860 <port id="0" precision="FP32">
8861 <dim>-1</dim>
8862 <dim>-1</dim>
8863 <dim>384</dim>
8864 </port>
8865 <port id="1" precision="FP32">
8866 <dim>1</dim>
8867 <dim>1</dim>
8868 <dim>384</dim>
8869 </port>
8870 </input>
8871 <output>
8872 <port id="2" precision="FP32" names="789,x.113">
8873 <dim>-1</dim>
8874 <dim>-1</dim>
8875 <dim>384</dim>
8876 </port>
8877 </output>
8878 </layer>
8879 <layer id="569" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
8880 <data element_type="i64" shape="4" offset="385444888" size="32" />
8881 <output>
8882 <port id="0" precision="I64">
8883 <dim>4</dim>
8884 </port>
8885 </output>
8886 </layer>
8887 <layer id="570" name="__module.encoder.layer.9.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
8888 <data special_zero="true" />
8889 <input>
8890 <port id="0" precision="FP32">
8891 <dim>-1</dim>
8892 <dim>-1</dim>
8893 <dim>384</dim>
8894 </port>
8895 <port id="1" precision="I64">
8896 <dim>4</dim>
8897 </port>
8898 </input>
8899 <output>
8900 <port id="2" precision="FP32" names="793,x.115">
8901 <dim>-1</dim>
8902 <dim>-1</dim>
8903 <dim>12</dim>
8904 <dim>32</dim>
8905 </port>
8906 </output>
8907 </layer>
8908 <layer id="571" name="Constant_6100310" type="Const" version="opset1">
8909 <data element_type="i64" shape="4" offset="385444920" size="32" />
8910 <output>
8911 <port id="0" precision="I64" names="794">
8912 <dim>4</dim>
8913 </port>
8914 </output>
8915 </layer>
8916 <layer id="572" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
8917 <input>
8918 <port id="0" precision="FP32">
8919 <dim>-1</dim>
8920 <dim>-1</dim>
8921 <dim>12</dim>
8922 <dim>32</dim>
8923 </port>
8924 <port id="1" precision="I64">
8925 <dim>4</dim>
8926 </port>
8927 </input>
8928 <output>
8929 <port id="2" precision="FP32" names="795">
8930 <dim>-1</dim>
8931 <dim>12</dim>
8932 <dim>-1</dim>
8933 <dim>32</dim>
8934 </port>
8935 </output>
8936 </layer>
8937 <layer id="573" name="self.encoder.layer.9.attention.self.value.weight" type="Const" version="opset1">
8938 <data element_type="f32" shape="384, 384" offset="449917088" size="589824" />
8939 <output>
8940 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.self.value.weight">
8941 <dim>384</dim>
8942 <dim>384</dim>
8943 </port>
8944 </output>
8945 </layer>
8946 <layer id="574" name="__module.encoder.layer.9.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
8947 <data transpose_a="false" transpose_b="true" />
8948 <input>
8949 <port id="0" precision="FP32">
8950 <dim>-1</dim>
8951 <dim>-1</dim>
8952 <dim>384</dim>
8953 </port>
8954 <port id="1" precision="FP32">
8955 <dim>384</dim>
8956 <dim>384</dim>
8957 </port>
8958 </input>
8959 <output>
8960 <port id="2" precision="FP32">
8961 <dim>-1</dim>
8962 <dim>-1</dim>
8963 <dim>384</dim>
8964 </port>
8965 </output>
8966 </layer>
8967 <layer id="575" name="Constant_6107464" type="Const" version="opset1">
8968 <data element_type="f32" shape="1, 1, 384" offset="450506912" size="1536" />
8969 <output>
8970 <port id="0" precision="FP32">
8971 <dim>1</dim>
8972 <dim>1</dim>
8973 <dim>384</dim>
8974 </port>
8975 </output>
8976 </layer>
8977 <layer id="576" name="__module.encoder.layer.9.attention.self.value/aten::linear/Add" type="Add" version="opset1">
8978 <data auto_broadcast="numpy" />
8979 <input>
8980 <port id="0" precision="FP32">
8981 <dim>-1</dim>
8982 <dim>-1</dim>
8983 <dim>384</dim>
8984 </port>
8985 <port id="1" precision="FP32">
8986 <dim>1</dim>
8987 <dim>1</dim>
8988 <dim>384</dim>
8989 </port>
8990 </input>
8991 <output>
8992 <port id="2" precision="FP32" names="798,x.117">
8993 <dim>-1</dim>
8994 <dim>-1</dim>
8995 <dim>384</dim>
8996 </port>
8997 </output>
8998 </layer>
8999 <layer id="577" name="__module.encoder.layer.9.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
9000 <data element_type="i64" shape="4" offset="385444888" size="32" />
9001 <output>
9002 <port id="0" precision="I64">
9003 <dim>4</dim>
9004 </port>
9005 </output>
9006 </layer>
9007 <layer id="578" name="__module.encoder.layer.9.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
9008 <data special_zero="true" />
9009 <input>
9010 <port id="0" precision="FP32">
9011 <dim>-1</dim>
9012 <dim>-1</dim>
9013 <dim>384</dim>
9014 </port>
9015 <port id="1" precision="I64">
9016 <dim>4</dim>
9017 </port>
9018 </input>
9019 <output>
9020 <port id="2" precision="FP32" names="802,x.119">
9021 <dim>-1</dim>
9022 <dim>-1</dim>
9023 <dim>12</dim>
9024 <dim>32</dim>
9025 </port>
9026 </output>
9027 </layer>
9028 <layer id="579" name="Constant_6100333" type="Const" version="opset1">
9029 <data element_type="i64" shape="4" offset="385444920" size="32" />
9030 <output>
9031 <port id="0" precision="I64" names="803">
9032 <dim>4</dim>
9033 </port>
9034 </output>
9035 </layer>
9036 <layer id="580" name="__module.encoder.layer.9.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
9037 <input>
9038 <port id="0" precision="FP32">
9039 <dim>-1</dim>
9040 <dim>-1</dim>
9041 <dim>12</dim>
9042 <dim>32</dim>
9043 </port>
9044 <port id="1" precision="I64">
9045 <dim>4</dim>
9046 </port>
9047 </input>
9048 <output>
9049 <port id="2" precision="FP32" names="804">
9050 <dim>-1</dim>
9051 <dim>12</dim>
9052 <dim>-1</dim>
9053 <dim>32</dim>
9054 </port>
9055 </output>
9056 </layer>
9057 <layer id="581" name="__module.encoder.layer.9.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
9058 <data causal="false" />
9059 <input>
9060 <port id="0" precision="FP32">
9061 <dim>-1</dim>
9062 <dim>12</dim>
9063 <dim>-1</dim>
9064 <dim>32</dim>
9065 </port>
9066 <port id="1" precision="FP32">
9067 <dim>-1</dim>
9068 <dim>12</dim>
9069 <dim>-1</dim>
9070 <dim>32</dim>
9071 </port>
9072 <port id="2" precision="FP32">
9073 <dim>-1</dim>
9074 <dim>12</dim>
9075 <dim>-1</dim>
9076 <dim>32</dim>
9077 </port>
9078 <port id="3" precision="FP32">
9079 <dim>-1</dim>
9080 <dim>1</dim>
9081 <dim>-1</dim>
9082 <dim>-1</dim>
9083 </port>
9084 </input>
9085 <output>
9086 <port id="4" precision="FP32" names="805,attn_output.37">
9087 <dim>-1</dim>
9088 <dim>12</dim>
9089 <dim>-1</dim>
9090 <dim>32</dim>
9091 </port>
9092 </output>
9093 </layer>
9094 <layer id="582" name="__module.encoder.layer.9.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
9095 <data element_type="i32" shape="4" offset="386627704" size="16" />
9096 <output>
9097 <port id="0" precision="I32">
9098 <dim>4</dim>
9099 </port>
9100 </output>
9101 </layer>
9102 <layer id="583" name="__module.encoder.layer.9.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
9103 <input>
9104 <port id="0" precision="FP32">
9105 <dim>-1</dim>
9106 <dim>12</dim>
9107 <dim>-1</dim>
9108 <dim>32</dim>
9109 </port>
9110 <port id="1" precision="I32">
9111 <dim>4</dim>
9112 </port>
9113 </input>
9114 <output>
9115 <port id="2" precision="FP32" names="806,attn_output.39">
9116 <dim>-1</dim>
9117 <dim>-1</dim>
9118 <dim>12</dim>
9119 <dim>32</dim>
9120 </port>
9121 </output>
9122 </layer>
9123 <layer id="584" name="Constant_6107623" type="Const" version="opset1">
9124 <data element_type="i64" shape="3" offset="386627720" size="24" />
9125 <output>
9126 <port id="0" precision="I64">
9127 <dim>3</dim>
9128 </port>
9129 </output>
9130 </layer>
9131 <layer id="585" name="__module.encoder.layer.9.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
9132 <data special_zero="true" />
9133 <input>
9134 <port id="0" precision="FP32">
9135 <dim>-1</dim>
9136 <dim>-1</dim>
9137 <dim>12</dim>
9138 <dim>32</dim>
9139 </port>
9140 <port id="1" precision="I64">
9141 <dim>3</dim>
9142 </port>
9143 </input>
9144 <output>
9145 <port id="2" precision="FP32" names="808">
9146 <dim>-1</dim>
9147 <dim>-1</dim>
9148 <dim>384</dim>
9149 </port>
9150 </output>
9151 </layer>
9152 <layer id="586" name="self.encoder.layer.9.attention.output.dense.weight" type="Const" version="opset1">
9153 <data element_type="f32" shape="384, 384" offset="450508448" size="589824" />
9154 <output>
9155 <port id="0" precision="FP32" names="self.encoder.layer.9.attention.output.dense.weight">
9156 <dim>384</dim>
9157 <dim>384</dim>
9158 </port>
9159 </output>
9160 </layer>
9161 <layer id="587" name="__module.encoder.layer.9.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9162 <data transpose_a="false" transpose_b="true" />
9163 <input>
9164 <port id="0" precision="FP32">
9165 <dim>-1</dim>
9166 <dim>-1</dim>
9167 <dim>384</dim>
9168 </port>
9169 <port id="1" precision="FP32">
9170 <dim>384</dim>
9171 <dim>384</dim>
9172 </port>
9173 </input>
9174 <output>
9175 <port id="2" precision="FP32">
9176 <dim>-1</dim>
9177 <dim>-1</dim>
9178 <dim>384</dim>
9179 </port>
9180 </output>
9181 </layer>
9182 <layer id="588" name="Constant_6107465" type="Const" version="opset1">
9183 <data element_type="f32" shape="1, 1, 384" offset="451098272" size="1536" />
9184 <output>
9185 <port id="0" precision="FP32">
9186 <dim>1</dim>
9187 <dim>1</dim>
9188 <dim>384</dim>
9189 </port>
9190 </output>
9191 </layer>
9192 <layer id="589" name="__module.encoder.layer.9.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
9193 <data auto_broadcast="numpy" />
9194 <input>
9195 <port id="0" precision="FP32">
9196 <dim>-1</dim>
9197 <dim>-1</dim>
9198 <dim>384</dim>
9199 </port>
9200 <port id="1" precision="FP32">
9201 <dim>1</dim>
9202 <dim>1</dim>
9203 <dim>384</dim>
9204 </port>
9205 </input>
9206 <output>
9207 <port id="2" precision="FP32" names="814,input.39">
9208 <dim>-1</dim>
9209 <dim>-1</dim>
9210 <dim>384</dim>
9211 </port>
9212 </output>
9213 </layer>
9214 <layer id="590" name="__module.encoder.layer.9.attention.output/aten::add/Add" type="Add" version="opset1">
9215 <data auto_broadcast="numpy" />
9216 <input>
9217 <port id="0" precision="FP32">
9218 <dim>-1</dim>
9219 <dim>-1</dim>
9220 <dim>384</dim>
9221 </port>
9222 <port id="1" precision="FP32">
9223 <dim>-1</dim>
9224 <dim>-1</dim>
9225 <dim>384</dim>
9226 </port>
9227 </input>
9228 <output>
9229 <port id="2" precision="FP32" names="816">
9230 <dim>-1</dim>
9231 <dim>-1</dim>
9232 <dim>384</dim>
9233 </port>
9234 </output>
9235 </layer>
9236 <layer id="591" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
9237 <data element_type="i32" shape="1" offset="384850452" size="4" />
9238 <output>
9239 <port id="0" precision="I32">
9240 <dim>1</dim>
9241 </port>
9242 </output>
9243 </layer>
9244 <layer id="592" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
9245 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
9246 <input>
9247 <port id="0" precision="FP32">
9248 <dim>-1</dim>
9249 <dim>-1</dim>
9250 <dim>384</dim>
9251 </port>
9252 <port id="1" precision="I32">
9253 <dim>1</dim>
9254 </port>
9255 </input>
9256 <output>
9257 <port id="2" precision="FP32">
9258 <dim>-1</dim>
9259 <dim>-1</dim>
9260 <dim>384</dim>
9261 </port>
9262 </output>
9263 </layer>
9264 <layer id="593" name="Constant_6107466" type="Const" version="opset1">
9265 <data element_type="f32" shape="1, 1, 384" offset="451099808" size="1536" />
9266 <output>
9267 <port id="0" precision="FP32">
9268 <dim>1</dim>
9269 <dim>1</dim>
9270 <dim>384</dim>
9271 </port>
9272 </output>
9273 </layer>
9274 <layer id="594" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
9275 <data auto_broadcast="numpy" />
9276 <input>
9277 <port id="0" precision="FP32">
9278 <dim>-1</dim>
9279 <dim>-1</dim>
9280 <dim>384</dim>
9281 </port>
9282 <port id="1" precision="FP32">
9283 <dim>1</dim>
9284 <dim>1</dim>
9285 <dim>384</dim>
9286 </port>
9287 </input>
9288 <output>
9289 <port id="2" precision="FP32">
9290 <dim>-1</dim>
9291 <dim>-1</dim>
9292 <dim>384</dim>
9293 </port>
9294 </output>
9295 </layer>
9296 <layer id="595" name="Constant_6107467" type="Const" version="opset1">
9297 <data element_type="f32" shape="1, 1, 384" offset="451101344" size="1536" />
9298 <output>
9299 <port id="0" precision="FP32">
9300 <dim>1</dim>
9301 <dim>1</dim>
9302 <dim>384</dim>
9303 </port>
9304 </output>
9305 </layer>
9306 <layer id="596" name="__module.encoder.layer.9.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
9307 <data auto_broadcast="numpy" />
9308 <input>
9309 <port id="0" precision="FP32">
9310 <dim>-1</dim>
9311 <dim>-1</dim>
9312 <dim>384</dim>
9313 </port>
9314 <port id="1" precision="FP32">
9315 <dim>1</dim>
9316 <dim>1</dim>
9317 <dim>384</dim>
9318 </port>
9319 </input>
9320 <output>
9321 <port id="2" precision="FP32" names="820,input_tensor.19">
9322 <dim>-1</dim>
9323 <dim>-1</dim>
9324 <dim>384</dim>
9325 </port>
9326 </output>
9327 </layer>
9328 <layer id="597" name="self.encoder.layer.9.intermediate.dense.weight" type="Const" version="opset1">
9329 <data element_type="f32" shape="1536, 384" offset="451102880" size="2359296" />
9330 <output>
9331 <port id="0" precision="FP32" names="self.encoder.layer.9.intermediate.dense.weight">
9332 <dim>1536</dim>
9333 <dim>384</dim>
9334 </port>
9335 </output>
9336 </layer>
9337 <layer id="598" name="__module.encoder.layer.9.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9338 <data transpose_a="false" transpose_b="true" />
9339 <input>
9340 <port id="0" precision="FP32">
9341 <dim>-1</dim>
9342 <dim>-1</dim>
9343 <dim>384</dim>
9344 </port>
9345 <port id="1" precision="FP32">
9346 <dim>1536</dim>
9347 <dim>384</dim>
9348 </port>
9349 </input>
9350 <output>
9351 <port id="2" precision="FP32">
9352 <dim>-1</dim>
9353 <dim>-1</dim>
9354 <dim>1536</dim>
9355 </port>
9356 </output>
9357 </layer>
9358 <layer id="599" name="Constant_6107468" type="Const" version="opset1">
9359 <data element_type="f32" shape="1, 1, 1536" offset="453462176" size="6144" />
9360 <output>
9361 <port id="0" precision="FP32">
9362 <dim>1</dim>
9363 <dim>1</dim>
9364 <dim>1536</dim>
9365 </port>
9366 </output>
9367 </layer>
9368 <layer id="600" name="__module.encoder.layer.9.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
9369 <data auto_broadcast="numpy" />
9370 <input>
9371 <port id="0" precision="FP32">
9372 <dim>-1</dim>
9373 <dim>-1</dim>
9374 <dim>1536</dim>
9375 </port>
9376 <port id="1" precision="FP32">
9377 <dim>1</dim>
9378 <dim>1</dim>
9379 <dim>1536</dim>
9380 </port>
9381 </input>
9382 <output>
9383 <port id="2" precision="FP32" names="825">
9384 <dim>-1</dim>
9385 <dim>-1</dim>
9386 <dim>1536</dim>
9387 </port>
9388 </output>
9389 </layer>
9390 <layer id="601" name="__module.encoder.layer.9.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
9391 <data approximation_mode="ERF" />
9392 <input>
9393 <port id="0" precision="FP32">
9394 <dim>-1</dim>
9395 <dim>-1</dim>
9396 <dim>1536</dim>
9397 </port>
9398 </input>
9399 <output>
9400 <port id="1" precision="FP32" names="826">
9401 <dim>-1</dim>
9402 <dim>-1</dim>
9403 <dim>1536</dim>
9404 </port>
9405 </output>
9406 </layer>
9407 <layer id="602" name="self.encoder.layer.9.output.dense.weight" type="Const" version="opset1">
9408 <data element_type="f32" shape="384, 1536" offset="453468320" size="2359296" />
9409 <output>
9410 <port id="0" precision="FP32" names="self.encoder.layer.9.output.dense.weight">
9411 <dim>384</dim>
9412 <dim>1536</dim>
9413 </port>
9414 </output>
9415 </layer>
9416 <layer id="603" name="__module.encoder.layer.9.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
9417 <data transpose_a="false" transpose_b="true" />
9418 <input>
9419 <port id="0" precision="FP32">
9420 <dim>-1</dim>
9421 <dim>-1</dim>
9422 <dim>1536</dim>
9423 </port>
9424 <port id="1" precision="FP32">
9425 <dim>384</dim>
9426 <dim>1536</dim>
9427 </port>
9428 </input>
9429 <output>
9430 <port id="2" precision="FP32">
9431 <dim>-1</dim>
9432 <dim>-1</dim>
9433 <dim>384</dim>
9434 </port>
9435 </output>
9436 </layer>
9437 <layer id="604" name="Constant_6107469" type="Const" version="opset1">
9438 <data element_type="f32" shape="1, 1, 384" offset="455827616" size="1536" />
9439 <output>
9440 <port id="0" precision="FP32">
9441 <dim>1</dim>
9442 <dim>1</dim>
9443 <dim>384</dim>
9444 </port>
9445 </output>
9446 </layer>
9447 <layer id="605" name="__module.encoder.layer.9.output.dense/aten::linear/Add" type="Add" version="opset1">
9448 <data auto_broadcast="numpy" />
9449 <input>
9450 <port id="0" precision="FP32">
9451 <dim>-1</dim>
9452 <dim>-1</dim>
9453 <dim>384</dim>
9454 </port>
9455 <port id="1" precision="FP32">
9456 <dim>1</dim>
9457 <dim>1</dim>
9458 <dim>384</dim>
9459 </port>
9460 </input>
9461 <output>
9462 <port id="2" precision="FP32" names="832,input.41">
9463 <dim>-1</dim>
9464 <dim>-1</dim>
9465 <dim>384</dim>
9466 </port>
9467 </output>
9468 </layer>
9469 <layer id="606" name="__module.encoder.layer.9.output/aten::add/Add" type="Add" version="opset1">
9470 <data auto_broadcast="numpy" />
9471 <input>
9472 <port id="0" precision="FP32">
9473 <dim>-1</dim>
9474 <dim>-1</dim>
9475 <dim>384</dim>
9476 </port>
9477 <port id="1" precision="FP32">
9478 <dim>-1</dim>
9479 <dim>-1</dim>
9480 <dim>384</dim>
9481 </port>
9482 </input>
9483 <output>
9484 <port id="2" precision="FP32" names="834">
9485 <dim>-1</dim>
9486 <dim>-1</dim>
9487 <dim>384</dim>
9488 </port>
9489 </output>
9490 </layer>
9491 <layer id="607" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
9492 <data element_type="i32" shape="1" offset="384850452" size="4" />
9493 <output>
9494 <port id="0" precision="I32">
9495 <dim>1</dim>
9496 </port>
9497 </output>
9498 </layer>
9499 <layer id="608" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
9500 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
9501 <input>
9502 <port id="0" precision="FP32">
9503 <dim>-1</dim>
9504 <dim>-1</dim>
9505 <dim>384</dim>
9506 </port>
9507 <port id="1" precision="I32">
9508 <dim>1</dim>
9509 </port>
9510 </input>
9511 <output>
9512 <port id="2" precision="FP32">
9513 <dim>-1</dim>
9514 <dim>-1</dim>
9515 <dim>384</dim>
9516 </port>
9517 </output>
9518 </layer>
9519 <layer id="609" name="Constant_6107470" type="Const" version="opset1">
9520 <data element_type="f32" shape="1, 1, 384" offset="455829152" size="1536" />
9521 <output>
9522 <port id="0" precision="FP32">
9523 <dim>1</dim>
9524 <dim>1</dim>
9525 <dim>384</dim>
9526 </port>
9527 </output>
9528 </layer>
9529 <layer id="610" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
9530 <data auto_broadcast="numpy" />
9531 <input>
9532 <port id="0" precision="FP32">
9533 <dim>-1</dim>
9534 <dim>-1</dim>
9535 <dim>384</dim>
9536 </port>
9537 <port id="1" precision="FP32">
9538 <dim>1</dim>
9539 <dim>1</dim>
9540 <dim>384</dim>
9541 </port>
9542 </input>
9543 <output>
9544 <port id="2" precision="FP32">
9545 <dim>-1</dim>
9546 <dim>-1</dim>
9547 <dim>384</dim>
9548 </port>
9549 </output>
9550 </layer>
9551 <layer id="611" name="Constant_6107471" type="Const" version="opset1">
9552 <data element_type="f32" shape="1, 1, 384" offset="455830688" size="1536" />
9553 <output>
9554 <port id="0" precision="FP32">
9555 <dim>1</dim>
9556 <dim>1</dim>
9557 <dim>384</dim>
9558 </port>
9559 </output>
9560 </layer>
9561 <layer id="612" name="__module.encoder.layer.9.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
9562 <data auto_broadcast="numpy" />
9563 <input>
9564 <port id="0" precision="FP32">
9565 <dim>-1</dim>
9566 <dim>-1</dim>
9567 <dim>384</dim>
9568 </port>
9569 <port id="1" precision="FP32">
9570 <dim>1</dim>
9571 <dim>1</dim>
9572 <dim>384</dim>
9573 </port>
9574 </input>
9575 <output>
9576 <port id="2" precision="FP32" names="838,hidden_states.61">
9577 <dim>-1</dim>
9578 <dim>-1</dim>
9579 <dim>384</dim>
9580 </port>
9581 </output>
9582 </layer>
9583 <layer id="613" name="self.encoder.layer.10.attention.self.query.weight" type="Const" version="opset1">
9584 <data element_type="f32" shape="384, 384" offset="455832224" size="589824" />
9585 <output>
9586 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.query.weight">
9587 <dim>384</dim>
9588 <dim>384</dim>
9589 </port>
9590 </output>
9591 </layer>
9592 <layer id="614" name="__module.encoder.layer.10.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
9593 <data transpose_a="false" transpose_b="true" />
9594 <input>
9595 <port id="0" precision="FP32">
9596 <dim>-1</dim>
9597 <dim>-1</dim>
9598 <dim>384</dim>
9599 </port>
9600 <port id="1" precision="FP32">
9601 <dim>384</dim>
9602 <dim>384</dim>
9603 </port>
9604 </input>
9605 <output>
9606 <port id="2" precision="FP32">
9607 <dim>-1</dim>
9608 <dim>-1</dim>
9609 <dim>384</dim>
9610 </port>
9611 </output>
9612 </layer>
9613 <layer id="615" name="Constant_6107472" type="Const" version="opset1">
9614 <data element_type="f32" shape="1, 1, 384" offset="456422048" size="1536" />
9615 <output>
9616 <port id="0" precision="FP32">
9617 <dim>1</dim>
9618 <dim>1</dim>
9619 <dim>384</dim>
9620 </port>
9621 </output>
9622 </layer>
9623 <layer id="616" name="__module.encoder.layer.10.attention.self.query/aten::linear/Add" type="Add" version="opset1">
9624 <data auto_broadcast="numpy" />
9625 <input>
9626 <port id="0" precision="FP32">
9627 <dim>-1</dim>
9628 <dim>-1</dim>
9629 <dim>384</dim>
9630 </port>
9631 <port id="1" precision="FP32">
9632 <dim>1</dim>
9633 <dim>1</dim>
9634 <dim>384</dim>
9635 </port>
9636 </input>
9637 <output>
9638 <port id="2" precision="FP32" names="851,x.121">
9639 <dim>-1</dim>
9640 <dim>-1</dim>
9641 <dim>384</dim>
9642 </port>
9643 </output>
9644 </layer>
9645 <layer id="617" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
9646 <data element_type="i64" shape="4" offset="385444888" size="32" />
9647 <output>
9648 <port id="0" precision="I64">
9649 <dim>4</dim>
9650 </port>
9651 </output>
9652 </layer>
9653 <layer id="618" name="__module.encoder.layer.10.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
9654 <data special_zero="true" />
9655 <input>
9656 <port id="0" precision="FP32">
9657 <dim>-1</dim>
9658 <dim>-1</dim>
9659 <dim>384</dim>
9660 </port>
9661 <port id="1" precision="I64">
9662 <dim>4</dim>
9663 </port>
9664 </input>
9665 <output>
9666 <port id="2" precision="FP32" names="855,x.123">
9667 <dim>-1</dim>
9668 <dim>-1</dim>
9669 <dim>12</dim>
9670 <dim>32</dim>
9671 </port>
9672 </output>
9673 </layer>
9674 <layer id="619" name="Constant_6100513" type="Const" version="opset1">
9675 <data element_type="i64" shape="4" offset="385444920" size="32" />
9676 <output>
9677 <port id="0" precision="I64" names="856">
9678 <dim>4</dim>
9679 </port>
9680 </output>
9681 </layer>
9682 <layer id="620" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
9683 <input>
9684 <port id="0" precision="FP32">
9685 <dim>-1</dim>
9686 <dim>-1</dim>
9687 <dim>12</dim>
9688 <dim>32</dim>
9689 </port>
9690 <port id="1" precision="I64">
9691 <dim>4</dim>
9692 </port>
9693 </input>
9694 <output>
9695 <port id="2" precision="FP32" names="857">
9696 <dim>-1</dim>
9697 <dim>12</dim>
9698 <dim>-1</dim>
9699 <dim>32</dim>
9700 </port>
9701 </output>
9702 </layer>
9703 <layer id="621" name="self.encoder.layer.10.attention.self.key.weight" type="Const" version="opset1">
9704 <data element_type="f32" shape="384, 384" offset="456423584" size="589824" />
9705 <output>
9706 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.key.weight">
9707 <dim>384</dim>
9708 <dim>384</dim>
9709 </port>
9710 </output>
9711 </layer>
9712 <layer id="622" name="__module.encoder.layer.10.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
9713 <data transpose_a="false" transpose_b="true" />
9714 <input>
9715 <port id="0" precision="FP32">
9716 <dim>-1</dim>
9717 <dim>-1</dim>
9718 <dim>384</dim>
9719 </port>
9720 <port id="1" precision="FP32">
9721 <dim>384</dim>
9722 <dim>384</dim>
9723 </port>
9724 </input>
9725 <output>
9726 <port id="2" precision="FP32">
9727 <dim>-1</dim>
9728 <dim>-1</dim>
9729 <dim>384</dim>
9730 </port>
9731 </output>
9732 </layer>
9733 <layer id="623" name="Constant_6107473" type="Const" version="opset1">
9734 <data element_type="f32" shape="1, 1, 384" offset="457013408" size="1536" />
9735 <output>
9736 <port id="0" precision="FP32">
9737 <dim>1</dim>
9738 <dim>1</dim>
9739 <dim>384</dim>
9740 </port>
9741 </output>
9742 </layer>
9743 <layer id="624" name="__module.encoder.layer.10.attention.self.key/aten::linear/Add" type="Add" version="opset1">
9744 <data auto_broadcast="numpy" />
9745 <input>
9746 <port id="0" precision="FP32">
9747 <dim>-1</dim>
9748 <dim>-1</dim>
9749 <dim>384</dim>
9750 </port>
9751 <port id="1" precision="FP32">
9752 <dim>1</dim>
9753 <dim>1</dim>
9754 <dim>384</dim>
9755 </port>
9756 </input>
9757 <output>
9758 <port id="2" precision="FP32" names="860,x.125">
9759 <dim>-1</dim>
9760 <dim>-1</dim>
9761 <dim>384</dim>
9762 </port>
9763 </output>
9764 </layer>
9765 <layer id="625" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
9766 <data element_type="i64" shape="4" offset="385444888" size="32" />
9767 <output>
9768 <port id="0" precision="I64">
9769 <dim>4</dim>
9770 </port>
9771 </output>
9772 </layer>
9773 <layer id="626" name="__module.encoder.layer.10.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
9774 <data special_zero="true" />
9775 <input>
9776 <port id="0" precision="FP32">
9777 <dim>-1</dim>
9778 <dim>-1</dim>
9779 <dim>384</dim>
9780 </port>
9781 <port id="1" precision="I64">
9782 <dim>4</dim>
9783 </port>
9784 </input>
9785 <output>
9786 <port id="2" precision="FP32" names="864,x.127">
9787 <dim>-1</dim>
9788 <dim>-1</dim>
9789 <dim>12</dim>
9790 <dim>32</dim>
9791 </port>
9792 </output>
9793 </layer>
9794 <layer id="627" name="Constant_6100536" type="Const" version="opset1">
9795 <data element_type="i64" shape="4" offset="385444920" size="32" />
9796 <output>
9797 <port id="0" precision="I64" names="865">
9798 <dim>4</dim>
9799 </port>
9800 </output>
9801 </layer>
9802 <layer id="628" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
9803 <input>
9804 <port id="0" precision="FP32">
9805 <dim>-1</dim>
9806 <dim>-1</dim>
9807 <dim>12</dim>
9808 <dim>32</dim>
9809 </port>
9810 <port id="1" precision="I64">
9811 <dim>4</dim>
9812 </port>
9813 </input>
9814 <output>
9815 <port id="2" precision="FP32" names="866">
9816 <dim>-1</dim>
9817 <dim>12</dim>
9818 <dim>-1</dim>
9819 <dim>32</dim>
9820 </port>
9821 </output>
9822 </layer>
9823 <layer id="629" name="self.encoder.layer.10.attention.self.value.weight" type="Const" version="opset1">
9824 <data element_type="f32" shape="384, 384" offset="457014944" size="589824" />
9825 <output>
9826 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.self.value.weight">
9827 <dim>384</dim>
9828 <dim>384</dim>
9829 </port>
9830 </output>
9831 </layer>
9832 <layer id="630" name="__module.encoder.layer.10.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
9833 <data transpose_a="false" transpose_b="true" />
9834 <input>
9835 <port id="0" precision="FP32">
9836 <dim>-1</dim>
9837 <dim>-1</dim>
9838 <dim>384</dim>
9839 </port>
9840 <port id="1" precision="FP32">
9841 <dim>384</dim>
9842 <dim>384</dim>
9843 </port>
9844 </input>
9845 <output>
9846 <port id="2" precision="FP32">
9847 <dim>-1</dim>
9848 <dim>-1</dim>
9849 <dim>384</dim>
9850 </port>
9851 </output>
9852 </layer>
9853 <layer id="631" name="Constant_6107474" type="Const" version="opset1">
9854 <data element_type="f32" shape="1, 1, 384" offset="457604768" size="1536" />
9855 <output>
9856 <port id="0" precision="FP32">
9857 <dim>1</dim>
9858 <dim>1</dim>
9859 <dim>384</dim>
9860 </port>
9861 </output>
9862 </layer>
9863 <layer id="632" name="__module.encoder.layer.10.attention.self.value/aten::linear/Add" type="Add" version="opset1">
9864 <data auto_broadcast="numpy" />
9865 <input>
9866 <port id="0" precision="FP32">
9867 <dim>-1</dim>
9868 <dim>-1</dim>
9869 <dim>384</dim>
9870 </port>
9871 <port id="1" precision="FP32">
9872 <dim>1</dim>
9873 <dim>1</dim>
9874 <dim>384</dim>
9875 </port>
9876 </input>
9877 <output>
9878 <port id="2" precision="FP32" names="869,x.129">
9879 <dim>-1</dim>
9880 <dim>-1</dim>
9881 <dim>384</dim>
9882 </port>
9883 </output>
9884 </layer>
9885 <layer id="633" name="__module.encoder.layer.10.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
9886 <data element_type="i64" shape="4" offset="385444888" size="32" />
9887 <output>
9888 <port id="0" precision="I64">
9889 <dim>4</dim>
9890 </port>
9891 </output>
9892 </layer>
9893 <layer id="634" name="__module.encoder.layer.10.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
9894 <data special_zero="true" />
9895 <input>
9896 <port id="0" precision="FP32">
9897 <dim>-1</dim>
9898 <dim>-1</dim>
9899 <dim>384</dim>
9900 </port>
9901 <port id="1" precision="I64">
9902 <dim>4</dim>
9903 </port>
9904 </input>
9905 <output>
9906 <port id="2" precision="FP32" names="873,x.131">
9907 <dim>-1</dim>
9908 <dim>-1</dim>
9909 <dim>12</dim>
9910 <dim>32</dim>
9911 </port>
9912 </output>
9913 </layer>
9914 <layer id="635" name="Constant_6100559" type="Const" version="opset1">
9915 <data element_type="i64" shape="4" offset="385444920" size="32" />
9916 <output>
9917 <port id="0" precision="I64" names="874">
9918 <dim>4</dim>
9919 </port>
9920 </output>
9921 </layer>
9922 <layer id="636" name="__module.encoder.layer.10.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
9923 <input>
9924 <port id="0" precision="FP32">
9925 <dim>-1</dim>
9926 <dim>-1</dim>
9927 <dim>12</dim>
9928 <dim>32</dim>
9929 </port>
9930 <port id="1" precision="I64">
9931 <dim>4</dim>
9932 </port>
9933 </input>
9934 <output>
9935 <port id="2" precision="FP32" names="875">
9936 <dim>-1</dim>
9937 <dim>12</dim>
9938 <dim>-1</dim>
9939 <dim>32</dim>
9940 </port>
9941 </output>
9942 </layer>
9943 <layer id="637" name="__module.encoder.layer.10.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
9944 <data causal="false" />
9945 <input>
9946 <port id="0" precision="FP32">
9947 <dim>-1</dim>
9948 <dim>12</dim>
9949 <dim>-1</dim>
9950 <dim>32</dim>
9951 </port>
9952 <port id="1" precision="FP32">
9953 <dim>-1</dim>
9954 <dim>12</dim>
9955 <dim>-1</dim>
9956 <dim>32</dim>
9957 </port>
9958 <port id="2" precision="FP32">
9959 <dim>-1</dim>
9960 <dim>12</dim>
9961 <dim>-1</dim>
9962 <dim>32</dim>
9963 </port>
9964 <port id="3" precision="FP32">
9965 <dim>-1</dim>
9966 <dim>1</dim>
9967 <dim>-1</dim>
9968 <dim>-1</dim>
9969 </port>
9970 </input>
9971 <output>
9972 <port id="4" precision="FP32" names="876,attn_output.41">
9973 <dim>-1</dim>
9974 <dim>12</dim>
9975 <dim>-1</dim>
9976 <dim>32</dim>
9977 </port>
9978 </output>
9979 </layer>
9980 <layer id="638" name="__module.encoder.layer.10.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
9981 <data element_type="i32" shape="4" offset="386627704" size="16" />
9982 <output>
9983 <port id="0" precision="I32">
9984 <dim>4</dim>
9985 </port>
9986 </output>
9987 </layer>
9988 <layer id="639" name="__module.encoder.layer.10.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
9989 <input>
9990 <port id="0" precision="FP32">
9991 <dim>-1</dim>
9992 <dim>12</dim>
9993 <dim>-1</dim>
9994 <dim>32</dim>
9995 </port>
9996 <port id="1" precision="I32">
9997 <dim>4</dim>
9998 </port>
9999 </input>
10000 <output>
10001 <port id="2" precision="FP32" names="877,attn_output.43">
10002 <dim>-1</dim>
10003 <dim>-1</dim>
10004 <dim>12</dim>
10005 <dim>32</dim>
10006 </port>
10007 </output>
10008 </layer>
10009 <layer id="640" name="Constant_6107624" type="Const" version="opset1">
10010 <data element_type="i64" shape="3" offset="386627720" size="24" />
10011 <output>
10012 <port id="0" precision="I64">
10013 <dim>3</dim>
10014 </port>
10015 </output>
10016 </layer>
10017 <layer id="641" name="__module.encoder.layer.10.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
10018 <data special_zero="true" />
10019 <input>
10020 <port id="0" precision="FP32">
10021 <dim>-1</dim>
10022 <dim>-1</dim>
10023 <dim>12</dim>
10024 <dim>32</dim>
10025 </port>
10026 <port id="1" precision="I64">
10027 <dim>3</dim>
10028 </port>
10029 </input>
10030 <output>
10031 <port id="2" precision="FP32" names="879">
10032 <dim>-1</dim>
10033 <dim>-1</dim>
10034 <dim>384</dim>
10035 </port>
10036 </output>
10037 </layer>
10038 <layer id="642" name="self.encoder.layer.10.attention.output.dense.weight" type="Const" version="opset1">
10039 <data element_type="f32" shape="384, 384" offset="457606304" size="589824" />
10040 <output>
10041 <port id="0" precision="FP32" names="self.encoder.layer.10.attention.output.dense.weight">
10042 <dim>384</dim>
10043 <dim>384</dim>
10044 </port>
10045 </output>
10046 </layer>
10047 <layer id="643" name="__module.encoder.layer.10.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10048 <data transpose_a="false" transpose_b="true" />
10049 <input>
10050 <port id="0" precision="FP32">
10051 <dim>-1</dim>
10052 <dim>-1</dim>
10053 <dim>384</dim>
10054 </port>
10055 <port id="1" precision="FP32">
10056 <dim>384</dim>
10057 <dim>384</dim>
10058 </port>
10059 </input>
10060 <output>
10061 <port id="2" precision="FP32">
10062 <dim>-1</dim>
10063 <dim>-1</dim>
10064 <dim>384</dim>
10065 </port>
10066 </output>
10067 </layer>
10068 <layer id="644" name="Constant_6107475" type="Const" version="opset1">
10069 <data element_type="f32" shape="1, 1, 384" offset="458196128" size="1536" />
10070 <output>
10071 <port id="0" precision="FP32">
10072 <dim>1</dim>
10073 <dim>1</dim>
10074 <dim>384</dim>
10075 </port>
10076 </output>
10077 </layer>
10078 <layer id="645" name="__module.encoder.layer.10.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
10079 <data auto_broadcast="numpy" />
10080 <input>
10081 <port id="0" precision="FP32">
10082 <dim>-1</dim>
10083 <dim>-1</dim>
10084 <dim>384</dim>
10085 </port>
10086 <port id="1" precision="FP32">
10087 <dim>1</dim>
10088 <dim>1</dim>
10089 <dim>384</dim>
10090 </port>
10091 </input>
10092 <output>
10093 <port id="2" precision="FP32" names="885,input.43">
10094 <dim>-1</dim>
10095 <dim>-1</dim>
10096 <dim>384</dim>
10097 </port>
10098 </output>
10099 </layer>
10100 <layer id="646" name="__module.encoder.layer.10.attention.output/aten::add/Add" type="Add" version="opset1">
10101 <data auto_broadcast="numpy" />
10102 <input>
10103 <port id="0" precision="FP32">
10104 <dim>-1</dim>
10105 <dim>-1</dim>
10106 <dim>384</dim>
10107 </port>
10108 <port id="1" precision="FP32">
10109 <dim>-1</dim>
10110 <dim>-1</dim>
10111 <dim>384</dim>
10112 </port>
10113 </input>
10114 <output>
10115 <port id="2" precision="FP32" names="887">
10116 <dim>-1</dim>
10117 <dim>-1</dim>
10118 <dim>384</dim>
10119 </port>
10120 </output>
10121 </layer>
10122 <layer id="647" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
10123 <data element_type="i32" shape="1" offset="384850452" size="4" />
10124 <output>
10125 <port id="0" precision="I32">
10126 <dim>1</dim>
10127 </port>
10128 </output>
10129 </layer>
10130 <layer id="648" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
10131 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
10132 <input>
10133 <port id="0" precision="FP32">
10134 <dim>-1</dim>
10135 <dim>-1</dim>
10136 <dim>384</dim>
10137 </port>
10138 <port id="1" precision="I32">
10139 <dim>1</dim>
10140 </port>
10141 </input>
10142 <output>
10143 <port id="2" precision="FP32">
10144 <dim>-1</dim>
10145 <dim>-1</dim>
10146 <dim>384</dim>
10147 </port>
10148 </output>
10149 </layer>
10150 <layer id="649" name="Constant_6107476" type="Const" version="opset1">
10151 <data element_type="f32" shape="1, 1, 384" offset="458197664" size="1536" />
10152 <output>
10153 <port id="0" precision="FP32">
10154 <dim>1</dim>
10155 <dim>1</dim>
10156 <dim>384</dim>
10157 </port>
10158 </output>
10159 </layer>
10160 <layer id="650" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
10161 <data auto_broadcast="numpy" />
10162 <input>
10163 <port id="0" precision="FP32">
10164 <dim>-1</dim>
10165 <dim>-1</dim>
10166 <dim>384</dim>
10167 </port>
10168 <port id="1" precision="FP32">
10169 <dim>1</dim>
10170 <dim>1</dim>
10171 <dim>384</dim>
10172 </port>
10173 </input>
10174 <output>
10175 <port id="2" precision="FP32">
10176 <dim>-1</dim>
10177 <dim>-1</dim>
10178 <dim>384</dim>
10179 </port>
10180 </output>
10181 </layer>
10182 <layer id="651" name="Constant_6107477" type="Const" version="opset1">
10183 <data element_type="f32" shape="1, 1, 384" offset="458199200" size="1536" />
10184 <output>
10185 <port id="0" precision="FP32">
10186 <dim>1</dim>
10187 <dim>1</dim>
10188 <dim>384</dim>
10189 </port>
10190 </output>
10191 </layer>
10192 <layer id="652" name="__module.encoder.layer.10.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
10193 <data auto_broadcast="numpy" />
10194 <input>
10195 <port id="0" precision="FP32">
10196 <dim>-1</dim>
10197 <dim>-1</dim>
10198 <dim>384</dim>
10199 </port>
10200 <port id="1" precision="FP32">
10201 <dim>1</dim>
10202 <dim>1</dim>
10203 <dim>384</dim>
10204 </port>
10205 </input>
10206 <output>
10207 <port id="2" precision="FP32" names="891,input_tensor.21">
10208 <dim>-1</dim>
10209 <dim>-1</dim>
10210 <dim>384</dim>
10211 </port>
10212 </output>
10213 </layer>
10214 <layer id="653" name="self.encoder.layer.10.intermediate.dense.weight" type="Const" version="opset1">
10215 <data element_type="f32" shape="1536, 384" offset="458200736" size="2359296" />
10216 <output>
10217 <port id="0" precision="FP32" names="self.encoder.layer.10.intermediate.dense.weight">
10218 <dim>1536</dim>
10219 <dim>384</dim>
10220 </port>
10221 </output>
10222 </layer>
10223 <layer id="654" name="__module.encoder.layer.10.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10224 <data transpose_a="false" transpose_b="true" />
10225 <input>
10226 <port id="0" precision="FP32">
10227 <dim>-1</dim>
10228 <dim>-1</dim>
10229 <dim>384</dim>
10230 </port>
10231 <port id="1" precision="FP32">
10232 <dim>1536</dim>
10233 <dim>384</dim>
10234 </port>
10235 </input>
10236 <output>
10237 <port id="2" precision="FP32">
10238 <dim>-1</dim>
10239 <dim>-1</dim>
10240 <dim>1536</dim>
10241 </port>
10242 </output>
10243 </layer>
10244 <layer id="655" name="Constant_6107478" type="Const" version="opset1">
10245 <data element_type="f32" shape="1, 1, 1536" offset="460560032" size="6144" />
10246 <output>
10247 <port id="0" precision="FP32">
10248 <dim>1</dim>
10249 <dim>1</dim>
10250 <dim>1536</dim>
10251 </port>
10252 </output>
10253 </layer>
10254 <layer id="656" name="__module.encoder.layer.10.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
10255 <data auto_broadcast="numpy" />
10256 <input>
10257 <port id="0" precision="FP32">
10258 <dim>-1</dim>
10259 <dim>-1</dim>
10260 <dim>1536</dim>
10261 </port>
10262 <port id="1" precision="FP32">
10263 <dim>1</dim>
10264 <dim>1</dim>
10265 <dim>1536</dim>
10266 </port>
10267 </input>
10268 <output>
10269 <port id="2" precision="FP32" names="896">
10270 <dim>-1</dim>
10271 <dim>-1</dim>
10272 <dim>1536</dim>
10273 </port>
10274 </output>
10275 </layer>
10276 <layer id="657" name="__module.encoder.layer.10.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
10277 <data approximation_mode="ERF" />
10278 <input>
10279 <port id="0" precision="FP32">
10280 <dim>-1</dim>
10281 <dim>-1</dim>
10282 <dim>1536</dim>
10283 </port>
10284 </input>
10285 <output>
10286 <port id="1" precision="FP32" names="897">
10287 <dim>-1</dim>
10288 <dim>-1</dim>
10289 <dim>1536</dim>
10290 </port>
10291 </output>
10292 </layer>
10293 <layer id="658" name="self.encoder.layer.10.output.dense.weight" type="Const" version="opset1">
10294 <data element_type="f32" shape="384, 1536" offset="460566176" size="2359296" />
10295 <output>
10296 <port id="0" precision="FP32" names="self.encoder.layer.10.output.dense.weight">
10297 <dim>384</dim>
10298 <dim>1536</dim>
10299 </port>
10300 </output>
10301 </layer>
10302 <layer id="659" name="__module.encoder.layer.10.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10303 <data transpose_a="false" transpose_b="true" />
10304 <input>
10305 <port id="0" precision="FP32">
10306 <dim>-1</dim>
10307 <dim>-1</dim>
10308 <dim>1536</dim>
10309 </port>
10310 <port id="1" precision="FP32">
10311 <dim>384</dim>
10312 <dim>1536</dim>
10313 </port>
10314 </input>
10315 <output>
10316 <port id="2" precision="FP32">
10317 <dim>-1</dim>
10318 <dim>-1</dim>
10319 <dim>384</dim>
10320 </port>
10321 </output>
10322 </layer>
10323 <layer id="660" name="Constant_6107479" type="Const" version="opset1">
10324 <data element_type="f32" shape="1, 1, 384" offset="462925472" size="1536" />
10325 <output>
10326 <port id="0" precision="FP32">
10327 <dim>1</dim>
10328 <dim>1</dim>
10329 <dim>384</dim>
10330 </port>
10331 </output>
10332 </layer>
10333 <layer id="661" name="__module.encoder.layer.10.output.dense/aten::linear/Add" type="Add" version="opset1">
10334 <data auto_broadcast="numpy" />
10335 <input>
10336 <port id="0" precision="FP32">
10337 <dim>-1</dim>
10338 <dim>-1</dim>
10339 <dim>384</dim>
10340 </port>
10341 <port id="1" precision="FP32">
10342 <dim>1</dim>
10343 <dim>1</dim>
10344 <dim>384</dim>
10345 </port>
10346 </input>
10347 <output>
10348 <port id="2" precision="FP32" names="903,input.45">
10349 <dim>-1</dim>
10350 <dim>-1</dim>
10351 <dim>384</dim>
10352 </port>
10353 </output>
10354 </layer>
10355 <layer id="662" name="__module.encoder.layer.10.output/aten::add/Add" type="Add" version="opset1">
10356 <data auto_broadcast="numpy" />
10357 <input>
10358 <port id="0" precision="FP32">
10359 <dim>-1</dim>
10360 <dim>-1</dim>
10361 <dim>384</dim>
10362 </port>
10363 <port id="1" precision="FP32">
10364 <dim>-1</dim>
10365 <dim>-1</dim>
10366 <dim>384</dim>
10367 </port>
10368 </input>
10369 <output>
10370 <port id="2" precision="FP32" names="905">
10371 <dim>-1</dim>
10372 <dim>-1</dim>
10373 <dim>384</dim>
10374 </port>
10375 </output>
10376 </layer>
10377 <layer id="663" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
10378 <data element_type="i32" shape="1" offset="384850452" size="4" />
10379 <output>
10380 <port id="0" precision="I32">
10381 <dim>1</dim>
10382 </port>
10383 </output>
10384 </layer>
10385 <layer id="664" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
10386 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
10387 <input>
10388 <port id="0" precision="FP32">
10389 <dim>-1</dim>
10390 <dim>-1</dim>
10391 <dim>384</dim>
10392 </port>
10393 <port id="1" precision="I32">
10394 <dim>1</dim>
10395 </port>
10396 </input>
10397 <output>
10398 <port id="2" precision="FP32">
10399 <dim>-1</dim>
10400 <dim>-1</dim>
10401 <dim>384</dim>
10402 </port>
10403 </output>
10404 </layer>
10405 <layer id="665" name="Constant_6107480" type="Const" version="opset1">
10406 <data element_type="f32" shape="1, 1, 384" offset="462927008" size="1536" />
10407 <output>
10408 <port id="0" precision="FP32">
10409 <dim>1</dim>
10410 <dim>1</dim>
10411 <dim>384</dim>
10412 </port>
10413 </output>
10414 </layer>
10415 <layer id="666" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
10416 <data auto_broadcast="numpy" />
10417 <input>
10418 <port id="0" precision="FP32">
10419 <dim>-1</dim>
10420 <dim>-1</dim>
10421 <dim>384</dim>
10422 </port>
10423 <port id="1" precision="FP32">
10424 <dim>1</dim>
10425 <dim>1</dim>
10426 <dim>384</dim>
10427 </port>
10428 </input>
10429 <output>
10430 <port id="2" precision="FP32">
10431 <dim>-1</dim>
10432 <dim>-1</dim>
10433 <dim>384</dim>
10434 </port>
10435 </output>
10436 </layer>
10437 <layer id="667" name="Constant_6107481" type="Const" version="opset1">
10438 <data element_type="f32" shape="1, 1, 384" offset="462928544" size="1536" />
10439 <output>
10440 <port id="0" precision="FP32">
10441 <dim>1</dim>
10442 <dim>1</dim>
10443 <dim>384</dim>
10444 </port>
10445 </output>
10446 </layer>
10447 <layer id="668" name="__module.encoder.layer.10.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
10448 <data auto_broadcast="numpy" />
10449 <input>
10450 <port id="0" precision="FP32">
10451 <dim>-1</dim>
10452 <dim>-1</dim>
10453 <dim>384</dim>
10454 </port>
10455 <port id="1" precision="FP32">
10456 <dim>1</dim>
10457 <dim>1</dim>
10458 <dim>384</dim>
10459 </port>
10460 </input>
10461 <output>
10462 <port id="2" precision="FP32" names="909,hidden_states.67">
10463 <dim>-1</dim>
10464 <dim>-1</dim>
10465 <dim>384</dim>
10466 </port>
10467 </output>
10468 </layer>
10469 <layer id="669" name="self.encoder.layer.11.attention.self.query.weight" type="Const" version="opset1">
10470 <data element_type="f32" shape="384, 384" offset="462930080" size="589824" />
10471 <output>
10472 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.query.weight">
10473 <dim>384</dim>
10474 <dim>384</dim>
10475 </port>
10476 </output>
10477 </layer>
10478 <layer id="670" name="__module.encoder.layer.11.attention.self.query/aten::linear/MatMul" type="MatMul" version="opset1">
10479 <data transpose_a="false" transpose_b="true" />
10480 <input>
10481 <port id="0" precision="FP32">
10482 <dim>-1</dim>
10483 <dim>-1</dim>
10484 <dim>384</dim>
10485 </port>
10486 <port id="1" precision="FP32">
10487 <dim>384</dim>
10488 <dim>384</dim>
10489 </port>
10490 </input>
10491 <output>
10492 <port id="2" precision="FP32">
10493 <dim>-1</dim>
10494 <dim>-1</dim>
10495 <dim>384</dim>
10496 </port>
10497 </output>
10498 </layer>
10499 <layer id="671" name="Constant_6107482" type="Const" version="opset1">
10500 <data element_type="f32" shape="1, 1, 384" offset="463519904" size="1536" />
10501 <output>
10502 <port id="0" precision="FP32">
10503 <dim>1</dim>
10504 <dim>1</dim>
10505 <dim>384</dim>
10506 </port>
10507 </output>
10508 </layer>
10509 <layer id="672" name="__module.encoder.layer.11.attention.self.query/aten::linear/Add" type="Add" version="opset1">
10510 <data auto_broadcast="numpy" />
10511 <input>
10512 <port id="0" precision="FP32">
10513 <dim>-1</dim>
10514 <dim>-1</dim>
10515 <dim>384</dim>
10516 </port>
10517 <port id="1" precision="FP32">
10518 <dim>1</dim>
10519 <dim>1</dim>
10520 <dim>384</dim>
10521 </port>
10522 </input>
10523 <output>
10524 <port id="2" precision="FP32" names="922,x.133">
10525 <dim>-1</dim>
10526 <dim>-1</dim>
10527 <dim>384</dim>
10528 </port>
10529 </output>
10530 </layer>
10531 <layer id="673" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat" type="Const" version="opset1">
10532 <data element_type="i64" shape="4" offset="385444888" size="32" />
10533 <output>
10534 <port id="0" precision="I64">
10535 <dim>4</dim>
10536 </port>
10537 </output>
10538 </layer>
10539 <layer id="674" name="__module.encoder.layer.11.attention.self/aten::view/Reshape" type="Reshape" version="opset1">
10540 <data special_zero="true" />
10541 <input>
10542 <port id="0" precision="FP32">
10543 <dim>-1</dim>
10544 <dim>-1</dim>
10545 <dim>384</dim>
10546 </port>
10547 <port id="1" precision="I64">
10548 <dim>4</dim>
10549 </port>
10550 </input>
10551 <output>
10552 <port id="2" precision="FP32" names="926,x.135">
10553 <dim>-1</dim>
10554 <dim>-1</dim>
10555 <dim>12</dim>
10556 <dim>32</dim>
10557 </port>
10558 </output>
10559 </layer>
10560 <layer id="675" name="Constant_6100739" type="Const" version="opset1">
10561 <data element_type="i64" shape="4" offset="385444920" size="32" />
10562 <output>
10563 <port id="0" precision="I64" names="927">
10564 <dim>4</dim>
10565 </port>
10566 </output>
10567 </layer>
10568 <layer id="676" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose" type="Transpose" version="opset1">
10569 <input>
10570 <port id="0" precision="FP32">
10571 <dim>-1</dim>
10572 <dim>-1</dim>
10573 <dim>12</dim>
10574 <dim>32</dim>
10575 </port>
10576 <port id="1" precision="I64">
10577 <dim>4</dim>
10578 </port>
10579 </input>
10580 <output>
10581 <port id="2" precision="FP32" names="928">
10582 <dim>-1</dim>
10583 <dim>12</dim>
10584 <dim>-1</dim>
10585 <dim>32</dim>
10586 </port>
10587 </output>
10588 </layer>
10589 <layer id="677" name="self.encoder.layer.11.attention.self.key.weight" type="Const" version="opset1">
10590 <data element_type="f32" shape="384, 384" offset="463521440" size="589824" />
10591 <output>
10592 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.key.weight">
10593 <dim>384</dim>
10594 <dim>384</dim>
10595 </port>
10596 </output>
10597 </layer>
10598 <layer id="678" name="__module.encoder.layer.11.attention.self.key/aten::linear/MatMul" type="MatMul" version="opset1">
10599 <data transpose_a="false" transpose_b="true" />
10600 <input>
10601 <port id="0" precision="FP32">
10602 <dim>-1</dim>
10603 <dim>-1</dim>
10604 <dim>384</dim>
10605 </port>
10606 <port id="1" precision="FP32">
10607 <dim>384</dim>
10608 <dim>384</dim>
10609 </port>
10610 </input>
10611 <output>
10612 <port id="2" precision="FP32">
10613 <dim>-1</dim>
10614 <dim>-1</dim>
10615 <dim>384</dim>
10616 </port>
10617 </output>
10618 </layer>
10619 <layer id="679" name="Constant_6107483" type="Const" version="opset1">
10620 <data element_type="f32" shape="1, 1, 384" offset="464111264" size="1536" />
10621 <output>
10622 <port id="0" precision="FP32">
10623 <dim>1</dim>
10624 <dim>1</dim>
10625 <dim>384</dim>
10626 </port>
10627 </output>
10628 </layer>
10629 <layer id="680" name="__module.encoder.layer.11.attention.self.key/aten::linear/Add" type="Add" version="opset1">
10630 <data auto_broadcast="numpy" />
10631 <input>
10632 <port id="0" precision="FP32">
10633 <dim>-1</dim>
10634 <dim>-1</dim>
10635 <dim>384</dim>
10636 </port>
10637 <port id="1" precision="FP32">
10638 <dim>1</dim>
10639 <dim>1</dim>
10640 <dim>384</dim>
10641 </port>
10642 </input>
10643 <output>
10644 <port id="2" precision="FP32" names="931,x.137">
10645 <dim>-1</dim>
10646 <dim>-1</dim>
10647 <dim>384</dim>
10648 </port>
10649 </output>
10650 </layer>
10651 <layer id="681" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_1" type="Const" version="opset1">
10652 <data element_type="i64" shape="4" offset="385444888" size="32" />
10653 <output>
10654 <port id="0" precision="I64">
10655 <dim>4</dim>
10656 </port>
10657 </output>
10658 </layer>
10659 <layer id="682" name="__module.encoder.layer.11.attention.self/aten::view/Reshape_1" type="Reshape" version="opset1">
10660 <data special_zero="true" />
10661 <input>
10662 <port id="0" precision="FP32">
10663 <dim>-1</dim>
10664 <dim>-1</dim>
10665 <dim>384</dim>
10666 </port>
10667 <port id="1" precision="I64">
10668 <dim>4</dim>
10669 </port>
10670 </input>
10671 <output>
10672 <port id="2" precision="FP32" names="935,x.139">
10673 <dim>-1</dim>
10674 <dim>-1</dim>
10675 <dim>12</dim>
10676 <dim>32</dim>
10677 </port>
10678 </output>
10679 </layer>
10680 <layer id="683" name="Constant_6100762" type="Const" version="opset1">
10681 <data element_type="i64" shape="4" offset="385444920" size="32" />
10682 <output>
10683 <port id="0" precision="I64" names="936">
10684 <dim>4</dim>
10685 </port>
10686 </output>
10687 </layer>
10688 <layer id="684" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose_1" type="Transpose" version="opset1">
10689 <input>
10690 <port id="0" precision="FP32">
10691 <dim>-1</dim>
10692 <dim>-1</dim>
10693 <dim>12</dim>
10694 <dim>32</dim>
10695 </port>
10696 <port id="1" precision="I64">
10697 <dim>4</dim>
10698 </port>
10699 </input>
10700 <output>
10701 <port id="2" precision="FP32" names="937">
10702 <dim>-1</dim>
10703 <dim>12</dim>
10704 <dim>-1</dim>
10705 <dim>32</dim>
10706 </port>
10707 </output>
10708 </layer>
10709 <layer id="685" name="self.encoder.layer.11.attention.self.value.weight" type="Const" version="opset1">
10710 <data element_type="f32" shape="384, 384" offset="464112800" size="589824" />
10711 <output>
10712 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.self.value.weight">
10713 <dim>384</dim>
10714 <dim>384</dim>
10715 </port>
10716 </output>
10717 </layer>
10718 <layer id="686" name="__module.encoder.layer.11.attention.self.value/aten::linear/MatMul" type="MatMul" version="opset1">
10719 <data transpose_a="false" transpose_b="true" />
10720 <input>
10721 <port id="0" precision="FP32">
10722 <dim>-1</dim>
10723 <dim>-1</dim>
10724 <dim>384</dim>
10725 </port>
10726 <port id="1" precision="FP32">
10727 <dim>384</dim>
10728 <dim>384</dim>
10729 </port>
10730 </input>
10731 <output>
10732 <port id="2" precision="FP32">
10733 <dim>-1</dim>
10734 <dim>-1</dim>
10735 <dim>384</dim>
10736 </port>
10737 </output>
10738 </layer>
10739 <layer id="687" name="Constant_6107484" type="Const" version="opset1">
10740 <data element_type="f32" shape="1, 1, 384" offset="464702624" size="1536" />
10741 <output>
10742 <port id="0" precision="FP32">
10743 <dim>1</dim>
10744 <dim>1</dim>
10745 <dim>384</dim>
10746 </port>
10747 </output>
10748 </layer>
10749 <layer id="688" name="__module.encoder.layer.11.attention.self.value/aten::linear/Add" type="Add" version="opset1">
10750 <data auto_broadcast="numpy" />
10751 <input>
10752 <port id="0" precision="FP32">
10753 <dim>-1</dim>
10754 <dim>-1</dim>
10755 <dim>384</dim>
10756 </port>
10757 <port id="1" precision="FP32">
10758 <dim>1</dim>
10759 <dim>1</dim>
10760 <dim>384</dim>
10761 </port>
10762 </input>
10763 <output>
10764 <port id="2" precision="FP32" names="940,x.141">
10765 <dim>-1</dim>
10766 <dim>-1</dim>
10767 <dim>384</dim>
10768 </port>
10769 </output>
10770 </layer>
10771 <layer id="689" name="__module.encoder.layer.11.attention.self/prim::ListConstruct/Concat_2" type="Const" version="opset1">
10772 <data element_type="i64" shape="4" offset="385444888" size="32" />
10773 <output>
10774 <port id="0" precision="I64">
10775 <dim>4</dim>
10776 </port>
10777 </output>
10778 </layer>
10779 <layer id="690" name="__module.encoder.layer.11.attention.self/aten::view/Reshape_2" type="Reshape" version="opset1">
10780 <data special_zero="true" />
10781 <input>
10782 <port id="0" precision="FP32">
10783 <dim>-1</dim>
10784 <dim>-1</dim>
10785 <dim>384</dim>
10786 </port>
10787 <port id="1" precision="I64">
10788 <dim>4</dim>
10789 </port>
10790 </input>
10791 <output>
10792 <port id="2" precision="FP32" names="944,x">
10793 <dim>-1</dim>
10794 <dim>-1</dim>
10795 <dim>12</dim>
10796 <dim>32</dim>
10797 </port>
10798 </output>
10799 </layer>
10800 <layer id="691" name="Constant_6100785" type="Const" version="opset1">
10801 <data element_type="i64" shape="4" offset="385444920" size="32" />
10802 <output>
10803 <port id="0" precision="I64" names="945">
10804 <dim>4</dim>
10805 </port>
10806 </output>
10807 </layer>
10808 <layer id="692" name="__module.encoder.layer.11.attention.self/aten::permute/Transpose_2" type="Transpose" version="opset1">
10809 <input>
10810 <port id="0" precision="FP32">
10811 <dim>-1</dim>
10812 <dim>-1</dim>
10813 <dim>12</dim>
10814 <dim>32</dim>
10815 </port>
10816 <port id="1" precision="I64">
10817 <dim>4</dim>
10818 </port>
10819 </input>
10820 <output>
10821 <port id="2" precision="FP32" names="946">
10822 <dim>-1</dim>
10823 <dim>12</dim>
10824 <dim>-1</dim>
10825 <dim>32</dim>
10826 </port>
10827 </output>
10828 </layer>
10829 <layer id="693" name="__module.encoder.layer.11.attention.self/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
10830 <data causal="false" />
10831 <input>
10832 <port id="0" precision="FP32">
10833 <dim>-1</dim>
10834 <dim>12</dim>
10835 <dim>-1</dim>
10836 <dim>32</dim>
10837 </port>
10838 <port id="1" precision="FP32">
10839 <dim>-1</dim>
10840 <dim>12</dim>
10841 <dim>-1</dim>
10842 <dim>32</dim>
10843 </port>
10844 <port id="2" precision="FP32">
10845 <dim>-1</dim>
10846 <dim>12</dim>
10847 <dim>-1</dim>
10848 <dim>32</dim>
10849 </port>
10850 <port id="3" precision="FP32">
10851 <dim>-1</dim>
10852 <dim>1</dim>
10853 <dim>-1</dim>
10854 <dim>-1</dim>
10855 </port>
10856 </input>
10857 <output>
10858 <port id="4" precision="FP32" names="947,attn_output.45">
10859 <dim>-1</dim>
10860 <dim>12</dim>
10861 <dim>-1</dim>
10862 <dim>32</dim>
10863 </port>
10864 </output>
10865 </layer>
10866 <layer id="694" name="__module.encoder.layer.11.attention.self/aten::transpose/ScatterElementsUpdate" type="Const" version="opset1">
10867 <data element_type="i32" shape="4" offset="386627704" size="16" />
10868 <output>
10869 <port id="0" precision="I32">
10870 <dim>4</dim>
10871 </port>
10872 </output>
10873 </layer>
10874 <layer id="695" name="__module.encoder.layer.11.attention.self/aten::transpose/Transpose" type="Transpose" version="opset1">
10875 <input>
10876 <port id="0" precision="FP32">
10877 <dim>-1</dim>
10878 <dim>12</dim>
10879 <dim>-1</dim>
10880 <dim>32</dim>
10881 </port>
10882 <port id="1" precision="I32">
10883 <dim>4</dim>
10884 </port>
10885 </input>
10886 <output>
10887 <port id="2" precision="FP32" names="948,attn_output">
10888 <dim>-1</dim>
10889 <dim>-1</dim>
10890 <dim>12</dim>
10891 <dim>32</dim>
10892 </port>
10893 </output>
10894 </layer>
10895 <layer id="696" name="Constant_6107625" type="Const" version="opset1">
10896 <data element_type="i64" shape="3" offset="386627720" size="24" />
10897 <output>
10898 <port id="0" precision="I64">
10899 <dim>3</dim>
10900 </port>
10901 </output>
10902 </layer>
10903 <layer id="697" name="__module.encoder.layer.11.attention.self/aten::reshape/Reshape" type="Reshape" version="opset1">
10904 <data special_zero="true" />
10905 <input>
10906 <port id="0" precision="FP32">
10907 <dim>-1</dim>
10908 <dim>-1</dim>
10909 <dim>12</dim>
10910 <dim>32</dim>
10911 </port>
10912 <port id="1" precision="I64">
10913 <dim>3</dim>
10914 </port>
10915 </input>
10916 <output>
10917 <port id="2" precision="FP32" names="950">
10918 <dim>-1</dim>
10919 <dim>-1</dim>
10920 <dim>384</dim>
10921 </port>
10922 </output>
10923 </layer>
10924 <layer id="698" name="self.encoder.layer.11.attention.output.dense.weight" type="Const" version="opset1">
10925 <data element_type="f32" shape="384, 384" offset="464704160" size="589824" />
10926 <output>
10927 <port id="0" precision="FP32" names="self.encoder.layer.11.attention.output.dense.weight">
10928 <dim>384</dim>
10929 <dim>384</dim>
10930 </port>
10931 </output>
10932 </layer>
10933 <layer id="699" name="__module.encoder.layer.11.attention.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
10934 <data transpose_a="false" transpose_b="true" />
10935 <input>
10936 <port id="0" precision="FP32">
10937 <dim>-1</dim>
10938 <dim>-1</dim>
10939 <dim>384</dim>
10940 </port>
10941 <port id="1" precision="FP32">
10942 <dim>384</dim>
10943 <dim>384</dim>
10944 </port>
10945 </input>
10946 <output>
10947 <port id="2" precision="FP32">
10948 <dim>-1</dim>
10949 <dim>-1</dim>
10950 <dim>384</dim>
10951 </port>
10952 </output>
10953 </layer>
10954 <layer id="700" name="Constant_6107485" type="Const" version="opset1">
10955 <data element_type="f32" shape="1, 1, 384" offset="465293984" size="1536" />
10956 <output>
10957 <port id="0" precision="FP32">
10958 <dim>1</dim>
10959 <dim>1</dim>
10960 <dim>384</dim>
10961 </port>
10962 </output>
10963 </layer>
10964 <layer id="701" name="__module.encoder.layer.11.attention.output.dense/aten::linear/Add" type="Add" version="opset1">
10965 <data auto_broadcast="numpy" />
10966 <input>
10967 <port id="0" precision="FP32">
10968 <dim>-1</dim>
10969 <dim>-1</dim>
10970 <dim>384</dim>
10971 </port>
10972 <port id="1" precision="FP32">
10973 <dim>1</dim>
10974 <dim>1</dim>
10975 <dim>384</dim>
10976 </port>
10977 </input>
10978 <output>
10979 <port id="2" precision="FP32" names="956,input.47">
10980 <dim>-1</dim>
10981 <dim>-1</dim>
10982 <dim>384</dim>
10983 </port>
10984 </output>
10985 </layer>
10986 <layer id="702" name="__module.encoder.layer.11.attention.output/aten::add/Add" type="Add" version="opset1">
10987 <data auto_broadcast="numpy" />
10988 <input>
10989 <port id="0" precision="FP32">
10990 <dim>-1</dim>
10991 <dim>-1</dim>
10992 <dim>384</dim>
10993 </port>
10994 <port id="1" precision="FP32">
10995 <dim>-1</dim>
10996 <dim>-1</dim>
10997 <dim>384</dim>
10998 </port>
10999 </input>
11000 <output>
11001 <port id="2" precision="FP32" names="958">
11002 <dim>-1</dim>
11003 <dim>-1</dim>
11004 <dim>384</dim>
11005 </port>
11006 </output>
11007 </layer>
11008 <layer id="703" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
11009 <data element_type="i32" shape="1" offset="384850452" size="4" />
11010 <output>
11011 <port id="0" precision="I32">
11012 <dim>1</dim>
11013 </port>
11014 </output>
11015 </layer>
11016 <layer id="704" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
11017 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
11018 <input>
11019 <port id="0" precision="FP32">
11020 <dim>-1</dim>
11021 <dim>-1</dim>
11022 <dim>384</dim>
11023 </port>
11024 <port id="1" precision="I32">
11025 <dim>1</dim>
11026 </port>
11027 </input>
11028 <output>
11029 <port id="2" precision="FP32">
11030 <dim>-1</dim>
11031 <dim>-1</dim>
11032 <dim>384</dim>
11033 </port>
11034 </output>
11035 </layer>
11036 <layer id="705" name="Constant_6107486" type="Const" version="opset1">
11037 <data element_type="f32" shape="1, 1, 384" offset="465295520" size="1536" />
11038 <output>
11039 <port id="0" precision="FP32">
11040 <dim>1</dim>
11041 <dim>1</dim>
11042 <dim>384</dim>
11043 </port>
11044 </output>
11045 </layer>
11046 <layer id="706" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
11047 <data auto_broadcast="numpy" />
11048 <input>
11049 <port id="0" precision="FP32">
11050 <dim>-1</dim>
11051 <dim>-1</dim>
11052 <dim>384</dim>
11053 </port>
11054 <port id="1" precision="FP32">
11055 <dim>1</dim>
11056 <dim>1</dim>
11057 <dim>384</dim>
11058 </port>
11059 </input>
11060 <output>
11061 <port id="2" precision="FP32">
11062 <dim>-1</dim>
11063 <dim>-1</dim>
11064 <dim>384</dim>
11065 </port>
11066 </output>
11067 </layer>
11068 <layer id="707" name="Constant_6107487" type="Const" version="opset1">
11069 <data element_type="f32" shape="1, 1, 384" offset="465297056" size="1536" />
11070 <output>
11071 <port id="0" precision="FP32">
11072 <dim>1</dim>
11073 <dim>1</dim>
11074 <dim>384</dim>
11075 </port>
11076 </output>
11077 </layer>
11078 <layer id="708" name="__module.encoder.layer.11.attention.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
11079 <data auto_broadcast="numpy" />
11080 <input>
11081 <port id="0" precision="FP32">
11082 <dim>-1</dim>
11083 <dim>-1</dim>
11084 <dim>384</dim>
11085 </port>
11086 <port id="1" precision="FP32">
11087 <dim>1</dim>
11088 <dim>1</dim>
11089 <dim>384</dim>
11090 </port>
11091 </input>
11092 <output>
11093 <port id="2" precision="FP32" names="962,input_tensor">
11094 <dim>-1</dim>
11095 <dim>-1</dim>
11096 <dim>384</dim>
11097 </port>
11098 </output>
11099 </layer>
11100 <layer id="709" name="self.encoder.layer.11.intermediate.dense.weight" type="Const" version="opset1">
11101 <data element_type="f32" shape="1536, 384" offset="465298592" size="2359296" />
11102 <output>
11103 <port id="0" precision="FP32" names="self.encoder.layer.11.intermediate.dense.weight">
11104 <dim>1536</dim>
11105 <dim>384</dim>
11106 </port>
11107 </output>
11108 </layer>
11109 <layer id="710" name="__module.encoder.layer.11.intermediate.dense/aten::linear/MatMul" type="MatMul" version="opset1">
11110 <data transpose_a="false" transpose_b="true" />
11111 <input>
11112 <port id="0" precision="FP32">
11113 <dim>-1</dim>
11114 <dim>-1</dim>
11115 <dim>384</dim>
11116 </port>
11117 <port id="1" precision="FP32">
11118 <dim>1536</dim>
11119 <dim>384</dim>
11120 </port>
11121 </input>
11122 <output>
11123 <port id="2" precision="FP32">
11124 <dim>-1</dim>
11125 <dim>-1</dim>
11126 <dim>1536</dim>
11127 </port>
11128 </output>
11129 </layer>
11130 <layer id="711" name="Constant_6107488" type="Const" version="opset1">
11131 <data element_type="f32" shape="1, 1, 1536" offset="467657888" size="6144" />
11132 <output>
11133 <port id="0" precision="FP32">
11134 <dim>1</dim>
11135 <dim>1</dim>
11136 <dim>1536</dim>
11137 </port>
11138 </output>
11139 </layer>
11140 <layer id="712" name="__module.encoder.layer.11.intermediate.dense/aten::linear/Add" type="Add" version="opset1">
11141 <data auto_broadcast="numpy" />
11142 <input>
11143 <port id="0" precision="FP32">
11144 <dim>-1</dim>
11145 <dim>-1</dim>
11146 <dim>1536</dim>
11147 </port>
11148 <port id="1" precision="FP32">
11149 <dim>1</dim>
11150 <dim>1</dim>
11151 <dim>1536</dim>
11152 </port>
11153 </input>
11154 <output>
11155 <port id="2" precision="FP32" names="967">
11156 <dim>-1</dim>
11157 <dim>-1</dim>
11158 <dim>1536</dim>
11159 </port>
11160 </output>
11161 </layer>
11162 <layer id="713" name="__module.encoder.layer.11.intermediate.intermediate_act_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
11163 <data approximation_mode="ERF" />
11164 <input>
11165 <port id="0" precision="FP32">
11166 <dim>-1</dim>
11167 <dim>-1</dim>
11168 <dim>1536</dim>
11169 </port>
11170 </input>
11171 <output>
11172 <port id="1" precision="FP32" names="968">
11173 <dim>-1</dim>
11174 <dim>-1</dim>
11175 <dim>1536</dim>
11176 </port>
11177 </output>
11178 </layer>
11179 <layer id="714" name="self.encoder.layer.11.output.dense.weight" type="Const" version="opset1">
11180 <data element_type="f32" shape="384, 1536" offset="467664032" size="2359296" />
11181 <output>
11182 <port id="0" precision="FP32" names="self.encoder.layer.11.output.dense.weight">
11183 <dim>384</dim>
11184 <dim>1536</dim>
11185 </port>
11186 </output>
11187 </layer>
11188 <layer id="715" name="__module.encoder.layer.11.output.dense/aten::linear/MatMul" type="MatMul" version="opset1">
11189 <data transpose_a="false" transpose_b="true" />
11190 <input>
11191 <port id="0" precision="FP32">
11192 <dim>-1</dim>
11193 <dim>-1</dim>
11194 <dim>1536</dim>
11195 </port>
11196 <port id="1" precision="FP32">
11197 <dim>384</dim>
11198 <dim>1536</dim>
11199 </port>
11200 </input>
11201 <output>
11202 <port id="2" precision="FP32">
11203 <dim>-1</dim>
11204 <dim>-1</dim>
11205 <dim>384</dim>
11206 </port>
11207 </output>
11208 </layer>
11209 <layer id="716" name="Constant_6107489" type="Const" version="opset1">
11210 <data element_type="f32" shape="1, 1, 384" offset="470023328" size="1536" />
11211 <output>
11212 <port id="0" precision="FP32">
11213 <dim>1</dim>
11214 <dim>1</dim>
11215 <dim>384</dim>
11216 </port>
11217 </output>
11218 </layer>
11219 <layer id="717" name="__module.encoder.layer.11.output.dense/aten::linear/Add" type="Add" version="opset1">
11220 <data auto_broadcast="numpy" />
11221 <input>
11222 <port id="0" precision="FP32">
11223 <dim>-1</dim>
11224 <dim>-1</dim>
11225 <dim>384</dim>
11226 </port>
11227 <port id="1" precision="FP32">
11228 <dim>1</dim>
11229 <dim>1</dim>
11230 <dim>384</dim>
11231 </port>
11232 </input>
11233 <output>
11234 <port id="2" precision="FP32" names="974,input">
11235 <dim>-1</dim>
11236 <dim>-1</dim>
11237 <dim>384</dim>
11238 </port>
11239 </output>
11240 </layer>
11241 <layer id="718" name="__module.encoder.layer.11.output/aten::add/Add" type="Add" version="opset1">
11242 <data auto_broadcast="numpy" />
11243 <input>
11244 <port id="0" precision="FP32">
11245 <dim>-1</dim>
11246 <dim>-1</dim>
11247 <dim>384</dim>
11248 </port>
11249 <port id="1" precision="FP32">
11250 <dim>-1</dim>
11251 <dim>-1</dim>
11252 <dim>384</dim>
11253 </port>
11254 </input>
11255 <output>
11256 <port id="2" precision="FP32" names="976">
11257 <dim>-1</dim>
11258 <dim>-1</dim>
11259 <dim>384</dim>
11260 </port>
11261 </output>
11262 </layer>
11263 <layer id="719" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Multiply" type="Const" version="opset1">
11264 <data element_type="i32" shape="1" offset="384850452" size="4" />
11265 <output>
11266 <port id="0" precision="I32">
11267 <dim>1</dim>
11268 </port>
11269 </output>
11270 </layer>
11271 <layer id="720" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/MVN" type="MVN" version="opset6">
11272 <data eps="9.999999960041972e-13" normalize_variance="true" eps_mode="INSIDE_SQRT" />
11273 <input>
11274 <port id="0" precision="FP32">
11275 <dim>-1</dim>
11276 <dim>-1</dim>
11277 <dim>384</dim>
11278 </port>
11279 <port id="1" precision="I32">
11280 <dim>1</dim>
11281 </port>
11282 </input>
11283 <output>
11284 <port id="2" precision="FP32">
11285 <dim>-1</dim>
11286 <dim>-1</dim>
11287 <dim>384</dim>
11288 </port>
11289 </output>
11290 </layer>
11291 <layer id="721" name="Constant_6107490" type="Const" version="opset1">
11292 <data element_type="f32" shape="1, 1, 384" offset="470024864" size="1536" />
11293 <output>
11294 <port id="0" precision="FP32">
11295 <dim>1</dim>
11296 <dim>1</dim>
11297 <dim>384</dim>
11298 </port>
11299 </output>
11300 </layer>
11301 <layer id="722" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
11302 <data auto_broadcast="numpy" />
11303 <input>
11304 <port id="0" precision="FP32">
11305 <dim>-1</dim>
11306 <dim>-1</dim>
11307 <dim>384</dim>
11308 </port>
11309 <port id="1" precision="FP32">
11310 <dim>1</dim>
11311 <dim>1</dim>
11312 <dim>384</dim>
11313 </port>
11314 </input>
11315 <output>
11316 <port id="2" precision="FP32">
11317 <dim>-1</dim>
11318 <dim>-1</dim>
11319 <dim>384</dim>
11320 </port>
11321 </output>
11322 </layer>
11323 <layer id="723" name="Constant_6107491" type="Const" version="opset1">
11324 <data element_type="f32" shape="1, 1, 384" offset="470026400" size="1536" />
11325 <output>
11326 <port id="0" precision="FP32">
11327 <dim>1</dim>
11328 <dim>1</dim>
11329 <dim>384</dim>
11330 </port>
11331 </output>
11332 </layer>
11333 <layer id="724" name="__module.encoder.layer.11.output.LayerNorm/aten::layer_norm/Add" type="Add" version="opset1">
11334 <data auto_broadcast="numpy" />
11335 <input>
11336 <port id="0" precision="FP32">
11337 <dim>-1</dim>
11338 <dim>-1</dim>
11339 <dim>384</dim>
11340 </port>
11341 <port id="1" precision="FP32">
11342 <dim>1</dim>
11343 <dim>1</dim>
11344 <dim>384</dim>
11345 </port>
11346 </input>
11347 <output>
11348 <port id="2" precision="FP32" names="last_hidden_state">
11349 <dim>-1</dim>
11350 <dim>-1</dim>
11351 <dim>384</dim>
11352 </port>
11353 </output>
11354 </layer>
11355 <layer id="725" name="Result_6102677" type="Result" version="opset1">
11356 <input>
11357 <port id="0" precision="FP32">
11358 <dim>-1</dim>
11359 <dim>-1</dim>
11360 <dim>384</dim>
11361 </port>
11362 </input>
11363 </layer>
11364 </layers>
11365 <edges>
11366 <edge from-layer="0" from-port="0" to-layer="8" to-port="0" />
11367 <edge from-layer="1" from-port="0" to-layer="58" to-port="0" />
11368 <edge from-layer="2" from-port="0" to-layer="15" to-port="0" />
11369 <edge from-layer="2" from-port="0" to-layer="4" to-port="0" />
11370 <edge from-layer="3" from-port="0" to-layer="6" to-port="0" />
11371 <edge from-layer="4" from-port="1" to-layer="6" to-port="1" />
11372 <edge from-layer="5" from-port="0" to-layer="6" to-port="2" />
11373 <edge from-layer="6" from-port="3" to-layer="11" to-port="0" />
11374 <edge from-layer="7" from-port="0" to-layer="10" to-port="0" />
11375 <edge from-layer="8" from-port="1" to-layer="10" to-port="1" />
11376 <edge from-layer="9" from-port="0" to-layer="10" to-port="2" />
11377 <edge from-layer="10" from-port="3" to-layer="11" to-port="1" />
11378 <edge from-layer="11" from-port="2" to-layer="25" to-port="0" />
11379 <edge from-layer="12" from-port="0" to-layer="24" to-port="0" />
11380 <edge from-layer="13" from-port="0" to-layer="21" to-port="0" />
11381 <edge from-layer="14" from-port="0" to-layer="21" to-port="1" />
11382 <edge from-layer="15" from-port="1" to-layer="63" to-port="0" />
11383 <edge from-layer="15" from-port="1" to-layer="67" to-port="0" />
11384 <edge from-layer="15" from-port="1" to-layer="18" to-port="0" />
11385 <edge from-layer="16" from-port="0" to-layer="18" to-port="1" />
11386 <edge from-layer="17" from-port="0" to-layer="18" to-port="2" />
11387 <edge from-layer="18" from-port="3" to-layer="21" to-port="2" />
11388 <edge from-layer="19" from-port="0" to-layer="21" to-port="3" />
11389 <edge from-layer="20" from-port="0" to-layer="21" to-port="4" />
11390 <edge from-layer="21" from-port="5" to-layer="22" to-port="0" />
11391 <edge from-layer="22" from-port="1" to-layer="24" to-port="1" />
11392 <edge from-layer="23" from-port="0" to-layer="24" to-port="2" />
11393 <edge from-layer="24" from-port="3" to-layer="25" to-port="1" />
11394 <edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
11395 <edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
11396 <edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
11397 <edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
11398 <edge from-layer="29" from-port="2" to-layer="31" to-port="0" />
11399 <edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
11400 <edge from-layer="31" from-port="2" to-layer="86" to-port="1" />
11401 <edge from-layer="31" from-port="2" to-layer="49" to-port="0" />
11402 <edge from-layer="31" from-port="2" to-layer="41" to-port="0" />
11403 <edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
11404 <edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
11405 <edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
11406 <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
11407 <edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
11408 <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
11409 <edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
11410 <edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
11411 <edge from-layer="39" from-port="2" to-layer="77" to-port="0" />
11412 <edge from-layer="40" from-port="0" to-layer="41" to-port="1" />
11413 <edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
11414 <edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
11415 <edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
11416 <edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
11417 <edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
11418 <edge from-layer="46" from-port="0" to-layer="47" to-port="1" />
11419 <edge from-layer="47" from-port="2" to-layer="77" to-port="1" />
11420 <edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
11421 <edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
11422 <edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
11423 <edge from-layer="51" from-port="2" to-layer="53" to-port="0" />
11424 <edge from-layer="52" from-port="0" to-layer="53" to-port="1" />
11425 <edge from-layer="53" from-port="2" to-layer="55" to-port="0" />
11426 <edge from-layer="54" from-port="0" to-layer="55" to-port="1" />
11427 <edge from-layer="55" from-port="2" to-layer="77" to-port="2" />
11428 <edge from-layer="56" from-port="0" to-layer="73" to-port="0" />
11429 <edge from-layer="57" from-port="0" to-layer="58" to-port="1" />
11430 <edge from-layer="58" from-port="2" to-layer="60" to-port="0" />
11431 <edge from-layer="59" from-port="0" to-layer="60" to-port="1" />
11432 <edge from-layer="60" from-port="2" to-layer="69" to-port="0" />
11433 <edge from-layer="61" from-port="0" to-layer="63" to-port="1" />
11434 <edge from-layer="62" from-port="0" to-layer="63" to-port="2" />
11435 <edge from-layer="63" from-port="3" to-layer="68" to-port="0" />
11436 <edge from-layer="64" from-port="0" to-layer="68" to-port="1" />
11437 <edge from-layer="65" from-port="0" to-layer="67" to-port="1" />
11438 <edge from-layer="66" from-port="0" to-layer="67" to-port="2" />
11439 <edge from-layer="67" from-port="3" to-layer="68" to-port="2" />
11440 <edge from-layer="68" from-port="3" to-layer="69" to-port="1" />
11441 <edge from-layer="69" from-port="2" to-layer="70" to-port="0" />
11442 <edge from-layer="70" from-port="1" to-layer="72" to-port="0" />
11443 <edge from-layer="71" from-port="0" to-layer="72" to-port="1" />
11444 <edge from-layer="72" from-port="2" to-layer="73" to-port="1" />
11445 <edge from-layer="73" from-port="2" to-layer="74" to-port="0" />
11446 <edge from-layer="73" from-port="2" to-layer="76" to-port="2" />
11447 <edge from-layer="74" from-port="1" to-layer="76" to-port="0" />
11448 <edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
11449 <edge from-layer="76" from-port="3" to-layer="133" to-port="3" />
11450 <edge from-layer="76" from-port="3" to-layer="413" to-port="3" />
11451 <edge from-layer="76" from-port="3" to-layer="189" to-port="3" />
11452 <edge from-layer="76" from-port="3" to-layer="245" to-port="3" />
11453 <edge from-layer="76" from-port="3" to-layer="301" to-port="3" />
11454 <edge from-layer="76" from-port="3" to-layer="357" to-port="3" />
11455 <edge from-layer="76" from-port="3" to-layer="693" to-port="3" />
11456 <edge from-layer="76" from-port="3" to-layer="637" to-port="3" />
11457 <edge from-layer="76" from-port="3" to-layer="581" to-port="3" />
11458 <edge from-layer="76" from-port="3" to-layer="525" to-port="3" />
11459 <edge from-layer="76" from-port="3" to-layer="469" to-port="3" />
11460 <edge from-layer="76" from-port="3" to-layer="77" to-port="3" />
11461 <edge from-layer="77" from-port="4" to-layer="79" to-port="0" />
11462 <edge from-layer="78" from-port="0" to-layer="79" to-port="1" />
11463 <edge from-layer="79" from-port="2" to-layer="81" to-port="0" />
11464 <edge from-layer="80" from-port="0" to-layer="81" to-port="1" />
11465 <edge from-layer="81" from-port="2" to-layer="83" to-port="0" />
11466 <edge from-layer="82" from-port="0" to-layer="83" to-port="1" />
11467 <edge from-layer="83" from-port="2" to-layer="85" to-port="0" />
11468 <edge from-layer="84" from-port="0" to-layer="85" to-port="1" />
11469 <edge from-layer="85" from-port="2" to-layer="86" to-port="0" />
11470 <edge from-layer="86" from-port="2" to-layer="88" to-port="0" />
11471 <edge from-layer="87" from-port="0" to-layer="88" to-port="1" />
11472 <edge from-layer="88" from-port="2" to-layer="90" to-port="0" />
11473 <edge from-layer="89" from-port="0" to-layer="90" to-port="1" />
11474 <edge from-layer="90" from-port="2" to-layer="92" to-port="0" />
11475 <edge from-layer="91" from-port="0" to-layer="92" to-port="1" />
11476 <edge from-layer="92" from-port="2" to-layer="102" to-port="1" />
11477 <edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
11478 <edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
11479 <edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
11480 <edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
11481 <edge from-layer="96" from-port="2" to-layer="97" to-port="0" />
11482 <edge from-layer="97" from-port="1" to-layer="99" to-port="0" />
11483 <edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
11484 <edge from-layer="99" from-port="2" to-layer="101" to-port="0" />
11485 <edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
11486 <edge from-layer="101" from-port="2" to-layer="102" to-port="0" />
11487 <edge from-layer="102" from-port="2" to-layer="104" to-port="0" />
11488 <edge from-layer="103" from-port="0" to-layer="104" to-port="1" />
11489 <edge from-layer="104" from-port="2" to-layer="106" to-port="0" />
11490 <edge from-layer="105" from-port="0" to-layer="106" to-port="1" />
11491 <edge from-layer="106" from-port="2" to-layer="108" to-port="0" />
11492 <edge from-layer="107" from-port="0" to-layer="108" to-port="1" />
11493 <edge from-layer="108" from-port="2" to-layer="118" to-port="0" />
11494 <edge from-layer="108" from-port="2" to-layer="110" to-port="0" />
11495 <edge from-layer="108" from-port="2" to-layer="126" to-port="0" />
11496 <edge from-layer="108" from-port="2" to-layer="142" to-port="1" />
11497 <edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
11498 <edge from-layer="110" from-port="2" to-layer="112" to-port="0" />
11499 <edge from-layer="111" from-port="0" to-layer="112" to-port="1" />
11500 <edge from-layer="112" from-port="2" to-layer="114" to-port="0" />
11501 <edge from-layer="113" from-port="0" to-layer="114" to-port="1" />
11502 <edge from-layer="114" from-port="2" to-layer="116" to-port="0" />
11503 <edge from-layer="115" from-port="0" to-layer="116" to-port="1" />
11504 <edge from-layer="116" from-port="2" to-layer="133" to-port="0" />
11505 <edge from-layer="117" from-port="0" to-layer="118" to-port="1" />
11506 <edge from-layer="118" from-port="2" to-layer="120" to-port="0" />
11507 <edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
11508 <edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
11509 <edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
11510 <edge from-layer="122" from-port="2" to-layer="124" to-port="0" />
11511 <edge from-layer="123" from-port="0" to-layer="124" to-port="1" />
11512 <edge from-layer="124" from-port="2" to-layer="133" to-port="1" />
11513 <edge from-layer="125" from-port="0" to-layer="126" to-port="1" />
11514 <edge from-layer="126" from-port="2" to-layer="128" to-port="0" />
11515 <edge from-layer="127" from-port="0" to-layer="128" to-port="1" />
11516 <edge from-layer="128" from-port="2" to-layer="130" to-port="0" />
11517 <edge from-layer="129" from-port="0" to-layer="130" to-port="1" />
11518 <edge from-layer="130" from-port="2" to-layer="132" to-port="0" />
11519 <edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
11520 <edge from-layer="132" from-port="2" to-layer="133" to-port="2" />
11521 <edge from-layer="133" from-port="4" to-layer="135" to-port="0" />
11522 <edge from-layer="134" from-port="0" to-layer="135" to-port="1" />
11523 <edge from-layer="135" from-port="2" to-layer="137" to-port="0" />
11524 <edge from-layer="136" from-port="0" to-layer="137" to-port="1" />
11525 <edge from-layer="137" from-port="2" to-layer="139" to-port="0" />
11526 <edge from-layer="138" from-port="0" to-layer="139" to-port="1" />
11527 <edge from-layer="139" from-port="2" to-layer="141" to-port="0" />
11528 <edge from-layer="140" from-port="0" to-layer="141" to-port="1" />
11529 <edge from-layer="141" from-port="2" to-layer="142" to-port="0" />
11530 <edge from-layer="142" from-port="2" to-layer="144" to-port="0" />
11531 <edge from-layer="143" from-port="0" to-layer="144" to-port="1" />
11532 <edge from-layer="144" from-port="2" to-layer="146" to-port="0" />
11533 <edge from-layer="145" from-port="0" to-layer="146" to-port="1" />
11534 <edge from-layer="146" from-port="2" to-layer="148" to-port="0" />
11535 <edge from-layer="147" from-port="0" to-layer="148" to-port="1" />
11536 <edge from-layer="148" from-port="2" to-layer="158" to-port="1" />
11537 <edge from-layer="148" from-port="2" to-layer="150" to-port="0" />
11538 <edge from-layer="149" from-port="0" to-layer="150" to-port="1" />
11539 <edge from-layer="150" from-port="2" to-layer="152" to-port="0" />
11540 <edge from-layer="151" from-port="0" to-layer="152" to-port="1" />
11541 <edge from-layer="152" from-port="2" to-layer="153" to-port="0" />
11542 <edge from-layer="153" from-port="1" to-layer="155" to-port="0" />
11543 <edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
11544 <edge from-layer="155" from-port="2" to-layer="157" to-port="0" />
11545 <edge from-layer="156" from-port="0" to-layer="157" to-port="1" />
11546 <edge from-layer="157" from-port="2" to-layer="158" to-port="0" />
11547 <edge from-layer="158" from-port="2" to-layer="160" to-port="0" />
11548 <edge from-layer="159" from-port="0" to-layer="160" to-port="1" />
11549 <edge from-layer="160" from-port="2" to-layer="162" to-port="0" />
11550 <edge from-layer="161" from-port="0" to-layer="162" to-port="1" />
11551 <edge from-layer="162" from-port="2" to-layer="164" to-port="0" />
11552 <edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
11553 <edge from-layer="164" from-port="2" to-layer="198" to-port="1" />
11554 <edge from-layer="164" from-port="2" to-layer="166" to-port="0" />
11555 <edge from-layer="164" from-port="2" to-layer="182" to-port="0" />
11556 <edge from-layer="164" from-port="2" to-layer="174" to-port="0" />
11557 <edge from-layer="165" from-port="0" to-layer="166" to-port="1" />
11558 <edge from-layer="166" from-port="2" to-layer="168" to-port="0" />
11559 <edge from-layer="167" from-port="0" to-layer="168" to-port="1" />
11560 <edge from-layer="168" from-port="2" to-layer="170" to-port="0" />
11561 <edge from-layer="169" from-port="0" to-layer="170" to-port="1" />
11562 <edge from-layer="170" from-port="2" to-layer="172" to-port="0" />
11563 <edge from-layer="171" from-port="0" to-layer="172" to-port="1" />
11564 <edge from-layer="172" from-port="2" to-layer="189" to-port="0" />
11565 <edge from-layer="173" from-port="0" to-layer="174" to-port="1" />
11566 <edge from-layer="174" from-port="2" to-layer="176" to-port="0" />
11567 <edge from-layer="175" from-port="0" to-layer="176" to-port="1" />
11568 <edge from-layer="176" from-port="2" to-layer="178" to-port="0" />
11569 <edge from-layer="177" from-port="0" to-layer="178" to-port="1" />
11570 <edge from-layer="178" from-port="2" to-layer="180" to-port="0" />
11571 <edge from-layer="179" from-port="0" to-layer="180" to-port="1" />
11572 <edge from-layer="180" from-port="2" to-layer="189" to-port="1" />
11573 <edge from-layer="181" from-port="0" to-layer="182" to-port="1" />
11574 <edge from-layer="182" from-port="2" to-layer="184" to-port="0" />
11575 <edge from-layer="183" from-port="0" to-layer="184" to-port="1" />
11576 <edge from-layer="184" from-port="2" to-layer="186" to-port="0" />
11577 <edge from-layer="185" from-port="0" to-layer="186" to-port="1" />
11578 <edge from-layer="186" from-port="2" to-layer="188" to-port="0" />
11579 <edge from-layer="187" from-port="0" to-layer="188" to-port="1" />
11580 <edge from-layer="188" from-port="2" to-layer="189" to-port="2" />
11581 <edge from-layer="189" from-port="4" to-layer="191" to-port="0" />
11582 <edge from-layer="190" from-port="0" to-layer="191" to-port="1" />
11583 <edge from-layer="191" from-port="2" to-layer="193" to-port="0" />
11584 <edge from-layer="192" from-port="0" to-layer="193" to-port="1" />
11585 <edge from-layer="193" from-port="2" to-layer="195" to-port="0" />
11586 <edge from-layer="194" from-port="0" to-layer="195" to-port="1" />
11587 <edge from-layer="195" from-port="2" to-layer="197" to-port="0" />
11588 <edge from-layer="196" from-port="0" to-layer="197" to-port="1" />
11589 <edge from-layer="197" from-port="2" to-layer="198" to-port="0" />
11590 <edge from-layer="198" from-port="2" to-layer="200" to-port="0" />
11591 <edge from-layer="199" from-port="0" to-layer="200" to-port="1" />
11592 <edge from-layer="200" from-port="2" to-layer="202" to-port="0" />
11593 <edge from-layer="201" from-port="0" to-layer="202" to-port="1" />
11594 <edge from-layer="202" from-port="2" to-layer="204" to-port="0" />
11595 <edge from-layer="203" from-port="0" to-layer="204" to-port="1" />
11596 <edge from-layer="204" from-port="2" to-layer="214" to-port="1" />
11597 <edge from-layer="204" from-port="2" to-layer="206" to-port="0" />
11598 <edge from-layer="205" from-port="0" to-layer="206" to-port="1" />
11599 <edge from-layer="206" from-port="2" to-layer="208" to-port="0" />
11600 <edge from-layer="207" from-port="0" to-layer="208" to-port="1" />
11601 <edge from-layer="208" from-port="2" to-layer="209" to-port="0" />
11602 <edge from-layer="209" from-port="1" to-layer="211" to-port="0" />
11603 <edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
11604 <edge from-layer="211" from-port="2" to-layer="213" to-port="0" />
11605 <edge from-layer="212" from-port="0" to-layer="213" to-port="1" />
11606 <edge from-layer="213" from-port="2" to-layer="214" to-port="0" />
11607 <edge from-layer="214" from-port="2" to-layer="216" to-port="0" />
11608 <edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
11609 <edge from-layer="216" from-port="2" to-layer="218" to-port="0" />
11610 <edge from-layer="217" from-port="0" to-layer="218" to-port="1" />
11611 <edge from-layer="218" from-port="2" to-layer="220" to-port="0" />
11612 <edge from-layer="219" from-port="0" to-layer="220" to-port="1" />
11613 <edge from-layer="220" from-port="2" to-layer="230" to-port="0" />
11614 <edge from-layer="220" from-port="2" to-layer="238" to-port="0" />
11615 <edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
11616 <edge from-layer="220" from-port="2" to-layer="254" to-port="1" />
11617 <edge from-layer="221" from-port="0" to-layer="222" to-port="1" />
11618 <edge from-layer="222" from-port="2" to-layer="224" to-port="0" />
11619 <edge from-layer="223" from-port="0" to-layer="224" to-port="1" />
11620 <edge from-layer="224" from-port="2" to-layer="226" to-port="0" />
11621 <edge from-layer="225" from-port="0" to-layer="226" to-port="1" />
11622 <edge from-layer="226" from-port="2" to-layer="228" to-port="0" />
11623 <edge from-layer="227" from-port="0" to-layer="228" to-port="1" />
11624 <edge from-layer="228" from-port="2" to-layer="245" to-port="0" />
11625 <edge from-layer="229" from-port="0" to-layer="230" to-port="1" />
11626 <edge from-layer="230" from-port="2" to-layer="232" to-port="0" />
11627 <edge from-layer="231" from-port="0" to-layer="232" to-port="1" />
11628 <edge from-layer="232" from-port="2" to-layer="234" to-port="0" />
11629 <edge from-layer="233" from-port="0" to-layer="234" to-port="1" />
11630 <edge from-layer="234" from-port="2" to-layer="236" to-port="0" />
11631 <edge from-layer="235" from-port="0" to-layer="236" to-port="1" />
11632 <edge from-layer="236" from-port="2" to-layer="245" to-port="1" />
11633 <edge from-layer="237" from-port="0" to-layer="238" to-port="1" />
11634 <edge from-layer="238" from-port="2" to-layer="240" to-port="0" />
11635 <edge from-layer="239" from-port="0" to-layer="240" to-port="1" />
11636 <edge from-layer="240" from-port="2" to-layer="242" to-port="0" />
11637 <edge from-layer="241" from-port="0" to-layer="242" to-port="1" />
11638 <edge from-layer="242" from-port="2" to-layer="244" to-port="0" />
11639 <edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
11640 <edge from-layer="244" from-port="2" to-layer="245" to-port="2" />
11641 <edge from-layer="245" from-port="4" to-layer="247" to-port="0" />
11642 <edge from-layer="246" from-port="0" to-layer="247" to-port="1" />
11643 <edge from-layer="247" from-port="2" to-layer="249" to-port="0" />
11644 <edge from-layer="248" from-port="0" to-layer="249" to-port="1" />
11645 <edge from-layer="249" from-port="2" to-layer="251" to-port="0" />
11646 <edge from-layer="250" from-port="0" to-layer="251" to-port="1" />
11647 <edge from-layer="251" from-port="2" to-layer="253" to-port="0" />
11648 <edge from-layer="252" from-port="0" to-layer="253" to-port="1" />
11649 <edge from-layer="253" from-port="2" to-layer="254" to-port="0" />
11650 <edge from-layer="254" from-port="2" to-layer="256" to-port="0" />
11651 <edge from-layer="255" from-port="0" to-layer="256" to-port="1" />
11652 <edge from-layer="256" from-port="2" to-layer="258" to-port="0" />
11653 <edge from-layer="257" from-port="0" to-layer="258" to-port="1" />
11654 <edge from-layer="258" from-port="2" to-layer="260" to-port="0" />
11655 <edge from-layer="259" from-port="0" to-layer="260" to-port="1" />
11656 <edge from-layer="260" from-port="2" to-layer="262" to-port="0" />
11657 <edge from-layer="260" from-port="2" to-layer="270" to-port="1" />
11658 <edge from-layer="261" from-port="0" to-layer="262" to-port="1" />
11659 <edge from-layer="262" from-port="2" to-layer="264" to-port="0" />
11660 <edge from-layer="263" from-port="0" to-layer="264" to-port="1" />
11661 <edge from-layer="264" from-port="2" to-layer="265" to-port="0" />
11662 <edge from-layer="265" from-port="1" to-layer="267" to-port="0" />
11663 <edge from-layer="266" from-port="0" to-layer="267" to-port="1" />
11664 <edge from-layer="267" from-port="2" to-layer="269" to-port="0" />
11665 <edge from-layer="268" from-port="0" to-layer="269" to-port="1" />
11666 <edge from-layer="269" from-port="2" to-layer="270" to-port="0" />
11667 <edge from-layer="270" from-port="2" to-layer="272" to-port="0" />
11668 <edge from-layer="271" from-port="0" to-layer="272" to-port="1" />
11669 <edge from-layer="272" from-port="2" to-layer="274" to-port="0" />
11670 <edge from-layer="273" from-port="0" to-layer="274" to-port="1" />
11671 <edge from-layer="274" from-port="2" to-layer="276" to-port="0" />
11672 <edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
11673 <edge from-layer="276" from-port="2" to-layer="294" to-port="0" />
11674 <edge from-layer="276" from-port="2" to-layer="286" to-port="0" />
11675 <edge from-layer="276" from-port="2" to-layer="278" to-port="0" />
11676 <edge from-layer="276" from-port="2" to-layer="310" to-port="1" />
11677 <edge from-layer="277" from-port="0" to-layer="278" to-port="1" />
11678 <edge from-layer="278" from-port="2" to-layer="280" to-port="0" />
11679 <edge from-layer="279" from-port="0" to-layer="280" to-port="1" />
11680 <edge from-layer="280" from-port="2" to-layer="282" to-port="0" />
11681 <edge from-layer="281" from-port="0" to-layer="282" to-port="1" />
11682 <edge from-layer="282" from-port="2" to-layer="284" to-port="0" />
11683 <edge from-layer="283" from-port="0" to-layer="284" to-port="1" />
11684 <edge from-layer="284" from-port="2" to-layer="301" to-port="0" />
11685 <edge from-layer="285" from-port="0" to-layer="286" to-port="1" />
11686 <edge from-layer="286" from-port="2" to-layer="288" to-port="0" />
11687 <edge from-layer="287" from-port="0" to-layer="288" to-port="1" />
11688 <edge from-layer="288" from-port="2" to-layer="290" to-port="0" />
11689 <edge from-layer="289" from-port="0" to-layer="290" to-port="1" />
11690 <edge from-layer="290" from-port="2" to-layer="292" to-port="0" />
11691 <edge from-layer="291" from-port="0" to-layer="292" to-port="1" />
11692 <edge from-layer="292" from-port="2" to-layer="301" to-port="1" />
11693 <edge from-layer="293" from-port="0" to-layer="294" to-port="1" />
11694 <edge from-layer="294" from-port="2" to-layer="296" to-port="0" />
11695 <edge from-layer="295" from-port="0" to-layer="296" to-port="1" />
11696 <edge from-layer="296" from-port="2" to-layer="298" to-port="0" />
11697 <edge from-layer="297" from-port="0" to-layer="298" to-port="1" />
11698 <edge from-layer="298" from-port="2" to-layer="300" to-port="0" />
11699 <edge from-layer="299" from-port="0" to-layer="300" to-port="1" />
11700 <edge from-layer="300" from-port="2" to-layer="301" to-port="2" />
11701 <edge from-layer="301" from-port="4" to-layer="303" to-port="0" />
11702 <edge from-layer="302" from-port="0" to-layer="303" to-port="1" />
11703 <edge from-layer="303" from-port="2" to-layer="305" to-port="0" />
11704 <edge from-layer="304" from-port="0" to-layer="305" to-port="1" />
11705 <edge from-layer="305" from-port="2" to-layer="307" to-port="0" />
11706 <edge from-layer="306" from-port="0" to-layer="307" to-port="1" />
11707 <edge from-layer="307" from-port="2" to-layer="309" to-port="0" />
11708 <edge from-layer="308" from-port="0" to-layer="309" to-port="1" />
11709 <edge from-layer="309" from-port="2" to-layer="310" to-port="0" />
11710 <edge from-layer="310" from-port="2" to-layer="312" to-port="0" />
11711 <edge from-layer="311" from-port="0" to-layer="312" to-port="1" />
11712 <edge from-layer="312" from-port="2" to-layer="314" to-port="0" />
11713 <edge from-layer="313" from-port="0" to-layer="314" to-port="1" />
11714 <edge from-layer="314" from-port="2" to-layer="316" to-port="0" />
11715 <edge from-layer="315" from-port="0" to-layer="316" to-port="1" />
11716 <edge from-layer="316" from-port="2" to-layer="318" to-port="0" />
11717 <edge from-layer="316" from-port="2" to-layer="326" to-port="1" />
11718 <edge from-layer="317" from-port="0" to-layer="318" to-port="1" />
11719 <edge from-layer="318" from-port="2" to-layer="320" to-port="0" />
11720 <edge from-layer="319" from-port="0" to-layer="320" to-port="1" />
11721 <edge from-layer="320" from-port="2" to-layer="321" to-port="0" />
11722 <edge from-layer="321" from-port="1" to-layer="323" to-port="0" />
11723 <edge from-layer="322" from-port="0" to-layer="323" to-port="1" />
11724 <edge from-layer="323" from-port="2" to-layer="325" to-port="0" />
11725 <edge from-layer="324" from-port="0" to-layer="325" to-port="1" />
11726 <edge from-layer="325" from-port="2" to-layer="326" to-port="0" />
11727 <edge from-layer="326" from-port="2" to-layer="328" to-port="0" />
11728 <edge from-layer="327" from-port="0" to-layer="328" to-port="1" />
11729 <edge from-layer="328" from-port="2" to-layer="330" to-port="0" />
11730 <edge from-layer="329" from-port="0" to-layer="330" to-port="1" />
11731 <edge from-layer="330" from-port="2" to-layer="332" to-port="0" />
11732 <edge from-layer="331" from-port="0" to-layer="332" to-port="1" />
11733 <edge from-layer="332" from-port="2" to-layer="334" to-port="0" />
11734 <edge from-layer="332" from-port="2" to-layer="342" to-port="0" />
11735 <edge from-layer="332" from-port="2" to-layer="366" to-port="1" />
11736 <edge from-layer="332" from-port="2" to-layer="350" to-port="0" />
11737 <edge from-layer="333" from-port="0" to-layer="334" to-port="1" />
11738 <edge from-layer="334" from-port="2" to-layer="336" to-port="0" />
11739 <edge from-layer="335" from-port="0" to-layer="336" to-port="1" />
11740 <edge from-layer="336" from-port="2" to-layer="338" to-port="0" />
11741 <edge from-layer="337" from-port="0" to-layer="338" to-port="1" />
11742 <edge from-layer="338" from-port="2" to-layer="340" to-port="0" />
11743 <edge from-layer="339" from-port="0" to-layer="340" to-port="1" />
11744 <edge from-layer="340" from-port="2" to-layer="357" to-port="0" />
11745 <edge from-layer="341" from-port="0" to-layer="342" to-port="1" />
11746 <edge from-layer="342" from-port="2" to-layer="344" to-port="0" />
11747 <edge from-layer="343" from-port="0" to-layer="344" to-port="1" />
11748 <edge from-layer="344" from-port="2" to-layer="346" to-port="0" />
11749 <edge from-layer="345" from-port="0" to-layer="346" to-port="1" />
11750 <edge from-layer="346" from-port="2" to-layer="348" to-port="0" />
11751 <edge from-layer="347" from-port="0" to-layer="348" to-port="1" />
11752 <edge from-layer="348" from-port="2" to-layer="357" to-port="1" />
11753 <edge from-layer="349" from-port="0" to-layer="350" to-port="1" />
11754 <edge from-layer="350" from-port="2" to-layer="352" to-port="0" />
11755 <edge from-layer="351" from-port="0" to-layer="352" to-port="1" />
11756 <edge from-layer="352" from-port="2" to-layer="354" to-port="0" />
11757 <edge from-layer="353" from-port="0" to-layer="354" to-port="1" />
11758 <edge from-layer="354" from-port="2" to-layer="356" to-port="0" />
11759 <edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
11760 <edge from-layer="356" from-port="2" to-layer="357" to-port="2" />
11761 <edge from-layer="357" from-port="4" to-layer="359" to-port="0" />
11762 <edge from-layer="358" from-port="0" to-layer="359" to-port="1" />
11763 <edge from-layer="359" from-port="2" to-layer="361" to-port="0" />
11764 <edge from-layer="360" from-port="0" to-layer="361" to-port="1" />
11765 <edge from-layer="361" from-port="2" to-layer="363" to-port="0" />
11766 <edge from-layer="362" from-port="0" to-layer="363" to-port="1" />
11767 <edge from-layer="363" from-port="2" to-layer="365" to-port="0" />
11768 <edge from-layer="364" from-port="0" to-layer="365" to-port="1" />
11769 <edge from-layer="365" from-port="2" to-layer="366" to-port="0" />
11770 <edge from-layer="366" from-port="2" to-layer="368" to-port="0" />
11771 <edge from-layer="367" from-port="0" to-layer="368" to-port="1" />
11772 <edge from-layer="368" from-port="2" to-layer="370" to-port="0" />
11773 <edge from-layer="369" from-port="0" to-layer="370" to-port="1" />
11774 <edge from-layer="370" from-port="2" to-layer="372" to-port="0" />
11775 <edge from-layer="371" from-port="0" to-layer="372" to-port="1" />
11776 <edge from-layer="372" from-port="2" to-layer="374" to-port="0" />
11777 <edge from-layer="372" from-port="2" to-layer="382" to-port="1" />
11778 <edge from-layer="373" from-port="0" to-layer="374" to-port="1" />
11779 <edge from-layer="374" from-port="2" to-layer="376" to-port="0" />
11780 <edge from-layer="375" from-port="0" to-layer="376" to-port="1" />
11781 <edge from-layer="376" from-port="2" to-layer="377" to-port="0" />
11782 <edge from-layer="377" from-port="1" to-layer="379" to-port="0" />
11783 <edge from-layer="378" from-port="0" to-layer="379" to-port="1" />
11784 <edge from-layer="379" from-port="2" to-layer="381" to-port="0" />
11785 <edge from-layer="380" from-port="0" to-layer="381" to-port="1" />
11786 <edge from-layer="381" from-port="2" to-layer="382" to-port="0" />
11787 <edge from-layer="382" from-port="2" to-layer="384" to-port="0" />
11788 <edge from-layer="383" from-port="0" to-layer="384" to-port="1" />
11789 <edge from-layer="384" from-port="2" to-layer="386" to-port="0" />
11790 <edge from-layer="385" from-port="0" to-layer="386" to-port="1" />
11791 <edge from-layer="386" from-port="2" to-layer="388" to-port="0" />
11792 <edge from-layer="387" from-port="0" to-layer="388" to-port="1" />
11793 <edge from-layer="388" from-port="2" to-layer="422" to-port="1" />
11794 <edge from-layer="388" from-port="2" to-layer="406" to-port="0" />
11795 <edge from-layer="388" from-port="2" to-layer="390" to-port="0" />
11796 <edge from-layer="388" from-port="2" to-layer="398" to-port="0" />
11797 <edge from-layer="389" from-port="0" to-layer="390" to-port="1" />
11798 <edge from-layer="390" from-port="2" to-layer="392" to-port="0" />
11799 <edge from-layer="391" from-port="0" to-layer="392" to-port="1" />
11800 <edge from-layer="392" from-port="2" to-layer="394" to-port="0" />
11801 <edge from-layer="393" from-port="0" to-layer="394" to-port="1" />
11802 <edge from-layer="394" from-port="2" to-layer="396" to-port="0" />
11803 <edge from-layer="395" from-port="0" to-layer="396" to-port="1" />
11804 <edge from-layer="396" from-port="2" to-layer="413" to-port="0" />
11805 <edge from-layer="397" from-port="0" to-layer="398" to-port="1" />
11806 <edge from-layer="398" from-port="2" to-layer="400" to-port="0" />
11807 <edge from-layer="399" from-port="0" to-layer="400" to-port="1" />
11808 <edge from-layer="400" from-port="2" to-layer="402" to-port="0" />
11809 <edge from-layer="401" from-port="0" to-layer="402" to-port="1" />
11810 <edge from-layer="402" from-port="2" to-layer="404" to-port="0" />
11811 <edge from-layer="403" from-port="0" to-layer="404" to-port="1" />
11812 <edge from-layer="404" from-port="2" to-layer="413" to-port="1" />
11813 <edge from-layer="405" from-port="0" to-layer="406" to-port="1" />
11814 <edge from-layer="406" from-port="2" to-layer="408" to-port="0" />
11815 <edge from-layer="407" from-port="0" to-layer="408" to-port="1" />
11816 <edge from-layer="408" from-port="2" to-layer="410" to-port="0" />
11817 <edge from-layer="409" from-port="0" to-layer="410" to-port="1" />
11818 <edge from-layer="410" from-port="2" to-layer="412" to-port="0" />
11819 <edge from-layer="411" from-port="0" to-layer="412" to-port="1" />
11820 <edge from-layer="412" from-port="2" to-layer="413" to-port="2" />
11821 <edge from-layer="413" from-port="4" to-layer="415" to-port="0" />
11822 <edge from-layer="414" from-port="0" to-layer="415" to-port="1" />
11823 <edge from-layer="415" from-port="2" to-layer="417" to-port="0" />
11824 <edge from-layer="416" from-port="0" to-layer="417" to-port="1" />
11825 <edge from-layer="417" from-port="2" to-layer="419" to-port="0" />
11826 <edge from-layer="418" from-port="0" to-layer="419" to-port="1" />
11827 <edge from-layer="419" from-port="2" to-layer="421" to-port="0" />
11828 <edge from-layer="420" from-port="0" to-layer="421" to-port="1" />
11829 <edge from-layer="421" from-port="2" to-layer="422" to-port="0" />
11830 <edge from-layer="422" from-port="2" to-layer="424" to-port="0" />
11831 <edge from-layer="423" from-port="0" to-layer="424" to-port="1" />
11832 <edge from-layer="424" from-port="2" to-layer="426" to-port="0" />
11833 <edge from-layer="425" from-port="0" to-layer="426" to-port="1" />
11834 <edge from-layer="426" from-port="2" to-layer="428" to-port="0" />
11835 <edge from-layer="427" from-port="0" to-layer="428" to-port="1" />
11836 <edge from-layer="428" from-port="2" to-layer="430" to-port="0" />
11837 <edge from-layer="428" from-port="2" to-layer="438" to-port="1" />
11838 <edge from-layer="429" from-port="0" to-layer="430" to-port="1" />
11839 <edge from-layer="430" from-port="2" to-layer="432" to-port="0" />
11840 <edge from-layer="431" from-port="0" to-layer="432" to-port="1" />
11841 <edge from-layer="432" from-port="2" to-layer="433" to-port="0" />
11842 <edge from-layer="433" from-port="1" to-layer="435" to-port="0" />
11843 <edge from-layer="434" from-port="0" to-layer="435" to-port="1" />
11844 <edge from-layer="435" from-port="2" to-layer="437" to-port="0" />
11845 <edge from-layer="436" from-port="0" to-layer="437" to-port="1" />
11846 <edge from-layer="437" from-port="2" to-layer="438" to-port="0" />
11847 <edge from-layer="438" from-port="2" to-layer="440" to-port="0" />
11848 <edge from-layer="439" from-port="0" to-layer="440" to-port="1" />
11849 <edge from-layer="440" from-port="2" to-layer="442" to-port="0" />
11850 <edge from-layer="441" from-port="0" to-layer="442" to-port="1" />
11851 <edge from-layer="442" from-port="2" to-layer="444" to-port="0" />
11852 <edge from-layer="443" from-port="0" to-layer="444" to-port="1" />
11853 <edge from-layer="444" from-port="2" to-layer="478" to-port="1" />
11854 <edge from-layer="444" from-port="2" to-layer="462" to-port="0" />
11855 <edge from-layer="444" from-port="2" to-layer="454" to-port="0" />
11856 <edge from-layer="444" from-port="2" to-layer="446" to-port="0" />
11857 <edge from-layer="445" from-port="0" to-layer="446" to-port="1" />
11858 <edge from-layer="446" from-port="2" to-layer="448" to-port="0" />
11859 <edge from-layer="447" from-port="0" to-layer="448" to-port="1" />
11860 <edge from-layer="448" from-port="2" to-layer="450" to-port="0" />
11861 <edge from-layer="449" from-port="0" to-layer="450" to-port="1" />
11862 <edge from-layer="450" from-port="2" to-layer="452" to-port="0" />
11863 <edge from-layer="451" from-port="0" to-layer="452" to-port="1" />
11864 <edge from-layer="452" from-port="2" to-layer="469" to-port="0" />
11865 <edge from-layer="453" from-port="0" to-layer="454" to-port="1" />
11866 <edge from-layer="454" from-port="2" to-layer="456" to-port="0" />
11867 <edge from-layer="455" from-port="0" to-layer="456" to-port="1" />
11868 <edge from-layer="456" from-port="2" to-layer="458" to-port="0" />
11869 <edge from-layer="457" from-port="0" to-layer="458" to-port="1" />
11870 <edge from-layer="458" from-port="2" to-layer="460" to-port="0" />
11871 <edge from-layer="459" from-port="0" to-layer="460" to-port="1" />
11872 <edge from-layer="460" from-port="2" to-layer="469" to-port="1" />
11873 <edge from-layer="461" from-port="0" to-layer="462" to-port="1" />
11874 <edge from-layer="462" from-port="2" to-layer="464" to-port="0" />
11875 <edge from-layer="463" from-port="0" to-layer="464" to-port="1" />
11876 <edge from-layer="464" from-port="2" to-layer="466" to-port="0" />
11877 <edge from-layer="465" from-port="0" to-layer="466" to-port="1" />
11878 <edge from-layer="466" from-port="2" to-layer="468" to-port="0" />
11879 <edge from-layer="467" from-port="0" to-layer="468" to-port="1" />
11880 <edge from-layer="468" from-port="2" to-layer="469" to-port="2" />
11881 <edge from-layer="469" from-port="4" to-layer="471" to-port="0" />
11882 <edge from-layer="470" from-port="0" to-layer="471" to-port="1" />
11883 <edge from-layer="471" from-port="2" to-layer="473" to-port="0" />
11884 <edge from-layer="472" from-port="0" to-layer="473" to-port="1" />
11885 <edge from-layer="473" from-port="2" to-layer="475" to-port="0" />
11886 <edge from-layer="474" from-port="0" to-layer="475" to-port="1" />
11887 <edge from-layer="475" from-port="2" to-layer="477" to-port="0" />
11888 <edge from-layer="476" from-port="0" to-layer="477" to-port="1" />
11889 <edge from-layer="477" from-port="2" to-layer="478" to-port="0" />
11890 <edge from-layer="478" from-port="2" to-layer="480" to-port="0" />
11891 <edge from-layer="479" from-port="0" to-layer="480" to-port="1" />
11892 <edge from-layer="480" from-port="2" to-layer="482" to-port="0" />
11893 <edge from-layer="481" from-port="0" to-layer="482" to-port="1" />
11894 <edge from-layer="482" from-port="2" to-layer="484" to-port="0" />
11895 <edge from-layer="483" from-port="0" to-layer="484" to-port="1" />
11896 <edge from-layer="484" from-port="2" to-layer="486" to-port="0" />
11897 <edge from-layer="484" from-port="2" to-layer="494" to-port="1" />
11898 <edge from-layer="485" from-port="0" to-layer="486" to-port="1" />
11899 <edge from-layer="486" from-port="2" to-layer="488" to-port="0" />
11900 <edge from-layer="487" from-port="0" to-layer="488" to-port="1" />
11901 <edge from-layer="488" from-port="2" to-layer="489" to-port="0" />
11902 <edge from-layer="489" from-port="1" to-layer="491" to-port="0" />
11903 <edge from-layer="490" from-port="0" to-layer="491" to-port="1" />
11904 <edge from-layer="491" from-port="2" to-layer="493" to-port="0" />
11905 <edge from-layer="492" from-port="0" to-layer="493" to-port="1" />
11906 <edge from-layer="493" from-port="2" to-layer="494" to-port="0" />
11907 <edge from-layer="494" from-port="2" to-layer="496" to-port="0" />
11908 <edge from-layer="495" from-port="0" to-layer="496" to-port="1" />
11909 <edge from-layer="496" from-port="2" to-layer="498" to-port="0" />
11910 <edge from-layer="497" from-port="0" to-layer="498" to-port="1" />
11911 <edge from-layer="498" from-port="2" to-layer="500" to-port="0" />
11912 <edge from-layer="499" from-port="0" to-layer="500" to-port="1" />
11913 <edge from-layer="500" from-port="2" to-layer="510" to-port="0" />
11914 <edge from-layer="500" from-port="2" to-layer="534" to-port="1" />
11915 <edge from-layer="500" from-port="2" to-layer="518" to-port="0" />
11916 <edge from-layer="500" from-port="2" to-layer="502" to-port="0" />
11917 <edge from-layer="501" from-port="0" to-layer="502" to-port="1" />
11918 <edge from-layer="502" from-port="2" to-layer="504" to-port="0" />
11919 <edge from-layer="503" from-port="0" to-layer="504" to-port="1" />
11920 <edge from-layer="504" from-port="2" to-layer="506" to-port="0" />
11921 <edge from-layer="505" from-port="0" to-layer="506" to-port="1" />
11922 <edge from-layer="506" from-port="2" to-layer="508" to-port="0" />
11923 <edge from-layer="507" from-port="0" to-layer="508" to-port="1" />
11924 <edge from-layer="508" from-port="2" to-layer="525" to-port="0" />
11925 <edge from-layer="509" from-port="0" to-layer="510" to-port="1" />
11926 <edge from-layer="510" from-port="2" to-layer="512" to-port="0" />
11927 <edge from-layer="511" from-port="0" to-layer="512" to-port="1" />
11928 <edge from-layer="512" from-port="2" to-layer="514" to-port="0" />
11929 <edge from-layer="513" from-port="0" to-layer="514" to-port="1" />
11930 <edge from-layer="514" from-port="2" to-layer="516" to-port="0" />
11931 <edge from-layer="515" from-port="0" to-layer="516" to-port="1" />
11932 <edge from-layer="516" from-port="2" to-layer="525" to-port="1" />
11933 <edge from-layer="517" from-port="0" to-layer="518" to-port="1" />
11934 <edge from-layer="518" from-port="2" to-layer="520" to-port="0" />
11935 <edge from-layer="519" from-port="0" to-layer="520" to-port="1" />
11936 <edge from-layer="520" from-port="2" to-layer="522" to-port="0" />
11937 <edge from-layer="521" from-port="0" to-layer="522" to-port="1" />
11938 <edge from-layer="522" from-port="2" to-layer="524" to-port="0" />
11939 <edge from-layer="523" from-port="0" to-layer="524" to-port="1" />
11940 <edge from-layer="524" from-port="2" to-layer="525" to-port="2" />
11941 <edge from-layer="525" from-port="4" to-layer="527" to-port="0" />
11942 <edge from-layer="526" from-port="0" to-layer="527" to-port="1" />
11943 <edge from-layer="527" from-port="2" to-layer="529" to-port="0" />
11944 <edge from-layer="528" from-port="0" to-layer="529" to-port="1" />
11945 <edge from-layer="529" from-port="2" to-layer="531" to-port="0" />
11946 <edge from-layer="530" from-port="0" to-layer="531" to-port="1" />
11947 <edge from-layer="531" from-port="2" to-layer="533" to-port="0" />
11948 <edge from-layer="532" from-port="0" to-layer="533" to-port="1" />
11949 <edge from-layer="533" from-port="2" to-layer="534" to-port="0" />
11950 <edge from-layer="534" from-port="2" to-layer="536" to-port="0" />
11951 <edge from-layer="535" from-port="0" to-layer="536" to-port="1" />
11952 <edge from-layer="536" from-port="2" to-layer="538" to-port="0" />
11953 <edge from-layer="537" from-port="0" to-layer="538" to-port="1" />
11954 <edge from-layer="538" from-port="2" to-layer="540" to-port="0" />
11955 <edge from-layer="539" from-port="0" to-layer="540" to-port="1" />
11956 <edge from-layer="540" from-port="2" to-layer="542" to-port="0" />
11957 <edge from-layer="540" from-port="2" to-layer="550" to-port="1" />
11958 <edge from-layer="541" from-port="0" to-layer="542" to-port="1" />
11959 <edge from-layer="542" from-port="2" to-layer="544" to-port="0" />
11960 <edge from-layer="543" from-port="0" to-layer="544" to-port="1" />
11961 <edge from-layer="544" from-port="2" to-layer="545" to-port="0" />
11962 <edge from-layer="545" from-port="1" to-layer="547" to-port="0" />
11963 <edge from-layer="546" from-port="0" to-layer="547" to-port="1" />
11964 <edge from-layer="547" from-port="2" to-layer="549" to-port="0" />
11965 <edge from-layer="548" from-port="0" to-layer="549" to-port="1" />
11966 <edge from-layer="549" from-port="2" to-layer="550" to-port="0" />
11967 <edge from-layer="550" from-port="2" to-layer="552" to-port="0" />
11968 <edge from-layer="551" from-port="0" to-layer="552" to-port="1" />
11969 <edge from-layer="552" from-port="2" to-layer="554" to-port="0" />
11970 <edge from-layer="553" from-port="0" to-layer="554" to-port="1" />
11971 <edge from-layer="554" from-port="2" to-layer="556" to-port="0" />
11972 <edge from-layer="555" from-port="0" to-layer="556" to-port="1" />
11973 <edge from-layer="556" from-port="2" to-layer="590" to-port="1" />
11974 <edge from-layer="556" from-port="2" to-layer="574" to-port="0" />
11975 <edge from-layer="556" from-port="2" to-layer="566" to-port="0" />
11976 <edge from-layer="556" from-port="2" to-layer="558" to-port="0" />
11977 <edge from-layer="557" from-port="0" to-layer="558" to-port="1" />
11978 <edge from-layer="558" from-port="2" to-layer="560" to-port="0" />
11979 <edge from-layer="559" from-port="0" to-layer="560" to-port="1" />
11980 <edge from-layer="560" from-port="2" to-layer="562" to-port="0" />
11981 <edge from-layer="561" from-port="0" to-layer="562" to-port="1" />
11982 <edge from-layer="562" from-port="2" to-layer="564" to-port="0" />
11983 <edge from-layer="563" from-port="0" to-layer="564" to-port="1" />
11984 <edge from-layer="564" from-port="2" to-layer="581" to-port="0" />
11985 <edge from-layer="565" from-port="0" to-layer="566" to-port="1" />
11986 <edge from-layer="566" from-port="2" to-layer="568" to-port="0" />
11987 <edge from-layer="567" from-port="0" to-layer="568" to-port="1" />
11988 <edge from-layer="568" from-port="2" to-layer="570" to-port="0" />
11989 <edge from-layer="569" from-port="0" to-layer="570" to-port="1" />
11990 <edge from-layer="570" from-port="2" to-layer="572" to-port="0" />
11991 <edge from-layer="571" from-port="0" to-layer="572" to-port="1" />
11992 <edge from-layer="572" from-port="2" to-layer="581" to-port="1" />
11993 <edge from-layer="573" from-port="0" to-layer="574" to-port="1" />
11994 <edge from-layer="574" from-port="2" to-layer="576" to-port="0" />
11995 <edge from-layer="575" from-port="0" to-layer="576" to-port="1" />
11996 <edge from-layer="576" from-port="2" to-layer="578" to-port="0" />
11997 <edge from-layer="577" from-port="0" to-layer="578" to-port="1" />
11998 <edge from-layer="578" from-port="2" to-layer="580" to-port="0" />
11999 <edge from-layer="579" from-port="0" to-layer="580" to-port="1" />
12000 <edge from-layer="580" from-port="2" to-layer="581" to-port="2" />
12001 <edge from-layer="581" from-port="4" to-layer="583" to-port="0" />
12002 <edge from-layer="582" from-port="0" to-layer="583" to-port="1" />
12003 <edge from-layer="583" from-port="2" to-layer="585" to-port="0" />
12004 <edge from-layer="584" from-port="0" to-layer="585" to-port="1" />
12005 <edge from-layer="585" from-port="2" to-layer="587" to-port="0" />
12006 <edge from-layer="586" from-port="0" to-layer="587" to-port="1" />
12007 <edge from-layer="587" from-port="2" to-layer="589" to-port="0" />
12008 <edge from-layer="588" from-port="0" to-layer="589" to-port="1" />
12009 <edge from-layer="589" from-port="2" to-layer="590" to-port="0" />
12010 <edge from-layer="590" from-port="2" to-layer="592" to-port="0" />
12011 <edge from-layer="591" from-port="0" to-layer="592" to-port="1" />
12012 <edge from-layer="592" from-port="2" to-layer="594" to-port="0" />
12013 <edge from-layer="593" from-port="0" to-layer="594" to-port="1" />
12014 <edge from-layer="594" from-port="2" to-layer="596" to-port="0" />
12015 <edge from-layer="595" from-port="0" to-layer="596" to-port="1" />
12016 <edge from-layer="596" from-port="2" to-layer="598" to-port="0" />
12017 <edge from-layer="596" from-port="2" to-layer="606" to-port="1" />
12018 <edge from-layer="597" from-port="0" to-layer="598" to-port="1" />
12019 <edge from-layer="598" from-port="2" to-layer="600" to-port="0" />
12020 <edge from-layer="599" from-port="0" to-layer="600" to-port="1" />
12021 <edge from-layer="600" from-port="2" to-layer="601" to-port="0" />
12022 <edge from-layer="601" from-port="1" to-layer="603" to-port="0" />
12023 <edge from-layer="602" from-port="0" to-layer="603" to-port="1" />
12024 <edge from-layer="603" from-port="2" to-layer="605" to-port="0" />
12025 <edge from-layer="604" from-port="0" to-layer="605" to-port="1" />
12026 <edge from-layer="605" from-port="2" to-layer="606" to-port="0" />
12027 <edge from-layer="606" from-port="2" to-layer="608" to-port="0" />
12028 <edge from-layer="607" from-port="0" to-layer="608" to-port="1" />
12029 <edge from-layer="608" from-port="2" to-layer="610" to-port="0" />
12030 <edge from-layer="609" from-port="0" to-layer="610" to-port="1" />
12031 <edge from-layer="610" from-port="2" to-layer="612" to-port="0" />
12032 <edge from-layer="611" from-port="0" to-layer="612" to-port="1" />
12033 <edge from-layer="612" from-port="2" to-layer="646" to-port="1" />
12034 <edge from-layer="612" from-port="2" to-layer="630" to-port="0" />
12035 <edge from-layer="612" from-port="2" to-layer="622" to-port="0" />
12036 <edge from-layer="612" from-port="2" to-layer="614" to-port="0" />
12037 <edge from-layer="613" from-port="0" to-layer="614" to-port="1" />
12038 <edge from-layer="614" from-port="2" to-layer="616" to-port="0" />
12039 <edge from-layer="615" from-port="0" to-layer="616" to-port="1" />
12040 <edge from-layer="616" from-port="2" to-layer="618" to-port="0" />
12041 <edge from-layer="617" from-port="0" to-layer="618" to-port="1" />
12042 <edge from-layer="618" from-port="2" to-layer="620" to-port="0" />
12043 <edge from-layer="619" from-port="0" to-layer="620" to-port="1" />
12044 <edge from-layer="620" from-port="2" to-layer="637" to-port="0" />
12045 <edge from-layer="621" from-port="0" to-layer="622" to-port="1" />
12046 <edge from-layer="622" from-port="2" to-layer="624" to-port="0" />
12047 <edge from-layer="623" from-port="0" to-layer="624" to-port="1" />
12048 <edge from-layer="624" from-port="2" to-layer="626" to-port="0" />
12049 <edge from-layer="625" from-port="0" to-layer="626" to-port="1" />
12050 <edge from-layer="626" from-port="2" to-layer="628" to-port="0" />
12051 <edge from-layer="627" from-port="0" to-layer="628" to-port="1" />
12052 <edge from-layer="628" from-port="2" to-layer="637" to-port="1" />
12053 <edge from-layer="629" from-port="0" to-layer="630" to-port="1" />
12054 <edge from-layer="630" from-port="2" to-layer="632" to-port="0" />
12055 <edge from-layer="631" from-port="0" to-layer="632" to-port="1" />
12056 <edge from-layer="632" from-port="2" to-layer="634" to-port="0" />
12057 <edge from-layer="633" from-port="0" to-layer="634" to-port="1" />
12058 <edge from-layer="634" from-port="2" to-layer="636" to-port="0" />
12059 <edge from-layer="635" from-port="0" to-layer="636" to-port="1" />
12060 <edge from-layer="636" from-port="2" to-layer="637" to-port="2" />
12061 <edge from-layer="637" from-port="4" to-layer="639" to-port="0" />
12062 <edge from-layer="638" from-port="0" to-layer="639" to-port="1" />
12063 <edge from-layer="639" from-port="2" to-layer="641" to-port="0" />
12064 <edge from-layer="640" from-port="0" to-layer="641" to-port="1" />
12065 <edge from-layer="641" from-port="2" to-layer="643" to-port="0" />
12066 <edge from-layer="642" from-port="0" to-layer="643" to-port="1" />
12067 <edge from-layer="643" from-port="2" to-layer="645" to-port="0" />
12068 <edge from-layer="644" from-port="0" to-layer="645" to-port="1" />
12069 <edge from-layer="645" from-port="2" to-layer="646" to-port="0" />
12070 <edge from-layer="646" from-port="2" to-layer="648" to-port="0" />
12071 <edge from-layer="647" from-port="0" to-layer="648" to-port="1" />
12072 <edge from-layer="648" from-port="2" to-layer="650" to-port="0" />
12073 <edge from-layer="649" from-port="0" to-layer="650" to-port="1" />
12074 <edge from-layer="650" from-port="2" to-layer="652" to-port="0" />
12075 <edge from-layer="651" from-port="0" to-layer="652" to-port="1" />
12076 <edge from-layer="652" from-port="2" to-layer="654" to-port="0" />
12077 <edge from-layer="652" from-port="2" to-layer="662" to-port="1" />
12078 <edge from-layer="653" from-port="0" to-layer="654" to-port="1" />
12079 <edge from-layer="654" from-port="2" to-layer="656" to-port="0" />
12080 <edge from-layer="655" from-port="0" to-layer="656" to-port="1" />
12081 <edge from-layer="656" from-port="2" to-layer="657" to-port="0" />
12082 <edge from-layer="657" from-port="1" to-layer="659" to-port="0" />
12083 <edge from-layer="658" from-port="0" to-layer="659" to-port="1" />
12084 <edge from-layer="659" from-port="2" to-layer="661" to-port="0" />
12085 <edge from-layer="660" from-port="0" to-layer="661" to-port="1" />
12086 <edge from-layer="661" from-port="2" to-layer="662" to-port="0" />
12087 <edge from-layer="662" from-port="2" to-layer="664" to-port="0" />
12088 <edge from-layer="663" from-port="0" to-layer="664" to-port="1" />
12089 <edge from-layer="664" from-port="2" to-layer="666" to-port="0" />
12090 <edge from-layer="665" from-port="0" to-layer="666" to-port="1" />
12091 <edge from-layer="666" from-port="2" to-layer="668" to-port="0" />
12092 <edge from-layer="667" from-port="0" to-layer="668" to-port="1" />
12093 <edge from-layer="668" from-port="2" to-layer="702" to-port="1" />
12094 <edge from-layer="668" from-port="2" to-layer="686" to-port="0" />
12095 <edge from-layer="668" from-port="2" to-layer="678" to-port="0" />
12096 <edge from-layer="668" from-port="2" to-layer="670" to-port="0" />
12097 <edge from-layer="669" from-port="0" to-layer="670" to-port="1" />
12098 <edge from-layer="670" from-port="2" to-layer="672" to-port="0" />
12099 <edge from-layer="671" from-port="0" to-layer="672" to-port="1" />
12100 <edge from-layer="672" from-port="2" to-layer="674" to-port="0" />
12101 <edge from-layer="673" from-port="0" to-layer="674" to-port="1" />
12102 <edge from-layer="674" from-port="2" to-layer="676" to-port="0" />
12103 <edge from-layer="675" from-port="0" to-layer="676" to-port="1" />
12104 <edge from-layer="676" from-port="2" to-layer="693" to-port="0" />
12105 <edge from-layer="677" from-port="0" to-layer="678" to-port="1" />
12106 <edge from-layer="678" from-port="2" to-layer="680" to-port="0" />
12107 <edge from-layer="679" from-port="0" to-layer="680" to-port="1" />
12108 <edge from-layer="680" from-port="2" to-layer="682" to-port="0" />
12109 <edge from-layer="681" from-port="0" to-layer="682" to-port="1" />
12110 <edge from-layer="682" from-port="2" to-layer="684" to-port="0" />
12111 <edge from-layer="683" from-port="0" to-layer="684" to-port="1" />
12112 <edge from-layer="684" from-port="2" to-layer="693" to-port="1" />
12113 <edge from-layer="685" from-port="0" to-layer="686" to-port="1" />
12114 <edge from-layer="686" from-port="2" to-layer="688" to-port="0" />
12115 <edge from-layer="687" from-port="0" to-layer="688" to-port="1" />
12116 <edge from-layer="688" from-port="2" to-layer="690" to-port="0" />
12117 <edge from-layer="689" from-port="0" to-layer="690" to-port="1" />
12118 <edge from-layer="690" from-port="2" to-layer="692" to-port="0" />
12119 <edge from-layer="691" from-port="0" to-layer="692" to-port="1" />
12120 <edge from-layer="692" from-port="2" to-layer="693" to-port="2" />
12121 <edge from-layer="693" from-port="4" to-layer="695" to-port="0" />
12122 <edge from-layer="694" from-port="0" to-layer="695" to-port="1" />
12123 <edge from-layer="695" from-port="2" to-layer="697" to-port="0" />
12124 <edge from-layer="696" from-port="0" to-layer="697" to-port="1" />
12125 <edge from-layer="697" from-port="2" to-layer="699" to-port="0" />
12126 <edge from-layer="698" from-port="0" to-layer="699" to-port="1" />
12127 <edge from-layer="699" from-port="2" to-layer="701" to-port="0" />
12128 <edge from-layer="700" from-port="0" to-layer="701" to-port="1" />
12129 <edge from-layer="701" from-port="2" to-layer="702" to-port="0" />
12130 <edge from-layer="702" from-port="2" to-layer="704" to-port="0" />
12131 <edge from-layer="703" from-port="0" to-layer="704" to-port="1" />
12132 <edge from-layer="704" from-port="2" to-layer="706" to-port="0" />
12133 <edge from-layer="705" from-port="0" to-layer="706" to-port="1" />
12134 <edge from-layer="706" from-port="2" to-layer="708" to-port="0" />
12135 <edge from-layer="707" from-port="0" to-layer="708" to-port="1" />
12136 <edge from-layer="708" from-port="2" to-layer="710" to-port="0" />
12137 <edge from-layer="708" from-port="2" to-layer="718" to-port="1" />
12138 <edge from-layer="709" from-port="0" to-layer="710" to-port="1" />
12139 <edge from-layer="710" from-port="2" to-layer="712" to-port="0" />
12140 <edge from-layer="711" from-port="0" to-layer="712" to-port="1" />
12141 <edge from-layer="712" from-port="2" to-layer="713" to-port="0" />
12142 <edge from-layer="713" from-port="1" to-layer="715" to-port="0" />
12143 <edge from-layer="714" from-port="0" to-layer="715" to-port="1" />
12144 <edge from-layer="715" from-port="2" to-layer="717" to-port="0" />
12145 <edge from-layer="716" from-port="0" to-layer="717" to-port="1" />
12146 <edge from-layer="717" from-port="2" to-layer="718" to-port="0" />
12147 <edge from-layer="718" from-port="2" to-layer="720" to-port="0" />
12148 <edge from-layer="719" from-port="0" to-layer="720" to-port="1" />
12149 <edge from-layer="720" from-port="2" to-layer="722" to-port="0" />
12150 <edge from-layer="721" from-port="0" to-layer="722" to-port="1" />
12151 <edge from-layer="722" from-port="2" to-layer="724" to-port="0" />
12152 <edge from-layer="723" from-port="0" to-layer="724" to-port="1" />
12153 <edge from-layer="724" from-port="2" to-layer="725" to-port="0" />
12154 </edges>
12155 <rt_info>
12156 <Runtime_version value="2024.4.1-16618-643f23d1318-releases/2024/4" />
12157 <conversion_parameters>
12158 <framework value="pytorch" />
12159 <is_python_object value="True" />
12160 </conversion_parameters>
12161 <optimum>
12162 <optimum_intel_version value="1.20.1" />
12163 <optimum_version value="1.23.3" />
12164 <pytorch_version value="2.5.1" />
12165 <transformers_version value="4.46.2" />
12166 </optimum>
12167 </rt_info>
12168 </net>
12169