SmolVLM2-500M-Video-Instruct-openvino / openvino_vision_embeddings_model.xml
echarlaix's picture
echarlaix HF Staff
Upload openvino_vision_embeddings_model.xml with huggingface_hub
feb2b3e verified
<?xml version="1.0"?>
<net name="Model2518" version="11">
<layers>
<layer id="2" name="pixel_values" type="Parameter" version="opset1">
<data shape="?,3,?,?" element_type="f32" />
<output>
<port id="0" precision="FP32" names="pixel_values">
<dim>-1</dim>
<dim>3</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1" name="patch_attention_mask" type="Parameter" version="opset1">
<data shape="?,?,?" element_type="boolean" />
<output>
<port id="0" precision="BOOL" names="patch_attention_mask">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="0" name="patch_position_ids" type="Parameter" version="opset1">
<data shape="?,?" element_type="i64" />
<output>
<port id="0" precision="I64" names="patch_position_ids">
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="3" name="self.vision_model.embeddings.patch_embedding.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3, 16, 16" offset="0" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.embeddings.patch_embedding.weight">
<dim>768</dim>
<dim>3</dim>
<dim>16</dim>
<dim>16</dim>
</port>
</output>
</layer>
<layer id="4" name="__module.vision_model.embeddings.patch_embedding/aten::_convolution_mode/Convolution" type="Convolution" version="opset1">
<data strides="16, 16" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="valid" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>3</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3</dim>
<dim>16</dim>
<dim>16</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="5" name="__module.vision_model.embeddings.patch_embedding/aten::_convolution_mode/Reshape" type="Const" version="opset1">
<data element_type="f32" shape="1, 768, 1, 1" offset="2359296" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>768</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="6" name="__module.vision_model.embeddings.patch_embedding/aten::_convolution_mode/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>768</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="42,patch_embeds">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="7" name="Constant_3175779" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="2362368" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="8" name="__module.vision_model.embeddings/aten::flatten/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="43">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="9" name="__module.vision_model.embeddings/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="3" offset="2362392" size="12" />
<output>
<port id="0" precision="I32">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="10" name="__module.vision_model.embeddings/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I32">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="44,embeddings.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="11" name="self.vision_model.embeddings.position_embedding.weight" type="Const" version="opset1">
<data element_type="f32" shape="1024, 768" offset="2362404" size="3145728" />
<output>
<port id="0" precision="FP32" names="self.vision_model.embeddings.position_embedding.weight">
<dim>1024</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="12" name="__module.vision_model.embeddings.position_embedding/aten::embedding/Convert" type="Convert" version="opset1">
<data destination_type="i32" />
<input>
<port id="0" precision="I64">
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I32">
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="13" name="__module.vision_model.embeddings.position_embedding/aten::embedding/Constant" type="Const" version="opset1">
<data element_type="i32" shape="" offset="5508132" size="4" />
<output>
<port id="0" precision="I32" />
</output>
</layer>
<layer id="14" name="__module.vision_model.embeddings.position_embedding/aten::embedding/Gather" type="Gather" version="opset8">
<data batch_dims="0" />
<input>
<port id="0" precision="FP32">
<dim>1024</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="2" precision="I32" />
</input>
<output>
<port id="3" precision="FP32" names="47">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="15" name="__module.vision_model.embeddings/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="48,residual.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="16" name="__module.vision_model.encoder.layers.0.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="17" name="__module.vision_model.encoder.layers.0.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="18" name="Constant_3175443" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="5508140" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="19" name="__module.vision_model.encoder.layers.0.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="20" name="Constant_3175444" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="5511212" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="21" name="__module.vision_model.encoder.layers.0.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="94,hidden_states.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="22" name="self.vision_model.encoder.layers.0.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="5514284" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.0.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="23" name="__module.vision_model.encoder.layers.0.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="24" name="Constant_3175445" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="7873580" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="25" name="__module.vision_model.encoder.layers.0.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="103,query_states.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="26" name="Constant_3175781" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="27" name="__module.vision_model.encoder.layers.0.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="111">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="28" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="29" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="112">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="30" name="self.vision_model.encoder.layers.0.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="7876700" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.0.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="31" name="__module.vision_model.encoder.layers.0.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="32" name="Constant_3175446" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="10235996" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="33" name="__module.vision_model.encoder.layers.0.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="106,key_states.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="34" name="Constant_3175782" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="35" name="__module.vision_model.encoder.layers.0.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="114">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="36" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="37" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="115">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="38" name="self.vision_model.encoder.layers.0.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="10239068" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.0.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="39" name="__module.vision_model.encoder.layers.0.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="40" name="Constant_3175447" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="12598364" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="41" name="__module.vision_model.encoder.layers.0.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="109,value_states.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="42" name="Constant_3175783" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="43" name="__module.vision_model.encoder.layers.0.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="117">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="44" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="45" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="118">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="46" name="Constant_3175449" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 1, 1" offset="12601436" size="4" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="47" name="Constant_3175780" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="12601440" size="16" />
<output>
<port id="0" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="48" name="__module.vision_model/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="BOOL" names="50,53,patch_attention_mask_1">
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="49" name="22" type="Const" version="opset1">
<data element_type="i64" shape="" offset="12601456" size="8" />
<output>
<port id="0" precision="I64" names="22" />
</output>
</layer>
<layer id="50" name="__module.vision_model/aten::unsqueeze/Unsqueeze" type="Unsqueeze" version="opset1">
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="BOOL" names="54">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="51" name="25" type="Const" version="opset1">
<data element_type="i64" shape="" offset="12601464" size="8" />
<output>
<port id="0" precision="I64" names="25" />
</output>
</layer>
<layer id="52" name="__module.vision_model/aten::unsqueeze/Unsqueeze_1" type="Unsqueeze" version="opset1">
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="BOOL" names="55,56">
<dim>-1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="53" name="ShapeOf_3175665" type="ShapeOf" version="opset3">
<data output_type="i64" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>768</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="54" name="Constant_3175666" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="12601472" size="8" />
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="55" name="Constant_3175667" type="Const" version="opset1">
<data element_type="i64" shape="" offset="12601472" size="8" />
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="56" name="Gather_3175668" type="Gather" version="opset8">
<data batch_dims="0" />
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="35,51,762">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="57" name="Constant_3174666" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="12601456" size="8" />
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="58" name="__module.vision_model/aten::size/ShapeOf_1" type="ShapeOf" version="opset3">
<data output_type="i64" />
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="59" name="Constant_3174772" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="12601480" size="16" />
<output>
<port id="0" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="60" name="Constant_3174773" type="Const" version="opset1">
<data element_type="i64" shape="" offset="12601472" size="8" />
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="61" name="Gather_3174774" type="Gather" version="opset8">
<data batch_dims="0" />
<input>
<port id="0" precision="I64">
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="62" name="__module.vision_model/prim::ListConstruct_1" type="Concat" version="opset1">
<data axis="0" />
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="63" name="__module.vision_model/aten::expand/Broadcast" type="Broadcast" version="opset3">
<data mode="bidirectional" />
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="BOOL" names="58">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="64" name="__module.vision_model/aten::to/Convert" type="Convert" version="opset1">
<data destination_type="f32" />
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="59">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="65" name="Constant_3175448" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 1, 1" offset="12601436" size="4" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="66" name="__module.vision_model/aten::rsub/Multiply" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="67" name="__module.vision_model/aten::rsub/Subtract" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="60,inverted_mask">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="68" name="__module.vision_model/aten::to/Convert_1" type="Convert" version="opset1">
<data destination_type="boolean" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="BOOL" names="61">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="69" name="__module.vision_model/aten::masked_fill/ConvertLike" type="Const" version="opset1">
<data element_type="f32" shape="" offset="12601496" size="4" />
<output>
<port id="0" precision="FP32" />
</output>
</layer>
<layer id="70" name="__module.vision_model/aten::masked_fill/Select" type="Select" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="BOOL">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="3" precision="FP32" names="62">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="71" name="__module.vision_model.encoder.layers.0.self_attn/aten::scaled_dot_product_attention/ConvertLike" type="Const" version="opset1">
<data element_type="f32" shape="" offset="12601500" size="4" />
<output>
<port id="0" precision="FP32" />
</output>
</layer>
<layer id="72" name="__module.vision_model.encoder.layers.0.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="119,attn_output.1">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="73" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="74" name="__module.vision_model.encoder.layers.0.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="120">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="75" name="Constant_3175784" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="76" name="__module.vision_model.encoder.layers.0.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="123">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="77" name="self.vision_model.encoder.layers.0.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="12601528" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.0.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="78" name="__module.vision_model.encoder.layers.0.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="79" name="Constant_3175450" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="14960824" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="80" name="__module.vision_model.encoder.layers.0.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="126,hidden_states.3">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="81" name="__module.vision_model.encoder.layers.0/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="127,residual.3">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="82" name="__module.vision_model.encoder.layers.0.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="83" name="__module.vision_model.encoder.layers.0.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="84" name="Constant_3175451" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="14963896" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="85" name="__module.vision_model.encoder.layers.0.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="86" name="Constant_3175452" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="14966968" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="87" name="__module.vision_model.encoder.layers.0.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="131">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="88" name="self.vision_model.encoder.layers.0.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="14970040" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.0.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="89" name="__module.vision_model.encoder.layers.0.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="90" name="Constant_3175453" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="24407224" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="91" name="__module.vision_model.encoder.layers.0.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="136">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="92" name="__module.vision_model.encoder.layers.0.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="137">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="93" name="self.vision_model.encoder.layers.0.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="24419512" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.0.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="94" name="__module.vision_model.encoder.layers.0.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="95" name="Constant_3175454" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="33856696" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="96" name="__module.vision_model.encoder.layers.0.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="140,hidden_states.5">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="97" name="__module.vision_model.encoder.layers.0/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="141,residual.5">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="98" name="__module.vision_model.encoder.layers.1.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="99" name="__module.vision_model.encoder.layers.1.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="100" name="Constant_3175455" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="33859768" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="101" name="__module.vision_model.encoder.layers.1.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="102" name="Constant_3175456" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="33862840" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="103" name="__module.vision_model.encoder.layers.1.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="149,hidden_states.7">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="104" name="self.vision_model.encoder.layers.1.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="33865912" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.1.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="105" name="__module.vision_model.encoder.layers.1.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="106" name="Constant_3175457" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="36225208" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="107" name="__module.vision_model.encoder.layers.1.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="158,query_states.3">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="108" name="Constant_3175785" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="109" name="__module.vision_model.encoder.layers.1.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="166">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="110" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="111" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="167">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="112" name="self.vision_model.encoder.layers.1.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="36228280" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.1.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="113" name="__module.vision_model.encoder.layers.1.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="114" name="Constant_3175458" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="38587576" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="115" name="__module.vision_model.encoder.layers.1.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="161,key_states.3">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="116" name="Constant_3175786" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="117" name="__module.vision_model.encoder.layers.1.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="169">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="118" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="119" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="170">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="120" name="self.vision_model.encoder.layers.1.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="38590648" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.1.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="121" name="__module.vision_model.encoder.layers.1.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="122" name="Constant_3175459" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="40949944" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="123" name="__module.vision_model.encoder.layers.1.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="164,value_states.3">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="124" name="Constant_3175787" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="125" name="__module.vision_model.encoder.layers.1.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="172">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="126" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="127" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="173">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="128" name="__module.vision_model.encoder.layers.1.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="174,attn_output.5">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="129" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="130" name="__module.vision_model.encoder.layers.1.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="175">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="131" name="Constant_3175788" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="132" name="__module.vision_model.encoder.layers.1.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="178">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="133" name="self.vision_model.encoder.layers.1.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="40953016" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.1.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="134" name="__module.vision_model.encoder.layers.1.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="135" name="Constant_3175460" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="43312312" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="136" name="__module.vision_model.encoder.layers.1.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="181,hidden_states.9">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="137" name="__module.vision_model.encoder.layers.1/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="182,residual.7">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="138" name="__module.vision_model.encoder.layers.1.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="139" name="__module.vision_model.encoder.layers.1.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="140" name="Constant_3175461" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="43315384" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="141" name="__module.vision_model.encoder.layers.1.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="142" name="Constant_3175462" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="43318456" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="143" name="__module.vision_model.encoder.layers.1.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="186">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="144" name="self.vision_model.encoder.layers.1.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="43321528" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.1.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="145" name="__module.vision_model.encoder.layers.1.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="146" name="Constant_3175463" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="52758712" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="147" name="__module.vision_model.encoder.layers.1.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="191">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="148" name="__module.vision_model.encoder.layers.1.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="192">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="149" name="self.vision_model.encoder.layers.1.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="52771000" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.1.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="150" name="__module.vision_model.encoder.layers.1.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="151" name="Constant_3175464" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="62208184" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="152" name="__module.vision_model.encoder.layers.1.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="195,hidden_states.11">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="153" name="__module.vision_model.encoder.layers.1/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="196,residual.9">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="154" name="__module.vision_model.encoder.layers.2.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="155" name="__module.vision_model.encoder.layers.2.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="156" name="Constant_3175465" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="62211256" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="157" name="__module.vision_model.encoder.layers.2.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="158" name="Constant_3175466" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="62214328" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="159" name="__module.vision_model.encoder.layers.2.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="204,hidden_states.13">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="160" name="self.vision_model.encoder.layers.2.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="62217400" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.2.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="161" name="__module.vision_model.encoder.layers.2.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="162" name="Constant_3175467" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="64576696" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="163" name="__module.vision_model.encoder.layers.2.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="213,query_states.5">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="164" name="Constant_3175789" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="165" name="__module.vision_model.encoder.layers.2.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="221">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="166" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="167" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="222">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="168" name="self.vision_model.encoder.layers.2.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="64579768" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.2.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="169" name="__module.vision_model.encoder.layers.2.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="170" name="Constant_3175468" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="66939064" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="171" name="__module.vision_model.encoder.layers.2.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="216,key_states.5">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="172" name="Constant_3175790" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="173" name="__module.vision_model.encoder.layers.2.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="224">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="174" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="175" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="225">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="176" name="self.vision_model.encoder.layers.2.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="66942136" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.2.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="177" name="__module.vision_model.encoder.layers.2.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="178" name="Constant_3175469" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="69301432" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="179" name="__module.vision_model.encoder.layers.2.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="219,value_states.5">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="180" name="Constant_3175791" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="181" name="__module.vision_model.encoder.layers.2.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="227">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="182" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="183" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="228">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="184" name="__module.vision_model.encoder.layers.2.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="229,attn_output.9">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="185" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="186" name="__module.vision_model.encoder.layers.2.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="230">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="187" name="Constant_3175792" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="188" name="__module.vision_model.encoder.layers.2.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="233">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="189" name="self.vision_model.encoder.layers.2.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="69304504" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.2.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="190" name="__module.vision_model.encoder.layers.2.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="191" name="Constant_3175470" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="71663800" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="192" name="__module.vision_model.encoder.layers.2.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="236,hidden_states.15">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="193" name="__module.vision_model.encoder.layers.2/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="237,residual.11">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="194" name="__module.vision_model.encoder.layers.2.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="195" name="__module.vision_model.encoder.layers.2.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="196" name="Constant_3175471" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="71666872" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="197" name="__module.vision_model.encoder.layers.2.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="198" name="Constant_3175472" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="71669944" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="199" name="__module.vision_model.encoder.layers.2.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="241">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="200" name="self.vision_model.encoder.layers.2.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="71673016" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.2.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="201" name="__module.vision_model.encoder.layers.2.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="202" name="Constant_3175473" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="81110200" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="203" name="__module.vision_model.encoder.layers.2.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="246">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="204" name="__module.vision_model.encoder.layers.2.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="247">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="205" name="self.vision_model.encoder.layers.2.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="81122488" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.2.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="206" name="__module.vision_model.encoder.layers.2.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="207" name="Constant_3175474" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="90559672" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="208" name="__module.vision_model.encoder.layers.2.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="250,hidden_states.17">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="209" name="__module.vision_model.encoder.layers.2/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="251,residual.13">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="210" name="__module.vision_model.encoder.layers.3.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="211" name="__module.vision_model.encoder.layers.3.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="212" name="Constant_3175475" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="90562744" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="213" name="__module.vision_model.encoder.layers.3.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="214" name="Constant_3175476" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="90565816" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="215" name="__module.vision_model.encoder.layers.3.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="259,hidden_states.19">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="216" name="self.vision_model.encoder.layers.3.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="90568888" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.3.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="217" name="__module.vision_model.encoder.layers.3.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="218" name="Constant_3175477" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="92928184" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="219" name="__module.vision_model.encoder.layers.3.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="268,query_states.7">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="220" name="Constant_3175793" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="221" name="__module.vision_model.encoder.layers.3.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="276">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="222" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="223" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="277">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="224" name="self.vision_model.encoder.layers.3.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="92931256" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.3.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="225" name="__module.vision_model.encoder.layers.3.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="226" name="Constant_3175478" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="95290552" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="227" name="__module.vision_model.encoder.layers.3.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="271,key_states.7">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="228" name="Constant_3175794" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="229" name="__module.vision_model.encoder.layers.3.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="279">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="230" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="231" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="280">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="232" name="self.vision_model.encoder.layers.3.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="95293624" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.3.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="233" name="__module.vision_model.encoder.layers.3.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="234" name="Constant_3175479" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="97652920" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="235" name="__module.vision_model.encoder.layers.3.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="274,value_states.7">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="236" name="Constant_3175795" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="237" name="__module.vision_model.encoder.layers.3.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="282">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="238" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="239" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="283">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="240" name="__module.vision_model.encoder.layers.3.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="284,attn_output.13">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="241" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="242" name="__module.vision_model.encoder.layers.3.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="285">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="243" name="Constant_3175796" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="244" name="__module.vision_model.encoder.layers.3.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="288">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="245" name="self.vision_model.encoder.layers.3.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="97655992" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.3.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="246" name="__module.vision_model.encoder.layers.3.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="247" name="Constant_3175480" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="100015288" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="248" name="__module.vision_model.encoder.layers.3.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="291,hidden_states.21">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="249" name="__module.vision_model.encoder.layers.3/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="292,residual.15">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="250" name="__module.vision_model.encoder.layers.3.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="251" name="__module.vision_model.encoder.layers.3.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="252" name="Constant_3175481" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="100018360" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="253" name="__module.vision_model.encoder.layers.3.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="254" name="Constant_3175482" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="100021432" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="255" name="__module.vision_model.encoder.layers.3.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="296">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="256" name="self.vision_model.encoder.layers.3.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="100024504" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.3.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="257" name="__module.vision_model.encoder.layers.3.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="258" name="Constant_3175483" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="109461688" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="259" name="__module.vision_model.encoder.layers.3.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="301">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="260" name="__module.vision_model.encoder.layers.3.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="302">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="261" name="self.vision_model.encoder.layers.3.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="109473976" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.3.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="262" name="__module.vision_model.encoder.layers.3.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="263" name="Constant_3175484" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="118911160" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="264" name="__module.vision_model.encoder.layers.3.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="305,hidden_states.23">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="265" name="__module.vision_model.encoder.layers.3/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="306,residual.17">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="266" name="__module.vision_model.encoder.layers.4.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="267" name="__module.vision_model.encoder.layers.4.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="268" name="Constant_3175485" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="118914232" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="269" name="__module.vision_model.encoder.layers.4.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="270" name="Constant_3175486" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="118917304" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="271" name="__module.vision_model.encoder.layers.4.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="314,hidden_states.25">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="272" name="self.vision_model.encoder.layers.4.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="118920376" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.4.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="273" name="__module.vision_model.encoder.layers.4.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="274" name="Constant_3175487" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="121279672" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="275" name="__module.vision_model.encoder.layers.4.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="323,query_states.9">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="276" name="Constant_3175797" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="277" name="__module.vision_model.encoder.layers.4.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="331">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="278" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="279" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="332">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="280" name="self.vision_model.encoder.layers.4.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="121282744" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.4.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="281" name="__module.vision_model.encoder.layers.4.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="282" name="Constant_3175488" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="123642040" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="283" name="__module.vision_model.encoder.layers.4.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="326,key_states.9">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="284" name="Constant_3175798" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="285" name="__module.vision_model.encoder.layers.4.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="334">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="286" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="287" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="335">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="288" name="self.vision_model.encoder.layers.4.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="123645112" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.4.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="289" name="__module.vision_model.encoder.layers.4.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="290" name="Constant_3175489" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="126004408" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="291" name="__module.vision_model.encoder.layers.4.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="329,value_states.9">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="292" name="Constant_3175799" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="293" name="__module.vision_model.encoder.layers.4.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="337">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="294" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="295" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="338">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="296" name="__module.vision_model.encoder.layers.4.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="339,attn_output.17">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="297" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="298" name="__module.vision_model.encoder.layers.4.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="340">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="299" name="Constant_3175800" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="300" name="__module.vision_model.encoder.layers.4.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="343">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="301" name="self.vision_model.encoder.layers.4.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="126007480" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.4.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="302" name="__module.vision_model.encoder.layers.4.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="303" name="Constant_3175490" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="128366776" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="304" name="__module.vision_model.encoder.layers.4.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="346,hidden_states.27">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="305" name="__module.vision_model.encoder.layers.4/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="347,residual.19">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="306" name="__module.vision_model.encoder.layers.4.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="307" name="__module.vision_model.encoder.layers.4.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="308" name="Constant_3175491" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="128369848" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="309" name="__module.vision_model.encoder.layers.4.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="310" name="Constant_3175492" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="128372920" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="311" name="__module.vision_model.encoder.layers.4.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="351">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="312" name="self.vision_model.encoder.layers.4.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="128375992" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.4.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="313" name="__module.vision_model.encoder.layers.4.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="314" name="Constant_3175493" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="137813176" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="315" name="__module.vision_model.encoder.layers.4.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="356">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="316" name="__module.vision_model.encoder.layers.4.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="357">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="317" name="self.vision_model.encoder.layers.4.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="137825464" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.4.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="318" name="__module.vision_model.encoder.layers.4.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="319" name="Constant_3175494" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="147262648" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="320" name="__module.vision_model.encoder.layers.4.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="360,hidden_states.29">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="321" name="__module.vision_model.encoder.layers.4/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="361,residual.21">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="322" name="__module.vision_model.encoder.layers.5.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="323" name="__module.vision_model.encoder.layers.5.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="324" name="Constant_3175495" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="147265720" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="325" name="__module.vision_model.encoder.layers.5.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="326" name="Constant_3175496" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="147268792" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="327" name="__module.vision_model.encoder.layers.5.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="369,hidden_states.31">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="328" name="self.vision_model.encoder.layers.5.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="147271864" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.5.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="329" name="__module.vision_model.encoder.layers.5.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="330" name="Constant_3175497" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="149631160" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="331" name="__module.vision_model.encoder.layers.5.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="378,query_states.11">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="332" name="Constant_3175801" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="333" name="__module.vision_model.encoder.layers.5.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="386">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="334" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="335" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="387">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="336" name="self.vision_model.encoder.layers.5.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="149634232" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.5.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="337" name="__module.vision_model.encoder.layers.5.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="338" name="Constant_3175498" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="151993528" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="339" name="__module.vision_model.encoder.layers.5.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="381,key_states.11">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="340" name="Constant_3175802" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="341" name="__module.vision_model.encoder.layers.5.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="389">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="342" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="343" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="390">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="344" name="self.vision_model.encoder.layers.5.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="151996600" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.5.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="345" name="__module.vision_model.encoder.layers.5.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="346" name="Constant_3175499" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="154355896" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="347" name="__module.vision_model.encoder.layers.5.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="384,value_states.11">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="348" name="Constant_3175803" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="349" name="__module.vision_model.encoder.layers.5.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="392">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="350" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="351" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="393">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="352" name="__module.vision_model.encoder.layers.5.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="394,attn_output.21">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="353" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="354" name="__module.vision_model.encoder.layers.5.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="395">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="355" name="Constant_3175804" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="356" name="__module.vision_model.encoder.layers.5.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="398">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="357" name="self.vision_model.encoder.layers.5.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="154358968" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.5.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="358" name="__module.vision_model.encoder.layers.5.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="359" name="Constant_3175500" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="156718264" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="360" name="__module.vision_model.encoder.layers.5.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="401,hidden_states.33">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="361" name="__module.vision_model.encoder.layers.5/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="402,residual.23">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="362" name="__module.vision_model.encoder.layers.5.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="363" name="__module.vision_model.encoder.layers.5.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="364" name="Constant_3175501" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="156721336" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="365" name="__module.vision_model.encoder.layers.5.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="366" name="Constant_3175502" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="156724408" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="367" name="__module.vision_model.encoder.layers.5.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="406">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="368" name="self.vision_model.encoder.layers.5.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="156727480" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.5.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="369" name="__module.vision_model.encoder.layers.5.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="370" name="Constant_3175503" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="166164664" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="371" name="__module.vision_model.encoder.layers.5.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="411">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="372" name="__module.vision_model.encoder.layers.5.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="412">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="373" name="self.vision_model.encoder.layers.5.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="166176952" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.5.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="374" name="__module.vision_model.encoder.layers.5.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="375" name="Constant_3175504" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="175614136" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="376" name="__module.vision_model.encoder.layers.5.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="415,hidden_states.35">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="377" name="__module.vision_model.encoder.layers.5/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="416,residual.25">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="378" name="__module.vision_model.encoder.layers.6.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="379" name="__module.vision_model.encoder.layers.6.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="380" name="Constant_3175505" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="175617208" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="381" name="__module.vision_model.encoder.layers.6.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="382" name="Constant_3175506" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="175620280" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="383" name="__module.vision_model.encoder.layers.6.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="424,hidden_states.37">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="384" name="self.vision_model.encoder.layers.6.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="175623352" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.6.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="385" name="__module.vision_model.encoder.layers.6.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="386" name="Constant_3175507" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="177982648" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="387" name="__module.vision_model.encoder.layers.6.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="433,query_states.13">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="388" name="Constant_3175805" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="389" name="__module.vision_model.encoder.layers.6.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="441">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="390" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="391" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="442">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="392" name="self.vision_model.encoder.layers.6.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="177985720" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.6.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="393" name="__module.vision_model.encoder.layers.6.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="394" name="Constant_3175508" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="180345016" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="395" name="__module.vision_model.encoder.layers.6.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="436,key_states.13">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="396" name="Constant_3175806" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="397" name="__module.vision_model.encoder.layers.6.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="444">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="398" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="399" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="445">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="400" name="self.vision_model.encoder.layers.6.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="180348088" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.6.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="401" name="__module.vision_model.encoder.layers.6.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="402" name="Constant_3175509" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="182707384" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="403" name="__module.vision_model.encoder.layers.6.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="439,value_states.13">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="404" name="Constant_3175807" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="405" name="__module.vision_model.encoder.layers.6.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="447">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="406" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="407" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="448">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="408" name="__module.vision_model.encoder.layers.6.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="449,attn_output.25">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="409" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="410" name="__module.vision_model.encoder.layers.6.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="450">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="411" name="Constant_3175808" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="412" name="__module.vision_model.encoder.layers.6.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="453">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="413" name="self.vision_model.encoder.layers.6.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="182710456" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.6.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="414" name="__module.vision_model.encoder.layers.6.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="415" name="Constant_3175510" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="185069752" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="416" name="__module.vision_model.encoder.layers.6.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="456,hidden_states.39">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="417" name="__module.vision_model.encoder.layers.6/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="457,residual.27">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="418" name="__module.vision_model.encoder.layers.6.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="419" name="__module.vision_model.encoder.layers.6.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="420" name="Constant_3175511" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="185072824" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="421" name="__module.vision_model.encoder.layers.6.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="422" name="Constant_3175512" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="185075896" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="423" name="__module.vision_model.encoder.layers.6.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="461">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="424" name="self.vision_model.encoder.layers.6.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="185078968" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.6.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="425" name="__module.vision_model.encoder.layers.6.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="426" name="Constant_3175513" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="194516152" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="427" name="__module.vision_model.encoder.layers.6.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="466">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="428" name="__module.vision_model.encoder.layers.6.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="467">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="429" name="self.vision_model.encoder.layers.6.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="194528440" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.6.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="430" name="__module.vision_model.encoder.layers.6.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="431" name="Constant_3175514" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="203965624" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="432" name="__module.vision_model.encoder.layers.6.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="470,hidden_states.41">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="433" name="__module.vision_model.encoder.layers.6/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="471,residual.29">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="434" name="__module.vision_model.encoder.layers.7.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="435" name="__module.vision_model.encoder.layers.7.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="436" name="Constant_3175515" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="203968696" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="437" name="__module.vision_model.encoder.layers.7.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="438" name="Constant_3175516" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="203971768" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="439" name="__module.vision_model.encoder.layers.7.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="479,hidden_states.43">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="440" name="self.vision_model.encoder.layers.7.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="203974840" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.7.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="441" name="__module.vision_model.encoder.layers.7.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="442" name="Constant_3175517" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="206334136" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="443" name="__module.vision_model.encoder.layers.7.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="488,query_states.15">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="444" name="Constant_3175809" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="445" name="__module.vision_model.encoder.layers.7.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="496">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="446" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="447" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="497">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="448" name="self.vision_model.encoder.layers.7.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="206337208" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.7.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="449" name="__module.vision_model.encoder.layers.7.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="450" name="Constant_3175518" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="208696504" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="451" name="__module.vision_model.encoder.layers.7.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="491,key_states.15">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="452" name="Constant_3175810" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="453" name="__module.vision_model.encoder.layers.7.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="499">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="454" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="455" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="500">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="456" name="self.vision_model.encoder.layers.7.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="208699576" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.7.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="457" name="__module.vision_model.encoder.layers.7.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="458" name="Constant_3175519" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="211058872" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="459" name="__module.vision_model.encoder.layers.7.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="494,value_states.15">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="460" name="Constant_3175811" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="461" name="__module.vision_model.encoder.layers.7.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="502">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="462" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="463" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="503">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="464" name="__module.vision_model.encoder.layers.7.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="504,attn_output.29">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="465" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="466" name="__module.vision_model.encoder.layers.7.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="505">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="467" name="Constant_3175812" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="468" name="__module.vision_model.encoder.layers.7.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="508">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="469" name="self.vision_model.encoder.layers.7.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="211061944" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.7.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="470" name="__module.vision_model.encoder.layers.7.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="471" name="Constant_3175520" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="213421240" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="472" name="__module.vision_model.encoder.layers.7.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="511,hidden_states.45">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="473" name="__module.vision_model.encoder.layers.7/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="512,residual.31">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="474" name="__module.vision_model.encoder.layers.7.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="475" name="__module.vision_model.encoder.layers.7.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="476" name="Constant_3175521" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="213424312" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="477" name="__module.vision_model.encoder.layers.7.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="478" name="Constant_3175522" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="213427384" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="479" name="__module.vision_model.encoder.layers.7.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="516">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="480" name="self.vision_model.encoder.layers.7.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="213430456" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.7.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="481" name="__module.vision_model.encoder.layers.7.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="482" name="Constant_3175523" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="222867640" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="483" name="__module.vision_model.encoder.layers.7.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="521">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="484" name="__module.vision_model.encoder.layers.7.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="522">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="485" name="self.vision_model.encoder.layers.7.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="222879928" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.7.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="486" name="__module.vision_model.encoder.layers.7.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="487" name="Constant_3175524" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="232317112" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="488" name="__module.vision_model.encoder.layers.7.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="525,hidden_states.47">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="489" name="__module.vision_model.encoder.layers.7/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="526,residual.33">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="490" name="__module.vision_model.encoder.layers.8.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="491" name="__module.vision_model.encoder.layers.8.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="492" name="Constant_3175525" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="232320184" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="493" name="__module.vision_model.encoder.layers.8.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="494" name="Constant_3175526" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="232323256" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="495" name="__module.vision_model.encoder.layers.8.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="534,hidden_states.49">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="496" name="self.vision_model.encoder.layers.8.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="232326328" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.8.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="497" name="__module.vision_model.encoder.layers.8.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="498" name="Constant_3175527" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="234685624" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="499" name="__module.vision_model.encoder.layers.8.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="543,query_states.17">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="500" name="Constant_3175813" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="501" name="__module.vision_model.encoder.layers.8.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="551">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="502" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="503" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="552">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="504" name="self.vision_model.encoder.layers.8.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="234688696" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.8.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="505" name="__module.vision_model.encoder.layers.8.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="506" name="Constant_3175528" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="237047992" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="507" name="__module.vision_model.encoder.layers.8.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="546,key_states.17">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="508" name="Constant_3175814" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="509" name="__module.vision_model.encoder.layers.8.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="554">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="510" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="511" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="555">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="512" name="self.vision_model.encoder.layers.8.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="237051064" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.8.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="513" name="__module.vision_model.encoder.layers.8.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="514" name="Constant_3175529" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="239410360" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="515" name="__module.vision_model.encoder.layers.8.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="549,value_states.17">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="516" name="Constant_3175815" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="517" name="__module.vision_model.encoder.layers.8.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="557">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="518" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="519" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="558">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="520" name="__module.vision_model.encoder.layers.8.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="559,attn_output.33">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="521" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="522" name="__module.vision_model.encoder.layers.8.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="560">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="523" name="Constant_3175816" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="524" name="__module.vision_model.encoder.layers.8.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="563">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="525" name="self.vision_model.encoder.layers.8.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="239413432" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.8.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="526" name="__module.vision_model.encoder.layers.8.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="527" name="Constant_3175530" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="241772728" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="528" name="__module.vision_model.encoder.layers.8.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="566,hidden_states.51">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="529" name="__module.vision_model.encoder.layers.8/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="567,residual.35">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="530" name="__module.vision_model.encoder.layers.8.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="531" name="__module.vision_model.encoder.layers.8.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="532" name="Constant_3175531" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="241775800" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="533" name="__module.vision_model.encoder.layers.8.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="534" name="Constant_3175532" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="241778872" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="535" name="__module.vision_model.encoder.layers.8.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="571">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="536" name="self.vision_model.encoder.layers.8.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="241781944" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.8.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="537" name="__module.vision_model.encoder.layers.8.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="538" name="Constant_3175533" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="251219128" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="539" name="__module.vision_model.encoder.layers.8.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="576">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="540" name="__module.vision_model.encoder.layers.8.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="577">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="541" name="self.vision_model.encoder.layers.8.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="251231416" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.8.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="542" name="__module.vision_model.encoder.layers.8.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="543" name="Constant_3175534" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="260668600" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="544" name="__module.vision_model.encoder.layers.8.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="580,hidden_states.53">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="545" name="__module.vision_model.encoder.layers.8/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="581,residual.37">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="546" name="__module.vision_model.encoder.layers.9.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="547" name="__module.vision_model.encoder.layers.9.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="548" name="Constant_3175535" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="260671672" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="549" name="__module.vision_model.encoder.layers.9.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="550" name="Constant_3175536" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="260674744" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="551" name="__module.vision_model.encoder.layers.9.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="589,hidden_states.55">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="552" name="self.vision_model.encoder.layers.9.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="260677816" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.9.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="553" name="__module.vision_model.encoder.layers.9.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="554" name="Constant_3175537" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="263037112" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="555" name="__module.vision_model.encoder.layers.9.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="598,query_states.19">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="556" name="Constant_3175817" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="557" name="__module.vision_model.encoder.layers.9.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="606">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="558" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="559" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="607">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="560" name="self.vision_model.encoder.layers.9.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="263040184" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.9.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="561" name="__module.vision_model.encoder.layers.9.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="562" name="Constant_3175538" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="265399480" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="563" name="__module.vision_model.encoder.layers.9.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="601,key_states.19">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="564" name="Constant_3175818" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="565" name="__module.vision_model.encoder.layers.9.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="609">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="566" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="567" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="610">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="568" name="self.vision_model.encoder.layers.9.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="265402552" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.9.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="569" name="__module.vision_model.encoder.layers.9.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="570" name="Constant_3175539" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="267761848" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="571" name="__module.vision_model.encoder.layers.9.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="604,value_states.19">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="572" name="Constant_3175819" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="573" name="__module.vision_model.encoder.layers.9.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="612">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="574" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="575" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="613">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="576" name="__module.vision_model.encoder.layers.9.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="614,attn_output.37">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="577" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="578" name="__module.vision_model.encoder.layers.9.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="615">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="579" name="Constant_3175820" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="580" name="__module.vision_model.encoder.layers.9.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="618">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="581" name="self.vision_model.encoder.layers.9.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="267764920" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.9.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="582" name="__module.vision_model.encoder.layers.9.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="583" name="Constant_3175540" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="270124216" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="584" name="__module.vision_model.encoder.layers.9.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="621,hidden_states.57">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="585" name="__module.vision_model.encoder.layers.9/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="622,residual.39">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="586" name="__module.vision_model.encoder.layers.9.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="587" name="__module.vision_model.encoder.layers.9.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="588" name="Constant_3175541" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="270127288" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="589" name="__module.vision_model.encoder.layers.9.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="590" name="Constant_3175542" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="270130360" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="591" name="__module.vision_model.encoder.layers.9.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="626">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="592" name="self.vision_model.encoder.layers.9.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="270133432" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.9.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="593" name="__module.vision_model.encoder.layers.9.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="594" name="Constant_3175543" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="279570616" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="595" name="__module.vision_model.encoder.layers.9.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="631">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="596" name="__module.vision_model.encoder.layers.9.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="632">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="597" name="self.vision_model.encoder.layers.9.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="279582904" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.9.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="598" name="__module.vision_model.encoder.layers.9.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="599" name="Constant_3175544" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="289020088" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="600" name="__module.vision_model.encoder.layers.9.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="635,hidden_states.59">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="601" name="__module.vision_model.encoder.layers.9/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="636,residual.41">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="602" name="__module.vision_model.encoder.layers.10.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="603" name="__module.vision_model.encoder.layers.10.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="604" name="Constant_3175545" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="289023160" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="605" name="__module.vision_model.encoder.layers.10.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="606" name="Constant_3175546" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="289026232" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="607" name="__module.vision_model.encoder.layers.10.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="644,hidden_states.61">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="608" name="self.vision_model.encoder.layers.10.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="289029304" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.10.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="609" name="__module.vision_model.encoder.layers.10.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="610" name="Constant_3175547" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="291388600" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="611" name="__module.vision_model.encoder.layers.10.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="653,query_states.21">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="612" name="Constant_3175821" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="613" name="__module.vision_model.encoder.layers.10.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="661">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="614" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="615" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="662">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="616" name="self.vision_model.encoder.layers.10.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="291391672" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.10.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="617" name="__module.vision_model.encoder.layers.10.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="618" name="Constant_3175548" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="293750968" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="619" name="__module.vision_model.encoder.layers.10.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="656,key_states.21">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="620" name="Constant_3175822" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="621" name="__module.vision_model.encoder.layers.10.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="664">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="622" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="623" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="665">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="624" name="self.vision_model.encoder.layers.10.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="293754040" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.10.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="625" name="__module.vision_model.encoder.layers.10.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="626" name="Constant_3175549" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="296113336" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="627" name="__module.vision_model.encoder.layers.10.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="659,value_states.21">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="628" name="Constant_3175823" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="629" name="__module.vision_model.encoder.layers.10.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="667">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="630" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="631" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="668">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="632" name="__module.vision_model.encoder.layers.10.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="669,attn_output.41">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="633" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="634" name="__module.vision_model.encoder.layers.10.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="670">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="635" name="Constant_3175824" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="636" name="__module.vision_model.encoder.layers.10.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="673">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="637" name="self.vision_model.encoder.layers.10.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="296116408" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.10.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="638" name="__module.vision_model.encoder.layers.10.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="639" name="Constant_3175550" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="298475704" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="640" name="__module.vision_model.encoder.layers.10.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="676,hidden_states.63">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="641" name="__module.vision_model.encoder.layers.10/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="677,residual.43">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="642" name="__module.vision_model.encoder.layers.10.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="643" name="__module.vision_model.encoder.layers.10.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="644" name="Constant_3175551" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="298478776" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="645" name="__module.vision_model.encoder.layers.10.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="646" name="Constant_3175552" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="298481848" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="647" name="__module.vision_model.encoder.layers.10.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="681">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="648" name="self.vision_model.encoder.layers.10.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="298484920" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.10.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="649" name="__module.vision_model.encoder.layers.10.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="650" name="Constant_3175553" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="307922104" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="651" name="__module.vision_model.encoder.layers.10.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="686">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="652" name="__module.vision_model.encoder.layers.10.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="687">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="653" name="self.vision_model.encoder.layers.10.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="307934392" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.10.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="654" name="__module.vision_model.encoder.layers.10.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="655" name="Constant_3175554" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="317371576" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="656" name="__module.vision_model.encoder.layers.10.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="690,hidden_states.65">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="657" name="__module.vision_model.encoder.layers.10/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="691,residual.45">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="658" name="__module.vision_model.encoder.layers.11.layer_norm1/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="659" name="__module.vision_model.encoder.layers.11.layer_norm1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="660" name="Constant_3175555" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="317374648" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="661" name="__module.vision_model.encoder.layers.11.layer_norm1/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="662" name="Constant_3175556" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="317377720" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="663" name="__module.vision_model.encoder.layers.11.layer_norm1/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="699,hidden_states.67">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="664" name="self.vision_model.encoder.layers.11.self_attn.q_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="317380792" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.11.self_attn.q_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="665" name="__module.vision_model.encoder.layers.11.self_attn.q_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="666" name="Constant_3175557" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="319740088" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="667" name="__module.vision_model.encoder.layers.11.self_attn.q_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="708,query_states">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="668" name="Constant_3175825" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="669" name="__module.vision_model.encoder.layers.11.self_attn/aten::view/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="716">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="670" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Constant" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="671" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="717">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="672" name="self.vision_model.encoder.layers.11.self_attn.k_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="319743160" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.11.self_attn.k_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="673" name="__module.vision_model.encoder.layers.11.self_attn.k_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="674" name="Constant_3175558" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="322102456" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="675" name="__module.vision_model.encoder.layers.11.self_attn.k_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="711,key_states">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="676" name="Constant_3175826" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="677" name="__module.vision_model.encoder.layers.11.self_attn/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="719">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="678" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Constant_1" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="679" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="720">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="680" name="self.vision_model.encoder.layers.11.self_attn.v_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="322105528" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.11.self_attn.v_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="681" name="__module.vision_model.encoder.layers.11.self_attn.v_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="682" name="Constant_3175559" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="324464824" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="683" name="__module.vision_model.encoder.layers.11.self_attn.v_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="714,value_states">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="684" name="Constant_3175827" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="7876652" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="685" name="__module.vision_model.encoder.layers.11.self_attn/aten::view/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="722">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="686" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Constant_2" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="687" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="723">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="688" name="__module.vision_model.encoder.layers.11.self_attn/aten::scaled_dot_product_attention/ScaledDotProductAttention" type="ScaledDotProductAttention" version="opset13">
<data causal="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="4" precision="FP32" />
</input>
<output>
<port id="5" precision="FP32" names="724,attn_output.45">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="689" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Constant_3" type="Const" version="opset1">
<data element_type="i32" shape="4" offset="7876684" size="16" />
<output>
<port id="0" precision="I32">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="690" name="__module.vision_model.encoder.layers.11.self_attn/aten::transpose/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>12</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I32">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="725">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="691" name="Constant_3175828" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="12601504" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="692" name="__module.vision_model.encoder.layers.11.self_attn/aten::view/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>12</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="728">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="693" name="self.vision_model.encoder.layers.11.self_attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 768" offset="324467896" size="2359296" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.11.self_attn.out_proj.weight">
<dim>768</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="694" name="__module.vision_model.encoder.layers.11.self_attn.out_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="695" name="Constant_3175560" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="326827192" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="696" name="__module.vision_model.encoder.layers.11.self_attn.out_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="731,hidden_states.69">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="697" name="__module.vision_model.encoder.layers.11/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="732,residual">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="698" name="__module.vision_model.encoder.layers.11.layer_norm2/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="699" name="__module.vision_model.encoder.layers.11.layer_norm2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="700" name="Constant_3175561" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="326830264" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="701" name="__module.vision_model.encoder.layers.11.layer_norm2/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="702" name="Constant_3175562" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="326833336" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="703" name="__module.vision_model.encoder.layers.11.layer_norm2/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="736">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="704" name="self.vision_model.encoder.layers.11.mlp.fc1.weight" type="Const" version="opset1">
<data element_type="f32" shape="3072, 768" offset="326836408" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.11.mlp.fc1.weight">
<dim>3072</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="705" name="__module.vision_model.encoder.layers.11.mlp.fc1/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>3072</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="706" name="Constant_3175563" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 3072" offset="336273592" size="12288" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="707" name="__module.vision_model.encoder.layers.11.mlp.fc1/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="741">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="708" name="__module.vision_model.encoder.layers.11.mlp.activation_fn/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="TANH" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="742">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="709" name="self.vision_model.encoder.layers.11.mlp.fc2.weight" type="Const" version="opset1">
<data element_type="f32" shape="768, 3072" offset="336285880" size="9437184" />
<output>
<port id="0" precision="FP32" names="self.vision_model.encoder.layers.11.mlp.fc2.weight">
<dim>768</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="710" name="__module.vision_model.encoder.layers.11.mlp.fc2/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>3072</dim>
</port>
<port id="1" precision="FP32">
<dim>768</dim>
<dim>3072</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="711" name="Constant_3175564" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="345723064" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="712" name="__module.vision_model.encoder.layers.11.mlp.fc2/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="745,hidden_states">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="713" name="__module.vision_model.encoder.layers.11/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="746">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="714" name="__module.vision_model.post_layernorm/aten::layer_norm/Multiply" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="5508136" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="715" name="__module.vision_model.post_layernorm/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="716" name="Constant_3175565" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="345726136" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="717" name="__module.vision_model.post_layernorm/aten::layer_norm/Multiply_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="718" name="Constant_3175566" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 768" offset="345729208" size="3072" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="719" name="__module.vision_model.post_layernorm/aten::layer_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>768</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="750,x.1">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
</output>
</layer>
<layer id="720" name="Constant_3175655" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="345732280" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="721" name="__module.connector/aten::view/Reshape_1" type="Reshape" version="opset1">
<data special_zero="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>768</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="770,x.5">
<dim>-1</dim>
<dim>32</dim>
<dim>8</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="722" name="Constant_3170708" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="345732312" size="32" />
<output>
<port id="0" precision="I64" names="771">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="723" name="__module.connector/aten::permute/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>8</dim>
<dim>3072</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="772,x.7">
<dim>-1</dim>
<dim>8</dim>
<dim>32</dim>
<dim>3072</dim>
</port>
</output>
</layer>
<layer id="724" name="Constant_3175829" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="345732344" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="725" name="__module.connector/aten::reshape/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>32</dim>
<dim>3072</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="776,x.9">
<dim>-1</dim>
<dim>8</dim>
<dim>8</dim>
<dim>12288</dim>
</port>
</output>
</layer>
<layer id="726" name="Constant_3170740" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="345732312" size="32" />
<output>
<port id="0" precision="I64" names="777">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="727" name="__module.connector/aten::permute/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>8</dim>
<dim>12288</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="778,x">
<dim>-1</dim>
<dim>8</dim>
<dim>8</dim>
<dim>12288</dim>
</port>
</output>
</layer>
<layer id="728" name="Constant_3175830" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="345732376" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="729" name="__module.connector/aten::reshape/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>8</dim>
<dim>12288</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="782">
<dim>-1</dim>
<dim>64</dim>
<dim>12288</dim>
</port>
</output>
</layer>
<layer id="730" name="self.connector.modality_projection.proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="960, 12288" offset="345732400" size="47185920" />
<output>
<port id="0" precision="FP32" names="self.connector.modality_projection.proj.weight">
<dim>960</dim>
<dim>12288</dim>
</port>
</output>
</layer>
<layer id="731" name="__module.connector.modality_projection.proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>12288</dim>
</port>
<port id="1" precision="FP32">
<dim>960</dim>
<dim>12288</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="last_hidden_state">
<dim>-1</dim>
<dim>64</dim>
<dim>960</dim>
</port>
</output>
</layer>
<layer id="732" name="Result_3170768" type="Result" version="opset1" output_names="last_hidden_state">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>960</dim>
</port>
</input>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="12" to-port="0" />
<edge from-layer="1" from-port="0" to-layer="48" to-port="0" />
<edge from-layer="2" from-port="0" to-layer="4" to-port="0" />
<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
<edge from-layer="4" from-port="2" to-layer="6" to-port="0" />
<edge from-layer="5" from-port="0" to-layer="6" to-port="1" />
<edge from-layer="6" from-port="2" to-layer="53" to-port="0" />
<edge from-layer="6" from-port="2" to-layer="8" to-port="0" />
<edge from-layer="7" from-port="0" to-layer="8" to-port="1" />
<edge from-layer="8" from-port="2" to-layer="10" to-port="0" />
<edge from-layer="9" from-port="0" to-layer="10" to-port="1" />
<edge from-layer="10" from-port="2" to-layer="15" to-port="0" />
<edge from-layer="11" from-port="0" to-layer="14" to-port="0" />
<edge from-layer="12" from-port="1" to-layer="14" to-port="1" />
<edge from-layer="13" from-port="0" to-layer="14" to-port="2" />
<edge from-layer="14" from-port="3" to-layer="15" to-port="1" />
<edge from-layer="15" from-port="2" to-layer="17" to-port="0" />
<edge from-layer="15" from-port="2" to-layer="81" to-port="0" />
<edge from-layer="16" from-port="0" to-layer="17" to-port="1" />
<edge from-layer="17" from-port="2" to-layer="19" to-port="0" />
<edge from-layer="18" from-port="0" to-layer="19" to-port="1" />
<edge from-layer="19" from-port="2" to-layer="21" to-port="0" />
<edge from-layer="20" from-port="0" to-layer="21" to-port="1" />
<edge from-layer="21" from-port="2" to-layer="39" to-port="0" />
<edge from-layer="21" from-port="2" to-layer="31" to-port="0" />
<edge from-layer="21" from-port="2" to-layer="23" to-port="0" />
<edge from-layer="22" from-port="0" to-layer="23" to-port="1" />
<edge from-layer="23" from-port="2" to-layer="25" to-port="0" />
<edge from-layer="24" from-port="0" to-layer="25" to-port="1" />
<edge from-layer="25" from-port="2" to-layer="27" to-port="0" />
<edge from-layer="26" from-port="0" to-layer="27" to-port="1" />
<edge from-layer="27" from-port="2" to-layer="29" to-port="0" />
<edge from-layer="28" from-port="0" to-layer="29" to-port="1" />
<edge from-layer="29" from-port="2" to-layer="72" to-port="0" />
<edge from-layer="30" from-port="0" to-layer="31" to-port="1" />
<edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
<edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
<edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
<edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
<edge from-layer="35" from-port="2" to-layer="37" to-port="0" />
<edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
<edge from-layer="37" from-port="2" to-layer="72" to-port="1" />
<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
<edge from-layer="39" from-port="2" to-layer="41" to-port="0" />
<edge from-layer="40" from-port="0" to-layer="41" to-port="1" />
<edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
<edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
<edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
<edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
<edge from-layer="45" from-port="2" to-layer="72" to-port="2" />
<edge from-layer="46" from-port="0" to-layer="67" to-port="0" />
<edge from-layer="47" from-port="0" to-layer="48" to-port="1" />
<edge from-layer="48" from-port="2" to-layer="50" to-port="0" />
<edge from-layer="48" from-port="2" to-layer="58" to-port="0" />
<edge from-layer="49" from-port="0" to-layer="50" to-port="1" />
<edge from-layer="50" from-port="2" to-layer="52" to-port="0" />
<edge from-layer="51" from-port="0" to-layer="52" to-port="1" />
<edge from-layer="52" from-port="2" to-layer="63" to-port="0" />
<edge from-layer="53" from-port="1" to-layer="56" to-port="0" />
<edge from-layer="54" from-port="0" to-layer="56" to-port="1" />
<edge from-layer="55" from-port="0" to-layer="56" to-port="2" />
<edge from-layer="56" from-port="3" to-layer="62" to-port="0" />
<edge from-layer="57" from-port="0" to-layer="62" to-port="1" />
<edge from-layer="58" from-port="1" to-layer="61" to-port="0" />
<edge from-layer="59" from-port="0" to-layer="61" to-port="1" />
<edge from-layer="60" from-port="0" to-layer="61" to-port="2" />
<edge from-layer="61" from-port="3" to-layer="62" to-port="2" />
<edge from-layer="62" from-port="3" to-layer="63" to-port="1" />
<edge from-layer="63" from-port="2" to-layer="64" to-port="0" />
<edge from-layer="64" from-port="1" to-layer="66" to-port="0" />
<edge from-layer="65" from-port="0" to-layer="66" to-port="1" />
<edge from-layer="66" from-port="2" to-layer="67" to-port="1" />
<edge from-layer="67" from-port="2" to-layer="68" to-port="0" />
<edge from-layer="67" from-port="2" to-layer="70" to-port="2" />
<edge from-layer="68" from-port="1" to-layer="70" to-port="0" />
<edge from-layer="69" from-port="0" to-layer="70" to-port="1" />
<edge from-layer="70" from-port="3" to-layer="464" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="520" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="576" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="632" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="688" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="352" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="296" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="240" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="184" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="408" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="128" to-port="3" />
<edge from-layer="70" from-port="3" to-layer="72" to-port="3" />
<edge from-layer="71" from-port="0" to-layer="128" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="464" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="72" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="408" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="520" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="576" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="184" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="240" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="632" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="688" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="352" to-port="4" />
<edge from-layer="71" from-port="0" to-layer="296" to-port="4" />
<edge from-layer="72" from-port="5" to-layer="74" to-port="0" />
<edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
<edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
<edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
<edge from-layer="76" from-port="2" to-layer="78" to-port="0" />
<edge from-layer="77" from-port="0" to-layer="78" to-port="1" />
<edge from-layer="78" from-port="2" to-layer="80" to-port="0" />
<edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
<edge from-layer="80" from-port="2" to-layer="81" to-port="1" />
<edge from-layer="81" from-port="2" to-layer="97" to-port="0" />
<edge from-layer="81" from-port="2" to-layer="83" to-port="0" />
<edge from-layer="82" from-port="0" to-layer="83" to-port="1" />
<edge from-layer="83" from-port="2" to-layer="85" to-port="0" />
<edge from-layer="84" from-port="0" to-layer="85" to-port="1" />
<edge from-layer="85" from-port="2" to-layer="87" to-port="0" />
<edge from-layer="86" from-port="0" to-layer="87" to-port="1" />
<edge from-layer="87" from-port="2" to-layer="89" to-port="0" />
<edge from-layer="88" from-port="0" to-layer="89" to-port="1" />
<edge from-layer="89" from-port="2" to-layer="91" to-port="0" />
<edge from-layer="90" from-port="0" to-layer="91" to-port="1" />
<edge from-layer="91" from-port="2" to-layer="92" to-port="0" />
<edge from-layer="92" from-port="1" to-layer="94" to-port="0" />
<edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
<edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
<edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
<edge from-layer="96" from-port="2" to-layer="97" to-port="1" />
<edge from-layer="97" from-port="2" to-layer="99" to-port="0" />
<edge from-layer="97" from-port="2" to-layer="137" to-port="0" />
<edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
<edge from-layer="99" from-port="2" to-layer="101" to-port="0" />
<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
<edge from-layer="101" from-port="2" to-layer="103" to-port="0" />
<edge from-layer="102" from-port="0" to-layer="103" to-port="1" />
<edge from-layer="103" from-port="2" to-layer="121" to-port="0" />
<edge from-layer="103" from-port="2" to-layer="113" to-port="0" />
<edge from-layer="103" from-port="2" to-layer="105" to-port="0" />
<edge from-layer="104" from-port="0" to-layer="105" to-port="1" />
<edge from-layer="105" from-port="2" to-layer="107" to-port="0" />
<edge from-layer="106" from-port="0" to-layer="107" to-port="1" />
<edge from-layer="107" from-port="2" to-layer="109" to-port="0" />
<edge from-layer="108" from-port="0" to-layer="109" to-port="1" />
<edge from-layer="109" from-port="2" to-layer="111" to-port="0" />
<edge from-layer="110" from-port="0" to-layer="111" to-port="1" />
<edge from-layer="111" from-port="2" to-layer="128" to-port="0" />
<edge from-layer="112" from-port="0" to-layer="113" to-port="1" />
<edge from-layer="113" from-port="2" to-layer="115" to-port="0" />
<edge from-layer="114" from-port="0" to-layer="115" to-port="1" />
<edge from-layer="115" from-port="2" to-layer="117" to-port="0" />
<edge from-layer="116" from-port="0" to-layer="117" to-port="1" />
<edge from-layer="117" from-port="2" to-layer="119" to-port="0" />
<edge from-layer="118" from-port="0" to-layer="119" to-port="1" />
<edge from-layer="119" from-port="2" to-layer="128" to-port="1" />
<edge from-layer="120" from-port="0" to-layer="121" to-port="1" />
<edge from-layer="121" from-port="2" to-layer="123" to-port="0" />
<edge from-layer="122" from-port="0" to-layer="123" to-port="1" />
<edge from-layer="123" from-port="2" to-layer="125" to-port="0" />
<edge from-layer="124" from-port="0" to-layer="125" to-port="1" />
<edge from-layer="125" from-port="2" to-layer="127" to-port="0" />
<edge from-layer="126" from-port="0" to-layer="127" to-port="1" />
<edge from-layer="127" from-port="2" to-layer="128" to-port="2" />
<edge from-layer="128" from-port="5" to-layer="130" to-port="0" />
<edge from-layer="129" from-port="0" to-layer="130" to-port="1" />
<edge from-layer="130" from-port="2" to-layer="132" to-port="0" />
<edge from-layer="131" from-port="0" to-layer="132" to-port="1" />
<edge from-layer="132" from-port="2" to-layer="134" to-port="0" />
<edge from-layer="133" from-port="0" to-layer="134" to-port="1" />
<edge from-layer="134" from-port="2" to-layer="136" to-port="0" />
<edge from-layer="135" from-port="0" to-layer="136" to-port="1" />
<edge from-layer="136" from-port="2" to-layer="137" to-port="1" />
<edge from-layer="137" from-port="2" to-layer="153" to-port="0" />
<edge from-layer="137" from-port="2" to-layer="139" to-port="0" />
<edge from-layer="138" from-port="0" to-layer="139" to-port="1" />
<edge from-layer="139" from-port="2" to-layer="141" to-port="0" />
<edge from-layer="140" from-port="0" to-layer="141" to-port="1" />
<edge from-layer="141" from-port="2" to-layer="143" to-port="0" />
<edge from-layer="142" from-port="0" to-layer="143" to-port="1" />
<edge from-layer="143" from-port="2" to-layer="145" to-port="0" />
<edge from-layer="144" from-port="0" to-layer="145" to-port="1" />
<edge from-layer="145" from-port="2" to-layer="147" to-port="0" />
<edge from-layer="146" from-port="0" to-layer="147" to-port="1" />
<edge from-layer="147" from-port="2" to-layer="148" to-port="0" />
<edge from-layer="148" from-port="1" to-layer="150" to-port="0" />
<edge from-layer="149" from-port="0" to-layer="150" to-port="1" />
<edge from-layer="150" from-port="2" to-layer="152" to-port="0" />
<edge from-layer="151" from-port="0" to-layer="152" to-port="1" />
<edge from-layer="152" from-port="2" to-layer="153" to-port="1" />
<edge from-layer="153" from-port="2" to-layer="193" to-port="0" />
<edge from-layer="153" from-port="2" to-layer="155" to-port="0" />
<edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
<edge from-layer="155" from-port="2" to-layer="157" to-port="0" />
<edge from-layer="156" from-port="0" to-layer="157" to-port="1" />
<edge from-layer="157" from-port="2" to-layer="159" to-port="0" />
<edge from-layer="158" from-port="0" to-layer="159" to-port="1" />
<edge from-layer="159" from-port="2" to-layer="169" to-port="0" />
<edge from-layer="159" from-port="2" to-layer="177" to-port="0" />
<edge from-layer="159" from-port="2" to-layer="161" to-port="0" />
<edge from-layer="160" from-port="0" to-layer="161" to-port="1" />
<edge from-layer="161" from-port="2" to-layer="163" to-port="0" />
<edge from-layer="162" from-port="0" to-layer="163" to-port="1" />
<edge from-layer="163" from-port="2" to-layer="165" to-port="0" />
<edge from-layer="164" from-port="0" to-layer="165" to-port="1" />
<edge from-layer="165" from-port="2" to-layer="167" to-port="0" />
<edge from-layer="166" from-port="0" to-layer="167" to-port="1" />
<edge from-layer="167" from-port="2" to-layer="184" to-port="0" />
<edge from-layer="168" from-port="0" to-layer="169" to-port="1" />
<edge from-layer="169" from-port="2" to-layer="171" to-port="0" />
<edge from-layer="170" from-port="0" to-layer="171" to-port="1" />
<edge from-layer="171" from-port="2" to-layer="173" to-port="0" />
<edge from-layer="172" from-port="0" to-layer="173" to-port="1" />
<edge from-layer="173" from-port="2" to-layer="175" to-port="0" />
<edge from-layer="174" from-port="0" to-layer="175" to-port="1" />
<edge from-layer="175" from-port="2" to-layer="184" to-port="1" />
<edge from-layer="176" from-port="0" to-layer="177" to-port="1" />
<edge from-layer="177" from-port="2" to-layer="179" to-port="0" />
<edge from-layer="178" from-port="0" to-layer="179" to-port="1" />
<edge from-layer="179" from-port="2" to-layer="181" to-port="0" />
<edge from-layer="180" from-port="0" to-layer="181" to-port="1" />
<edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
<edge from-layer="182" from-port="0" to-layer="183" to-port="1" />
<edge from-layer="183" from-port="2" to-layer="184" to-port="2" />
<edge from-layer="184" from-port="5" to-layer="186" to-port="0" />
<edge from-layer="185" from-port="0" to-layer="186" to-port="1" />
<edge from-layer="186" from-port="2" to-layer="188" to-port="0" />
<edge from-layer="187" from-port="0" to-layer="188" to-port="1" />
<edge from-layer="188" from-port="2" to-layer="190" to-port="0" />
<edge from-layer="189" from-port="0" to-layer="190" to-port="1" />
<edge from-layer="190" from-port="2" to-layer="192" to-port="0" />
<edge from-layer="191" from-port="0" to-layer="192" to-port="1" />
<edge from-layer="192" from-port="2" to-layer="193" to-port="1" />
<edge from-layer="193" from-port="2" to-layer="209" to-port="0" />
<edge from-layer="193" from-port="2" to-layer="195" to-port="0" />
<edge from-layer="194" from-port="0" to-layer="195" to-port="1" />
<edge from-layer="195" from-port="2" to-layer="197" to-port="0" />
<edge from-layer="196" from-port="0" to-layer="197" to-port="1" />
<edge from-layer="197" from-port="2" to-layer="199" to-port="0" />
<edge from-layer="198" from-port="0" to-layer="199" to-port="1" />
<edge from-layer="199" from-port="2" to-layer="201" to-port="0" />
<edge from-layer="200" from-port="0" to-layer="201" to-port="1" />
<edge from-layer="201" from-port="2" to-layer="203" to-port="0" />
<edge from-layer="202" from-port="0" to-layer="203" to-port="1" />
<edge from-layer="203" from-port="2" to-layer="204" to-port="0" />
<edge from-layer="204" from-port="1" to-layer="206" to-port="0" />
<edge from-layer="205" from-port="0" to-layer="206" to-port="1" />
<edge from-layer="206" from-port="2" to-layer="208" to-port="0" />
<edge from-layer="207" from-port="0" to-layer="208" to-port="1" />
<edge from-layer="208" from-port="2" to-layer="209" to-port="1" />
<edge from-layer="209" from-port="2" to-layer="249" to-port="0" />
<edge from-layer="209" from-port="2" to-layer="211" to-port="0" />
<edge from-layer="210" from-port="0" to-layer="211" to-port="1" />
<edge from-layer="211" from-port="2" to-layer="213" to-port="0" />
<edge from-layer="212" from-port="0" to-layer="213" to-port="1" />
<edge from-layer="213" from-port="2" to-layer="215" to-port="0" />
<edge from-layer="214" from-port="0" to-layer="215" to-port="1" />
<edge from-layer="215" from-port="2" to-layer="233" to-port="0" />
<edge from-layer="215" from-port="2" to-layer="217" to-port="0" />
<edge from-layer="215" from-port="2" to-layer="225" to-port="0" />
<edge from-layer="216" from-port="0" to-layer="217" to-port="1" />
<edge from-layer="217" from-port="2" to-layer="219" to-port="0" />
<edge from-layer="218" from-port="0" to-layer="219" to-port="1" />
<edge from-layer="219" from-port="2" to-layer="221" to-port="0" />
<edge from-layer="220" from-port="0" to-layer="221" to-port="1" />
<edge from-layer="221" from-port="2" to-layer="223" to-port="0" />
<edge from-layer="222" from-port="0" to-layer="223" to-port="1" />
<edge from-layer="223" from-port="2" to-layer="240" to-port="0" />
<edge from-layer="224" from-port="0" to-layer="225" to-port="1" />
<edge from-layer="225" from-port="2" to-layer="227" to-port="0" />
<edge from-layer="226" from-port="0" to-layer="227" to-port="1" />
<edge from-layer="227" from-port="2" to-layer="229" to-port="0" />
<edge from-layer="228" from-port="0" to-layer="229" to-port="1" />
<edge from-layer="229" from-port="2" to-layer="231" to-port="0" />
<edge from-layer="230" from-port="0" to-layer="231" to-port="1" />
<edge from-layer="231" from-port="2" to-layer="240" to-port="1" />
<edge from-layer="232" from-port="0" to-layer="233" to-port="1" />
<edge from-layer="233" from-port="2" to-layer="235" to-port="0" />
<edge from-layer="234" from-port="0" to-layer="235" to-port="1" />
<edge from-layer="235" from-port="2" to-layer="237" to-port="0" />
<edge from-layer="236" from-port="0" to-layer="237" to-port="1" />
<edge from-layer="237" from-port="2" to-layer="239" to-port="0" />
<edge from-layer="238" from-port="0" to-layer="239" to-port="1" />
<edge from-layer="239" from-port="2" to-layer="240" to-port="2" />
<edge from-layer="240" from-port="5" to-layer="242" to-port="0" />
<edge from-layer="241" from-port="0" to-layer="242" to-port="1" />
<edge from-layer="242" from-port="2" to-layer="244" to-port="0" />
<edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
<edge from-layer="244" from-port="2" to-layer="246" to-port="0" />
<edge from-layer="245" from-port="0" to-layer="246" to-port="1" />
<edge from-layer="246" from-port="2" to-layer="248" to-port="0" />
<edge from-layer="247" from-port="0" to-layer="248" to-port="1" />
<edge from-layer="248" from-port="2" to-layer="249" to-port="1" />
<edge from-layer="249" from-port="2" to-layer="265" to-port="0" />
<edge from-layer="249" from-port="2" to-layer="251" to-port="0" />
<edge from-layer="250" from-port="0" to-layer="251" to-port="1" />
<edge from-layer="251" from-port="2" to-layer="253" to-port="0" />
<edge from-layer="252" from-port="0" to-layer="253" to-port="1" />
<edge from-layer="253" from-port="2" to-layer="255" to-port="0" />
<edge from-layer="254" from-port="0" to-layer="255" to-port="1" />
<edge from-layer="255" from-port="2" to-layer="257" to-port="0" />
<edge from-layer="256" from-port="0" to-layer="257" to-port="1" />
<edge from-layer="257" from-port="2" to-layer="259" to-port="0" />
<edge from-layer="258" from-port="0" to-layer="259" to-port="1" />
<edge from-layer="259" from-port="2" to-layer="260" to-port="0" />
<edge from-layer="260" from-port="1" to-layer="262" to-port="0" />
<edge from-layer="261" from-port="0" to-layer="262" to-port="1" />
<edge from-layer="262" from-port="2" to-layer="264" to-port="0" />
<edge from-layer="263" from-port="0" to-layer="264" to-port="1" />
<edge from-layer="264" from-port="2" to-layer="265" to-port="1" />
<edge from-layer="265" from-port="2" to-layer="305" to-port="0" />
<edge from-layer="265" from-port="2" to-layer="267" to-port="0" />
<edge from-layer="266" from-port="0" to-layer="267" to-port="1" />
<edge from-layer="267" from-port="2" to-layer="269" to-port="0" />
<edge from-layer="268" from-port="0" to-layer="269" to-port="1" />
<edge from-layer="269" from-port="2" to-layer="271" to-port="0" />
<edge from-layer="270" from-port="0" to-layer="271" to-port="1" />
<edge from-layer="271" from-port="2" to-layer="289" to-port="0" />
<edge from-layer="271" from-port="2" to-layer="273" to-port="0" />
<edge from-layer="271" from-port="2" to-layer="281" to-port="0" />
<edge from-layer="272" from-port="0" to-layer="273" to-port="1" />
<edge from-layer="273" from-port="2" to-layer="275" to-port="0" />
<edge from-layer="274" from-port="0" to-layer="275" to-port="1" />
<edge from-layer="275" from-port="2" to-layer="277" to-port="0" />
<edge from-layer="276" from-port="0" to-layer="277" to-port="1" />
<edge from-layer="277" from-port="2" to-layer="279" to-port="0" />
<edge from-layer="278" from-port="0" to-layer="279" to-port="1" />
<edge from-layer="279" from-port="2" to-layer="296" to-port="0" />
<edge from-layer="280" from-port="0" to-layer="281" to-port="1" />
<edge from-layer="281" from-port="2" to-layer="283" to-port="0" />
<edge from-layer="282" from-port="0" to-layer="283" to-port="1" />
<edge from-layer="283" from-port="2" to-layer="285" to-port="0" />
<edge from-layer="284" from-port="0" to-layer="285" to-port="1" />
<edge from-layer="285" from-port="2" to-layer="287" to-port="0" />
<edge from-layer="286" from-port="0" to-layer="287" to-port="1" />
<edge from-layer="287" from-port="2" to-layer="296" to-port="1" />
<edge from-layer="288" from-port="0" to-layer="289" to-port="1" />
<edge from-layer="289" from-port="2" to-layer="291" to-port="0" />
<edge from-layer="290" from-port="0" to-layer="291" to-port="1" />
<edge from-layer="291" from-port="2" to-layer="293" to-port="0" />
<edge from-layer="292" from-port="0" to-layer="293" to-port="1" />
<edge from-layer="293" from-port="2" to-layer="295" to-port="0" />
<edge from-layer="294" from-port="0" to-layer="295" to-port="1" />
<edge from-layer="295" from-port="2" to-layer="296" to-port="2" />
<edge from-layer="296" from-port="5" to-layer="298" to-port="0" />
<edge from-layer="297" from-port="0" to-layer="298" to-port="1" />
<edge from-layer="298" from-port="2" to-layer="300" to-port="0" />
<edge from-layer="299" from-port="0" to-layer="300" to-port="1" />
<edge from-layer="300" from-port="2" to-layer="302" to-port="0" />
<edge from-layer="301" from-port="0" to-layer="302" to-port="1" />
<edge from-layer="302" from-port="2" to-layer="304" to-port="0" />
<edge from-layer="303" from-port="0" to-layer="304" to-port="1" />
<edge from-layer="304" from-port="2" to-layer="305" to-port="1" />
<edge from-layer="305" from-port="2" to-layer="321" to-port="0" />
<edge from-layer="305" from-port="2" to-layer="307" to-port="0" />
<edge from-layer="306" from-port="0" to-layer="307" to-port="1" />
<edge from-layer="307" from-port="2" to-layer="309" to-port="0" />
<edge from-layer="308" from-port="0" to-layer="309" to-port="1" />
<edge from-layer="309" from-port="2" to-layer="311" to-port="0" />
<edge from-layer="310" from-port="0" to-layer="311" to-port="1" />
<edge from-layer="311" from-port="2" to-layer="313" to-port="0" />
<edge from-layer="312" from-port="0" to-layer="313" to-port="1" />
<edge from-layer="313" from-port="2" to-layer="315" to-port="0" />
<edge from-layer="314" from-port="0" to-layer="315" to-port="1" />
<edge from-layer="315" from-port="2" to-layer="316" to-port="0" />
<edge from-layer="316" from-port="1" to-layer="318" to-port="0" />
<edge from-layer="317" from-port="0" to-layer="318" to-port="1" />
<edge from-layer="318" from-port="2" to-layer="320" to-port="0" />
<edge from-layer="319" from-port="0" to-layer="320" to-port="1" />
<edge from-layer="320" from-port="2" to-layer="321" to-port="1" />
<edge from-layer="321" from-port="2" to-layer="323" to-port="0" />
<edge from-layer="321" from-port="2" to-layer="361" to-port="0" />
<edge from-layer="322" from-port="0" to-layer="323" to-port="1" />
<edge from-layer="323" from-port="2" to-layer="325" to-port="0" />
<edge from-layer="324" from-port="0" to-layer="325" to-port="1" />
<edge from-layer="325" from-port="2" to-layer="327" to-port="0" />
<edge from-layer="326" from-port="0" to-layer="327" to-port="1" />
<edge from-layer="327" from-port="2" to-layer="337" to-port="0" />
<edge from-layer="327" from-port="2" to-layer="345" to-port="0" />
<edge from-layer="327" from-port="2" to-layer="329" to-port="0" />
<edge from-layer="328" from-port="0" to-layer="329" to-port="1" />
<edge from-layer="329" from-port="2" to-layer="331" to-port="0" />
<edge from-layer="330" from-port="0" to-layer="331" to-port="1" />
<edge from-layer="331" from-port="2" to-layer="333" to-port="0" />
<edge from-layer="332" from-port="0" to-layer="333" to-port="1" />
<edge from-layer="333" from-port="2" to-layer="335" to-port="0" />
<edge from-layer="334" from-port="0" to-layer="335" to-port="1" />
<edge from-layer="335" from-port="2" to-layer="352" to-port="0" />
<edge from-layer="336" from-port="0" to-layer="337" to-port="1" />
<edge from-layer="337" from-port="2" to-layer="339" to-port="0" />
<edge from-layer="338" from-port="0" to-layer="339" to-port="1" />
<edge from-layer="339" from-port="2" to-layer="341" to-port="0" />
<edge from-layer="340" from-port="0" to-layer="341" to-port="1" />
<edge from-layer="341" from-port="2" to-layer="343" to-port="0" />
<edge from-layer="342" from-port="0" to-layer="343" to-port="1" />
<edge from-layer="343" from-port="2" to-layer="352" to-port="1" />
<edge from-layer="344" from-port="0" to-layer="345" to-port="1" />
<edge from-layer="345" from-port="2" to-layer="347" to-port="0" />
<edge from-layer="346" from-port="0" to-layer="347" to-port="1" />
<edge from-layer="347" from-port="2" to-layer="349" to-port="0" />
<edge from-layer="348" from-port="0" to-layer="349" to-port="1" />
<edge from-layer="349" from-port="2" to-layer="351" to-port="0" />
<edge from-layer="350" from-port="0" to-layer="351" to-port="1" />
<edge from-layer="351" from-port="2" to-layer="352" to-port="2" />
<edge from-layer="352" from-port="5" to-layer="354" to-port="0" />
<edge from-layer="353" from-port="0" to-layer="354" to-port="1" />
<edge from-layer="354" from-port="2" to-layer="356" to-port="0" />
<edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
<edge from-layer="356" from-port="2" to-layer="358" to-port="0" />
<edge from-layer="357" from-port="0" to-layer="358" to-port="1" />
<edge from-layer="358" from-port="2" to-layer="360" to-port="0" />
<edge from-layer="359" from-port="0" to-layer="360" to-port="1" />
<edge from-layer="360" from-port="2" to-layer="361" to-port="1" />
<edge from-layer="361" from-port="2" to-layer="363" to-port="0" />
<edge from-layer="361" from-port="2" to-layer="377" to-port="0" />
<edge from-layer="362" from-port="0" to-layer="363" to-port="1" />
<edge from-layer="363" from-port="2" to-layer="365" to-port="0" />
<edge from-layer="364" from-port="0" to-layer="365" to-port="1" />
<edge from-layer="365" from-port="2" to-layer="367" to-port="0" />
<edge from-layer="366" from-port="0" to-layer="367" to-port="1" />
<edge from-layer="367" from-port="2" to-layer="369" to-port="0" />
<edge from-layer="368" from-port="0" to-layer="369" to-port="1" />
<edge from-layer="369" from-port="2" to-layer="371" to-port="0" />
<edge from-layer="370" from-port="0" to-layer="371" to-port="1" />
<edge from-layer="371" from-port="2" to-layer="372" to-port="0" />
<edge from-layer="372" from-port="1" to-layer="374" to-port="0" />
<edge from-layer="373" from-port="0" to-layer="374" to-port="1" />
<edge from-layer="374" from-port="2" to-layer="376" to-port="0" />
<edge from-layer="375" from-port="0" to-layer="376" to-port="1" />
<edge from-layer="376" from-port="2" to-layer="377" to-port="1" />
<edge from-layer="377" from-port="2" to-layer="417" to-port="0" />
<edge from-layer="377" from-port="2" to-layer="379" to-port="0" />
<edge from-layer="378" from-port="0" to-layer="379" to-port="1" />
<edge from-layer="379" from-port="2" to-layer="381" to-port="0" />
<edge from-layer="380" from-port="0" to-layer="381" to-port="1" />
<edge from-layer="381" from-port="2" to-layer="383" to-port="0" />
<edge from-layer="382" from-port="0" to-layer="383" to-port="1" />
<edge from-layer="383" from-port="2" to-layer="393" to-port="0" />
<edge from-layer="383" from-port="2" to-layer="401" to-port="0" />
<edge from-layer="383" from-port="2" to-layer="385" to-port="0" />
<edge from-layer="384" from-port="0" to-layer="385" to-port="1" />
<edge from-layer="385" from-port="2" to-layer="387" to-port="0" />
<edge from-layer="386" from-port="0" to-layer="387" to-port="1" />
<edge from-layer="387" from-port="2" to-layer="389" to-port="0" />
<edge from-layer="388" from-port="0" to-layer="389" to-port="1" />
<edge from-layer="389" from-port="2" to-layer="391" to-port="0" />
<edge from-layer="390" from-port="0" to-layer="391" to-port="1" />
<edge from-layer="391" from-port="2" to-layer="408" to-port="0" />
<edge from-layer="392" from-port="0" to-layer="393" to-port="1" />
<edge from-layer="393" from-port="2" to-layer="395" to-port="0" />
<edge from-layer="394" from-port="0" to-layer="395" to-port="1" />
<edge from-layer="395" from-port="2" to-layer="397" to-port="0" />
<edge from-layer="396" from-port="0" to-layer="397" to-port="1" />
<edge from-layer="397" from-port="2" to-layer="399" to-port="0" />
<edge from-layer="398" from-port="0" to-layer="399" to-port="1" />
<edge from-layer="399" from-port="2" to-layer="408" to-port="1" />
<edge from-layer="400" from-port="0" to-layer="401" to-port="1" />
<edge from-layer="401" from-port="2" to-layer="403" to-port="0" />
<edge from-layer="402" from-port="0" to-layer="403" to-port="1" />
<edge from-layer="403" from-port="2" to-layer="405" to-port="0" />
<edge from-layer="404" from-port="0" to-layer="405" to-port="1" />
<edge from-layer="405" from-port="2" to-layer="407" to-port="0" />
<edge from-layer="406" from-port="0" to-layer="407" to-port="1" />
<edge from-layer="407" from-port="2" to-layer="408" to-port="2" />
<edge from-layer="408" from-port="5" to-layer="410" to-port="0" />
<edge from-layer="409" from-port="0" to-layer="410" to-port="1" />
<edge from-layer="410" from-port="2" to-layer="412" to-port="0" />
<edge from-layer="411" from-port="0" to-layer="412" to-port="1" />
<edge from-layer="412" from-port="2" to-layer="414" to-port="0" />
<edge from-layer="413" from-port="0" to-layer="414" to-port="1" />
<edge from-layer="414" from-port="2" to-layer="416" to-port="0" />
<edge from-layer="415" from-port="0" to-layer="416" to-port="1" />
<edge from-layer="416" from-port="2" to-layer="417" to-port="1" />
<edge from-layer="417" from-port="2" to-layer="433" to-port="0" />
<edge from-layer="417" from-port="2" to-layer="419" to-port="0" />
<edge from-layer="418" from-port="0" to-layer="419" to-port="1" />
<edge from-layer="419" from-port="2" to-layer="421" to-port="0" />
<edge from-layer="420" from-port="0" to-layer="421" to-port="1" />
<edge from-layer="421" from-port="2" to-layer="423" to-port="0" />
<edge from-layer="422" from-port="0" to-layer="423" to-port="1" />
<edge from-layer="423" from-port="2" to-layer="425" to-port="0" />
<edge from-layer="424" from-port="0" to-layer="425" to-port="1" />
<edge from-layer="425" from-port="2" to-layer="427" to-port="0" />
<edge from-layer="426" from-port="0" to-layer="427" to-port="1" />
<edge from-layer="427" from-port="2" to-layer="428" to-port="0" />
<edge from-layer="428" from-port="1" to-layer="430" to-port="0" />
<edge from-layer="429" from-port="0" to-layer="430" to-port="1" />
<edge from-layer="430" from-port="2" to-layer="432" to-port="0" />
<edge from-layer="431" from-port="0" to-layer="432" to-port="1" />
<edge from-layer="432" from-port="2" to-layer="433" to-port="1" />
<edge from-layer="433" from-port="2" to-layer="435" to-port="0" />
<edge from-layer="433" from-port="2" to-layer="473" to-port="0" />
<edge from-layer="434" from-port="0" to-layer="435" to-port="1" />
<edge from-layer="435" from-port="2" to-layer="437" to-port="0" />
<edge from-layer="436" from-port="0" to-layer="437" to-port="1" />
<edge from-layer="437" from-port="2" to-layer="439" to-port="0" />
<edge from-layer="438" from-port="0" to-layer="439" to-port="1" />
<edge from-layer="439" from-port="2" to-layer="441" to-port="0" />
<edge from-layer="439" from-port="2" to-layer="457" to-port="0" />
<edge from-layer="439" from-port="2" to-layer="449" to-port="0" />
<edge from-layer="440" from-port="0" to-layer="441" to-port="1" />
<edge from-layer="441" from-port="2" to-layer="443" to-port="0" />
<edge from-layer="442" from-port="0" to-layer="443" to-port="1" />
<edge from-layer="443" from-port="2" to-layer="445" to-port="0" />
<edge from-layer="444" from-port="0" to-layer="445" to-port="1" />
<edge from-layer="445" from-port="2" to-layer="447" to-port="0" />
<edge from-layer="446" from-port="0" to-layer="447" to-port="1" />
<edge from-layer="447" from-port="2" to-layer="464" to-port="0" />
<edge from-layer="448" from-port="0" to-layer="449" to-port="1" />
<edge from-layer="449" from-port="2" to-layer="451" to-port="0" />
<edge from-layer="450" from-port="0" to-layer="451" to-port="1" />
<edge from-layer="451" from-port="2" to-layer="453" to-port="0" />
<edge from-layer="452" from-port="0" to-layer="453" to-port="1" />
<edge from-layer="453" from-port="2" to-layer="455" to-port="0" />
<edge from-layer="454" from-port="0" to-layer="455" to-port="1" />
<edge from-layer="455" from-port="2" to-layer="464" to-port="1" />
<edge from-layer="456" from-port="0" to-layer="457" to-port="1" />
<edge from-layer="457" from-port="2" to-layer="459" to-port="0" />
<edge from-layer="458" from-port="0" to-layer="459" to-port="1" />
<edge from-layer="459" from-port="2" to-layer="461" to-port="0" />
<edge from-layer="460" from-port="0" to-layer="461" to-port="1" />
<edge from-layer="461" from-port="2" to-layer="463" to-port="0" />
<edge from-layer="462" from-port="0" to-layer="463" to-port="1" />
<edge from-layer="463" from-port="2" to-layer="464" to-port="2" />
<edge from-layer="464" from-port="5" to-layer="466" to-port="0" />
<edge from-layer="465" from-port="0" to-layer="466" to-port="1" />
<edge from-layer="466" from-port="2" to-layer="468" to-port="0" />
<edge from-layer="467" from-port="0" to-layer="468" to-port="1" />
<edge from-layer="468" from-port="2" to-layer="470" to-port="0" />
<edge from-layer="469" from-port="0" to-layer="470" to-port="1" />
<edge from-layer="470" from-port="2" to-layer="472" to-port="0" />
<edge from-layer="471" from-port="0" to-layer="472" to-port="1" />
<edge from-layer="472" from-port="2" to-layer="473" to-port="1" />
<edge from-layer="473" from-port="2" to-layer="475" to-port="0" />
<edge from-layer="473" from-port="2" to-layer="489" to-port="0" />
<edge from-layer="474" from-port="0" to-layer="475" to-port="1" />
<edge from-layer="475" from-port="2" to-layer="477" to-port="0" />
<edge from-layer="476" from-port="0" to-layer="477" to-port="1" />
<edge from-layer="477" from-port="2" to-layer="479" to-port="0" />
<edge from-layer="478" from-port="0" to-layer="479" to-port="1" />
<edge from-layer="479" from-port="2" to-layer="481" to-port="0" />
<edge from-layer="480" from-port="0" to-layer="481" to-port="1" />
<edge from-layer="481" from-port="2" to-layer="483" to-port="0" />
<edge from-layer="482" from-port="0" to-layer="483" to-port="1" />
<edge from-layer="483" from-port="2" to-layer="484" to-port="0" />
<edge from-layer="484" from-port="1" to-layer="486" to-port="0" />
<edge from-layer="485" from-port="0" to-layer="486" to-port="1" />
<edge from-layer="486" from-port="2" to-layer="488" to-port="0" />
<edge from-layer="487" from-port="0" to-layer="488" to-port="1" />
<edge from-layer="488" from-port="2" to-layer="489" to-port="1" />
<edge from-layer="489" from-port="2" to-layer="529" to-port="0" />
<edge from-layer="489" from-port="2" to-layer="491" to-port="0" />
<edge from-layer="490" from-port="0" to-layer="491" to-port="1" />
<edge from-layer="491" from-port="2" to-layer="493" to-port="0" />
<edge from-layer="492" from-port="0" to-layer="493" to-port="1" />
<edge from-layer="493" from-port="2" to-layer="495" to-port="0" />
<edge from-layer="494" from-port="0" to-layer="495" to-port="1" />
<edge from-layer="495" from-port="2" to-layer="497" to-port="0" />
<edge from-layer="495" from-port="2" to-layer="505" to-port="0" />
<edge from-layer="495" from-port="2" to-layer="513" to-port="0" />
<edge from-layer="496" from-port="0" to-layer="497" to-port="1" />
<edge from-layer="497" from-port="2" to-layer="499" to-port="0" />
<edge from-layer="498" from-port="0" to-layer="499" to-port="1" />
<edge from-layer="499" from-port="2" to-layer="501" to-port="0" />
<edge from-layer="500" from-port="0" to-layer="501" to-port="1" />
<edge from-layer="501" from-port="2" to-layer="503" to-port="0" />
<edge from-layer="502" from-port="0" to-layer="503" to-port="1" />
<edge from-layer="503" from-port="2" to-layer="520" to-port="0" />
<edge from-layer="504" from-port="0" to-layer="505" to-port="1" />
<edge from-layer="505" from-port="2" to-layer="507" to-port="0" />
<edge from-layer="506" from-port="0" to-layer="507" to-port="1" />
<edge from-layer="507" from-port="2" to-layer="509" to-port="0" />
<edge from-layer="508" from-port="0" to-layer="509" to-port="1" />
<edge from-layer="509" from-port="2" to-layer="511" to-port="0" />
<edge from-layer="510" from-port="0" to-layer="511" to-port="1" />
<edge from-layer="511" from-port="2" to-layer="520" to-port="1" />
<edge from-layer="512" from-port="0" to-layer="513" to-port="1" />
<edge from-layer="513" from-port="2" to-layer="515" to-port="0" />
<edge from-layer="514" from-port="0" to-layer="515" to-port="1" />
<edge from-layer="515" from-port="2" to-layer="517" to-port="0" />
<edge from-layer="516" from-port="0" to-layer="517" to-port="1" />
<edge from-layer="517" from-port="2" to-layer="519" to-port="0" />
<edge from-layer="518" from-port="0" to-layer="519" to-port="1" />
<edge from-layer="519" from-port="2" to-layer="520" to-port="2" />
<edge from-layer="520" from-port="5" to-layer="522" to-port="0" />
<edge from-layer="521" from-port="0" to-layer="522" to-port="1" />
<edge from-layer="522" from-port="2" to-layer="524" to-port="0" />
<edge from-layer="523" from-port="0" to-layer="524" to-port="1" />
<edge from-layer="524" from-port="2" to-layer="526" to-port="0" />
<edge from-layer="525" from-port="0" to-layer="526" to-port="1" />
<edge from-layer="526" from-port="2" to-layer="528" to-port="0" />
<edge from-layer="527" from-port="0" to-layer="528" to-port="1" />
<edge from-layer="528" from-port="2" to-layer="529" to-port="1" />
<edge from-layer="529" from-port="2" to-layer="531" to-port="0" />
<edge from-layer="529" from-port="2" to-layer="545" to-port="0" />
<edge from-layer="530" from-port="0" to-layer="531" to-port="1" />
<edge from-layer="531" from-port="2" to-layer="533" to-port="0" />
<edge from-layer="532" from-port="0" to-layer="533" to-port="1" />
<edge from-layer="533" from-port="2" to-layer="535" to-port="0" />
<edge from-layer="534" from-port="0" to-layer="535" to-port="1" />
<edge from-layer="535" from-port="2" to-layer="537" to-port="0" />
<edge from-layer="536" from-port="0" to-layer="537" to-port="1" />
<edge from-layer="537" from-port="2" to-layer="539" to-port="0" />
<edge from-layer="538" from-port="0" to-layer="539" to-port="1" />
<edge from-layer="539" from-port="2" to-layer="540" to-port="0" />
<edge from-layer="540" from-port="1" to-layer="542" to-port="0" />
<edge from-layer="541" from-port="0" to-layer="542" to-port="1" />
<edge from-layer="542" from-port="2" to-layer="544" to-port="0" />
<edge from-layer="543" from-port="0" to-layer="544" to-port="1" />
<edge from-layer="544" from-port="2" to-layer="545" to-port="1" />
<edge from-layer="545" from-port="2" to-layer="547" to-port="0" />
<edge from-layer="545" from-port="2" to-layer="585" to-port="0" />
<edge from-layer="546" from-port="0" to-layer="547" to-port="1" />
<edge from-layer="547" from-port="2" to-layer="549" to-port="0" />
<edge from-layer="548" from-port="0" to-layer="549" to-port="1" />
<edge from-layer="549" from-port="2" to-layer="551" to-port="0" />
<edge from-layer="550" from-port="0" to-layer="551" to-port="1" />
<edge from-layer="551" from-port="2" to-layer="553" to-port="0" />
<edge from-layer="551" from-port="2" to-layer="561" to-port="0" />
<edge from-layer="551" from-port="2" to-layer="569" to-port="0" />
<edge from-layer="552" from-port="0" to-layer="553" to-port="1" />
<edge from-layer="553" from-port="2" to-layer="555" to-port="0" />
<edge from-layer="554" from-port="0" to-layer="555" to-port="1" />
<edge from-layer="555" from-port="2" to-layer="557" to-port="0" />
<edge from-layer="556" from-port="0" to-layer="557" to-port="1" />
<edge from-layer="557" from-port="2" to-layer="559" to-port="0" />
<edge from-layer="558" from-port="0" to-layer="559" to-port="1" />
<edge from-layer="559" from-port="2" to-layer="576" to-port="0" />
<edge from-layer="560" from-port="0" to-layer="561" to-port="1" />
<edge from-layer="561" from-port="2" to-layer="563" to-port="0" />
<edge from-layer="562" from-port="0" to-layer="563" to-port="1" />
<edge from-layer="563" from-port="2" to-layer="565" to-port="0" />
<edge from-layer="564" from-port="0" to-layer="565" to-port="1" />
<edge from-layer="565" from-port="2" to-layer="567" to-port="0" />
<edge from-layer="566" from-port="0" to-layer="567" to-port="1" />
<edge from-layer="567" from-port="2" to-layer="576" to-port="1" />
<edge from-layer="568" from-port="0" to-layer="569" to-port="1" />
<edge from-layer="569" from-port="2" to-layer="571" to-port="0" />
<edge from-layer="570" from-port="0" to-layer="571" to-port="1" />
<edge from-layer="571" from-port="2" to-layer="573" to-port="0" />
<edge from-layer="572" from-port="0" to-layer="573" to-port="1" />
<edge from-layer="573" from-port="2" to-layer="575" to-port="0" />
<edge from-layer="574" from-port="0" to-layer="575" to-port="1" />
<edge from-layer="575" from-port="2" to-layer="576" to-port="2" />
<edge from-layer="576" from-port="5" to-layer="578" to-port="0" />
<edge from-layer="577" from-port="0" to-layer="578" to-port="1" />
<edge from-layer="578" from-port="2" to-layer="580" to-port="0" />
<edge from-layer="579" from-port="0" to-layer="580" to-port="1" />
<edge from-layer="580" from-port="2" to-layer="582" to-port="0" />
<edge from-layer="581" from-port="0" to-layer="582" to-port="1" />
<edge from-layer="582" from-port="2" to-layer="584" to-port="0" />
<edge from-layer="583" from-port="0" to-layer="584" to-port="1" />
<edge from-layer="584" from-port="2" to-layer="585" to-port="1" />
<edge from-layer="585" from-port="2" to-layer="587" to-port="0" />
<edge from-layer="585" from-port="2" to-layer="601" to-port="0" />
<edge from-layer="586" from-port="0" to-layer="587" to-port="1" />
<edge from-layer="587" from-port="2" to-layer="589" to-port="0" />
<edge from-layer="588" from-port="0" to-layer="589" to-port="1" />
<edge from-layer="589" from-port="2" to-layer="591" to-port="0" />
<edge from-layer="590" from-port="0" to-layer="591" to-port="1" />
<edge from-layer="591" from-port="2" to-layer="593" to-port="0" />
<edge from-layer="592" from-port="0" to-layer="593" to-port="1" />
<edge from-layer="593" from-port="2" to-layer="595" to-port="0" />
<edge from-layer="594" from-port="0" to-layer="595" to-port="1" />
<edge from-layer="595" from-port="2" to-layer="596" to-port="0" />
<edge from-layer="596" from-port="1" to-layer="598" to-port="0" />
<edge from-layer="597" from-port="0" to-layer="598" to-port="1" />
<edge from-layer="598" from-port="2" to-layer="600" to-port="0" />
<edge from-layer="599" from-port="0" to-layer="600" to-port="1" />
<edge from-layer="600" from-port="2" to-layer="601" to-port="1" />
<edge from-layer="601" from-port="2" to-layer="603" to-port="0" />
<edge from-layer="601" from-port="2" to-layer="641" to-port="0" />
<edge from-layer="602" from-port="0" to-layer="603" to-port="1" />
<edge from-layer="603" from-port="2" to-layer="605" to-port="0" />
<edge from-layer="604" from-port="0" to-layer="605" to-port="1" />
<edge from-layer="605" from-port="2" to-layer="607" to-port="0" />
<edge from-layer="606" from-port="0" to-layer="607" to-port="1" />
<edge from-layer="607" from-port="2" to-layer="609" to-port="0" />
<edge from-layer="607" from-port="2" to-layer="625" to-port="0" />
<edge from-layer="607" from-port="2" to-layer="617" to-port="0" />
<edge from-layer="608" from-port="0" to-layer="609" to-port="1" />
<edge from-layer="609" from-port="2" to-layer="611" to-port="0" />
<edge from-layer="610" from-port="0" to-layer="611" to-port="1" />
<edge from-layer="611" from-port="2" to-layer="613" to-port="0" />
<edge from-layer="612" from-port="0" to-layer="613" to-port="1" />
<edge from-layer="613" from-port="2" to-layer="615" to-port="0" />
<edge from-layer="614" from-port="0" to-layer="615" to-port="1" />
<edge from-layer="615" from-port="2" to-layer="632" to-port="0" />
<edge from-layer="616" from-port="0" to-layer="617" to-port="1" />
<edge from-layer="617" from-port="2" to-layer="619" to-port="0" />
<edge from-layer="618" from-port="0" to-layer="619" to-port="1" />
<edge from-layer="619" from-port="2" to-layer="621" to-port="0" />
<edge from-layer="620" from-port="0" to-layer="621" to-port="1" />
<edge from-layer="621" from-port="2" to-layer="623" to-port="0" />
<edge from-layer="622" from-port="0" to-layer="623" to-port="1" />
<edge from-layer="623" from-port="2" to-layer="632" to-port="1" />
<edge from-layer="624" from-port="0" to-layer="625" to-port="1" />
<edge from-layer="625" from-port="2" to-layer="627" to-port="0" />
<edge from-layer="626" from-port="0" to-layer="627" to-port="1" />
<edge from-layer="627" from-port="2" to-layer="629" to-port="0" />
<edge from-layer="628" from-port="0" to-layer="629" to-port="1" />
<edge from-layer="629" from-port="2" to-layer="631" to-port="0" />
<edge from-layer="630" from-port="0" to-layer="631" to-port="1" />
<edge from-layer="631" from-port="2" to-layer="632" to-port="2" />
<edge from-layer="632" from-port="5" to-layer="634" to-port="0" />
<edge from-layer="633" from-port="0" to-layer="634" to-port="1" />
<edge from-layer="634" from-port="2" to-layer="636" to-port="0" />
<edge from-layer="635" from-port="0" to-layer="636" to-port="1" />
<edge from-layer="636" from-port="2" to-layer="638" to-port="0" />
<edge from-layer="637" from-port="0" to-layer="638" to-port="1" />
<edge from-layer="638" from-port="2" to-layer="640" to-port="0" />
<edge from-layer="639" from-port="0" to-layer="640" to-port="1" />
<edge from-layer="640" from-port="2" to-layer="641" to-port="1" />
<edge from-layer="641" from-port="2" to-layer="643" to-port="0" />
<edge from-layer="641" from-port="2" to-layer="657" to-port="0" />
<edge from-layer="642" from-port="0" to-layer="643" to-port="1" />
<edge from-layer="643" from-port="2" to-layer="645" to-port="0" />
<edge from-layer="644" from-port="0" to-layer="645" to-port="1" />
<edge from-layer="645" from-port="2" to-layer="647" to-port="0" />
<edge from-layer="646" from-port="0" to-layer="647" to-port="1" />
<edge from-layer="647" from-port="2" to-layer="649" to-port="0" />
<edge from-layer="648" from-port="0" to-layer="649" to-port="1" />
<edge from-layer="649" from-port="2" to-layer="651" to-port="0" />
<edge from-layer="650" from-port="0" to-layer="651" to-port="1" />
<edge from-layer="651" from-port="2" to-layer="652" to-port="0" />
<edge from-layer="652" from-port="1" to-layer="654" to-port="0" />
<edge from-layer="653" from-port="0" to-layer="654" to-port="1" />
<edge from-layer="654" from-port="2" to-layer="656" to-port="0" />
<edge from-layer="655" from-port="0" to-layer="656" to-port="1" />
<edge from-layer="656" from-port="2" to-layer="657" to-port="1" />
<edge from-layer="657" from-port="2" to-layer="697" to-port="0" />
<edge from-layer="657" from-port="2" to-layer="659" to-port="0" />
<edge from-layer="658" from-port="0" to-layer="659" to-port="1" />
<edge from-layer="659" from-port="2" to-layer="661" to-port="0" />
<edge from-layer="660" from-port="0" to-layer="661" to-port="1" />
<edge from-layer="661" from-port="2" to-layer="663" to-port="0" />
<edge from-layer="662" from-port="0" to-layer="663" to-port="1" />
<edge from-layer="663" from-port="2" to-layer="673" to-port="0" />
<edge from-layer="663" from-port="2" to-layer="681" to-port="0" />
<edge from-layer="663" from-port="2" to-layer="665" to-port="0" />
<edge from-layer="664" from-port="0" to-layer="665" to-port="1" />
<edge from-layer="665" from-port="2" to-layer="667" to-port="0" />
<edge from-layer="666" from-port="0" to-layer="667" to-port="1" />
<edge from-layer="667" from-port="2" to-layer="669" to-port="0" />
<edge from-layer="668" from-port="0" to-layer="669" to-port="1" />
<edge from-layer="669" from-port="2" to-layer="671" to-port="0" />
<edge from-layer="670" from-port="0" to-layer="671" to-port="1" />
<edge from-layer="671" from-port="2" to-layer="688" to-port="0" />
<edge from-layer="672" from-port="0" to-layer="673" to-port="1" />
<edge from-layer="673" from-port="2" to-layer="675" to-port="0" />
<edge from-layer="674" from-port="0" to-layer="675" to-port="1" />
<edge from-layer="675" from-port="2" to-layer="677" to-port="0" />
<edge from-layer="676" from-port="0" to-layer="677" to-port="1" />
<edge from-layer="677" from-port="2" to-layer="679" to-port="0" />
<edge from-layer="678" from-port="0" to-layer="679" to-port="1" />
<edge from-layer="679" from-port="2" to-layer="688" to-port="1" />
<edge from-layer="680" from-port="0" to-layer="681" to-port="1" />
<edge from-layer="681" from-port="2" to-layer="683" to-port="0" />
<edge from-layer="682" from-port="0" to-layer="683" to-port="1" />
<edge from-layer="683" from-port="2" to-layer="685" to-port="0" />
<edge from-layer="684" from-port="0" to-layer="685" to-port="1" />
<edge from-layer="685" from-port="2" to-layer="687" to-port="0" />
<edge from-layer="686" from-port="0" to-layer="687" to-port="1" />
<edge from-layer="687" from-port="2" to-layer="688" to-port="2" />
<edge from-layer="688" from-port="5" to-layer="690" to-port="0" />
<edge from-layer="689" from-port="0" to-layer="690" to-port="1" />
<edge from-layer="690" from-port="2" to-layer="692" to-port="0" />
<edge from-layer="691" from-port="0" to-layer="692" to-port="1" />
<edge from-layer="692" from-port="2" to-layer="694" to-port="0" />
<edge from-layer="693" from-port="0" to-layer="694" to-port="1" />
<edge from-layer="694" from-port="2" to-layer="696" to-port="0" />
<edge from-layer="695" from-port="0" to-layer="696" to-port="1" />
<edge from-layer="696" from-port="2" to-layer="697" to-port="1" />
<edge from-layer="697" from-port="2" to-layer="713" to-port="0" />
<edge from-layer="697" from-port="2" to-layer="699" to-port="0" />
<edge from-layer="698" from-port="0" to-layer="699" to-port="1" />
<edge from-layer="699" from-port="2" to-layer="701" to-port="0" />
<edge from-layer="700" from-port="0" to-layer="701" to-port="1" />
<edge from-layer="701" from-port="2" to-layer="703" to-port="0" />
<edge from-layer="702" from-port="0" to-layer="703" to-port="1" />
<edge from-layer="703" from-port="2" to-layer="705" to-port="0" />
<edge from-layer="704" from-port="0" to-layer="705" to-port="1" />
<edge from-layer="705" from-port="2" to-layer="707" to-port="0" />
<edge from-layer="706" from-port="0" to-layer="707" to-port="1" />
<edge from-layer="707" from-port="2" to-layer="708" to-port="0" />
<edge from-layer="708" from-port="1" to-layer="710" to-port="0" />
<edge from-layer="709" from-port="0" to-layer="710" to-port="1" />
<edge from-layer="710" from-port="2" to-layer="712" to-port="0" />
<edge from-layer="711" from-port="0" to-layer="712" to-port="1" />
<edge from-layer="712" from-port="2" to-layer="713" to-port="1" />
<edge from-layer="713" from-port="2" to-layer="715" to-port="0" />
<edge from-layer="714" from-port="0" to-layer="715" to-port="1" />
<edge from-layer="715" from-port="2" to-layer="717" to-port="0" />
<edge from-layer="716" from-port="0" to-layer="717" to-port="1" />
<edge from-layer="717" from-port="2" to-layer="719" to-port="0" />
<edge from-layer="718" from-port="0" to-layer="719" to-port="1" />
<edge from-layer="719" from-port="2" to-layer="721" to-port="0" />
<edge from-layer="720" from-port="0" to-layer="721" to-port="1" />
<edge from-layer="721" from-port="2" to-layer="723" to-port="0" />
<edge from-layer="722" from-port="0" to-layer="723" to-port="1" />
<edge from-layer="723" from-port="2" to-layer="725" to-port="0" />
<edge from-layer="724" from-port="0" to-layer="725" to-port="1" />
<edge from-layer="725" from-port="2" to-layer="727" to-port="0" />
<edge from-layer="726" from-port="0" to-layer="727" to-port="1" />
<edge from-layer="727" from-port="2" to-layer="729" to-port="0" />
<edge from-layer="728" from-port="0" to-layer="729" to-port="1" />
<edge from-layer="729" from-port="2" to-layer="731" to-port="0" />
<edge from-layer="730" from-port="0" to-layer="731" to-port="1" />
<edge from-layer="731" from-port="2" to-layer="732" to-port="0" />
</edges>
<rt_info>
<Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
<conversion_parameters>
<framework value="pytorch" />
<is_python_object value="True" />
</conversion_parameters>
<optimum>
<optimum_intel_version value="1.23.0" />
<optimum_version value="1.25.1" />
<pytorch_version value="2.7.0" />
<transformers_version value="4.51.3" />
</optimum>
</rt_info>
</net>