tiny-open-clip-model / openvino_model_vision.xml
zofinka's picture
Upload model
34bb85b verified
raw
history blame contribute delete
No virus
55.7 kB
<?xml version="1.0"?>
<net name="Model0" version="11">
<layers>
<layer id="0" name="x" type="Parameter" version="opset1">
<data shape="?,?,?,?" element_type="f32" />
<output>
<port id="0" precision="FP32" names="x">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1" name="aten::view/Reshape" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 128" offset="0" size="512" />
<output>
<port id="0" precision="FP32" names="35">
<dim>1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="2" name="ShapeOf_2611" type="ShapeOf" version="opset3">
<data output_type="i64" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="3" name="Constant_2718" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="512" size="8" />
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="4" name="Constant_2613" type="Const" version="opset1">
<data element_type="i64" shape="" offset="512" size="8" />
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="5" name="Gather_2614" type="Gather" version="opset8">
<data batch_dims="0" />
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="12,14,28,30">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="6" name="Constant_2454" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="520" size="8" />
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="7" name="Constant_2456" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="520" size="8" />
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="8" name="prim::ListConstruct/Concat_1" type="Concat" version="opset1">
<data axis="0" />
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="38">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="9" name="aten::expand/Broadcast" type="Broadcast" version="opset3">
<data mode="bidirectional" />
<input>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="40,45">
<dim>-1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="10" name="self.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 3, 2, 2" offset="528" size="6144" />
<output>
<port id="0" precision="FP32" names="self.conv1.weight">
<dim>128</dim>
<dim>3</dim>
<dim>2</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="11" name="__module.conv1/aten::_convolution/Convolution" type="Convolution" version="opset1">
<data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>3</dim>
<dim>2</dim>
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="81,x.3">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="12" name="Constant_2628" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="6672" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="13" name="aten::reshape/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="21,x.5">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="14" name="Constant_75" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="6696" size="24" />
<output>
<port id="0" precision="I64" names="25">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="15" name="aten::permute/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="26,x.7">
<dim>-1</dim>
<dim>-1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="16" name="aten::cat/Concat" type="Concat" version="opset1">
<data axis="1" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="48,x.9">
<dim>-1</dim>
<dim>-1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="17" name="Constant_2572" type="Const" version="opset1">
<data element_type="f32" shape="1, 17, 128" offset="6720" size="8704" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="18" name="aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="55">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="19" name="__module.ln_pre/aten::layer_norm/Constant" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="15424" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="20" name="__module.ln_pre/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="92,93,q_x.1,x.11">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="21" name="__module.transformer.resblocks.0.ln_1/aten::layer_norm/Constant" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="15424" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="22" name="__module.transformer.resblocks.0.ln_1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="116,117,query.1,x.13">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="23" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Slice" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="15428" size="65536" />
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="24" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="25" name="Constant_2629" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80964" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="26" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="27" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80996" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="28" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="29" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Slice_2" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="81028" size="65536" />
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="30" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/MatMul_1" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="31" name="Constant_2630" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80964" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="32" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="33" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="146564" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="34" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="35" name="Constant_2573" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 1, 1" offset="146596" size="4" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="36" name="Multiply_2535" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="37" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Multiply" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="38" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Softmax" type="SoftMax" version="opset8">
<data axis="-1" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="39" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Slice_4" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="146600" size="65536" />
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="40" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/MatMul_3" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="41" name="Constant_2631" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80964" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="42" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="43" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="44" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/MatMul_4" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="45" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="46" name="Constant_2632" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="212136" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="47" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="48" name="self.transformer.resblocks.0.attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="212160" size="65536" />
<output>
<port id="0" precision="FP32" names="self.transformer.resblocks.0.attn.out_proj.weight">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="49" name="__module.transformer.resblocks.0.attn/aten::_native_multi_head_attention/MatMul_5" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="124">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="50" name="__module.transformer.resblocks.0/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="126,x.17">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="51" name="__module.transformer.resblocks.0.ln_2/aten::layer_norm/Constant" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="15424" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="52" name="__module.transformer.resblocks.0.ln_2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="130,131,x.15">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="53" name="self.transformer.resblocks.0.mlp.c_fc.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 128" offset="277696" size="262144" />
<output>
<port id="0" precision="FP32" names="self.transformer.resblocks.0.mlp.c_fc.weight">
<dim>512</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="54" name="__module.transformer.resblocks.0.mlp.c_fc/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="55" name="Constant_2574" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 512" offset="539840" size="2048" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="56" name="__module.transformer.resblocks.0.mlp.c_fc/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="137">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="57" name="__module.transformer.resblocks.0.mlp.gelu/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="ERF" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="138">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="58" name="self.transformer.resblocks.0.mlp.c_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 512" offset="541888" size="262144" />
<output>
<port id="0" precision="FP32" names="self.transformer.resblocks.0.mlp.c_proj.weight">
<dim>128</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="59" name="__module.transformer.resblocks.0.mlp.c_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="60" name="Constant_2575" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 128" offset="804032" size="512" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="61" name="__module.transformer.resblocks.0.mlp.c_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="141">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="62" name="__module.transformer.resblocks.0/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="142,q_x">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="63" name="__module.transformer.resblocks.1.ln_1/aten::layer_norm/Constant" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="15424" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="64" name="__module.transformer.resblocks.1.ln_1/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="152,153,query,x.19">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="65" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Slice" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="804544" size="65536" />
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="66" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="67" name="Constant_2633" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80964" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="68" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="69" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80996" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="70" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Transpose" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="71" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Slice_2" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="870080" size="65536" />
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="72" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/MatMul_1" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="73" name="Constant_2634" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80964" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="74" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="75" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="146564" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="76" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Transpose_1" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="77" name="Constant_2576" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 1, 1" offset="146596" size="4" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="78" name="Multiply_2537" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="79" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Multiply" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>64</dim>
<dim>17</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="80" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Softmax" type="SoftMax" version="opset8">
<data axis="-1" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
</output>
</layer>
<layer id="81" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Slice_4" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="935616" size="65536" />
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="82" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/MatMul_3" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="83" name="Constant_2635" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="80964" size="32" />
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="84" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="85" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Transpose_2" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="86" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/MatMul_4" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>17</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="87" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Transpose_3" type="Transpose" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>2</dim>
<dim>17</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="88" name="Constant_2636" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="212136" size="24" />
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="89" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>2</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="90" name="self.transformer.resblocks.1.attn.out_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 128" offset="1001152" size="65536" />
<output>
<port id="0" precision="FP32" names="self.transformer.resblocks.1.attn.out_proj.weight">
<dim>128</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="91" name="__module.transformer.resblocks.1.attn/aten::_native_multi_head_attention/MatMul_5" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="160">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="92" name="__module.transformer.resblocks.1/aten::add/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="162,x.23">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="93" name="__module.transformer.resblocks.1.ln_2/aten::layer_norm/Constant" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="15424" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="94" name="__module.transformer.resblocks.1.ln_2/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="166,167,x.21">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="95" name="self.transformer.resblocks.1.mlp.c_fc.weight" type="Const" version="opset1">
<data element_type="f32" shape="512, 128" offset="1066688" size="262144" />
<output>
<port id="0" precision="FP32" names="self.transformer.resblocks.1.mlp.c_fc.weight">
<dim>512</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="96" name="__module.transformer.resblocks.1.mlp.c_fc/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="97" name="Constant_2577" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 512" offset="1328832" size="2048" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="98" name="__module.transformer.resblocks.1.mlp.c_fc/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="173">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="99" name="__module.transformer.resblocks.1.mlp.gelu/aten::gelu/Gelu" type="Gelu" version="opset7">
<data approximation_mode="ERF" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="174">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="100" name="self.transformer.resblocks.1.mlp.c_proj.weight" type="Const" version="opset1">
<data element_type="f32" shape="128, 512" offset="1330880" size="262144" />
<output>
<port id="0" precision="FP32" names="self.transformer.resblocks.1.mlp.c_proj.weight">
<dim>128</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="101" name="__module.transformer.resblocks.1.mlp.c_proj/aten::linear/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>512</dim>
</port>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="102" name="Constant_2578" type="Const" version="opset1">
<data element_type="f32" shape="1, 1, 128" offset="1593024" size="512" />
<output>
<port id="0" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="103" name="__module.transformer.resblocks.1.mlp.c_proj/aten::linear/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="177">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="104" name="__module.transformer.resblocks.1/aten::add/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="178">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="105" name="__module.ln_post/aten::layer_norm/Constant" type="Const" version="opset1">
<data element_type="i32" shape="1" offset="15424" size="4" />
<output>
<port id="0" precision="I32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="106" name="__module.ln_post/aten::layer_norm/MVN" type="MVN" version="opset6">
<data eps="9.9999997473787516e-06" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="188,189,64,x,x.25">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="107" name="66" type="Const" version="opset1">
<data element_type="i64" shape="" offset="512" size="8" />
<output>
<port id="0" precision="I64" names="66" />
</output>
</layer>
<layer id="108" name="65" type="Const" version="opset1">
<data element_type="i64" shape="" offset="520" size="8" />
<output>
<port id="0" precision="I64" names="65" />
</output>
</layer>
<layer id="109" name="aten::select/Gather" type="Gather" version="opset8">
<data batch_dims="0" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>17</dim>
<dim>128</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="FP32" names="67,pooled">
<dim>-1</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="110" name="Transpose_2533" type="Const" version="opset1">
<data element_type="f32" shape="8, 128" offset="1593536" size="4096" />
<output>
<port id="0" precision="FP32">
<dim>8</dim>
<dim>128</dim>
</port>
</output>
</layer>
<layer id="111" name="aten::matmul/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>128</dim>
</port>
<port id="1" precision="FP32">
<dim>8</dim>
<dim>128</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="image_features">
<dim>-1</dim>
<dim>8</dim>
</port>
</output>
</layer>
<layer id="112" name="Result_840" type="Result" version="opset1">
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
</port>
</input>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
<edge from-layer="0" from-port="0" to-layer="11" to-port="0" />
<edge from-layer="1" from-port="0" to-layer="9" to-port="0" />
<edge from-layer="2" from-port="1" to-layer="5" to-port="0" />
<edge from-layer="3" from-port="0" to-layer="5" to-port="1" />
<edge from-layer="4" from-port="0" to-layer="5" to-port="2" />
<edge from-layer="5" from-port="3" to-layer="8" to-port="0" />
<edge from-layer="6" from-port="0" to-layer="8" to-port="1" />
<edge from-layer="7" from-port="0" to-layer="8" to-port="2" />
<edge from-layer="8" from-port="3" to-layer="9" to-port="1" />
<edge from-layer="9" from-port="2" to-layer="16" to-port="0" />
<edge from-layer="10" from-port="0" to-layer="11" to-port="1" />
<edge from-layer="11" from-port="2" to-layer="13" to-port="0" />
<edge from-layer="12" from-port="0" to-layer="13" to-port="1" />
<edge from-layer="13" from-port="2" to-layer="15" to-port="0" />
<edge from-layer="14" from-port="0" to-layer="15" to-port="1" />
<edge from-layer="15" from-port="2" to-layer="16" to-port="1" />
<edge from-layer="16" from-port="2" to-layer="18" to-port="0" />
<edge from-layer="17" from-port="0" to-layer="18" to-port="1" />
<edge from-layer="18" from-port="2" to-layer="20" to-port="0" />
<edge from-layer="19" from-port="0" to-layer="20" to-port="1" />
<edge from-layer="20" from-port="2" to-layer="22" to-port="0" />
<edge from-layer="20" from-port="2" to-layer="50" to-port="0" />
<edge from-layer="21" from-port="0" to-layer="22" to-port="1" />
<edge from-layer="22" from-port="2" to-layer="30" to-port="0" />
<edge from-layer="22" from-port="2" to-layer="40" to-port="0" />
<edge from-layer="22" from-port="2" to-layer="24" to-port="0" />
<edge from-layer="23" from-port="0" to-layer="24" to-port="1" />
<edge from-layer="24" from-port="2" to-layer="26" to-port="0" />
<edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
<edge from-layer="26" from-port="2" to-layer="28" to-port="0" />
<edge from-layer="27" from-port="0" to-layer="28" to-port="1" />
<edge from-layer="27" from-port="0" to-layer="45" to-port="1" />
<edge from-layer="27" from-port="0" to-layer="43" to-port="1" />
<edge from-layer="28" from-port="2" to-layer="37" to-port="0" />
<edge from-layer="29" from-port="0" to-layer="30" to-port="1" />
<edge from-layer="30" from-port="2" to-layer="32" to-port="0" />
<edge from-layer="31" from-port="0" to-layer="32" to-port="1" />
<edge from-layer="32" from-port="2" to-layer="34" to-port="0" />
<edge from-layer="33" from-port="0" to-layer="34" to-port="1" />
<edge from-layer="34" from-port="2" to-layer="36" to-port="0" />
<edge from-layer="35" from-port="0" to-layer="36" to-port="1" />
<edge from-layer="36" from-port="2" to-layer="37" to-port="1" />
<edge from-layer="37" from-port="2" to-layer="38" to-port="0" />
<edge from-layer="38" from-port="1" to-layer="44" to-port="0" />
<edge from-layer="39" from-port="0" to-layer="40" to-port="1" />
<edge from-layer="40" from-port="2" to-layer="42" to-port="0" />
<edge from-layer="41" from-port="0" to-layer="42" to-port="1" />
<edge from-layer="42" from-port="2" to-layer="43" to-port="0" />
<edge from-layer="43" from-port="2" to-layer="44" to-port="1" />
<edge from-layer="44" from-port="2" to-layer="45" to-port="0" />
<edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
<edge from-layer="46" from-port="0" to-layer="47" to-port="1" />
<edge from-layer="47" from-port="2" to-layer="49" to-port="0" />
<edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
<edge from-layer="49" from-port="2" to-layer="50" to-port="1" />
<edge from-layer="50" from-port="2" to-layer="52" to-port="0" />
<edge from-layer="50" from-port="2" to-layer="62" to-port="0" />
<edge from-layer="51" from-port="0" to-layer="52" to-port="1" />
<edge from-layer="52" from-port="2" to-layer="54" to-port="0" />
<edge from-layer="53" from-port="0" to-layer="54" to-port="1" />
<edge from-layer="54" from-port="2" to-layer="56" to-port="0" />
<edge from-layer="55" from-port="0" to-layer="56" to-port="1" />
<edge from-layer="56" from-port="2" to-layer="57" to-port="0" />
<edge from-layer="57" from-port="1" to-layer="59" to-port="0" />
<edge from-layer="58" from-port="0" to-layer="59" to-port="1" />
<edge from-layer="59" from-port="2" to-layer="61" to-port="0" />
<edge from-layer="60" from-port="0" to-layer="61" to-port="1" />
<edge from-layer="61" from-port="2" to-layer="62" to-port="1" />
<edge from-layer="62" from-port="2" to-layer="64" to-port="0" />
<edge from-layer="62" from-port="2" to-layer="92" to-port="0" />
<edge from-layer="63" from-port="0" to-layer="64" to-port="1" />
<edge from-layer="64" from-port="2" to-layer="82" to-port="0" />
<edge from-layer="64" from-port="2" to-layer="72" to-port="0" />
<edge from-layer="64" from-port="2" to-layer="66" to-port="0" />
<edge from-layer="65" from-port="0" to-layer="66" to-port="1" />
<edge from-layer="66" from-port="2" to-layer="68" to-port="0" />
<edge from-layer="67" from-port="0" to-layer="68" to-port="1" />
<edge from-layer="68" from-port="2" to-layer="70" to-port="0" />
<edge from-layer="69" from-port="0" to-layer="87" to-port="1" />
<edge from-layer="69" from-port="0" to-layer="85" to-port="1" />
<edge from-layer="69" from-port="0" to-layer="70" to-port="1" />
<edge from-layer="70" from-port="2" to-layer="79" to-port="0" />
<edge from-layer="71" from-port="0" to-layer="72" to-port="1" />
<edge from-layer="72" from-port="2" to-layer="74" to-port="0" />
<edge from-layer="73" from-port="0" to-layer="74" to-port="1" />
<edge from-layer="74" from-port="2" to-layer="76" to-port="0" />
<edge from-layer="75" from-port="0" to-layer="76" to-port="1" />
<edge from-layer="76" from-port="2" to-layer="78" to-port="0" />
<edge from-layer="77" from-port="0" to-layer="78" to-port="1" />
<edge from-layer="78" from-port="2" to-layer="79" to-port="1" />
<edge from-layer="79" from-port="2" to-layer="80" to-port="0" />
<edge from-layer="80" from-port="1" to-layer="86" to-port="0" />
<edge from-layer="81" from-port="0" to-layer="82" to-port="1" />
<edge from-layer="82" from-port="2" to-layer="84" to-port="0" />
<edge from-layer="83" from-port="0" to-layer="84" to-port="1" />
<edge from-layer="84" from-port="2" to-layer="85" to-port="0" />
<edge from-layer="85" from-port="2" to-layer="86" to-port="1" />
<edge from-layer="86" from-port="2" to-layer="87" to-port="0" />
<edge from-layer="87" from-port="2" to-layer="89" to-port="0" />
<edge from-layer="88" from-port="0" to-layer="89" to-port="1" />
<edge from-layer="89" from-port="2" to-layer="91" to-port="0" />
<edge from-layer="90" from-port="0" to-layer="91" to-port="1" />
<edge from-layer="91" from-port="2" to-layer="92" to-port="1" />
<edge from-layer="92" from-port="2" to-layer="94" to-port="0" />
<edge from-layer="92" from-port="2" to-layer="104" to-port="0" />
<edge from-layer="93" from-port="0" to-layer="94" to-port="1" />
<edge from-layer="94" from-port="2" to-layer="96" to-port="0" />
<edge from-layer="95" from-port="0" to-layer="96" to-port="1" />
<edge from-layer="96" from-port="2" to-layer="98" to-port="0" />
<edge from-layer="97" from-port="0" to-layer="98" to-port="1" />
<edge from-layer="98" from-port="2" to-layer="99" to-port="0" />
<edge from-layer="99" from-port="1" to-layer="101" to-port="0" />
<edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
<edge from-layer="101" from-port="2" to-layer="103" to-port="0" />
<edge from-layer="102" from-port="0" to-layer="103" to-port="1" />
<edge from-layer="103" from-port="2" to-layer="104" to-port="1" />
<edge from-layer="104" from-port="2" to-layer="106" to-port="0" />
<edge from-layer="105" from-port="0" to-layer="106" to-port="1" />
<edge from-layer="106" from-port="2" to-layer="109" to-port="0" />
<edge from-layer="107" from-port="0" to-layer="109" to-port="1" />
<edge from-layer="108" from-port="0" to-layer="109" to-port="2" />
<edge from-layer="109" from-port="3" to-layer="111" to-port="0" />
<edge from-layer="110" from-port="0" to-layer="111" to-port="1" />
<edge from-layer="111" from-port="2" to-layer="112" to-port="0" />
</edges>
<rt_info>
<Runtime_version value="2024.2.0-15519-5c0f38f83f6-releases/2024/2" />
<conversion_parameters>
<framework value="pytorch" />
<is_python_object value="True" />
</conversion_parameters>
<optimum>
<open_clip_version value="2.26.1" />
<optimum_intel_version value="1.19.0.dev0+8f5f82e" />
<optimum_version value="1.22.0.dev0" />
<pytorch_version value="2.4.0" />
<transformers_version value="4.41.2" />
</optimum>
</rt_info>
</net>