niobures commited on
Commit
076305e
·
verified ·
1 Parent(s): d3b8d38

Transformer CNN Emotion Recognition (code, models)

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ models/ailia-models/code/03-01-01-01-01-01-01.wav filter=lfs diff=lfs merge=lfs -text
code/transformer-cnn-emotion-recognition.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fcb6f514212fa50eeb1dff06a8a8b8e47c8017d913de0e5f1ecb9f0857fdca5
3
+ size 20363810
colab/Parallel_is_All_You_Want.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
models/ailia-models/code/03-01-01-01-01-01-01.wav ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c467aad2ca80b089dbd4c2ba0dd16be508e4ce08230f46d36a208b859dc7fbac
3
+ size 375720
models/ailia-models/code/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2020 Ilia Zenkov
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
models/ailia-models/code/README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # transformer-cnn-emotion-recognition
2
+
3
+ ## Input
4
+
5
+ audio file
6
+
7
+ ```
8
+ 03-01-01-01-01-01-01.wav
9
+ RAVDESS Dataset
10
+ https://smartlaboratory.org/ravdess/
11
+ ```
12
+
13
+ ## Output
14
+
15
+ emotion label
16
+
17
+ ```
18
+ Emotion: neutral
19
+ Confidence: 0.99993193
20
+ ```
21
+
22
+ ## Labels
23
+
24
+ ```
25
+ "surprised", "neutral", "calm", "happy",
26
+ "sad", "angry", "fearful", "disgust"
27
+ ```
28
+
29
+ ## Requirements
30
+ This model requires additional module.
31
+
32
+ ```
33
+ pip3 install librosa
34
+ ```
35
+
36
+ ## Usage
37
+
38
+ ```bash
39
+ $ python3 transformer-cnn-emotion-recognition.py -i input.wav
40
+ ```
41
+
42
+ ## Reference
43
+
44
+ [Combining Spatial and Temporal Feature Representions of Speech Emotion by Parallelizing CNNs and Transformer-Encoders](https://github.com/IliaZenkov/transformer-cnn-emotion-recognition)
45
+
46
+ ## Framework
47
+
48
+ PyTorch 1.6.0
49
+
50
+ ## Model Format
51
+
52
+ ONNX opset = 11
53
+
54
+ ## Netron
55
+
56
+ [parallel_is_all_you_want_ep428.onnx.prototxt](https://netron.app/?url=https://storage.googleapis.com/ailia-models/parallel_is_all_you_want/parallel_is_all_you_want_ep428.onnx.prototxt)
models/ailia-models/code/std_mean.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d216d69500fa7e18652821e466e7c6e6a2dfcfc49a37495966e1f63eb191ceaf
3
+ size 90368
models/ailia-models/code/std_var.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f61153aa67beca6da05315e5d8eda4ede63fb3095a469cfe85dbe70cb47f042
3
+ size 90368
models/ailia-models/code/transformer-cnn-emotion-recognition.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+ import sys
3
+
4
+ import numpy as np
5
+ import librosa
6
+
7
+ import ailia
8
+
9
+ # import original modules
10
+ sys.path.append('../../util')
11
+ from arg_utils import get_base_parser, update_parser # noqa: E402
12
+ from model_utils import check_and_download_models # noqa: E402
13
+
14
+ # logger
15
+ from logging import getLogger # noqa: E402
16
+
17
+ logger = getLogger(__name__)
18
+
19
+ # ======================
20
+ # PARAMETERS
21
+ # ======================
22
+ # https://smartlaboratory.org/ravdess/
23
+ WAVE_PATH = "03-01-01-01-01-01-01.wav"
24
+
25
+ WEIGHT_PATH = "parallel_is_all_you_want_ep428.onnx"
26
+ MODEL_PATH = "parallel_is_all_you_want_ep428.onnx.prototxt"
27
+ REMOTE_PATH = "https://storage.googleapis.com/ailia-models/parallel_is_all_you_want/"
28
+
29
+ LABELS = [
30
+ "surprised", "neutral", "calm", "happy",
31
+ "sad", "angry", "fearful", "disgust",
32
+ ]
33
+
34
+ # ======================
35
+ # Arguemnt Parser Config
36
+ # ======================
37
+ parser = get_base_parser(
38
+ 'Parallel_is_All_You_Want.', WAVE_PATH, None, input_ftype='audio')
39
+ args = update_parser(parser)
40
+
41
+
42
+ # ======================
43
+ # Utils
44
+ # ======================
45
+
46
+ def get_waveforms(file, sample_rate=48000):
47
+ # load an individual sample audio file
48
+ # read the full 3 seconds of the file, cut off the first 0.5s of silence; native sample rate = 48k
49
+ # don't need to store the sample rate that librosa.load returns
50
+ waveform, _ = librosa.load(file, duration=3, offset=0.5, sr=sample_rate)
51
+
52
+ # make sure waveform vectors are homogenous by defining explicitly
53
+ waveform_homo = np.zeros((int(sample_rate * 3, )))
54
+ waveform_homo[:len(waveform)] = waveform
55
+
56
+ # return a single file's waveform
57
+ return waveform_homo
58
+
59
+
60
+ def feature_mfcc(
61
+ waveform,
62
+ sample_rate,
63
+ n_mfcc=40,
64
+ fft=1024,
65
+ winlen=512,
66
+ window='hamming',
67
+ # hop=256, # increases # of time steps; was not helpful
68
+ mels=128):
69
+ # Compute the MFCCs for all STFT frames
70
+ # 40 mel filterbanks (n_mfcc) = 40 coefficients
71
+ mfc_coefficients = librosa.feature.mfcc(
72
+ y=waveform,
73
+ sr=sample_rate,
74
+ n_mfcc=n_mfcc,
75
+ n_fft=fft,
76
+ win_length=winlen,
77
+ window=window,
78
+ # hop_length=hop,
79
+ n_mels=mels,
80
+ fmax=sample_rate / 2
81
+ )
82
+
83
+ return mfc_coefficients
84
+
85
+
86
+ def preprocess(x):
87
+ c, h, w = x.shape
88
+ x = x.reshape(-1)
89
+
90
+ mean = np.load('std_mean.npy')
91
+ var = np.load('std_var.npy')
92
+ x = (x - mean) / np.sqrt(var)
93
+
94
+ x = x.reshape((c, h, w))
95
+
96
+ return x
97
+
98
+
99
+ # ======================
100
+ # Main function
101
+ # ======================
102
+
103
+ def predict(net, waveform):
104
+ sample_rate = 48000
105
+ features = feature_mfcc(waveform, sample_rate)
106
+ features = np.expand_dims(features, axis=0)
107
+
108
+ x = preprocess(features)
109
+
110
+ # feedforward
111
+ x = np.expand_dims(x, axis=0)
112
+ output = net.predict([x])
113
+
114
+ output_logits, output_softmax = output
115
+ output_softmax = output_softmax[0]
116
+
117
+ label = np.argmax(output_softmax)
118
+ conf = output_softmax[label]
119
+
120
+ return label, conf
121
+
122
+
123
+ def recognize_from_audio(net):
124
+ for input_path in args.input:
125
+ logger.info(f'input: {input_path}')
126
+
127
+ # load audio
128
+ waveform = get_waveforms(input_path)
129
+
130
+ # inference
131
+ logger.info('Start inference...')
132
+ if args.benchmark:
133
+ logger.info('BENCHMARK mode')
134
+ total_time_estimation = 0
135
+ for i in range(args.benchmark_count):
136
+ start = int(round(time.time() * 1000))
137
+ label, conf = predict(net, waveform)
138
+ end = int(round(time.time() * 1000))
139
+ estimation_time = (end - start)
140
+
141
+ # Loggin
142
+ logger.info(f'\tailia processing estimation time {estimation_time} ms')
143
+ if i != 0:
144
+ total_time_estimation = total_time_estimation + estimation_time
145
+
146
+ else:
147
+ label, conf = predict(net, waveform)
148
+
149
+ label = LABELS[label]
150
+ logger.info("Emotion: %s" % label)
151
+ logger.info("Confidence: %s" % conf)
152
+
153
+ logger.info('Script finished successfully.')
154
+
155
+
156
+ def main():
157
+ # model files check and download
158
+ check_and_download_models(WEIGHT_PATH, MODEL_PATH, REMOTE_PATH)
159
+
160
+ # initialize
161
+ net = ailia.Net(MODEL_PATH, WEIGHT_PATH, env_id=args.env_id)
162
+
163
+ recognize_from_audio(net)
164
+
165
+
166
+ if __name__ == "__main__":
167
+ main()
models/ailia-models/parallel_is_all_you_want_ep428.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea26e8632d8db39edd80118981ea26e4ab7bad459b2e774d47d90116c2eac8e3
3
+ size 1018228
models/ailia-models/parallel_is_all_you_want_ep428.onnx.prototxt ADDED
@@ -0,0 +1,4341 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ir_version: 6
2
+ producer_name: "pytorch"
3
+ producer_version: "1.6"
4
+ model_version: 0
5
+ graph {
6
+ name: "torch-jit-export"
7
+ node {
8
+ input: "features"
9
+ input: "conv2Dblock1.0.weight"
10
+ input: "conv2Dblock1.0.bias"
11
+ output: "93"
12
+ name: "Conv_0"
13
+ op_type: "Conv"
14
+ attribute {
15
+ name: "dilations"
16
+ ints: 1
17
+ ints: 1
18
+ type: INTS
19
+ }
20
+ attribute {
21
+ name: "group"
22
+ i: 1
23
+ type: INT
24
+ }
25
+ attribute {
26
+ name: "kernel_shape"
27
+ ints: 3
28
+ ints: 3
29
+ type: INTS
30
+ }
31
+ attribute {
32
+ name: "pads"
33
+ ints: 1
34
+ ints: 1
35
+ ints: 1
36
+ ints: 1
37
+ type: INTS
38
+ }
39
+ attribute {
40
+ name: "strides"
41
+ ints: 1
42
+ ints: 1
43
+ type: INTS
44
+ }
45
+ }
46
+ node {
47
+ input: "93"
48
+ input: "conv2Dblock1.1.weight"
49
+ input: "conv2Dblock1.1.bias"
50
+ input: "conv2Dblock1.1.running_mean"
51
+ input: "conv2Dblock1.1.running_var"
52
+ output: "94"
53
+ name: "BatchNormalization_1"
54
+ op_type: "BatchNormalization"
55
+ attribute {
56
+ name: "epsilon"
57
+ f: 9.999999747378752e-06
58
+ type: FLOAT
59
+ }
60
+ attribute {
61
+ name: "momentum"
62
+ f: 0.8999999761581421
63
+ type: FLOAT
64
+ }
65
+ }
66
+ node {
67
+ input: "94"
68
+ output: "95"
69
+ name: "Relu_2"
70
+ op_type: "Relu"
71
+ }
72
+ node {
73
+ input: "95"
74
+ output: "96"
75
+ name: "MaxPool_3"
76
+ op_type: "MaxPool"
77
+ attribute {
78
+ name: "ceil_mode"
79
+ i: 0
80
+ type: INT
81
+ }
82
+ attribute {
83
+ name: "kernel_shape"
84
+ ints: 2
85
+ ints: 2
86
+ type: INTS
87
+ }
88
+ attribute {
89
+ name: "pads"
90
+ ints: 0
91
+ ints: 0
92
+ ints: 0
93
+ ints: 0
94
+ type: INTS
95
+ }
96
+ attribute {
97
+ name: "strides"
98
+ ints: 2
99
+ ints: 2
100
+ type: INTS
101
+ }
102
+ }
103
+ node {
104
+ input: "96"
105
+ input: "conv2Dblock1.5.weight"
106
+ input: "conv2Dblock1.5.bias"
107
+ output: "97"
108
+ name: "Conv_4"
109
+ op_type: "Conv"
110
+ attribute {
111
+ name: "dilations"
112
+ ints: 1
113
+ ints: 1
114
+ type: INTS
115
+ }
116
+ attribute {
117
+ name: "group"
118
+ i: 1
119
+ type: INT
120
+ }
121
+ attribute {
122
+ name: "kernel_shape"
123
+ ints: 3
124
+ ints: 3
125
+ type: INTS
126
+ }
127
+ attribute {
128
+ name: "pads"
129
+ ints: 1
130
+ ints: 1
131
+ ints: 1
132
+ ints: 1
133
+ type: INTS
134
+ }
135
+ attribute {
136
+ name: "strides"
137
+ ints: 1
138
+ ints: 1
139
+ type: INTS
140
+ }
141
+ }
142
+ node {
143
+ input: "97"
144
+ input: "conv2Dblock1.6.weight"
145
+ input: "conv2Dblock1.6.bias"
146
+ input: "conv2Dblock1.6.running_mean"
147
+ input: "conv2Dblock1.6.running_var"
148
+ output: "98"
149
+ name: "BatchNormalization_5"
150
+ op_type: "BatchNormalization"
151
+ attribute {
152
+ name: "epsilon"
153
+ f: 9.999999747378752e-06
154
+ type: FLOAT
155
+ }
156
+ attribute {
157
+ name: "momentum"
158
+ f: 0.8999999761581421
159
+ type: FLOAT
160
+ }
161
+ }
162
+ node {
163
+ input: "98"
164
+ output: "99"
165
+ name: "Relu_6"
166
+ op_type: "Relu"
167
+ }
168
+ node {
169
+ input: "99"
170
+ output: "100"
171
+ name: "MaxPool_7"
172
+ op_type: "MaxPool"
173
+ attribute {
174
+ name: "ceil_mode"
175
+ i: 0
176
+ type: INT
177
+ }
178
+ attribute {
179
+ name: "kernel_shape"
180
+ ints: 4
181
+ ints: 4
182
+ type: INTS
183
+ }
184
+ attribute {
185
+ name: "pads"
186
+ ints: 0
187
+ ints: 0
188
+ ints: 0
189
+ ints: 0
190
+ type: INTS
191
+ }
192
+ attribute {
193
+ name: "strides"
194
+ ints: 4
195
+ ints: 4
196
+ type: INTS
197
+ }
198
+ }
199
+ node {
200
+ input: "100"
201
+ input: "conv2Dblock1.10.weight"
202
+ input: "conv2Dblock1.10.bias"
203
+ output: "101"
204
+ name: "Conv_8"
205
+ op_type: "Conv"
206
+ attribute {
207
+ name: "dilations"
208
+ ints: 1
209
+ ints: 1
210
+ type: INTS
211
+ }
212
+ attribute {
213
+ name: "group"
214
+ i: 1
215
+ type: INT
216
+ }
217
+ attribute {
218
+ name: "kernel_shape"
219
+ ints: 3
220
+ ints: 3
221
+ type: INTS
222
+ }
223
+ attribute {
224
+ name: "pads"
225
+ ints: 1
226
+ ints: 1
227
+ ints: 1
228
+ ints: 1
229
+ type: INTS
230
+ }
231
+ attribute {
232
+ name: "strides"
233
+ ints: 1
234
+ ints: 1
235
+ type: INTS
236
+ }
237
+ }
238
+ node {
239
+ input: "101"
240
+ input: "conv2Dblock1.11.weight"
241
+ input: "conv2Dblock1.11.bias"
242
+ input: "conv2Dblock1.11.running_mean"
243
+ input: "conv2Dblock1.11.running_var"
244
+ output: "102"
245
+ name: "BatchNormalization_9"
246
+ op_type: "BatchNormalization"
247
+ attribute {
248
+ name: "epsilon"
249
+ f: 9.999999747378752e-06
250
+ type: FLOAT
251
+ }
252
+ attribute {
253
+ name: "momentum"
254
+ f: 0.8999999761581421
255
+ type: FLOAT
256
+ }
257
+ }
258
+ node {
259
+ input: "102"
260
+ output: "103"
261
+ name: "Relu_10"
262
+ op_type: "Relu"
263
+ }
264
+ node {
265
+ input: "103"
266
+ output: "104"
267
+ name: "MaxPool_11"
268
+ op_type: "MaxPool"
269
+ attribute {
270
+ name: "ceil_mode"
271
+ i: 0
272
+ type: INT
273
+ }
274
+ attribute {
275
+ name: "kernel_shape"
276
+ ints: 4
277
+ ints: 4
278
+ type: INTS
279
+ }
280
+ attribute {
281
+ name: "pads"
282
+ ints: 0
283
+ ints: 0
284
+ ints: 0
285
+ ints: 0
286
+ type: INTS
287
+ }
288
+ attribute {
289
+ name: "strides"
290
+ ints: 4
291
+ ints: 4
292
+ type: INTS
293
+ }
294
+ }
295
+ node {
296
+ input: "104"
297
+ output: "105"
298
+ name: "Flatten_12"
299
+ op_type: "Flatten"
300
+ attribute {
301
+ name: "axis"
302
+ i: 1
303
+ type: INT
304
+ }
305
+ }
306
+ node {
307
+ input: "features"
308
+ input: "conv2Dblock2.0.weight"
309
+ input: "conv2Dblock2.0.bias"
310
+ output: "106"
311
+ name: "Conv_13"
312
+ op_type: "Conv"
313
+ attribute {
314
+ name: "dilations"
315
+ ints: 1
316
+ ints: 1
317
+ type: INTS
318
+ }
319
+ attribute {
320
+ name: "group"
321
+ i: 1
322
+ type: INT
323
+ }
324
+ attribute {
325
+ name: "kernel_shape"
326
+ ints: 3
327
+ ints: 3
328
+ type: INTS
329
+ }
330
+ attribute {
331
+ name: "pads"
332
+ ints: 1
333
+ ints: 1
334
+ ints: 1
335
+ ints: 1
336
+ type: INTS
337
+ }
338
+ attribute {
339
+ name: "strides"
340
+ ints: 1
341
+ ints: 1
342
+ type: INTS
343
+ }
344
+ }
345
+ node {
346
+ input: "106"
347
+ input: "conv2Dblock2.1.weight"
348
+ input: "conv2Dblock2.1.bias"
349
+ input: "conv2Dblock2.1.running_mean"
350
+ input: "conv2Dblock2.1.running_var"
351
+ output: "107"
352
+ name: "BatchNormalization_14"
353
+ op_type: "BatchNormalization"
354
+ attribute {
355
+ name: "epsilon"
356
+ f: 9.999999747378752e-06
357
+ type: FLOAT
358
+ }
359
+ attribute {
360
+ name: "momentum"
361
+ f: 0.8999999761581421
362
+ type: FLOAT
363
+ }
364
+ }
365
+ node {
366
+ input: "107"
367
+ output: "108"
368
+ name: "Relu_15"
369
+ op_type: "Relu"
370
+ }
371
+ node {
372
+ input: "108"
373
+ output: "109"
374
+ name: "MaxPool_16"
375
+ op_type: "MaxPool"
376
+ attribute {
377
+ name: "ceil_mode"
378
+ i: 0
379
+ type: INT
380
+ }
381
+ attribute {
382
+ name: "kernel_shape"
383
+ ints: 2
384
+ ints: 2
385
+ type: INTS
386
+ }
387
+ attribute {
388
+ name: "pads"
389
+ ints: 0
390
+ ints: 0
391
+ ints: 0
392
+ ints: 0
393
+ type: INTS
394
+ }
395
+ attribute {
396
+ name: "strides"
397
+ ints: 2
398
+ ints: 2
399
+ type: INTS
400
+ }
401
+ }
402
+ node {
403
+ input: "109"
404
+ input: "conv2Dblock2.5.weight"
405
+ input: "conv2Dblock2.5.bias"
406
+ output: "110"
407
+ name: "Conv_17"
408
+ op_type: "Conv"
409
+ attribute {
410
+ name: "dilations"
411
+ ints: 1
412
+ ints: 1
413
+ type: INTS
414
+ }
415
+ attribute {
416
+ name: "group"
417
+ i: 1
418
+ type: INT
419
+ }
420
+ attribute {
421
+ name: "kernel_shape"
422
+ ints: 3
423
+ ints: 3
424
+ type: INTS
425
+ }
426
+ attribute {
427
+ name: "pads"
428
+ ints: 1
429
+ ints: 1
430
+ ints: 1
431
+ ints: 1
432
+ type: INTS
433
+ }
434
+ attribute {
435
+ name: "strides"
436
+ ints: 1
437
+ ints: 1
438
+ type: INTS
439
+ }
440
+ }
441
+ node {
442
+ input: "110"
443
+ input: "conv2Dblock2.6.weight"
444
+ input: "conv2Dblock2.6.bias"
445
+ input: "conv2Dblock2.6.running_mean"
446
+ input: "conv2Dblock2.6.running_var"
447
+ output: "111"
448
+ name: "BatchNormalization_18"
449
+ op_type: "BatchNormalization"
450
+ attribute {
451
+ name: "epsilon"
452
+ f: 9.999999747378752e-06
453
+ type: FLOAT
454
+ }
455
+ attribute {
456
+ name: "momentum"
457
+ f: 0.8999999761581421
458
+ type: FLOAT
459
+ }
460
+ }
461
+ node {
462
+ input: "111"
463
+ output: "112"
464
+ name: "Relu_19"
465
+ op_type: "Relu"
466
+ }
467
+ node {
468
+ input: "112"
469
+ output: "113"
470
+ name: "MaxPool_20"
471
+ op_type: "MaxPool"
472
+ attribute {
473
+ name: "ceil_mode"
474
+ i: 0
475
+ type: INT
476
+ }
477
+ attribute {
478
+ name: "kernel_shape"
479
+ ints: 4
480
+ ints: 4
481
+ type: INTS
482
+ }
483
+ attribute {
484
+ name: "pads"
485
+ ints: 0
486
+ ints: 0
487
+ ints: 0
488
+ ints: 0
489
+ type: INTS
490
+ }
491
+ attribute {
492
+ name: "strides"
493
+ ints: 4
494
+ ints: 4
495
+ type: INTS
496
+ }
497
+ }
498
+ node {
499
+ input: "113"
500
+ input: "conv2Dblock2.10.weight"
501
+ input: "conv2Dblock2.10.bias"
502
+ output: "114"
503
+ name: "Conv_21"
504
+ op_type: "Conv"
505
+ attribute {
506
+ name: "dilations"
507
+ ints: 1
508
+ ints: 1
509
+ type: INTS
510
+ }
511
+ attribute {
512
+ name: "group"
513
+ i: 1
514
+ type: INT
515
+ }
516
+ attribute {
517
+ name: "kernel_shape"
518
+ ints: 3
519
+ ints: 3
520
+ type: INTS
521
+ }
522
+ attribute {
523
+ name: "pads"
524
+ ints: 1
525
+ ints: 1
526
+ ints: 1
527
+ ints: 1
528
+ type: INTS
529
+ }
530
+ attribute {
531
+ name: "strides"
532
+ ints: 1
533
+ ints: 1
534
+ type: INTS
535
+ }
536
+ }
537
+ node {
538
+ input: "114"
539
+ input: "conv2Dblock2.11.weight"
540
+ input: "conv2Dblock2.11.bias"
541
+ input: "conv2Dblock2.11.running_mean"
542
+ input: "conv2Dblock2.11.running_var"
543
+ output: "115"
544
+ name: "BatchNormalization_22"
545
+ op_type: "BatchNormalization"
546
+ attribute {
547
+ name: "epsilon"
548
+ f: 9.999999747378752e-06
549
+ type: FLOAT
550
+ }
551
+ attribute {
552
+ name: "momentum"
553
+ f: 0.8999999761581421
554
+ type: FLOAT
555
+ }
556
+ }
557
+ node {
558
+ input: "115"
559
+ output: "116"
560
+ name: "Relu_23"
561
+ op_type: "Relu"
562
+ }
563
+ node {
564
+ input: "116"
565
+ output: "117"
566
+ name: "MaxPool_24"
567
+ op_type: "MaxPool"
568
+ attribute {
569
+ name: "ceil_mode"
570
+ i: 0
571
+ type: INT
572
+ }
573
+ attribute {
574
+ name: "kernel_shape"
575
+ ints: 4
576
+ ints: 4
577
+ type: INTS
578
+ }
579
+ attribute {
580
+ name: "pads"
581
+ ints: 0
582
+ ints: 0
583
+ ints: 0
584
+ ints: 0
585
+ type: INTS
586
+ }
587
+ attribute {
588
+ name: "strides"
589
+ ints: 4
590
+ ints: 4
591
+ type: INTS
592
+ }
593
+ }
594
+ node {
595
+ input: "117"
596
+ output: "118"
597
+ name: "Flatten_25"
598
+ op_type: "Flatten"
599
+ attribute {
600
+ name: "axis"
601
+ i: 1
602
+ type: INT
603
+ }
604
+ }
605
+ node {
606
+ input: "features"
607
+ output: "119"
608
+ name: "MaxPool_26"
609
+ op_type: "MaxPool"
610
+ attribute {
611
+ name: "ceil_mode"
612
+ i: 0
613
+ type: INT
614
+ }
615
+ attribute {
616
+ name: "kernel_shape"
617
+ ints: 1
618
+ ints: 4
619
+ type: INTS
620
+ }
621
+ attribute {
622
+ name: "pads"
623
+ ints: 0
624
+ ints: 0
625
+ ints: 0
626
+ ints: 0
627
+ type: INTS
628
+ }
629
+ attribute {
630
+ name: "strides"
631
+ ints: 1
632
+ ints: 4
633
+ type: INTS
634
+ }
635
+ }
636
+ node {
637
+ input: "119"
638
+ output: "120"
639
+ name: "Squeeze_27"
640
+ op_type: "Squeeze"
641
+ attribute {
642
+ name: "axes"
643
+ ints: 1
644
+ type: INTS
645
+ }
646
+ }
647
+ node {
648
+ input: "120"
649
+ output: "121"
650
+ name: "Transpose_28"
651
+ op_type: "Transpose"
652
+ attribute {
653
+ name: "perm"
654
+ ints: 2
655
+ ints: 0
656
+ ints: 1
657
+ type: INTS
658
+ }
659
+ }
660
+ node {
661
+ input: "121"
662
+ output: "122"
663
+ name: "Shape_29"
664
+ op_type: "Shape"
665
+ }
666
+ node {
667
+ output: "123"
668
+ name: "Constant_30"
669
+ op_type: "Constant"
670
+ attribute {
671
+ name: "value"
672
+ t {
673
+ data_type: 7
674
+ }
675
+ type: TENSOR
676
+ }
677
+ }
678
+ node {
679
+ input: "122"
680
+ input: "123"
681
+ output: "124"
682
+ name: "Gather_31"
683
+ op_type: "Gather"
684
+ attribute {
685
+ name: "axis"
686
+ i: 0
687
+ type: INT
688
+ }
689
+ }
690
+ node {
691
+ input: "121"
692
+ output: "125"
693
+ name: "Shape_32"
694
+ op_type: "Shape"
695
+ }
696
+ node {
697
+ output: "126"
698
+ name: "Constant_33"
699
+ op_type: "Constant"
700
+ attribute {
701
+ name: "value"
702
+ t {
703
+ data_type: 7
704
+ }
705
+ type: TENSOR
706
+ }
707
+ }
708
+ node {
709
+ input: "125"
710
+ input: "126"
711
+ output: "127"
712
+ name: "Gather_34"
713
+ op_type: "Gather"
714
+ attribute {
715
+ name: "axis"
716
+ i: 0
717
+ type: INT
718
+ }
719
+ }
720
+ node {
721
+ input: "121"
722
+ output: "128"
723
+ name: "Shape_35"
724
+ op_type: "Shape"
725
+ }
726
+ node {
727
+ output: "129"
728
+ name: "Constant_36"
729
+ op_type: "Constant"
730
+ attribute {
731
+ name: "value"
732
+ t {
733
+ data_type: 7
734
+ }
735
+ type: TENSOR
736
+ }
737
+ }
738
+ node {
739
+ input: "128"
740
+ input: "129"
741
+ output: "130"
742
+ name: "Gather_37"
743
+ op_type: "Gather"
744
+ attribute {
745
+ name: "axis"
746
+ i: 0
747
+ type: INT
748
+ }
749
+ }
750
+ node {
751
+ output: "131"
752
+ name: "Constant_38"
753
+ op_type: "Constant"
754
+ attribute {
755
+ name: "value"
756
+ t {
757
+ data_type: 7
758
+ }
759
+ type: TENSOR
760
+ }
761
+ }
762
+ node {
763
+ input: "130"
764
+ input: "131"
765
+ output: "132"
766
+ name: "Div_39"
767
+ op_type: "Div"
768
+ }
769
+ node {
770
+ input: "132"
771
+ output: "133"
772
+ name: "Cast_40"
773
+ op_type: "Cast"
774
+ attribute {
775
+ name: "to"
776
+ i: 7
777
+ type: INT
778
+ }
779
+ }
780
+ node {
781
+ input: "133"
782
+ output: "134"
783
+ name: "Cast_41"
784
+ op_type: "Cast"
785
+ attribute {
786
+ name: "to"
787
+ i: 7
788
+ type: INT
789
+ }
790
+ }
791
+ node {
792
+ input: "121"
793
+ input: "502"
794
+ output: "136"
795
+ name: "MatMul_42"
796
+ op_type: "MatMul"
797
+ }
798
+ node {
799
+ input: "136"
800
+ input: "transformer_encoder.layers.0.self_attn.in_proj_bias"
801
+ output: "137"
802
+ name: "Add_43"
803
+ op_type: "Add"
804
+ }
805
+ node {
806
+ input: "137"
807
+ output: "138"
808
+ output: "139"
809
+ output: "140"
810
+ name: "Split_44"
811
+ op_type: "Split"
812
+ attribute {
813
+ name: "axis"
814
+ i: -1
815
+ type: INT
816
+ }
817
+ attribute {
818
+ name: "split"
819
+ ints: 40
820
+ ints: 40
821
+ ints: 40
822
+ type: INTS
823
+ }
824
+ }
825
+ node {
826
+ output: "141"
827
+ name: "Constant_45"
828
+ op_type: "Constant"
829
+ attribute {
830
+ name: "value"
831
+ t {
832
+ data_type: 1
833
+ }
834
+ type: TENSOR
835
+ }
836
+ }
837
+ node {
838
+ input: "138"
839
+ input: "141"
840
+ output: "142"
841
+ name: "Mul_46"
842
+ op_type: "Mul"
843
+ }
844
+ node {
845
+ output: "143"
846
+ name: "Constant_47"
847
+ op_type: "Constant"
848
+ attribute {
849
+ name: "value"
850
+ t {
851
+ data_type: 7
852
+ }
853
+ type: TENSOR
854
+ }
855
+ }
856
+ node {
857
+ input: "127"
858
+ input: "143"
859
+ output: "144"
860
+ name: "Mul_48"
861
+ op_type: "Mul"
862
+ }
863
+ node {
864
+ input: "124"
865
+ output: "145"
866
+ name: "Unsqueeze_49"
867
+ op_type: "Unsqueeze"
868
+ attribute {
869
+ name: "axes"
870
+ ints: 0
871
+ type: INTS
872
+ }
873
+ }
874
+ node {
875
+ input: "144"
876
+ output: "146"
877
+ name: "Unsqueeze_50"
878
+ op_type: "Unsqueeze"
879
+ attribute {
880
+ name: "axes"
881
+ ints: 0
882
+ type: INTS
883
+ }
884
+ }
885
+ node {
886
+ input: "134"
887
+ output: "147"
888
+ name: "Unsqueeze_51"
889
+ op_type: "Unsqueeze"
890
+ attribute {
891
+ name: "axes"
892
+ ints: 0
893
+ type: INTS
894
+ }
895
+ }
896
+ node {
897
+ input: "145"
898
+ input: "146"
899
+ input: "147"
900
+ output: "148"
901
+ name: "Concat_52"
902
+ op_type: "Concat"
903
+ attribute {
904
+ name: "axis"
905
+ i: 0
906
+ type: INT
907
+ }
908
+ }
909
+ node {
910
+ input: "142"
911
+ input: "148"
912
+ output: "149"
913
+ name: "Reshape_53"
914
+ op_type: "Reshape"
915
+ }
916
+ node {
917
+ input: "149"
918
+ output: "150"
919
+ name: "Transpose_54"
920
+ op_type: "Transpose"
921
+ attribute {
922
+ name: "perm"
923
+ ints: 1
924
+ ints: 0
925
+ ints: 2
926
+ type: INTS
927
+ }
928
+ }
929
+ node {
930
+ output: "151"
931
+ name: "Constant_55"
932
+ op_type: "Constant"
933
+ attribute {
934
+ name: "value"
935
+ t {
936
+ data_type: 7
937
+ }
938
+ type: TENSOR
939
+ }
940
+ }
941
+ node {
942
+ input: "127"
943
+ input: "151"
944
+ output: "152"
945
+ name: "Mul_56"
946
+ op_type: "Mul"
947
+ }
948
+ node {
949
+ input: "152"
950
+ output: "155"
951
+ name: "Unsqueeze_57"
952
+ op_type: "Unsqueeze"
953
+ attribute {
954
+ name: "axes"
955
+ ints: 0
956
+ type: INTS
957
+ }
958
+ }
959
+ node {
960
+ input: "134"
961
+ output: "156"
962
+ name: "Unsqueeze_58"
963
+ op_type: "Unsqueeze"
964
+ attribute {
965
+ name: "axes"
966
+ ints: 0
967
+ type: INTS
968
+ }
969
+ }
970
+ node {
971
+ input: "503"
972
+ input: "155"
973
+ input: "156"
974
+ output: "157"
975
+ name: "Concat_59"
976
+ op_type: "Concat"
977
+ attribute {
978
+ name: "axis"
979
+ i: 0
980
+ type: INT
981
+ }
982
+ }
983
+ node {
984
+ input: "139"
985
+ input: "157"
986
+ output: "158"
987
+ name: "Reshape_60"
988
+ op_type: "Reshape"
989
+ }
990
+ node {
991
+ output: "159"
992
+ name: "Constant_61"
993
+ op_type: "Constant"
994
+ attribute {
995
+ name: "value"
996
+ t {
997
+ data_type: 7
998
+ }
999
+ type: TENSOR
1000
+ }
1001
+ }
1002
+ node {
1003
+ input: "127"
1004
+ input: "159"
1005
+ output: "160"
1006
+ name: "Mul_62"
1007
+ op_type: "Mul"
1008
+ }
1009
+ node {
1010
+ input: "160"
1011
+ output: "163"
1012
+ name: "Unsqueeze_63"
1013
+ op_type: "Unsqueeze"
1014
+ attribute {
1015
+ name: "axes"
1016
+ ints: 0
1017
+ type: INTS
1018
+ }
1019
+ }
1020
+ node {
1021
+ input: "134"
1022
+ output: "164"
1023
+ name: "Unsqueeze_64"
1024
+ op_type: "Unsqueeze"
1025
+ attribute {
1026
+ name: "axes"
1027
+ ints: 0
1028
+ type: INTS
1029
+ }
1030
+ }
1031
+ node {
1032
+ input: "504"
1033
+ input: "163"
1034
+ input: "164"
1035
+ output: "165"
1036
+ name: "Concat_65"
1037
+ op_type: "Concat"
1038
+ attribute {
1039
+ name: "axis"
1040
+ i: 0
1041
+ type: INT
1042
+ }
1043
+ }
1044
+ node {
1045
+ input: "140"
1046
+ input: "165"
1047
+ output: "166"
1048
+ name: "Reshape_66"
1049
+ op_type: "Reshape"
1050
+ }
1051
+ node {
1052
+ input: "166"
1053
+ output: "167"
1054
+ name: "Transpose_67"
1055
+ op_type: "Transpose"
1056
+ attribute {
1057
+ name: "perm"
1058
+ ints: 1
1059
+ ints: 0
1060
+ ints: 2
1061
+ type: INTS
1062
+ }
1063
+ }
1064
+ node {
1065
+ input: "158"
1066
+ output: "168"
1067
+ name: "Transpose_68"
1068
+ op_type: "Transpose"
1069
+ attribute {
1070
+ name: "perm"
1071
+ ints: 1
1072
+ ints: 2
1073
+ ints: 0
1074
+ type: INTS
1075
+ }
1076
+ }
1077
+ node {
1078
+ input: "150"
1079
+ input: "168"
1080
+ output: "169"
1081
+ name: "MatMul_69"
1082
+ op_type: "MatMul"
1083
+ }
1084
+ node {
1085
+ input: "169"
1086
+ output: "170"
1087
+ name: "Softmax_70"
1088
+ op_type: "Softmax"
1089
+ attribute {
1090
+ name: "axis"
1091
+ i: 2
1092
+ type: INT
1093
+ }
1094
+ }
1095
+ node {
1096
+ input: "170"
1097
+ input: "167"
1098
+ output: "171"
1099
+ name: "MatMul_71"
1100
+ op_type: "MatMul"
1101
+ }
1102
+ node {
1103
+ input: "171"
1104
+ output: "172"
1105
+ name: "Transpose_72"
1106
+ op_type: "Transpose"
1107
+ attribute {
1108
+ name: "perm"
1109
+ ints: 1
1110
+ ints: 0
1111
+ ints: 2
1112
+ type: INTS
1113
+ }
1114
+ }
1115
+ node {
1116
+ input: "124"
1117
+ output: "173"
1118
+ name: "Unsqueeze_73"
1119
+ op_type: "Unsqueeze"
1120
+ attribute {
1121
+ name: "axes"
1122
+ ints: 0
1123
+ type: INTS
1124
+ }
1125
+ }
1126
+ node {
1127
+ input: "127"
1128
+ output: "174"
1129
+ name: "Unsqueeze_74"
1130
+ op_type: "Unsqueeze"
1131
+ attribute {
1132
+ name: "axes"
1133
+ ints: 0
1134
+ type: INTS
1135
+ }
1136
+ }
1137
+ node {
1138
+ input: "130"
1139
+ output: "175"
1140
+ name: "Unsqueeze_75"
1141
+ op_type: "Unsqueeze"
1142
+ attribute {
1143
+ name: "axes"
1144
+ ints: 0
1145
+ type: INTS
1146
+ }
1147
+ }
1148
+ node {
1149
+ input: "173"
1150
+ input: "174"
1151
+ input: "175"
1152
+ output: "176"
1153
+ name: "Concat_76"
1154
+ op_type: "Concat"
1155
+ attribute {
1156
+ name: "axis"
1157
+ i: 0
1158
+ type: INT
1159
+ }
1160
+ }
1161
+ node {
1162
+ input: "172"
1163
+ input: "176"
1164
+ output: "177"
1165
+ name: "Reshape_77"
1166
+ op_type: "Reshape"
1167
+ }
1168
+ node {
1169
+ input: "177"
1170
+ input: "505"
1171
+ output: "179"
1172
+ name: "MatMul_78"
1173
+ op_type: "MatMul"
1174
+ }
1175
+ node {
1176
+ input: "179"
1177
+ input: "transformer_encoder.layers.0.self_attn.out_proj.bias"
1178
+ output: "180"
1179
+ name: "Add_79"
1180
+ op_type: "Add"
1181
+ }
1182
+ node {
1183
+ input: "121"
1184
+ input: "180"
1185
+ output: "181"
1186
+ name: "Add_80"
1187
+ op_type: "Add"
1188
+ }
1189
+ node {
1190
+ input: "181"
1191
+ output: "183"
1192
+ name: "ReduceMean_81"
1193
+ op_type: "ReduceMean"
1194
+ attribute {
1195
+ name: "axes"
1196
+ ints: -1
1197
+ type: INTS
1198
+ }
1199
+ }
1200
+ node {
1201
+ input: "181"
1202
+ input: "183"
1203
+ output: "184"
1204
+ name: "Sub_82"
1205
+ op_type: "Sub"
1206
+ }
1207
+ node {
1208
+ input: "184"
1209
+ output: "185"
1210
+ name: "Cast_83"
1211
+ op_type: "Cast"
1212
+ attribute {
1213
+ name: "to"
1214
+ i: 1
1215
+ type: INT
1216
+ }
1217
+ }
1218
+ node {
1219
+ input: "185"
1220
+ input: "506"
1221
+ output: "187"
1222
+ name: "Pow_84"
1223
+ op_type: "Pow"
1224
+ }
1225
+ node {
1226
+ input: "187"
1227
+ output: "188"
1228
+ name: "ReduceMean_85"
1229
+ op_type: "ReduceMean"
1230
+ attribute {
1231
+ name: "axes"
1232
+ ints: -1
1233
+ type: INTS
1234
+ }
1235
+ }
1236
+ node {
1237
+ output: "189"
1238
+ name: "Constant_86"
1239
+ op_type: "Constant"
1240
+ attribute {
1241
+ name: "value"
1242
+ t {
1243
+ data_type: 1
1244
+ }
1245
+ type: TENSOR
1246
+ }
1247
+ }
1248
+ node {
1249
+ input: "188"
1250
+ input: "189"
1251
+ output: "190"
1252
+ name: "Add_87"
1253
+ op_type: "Add"
1254
+ }
1255
+ node {
1256
+ input: "190"
1257
+ output: "191"
1258
+ name: "Sqrt_88"
1259
+ op_type: "Sqrt"
1260
+ }
1261
+ node {
1262
+ input: "184"
1263
+ input: "191"
1264
+ output: "192"
1265
+ name: "Div_89"
1266
+ op_type: "Div"
1267
+ }
1268
+ node {
1269
+ input: "192"
1270
+ input: "transformer_encoder.layers.0.norm1.weight"
1271
+ output: "193"
1272
+ name: "Mul_90"
1273
+ op_type: "Mul"
1274
+ }
1275
+ node {
1276
+ input: "193"
1277
+ input: "transformer_encoder.layers.0.norm1.bias"
1278
+ output: "194"
1279
+ name: "Add_91"
1280
+ op_type: "Add"
1281
+ }
1282
+ node {
1283
+ input: "194"
1284
+ input: "507"
1285
+ output: "196"
1286
+ name: "MatMul_92"
1287
+ op_type: "MatMul"
1288
+ }
1289
+ node {
1290
+ input: "196"
1291
+ input: "transformer_encoder.layers.0.linear1.bias"
1292
+ output: "197"
1293
+ name: "Add_93"
1294
+ op_type: "Add"
1295
+ }
1296
+ node {
1297
+ input: "197"
1298
+ output: "198"
1299
+ name: "Relu_94"
1300
+ op_type: "Relu"
1301
+ }
1302
+ node {
1303
+ input: "198"
1304
+ input: "508"
1305
+ output: "200"
1306
+ name: "MatMul_95"
1307
+ op_type: "MatMul"
1308
+ }
1309
+ node {
1310
+ input: "200"
1311
+ input: "transformer_encoder.layers.0.linear2.bias"
1312
+ output: "201"
1313
+ name: "Add_96"
1314
+ op_type: "Add"
1315
+ }
1316
+ node {
1317
+ input: "194"
1318
+ input: "201"
1319
+ output: "202"
1320
+ name: "Add_97"
1321
+ op_type: "Add"
1322
+ }
1323
+ node {
1324
+ input: "202"
1325
+ output: "204"
1326
+ name: "ReduceMean_98"
1327
+ op_type: "ReduceMean"
1328
+ attribute {
1329
+ name: "axes"
1330
+ ints: -1
1331
+ type: INTS
1332
+ }
1333
+ }
1334
+ node {
1335
+ input: "202"
1336
+ input: "204"
1337
+ output: "205"
1338
+ name: "Sub_99"
1339
+ op_type: "Sub"
1340
+ }
1341
+ node {
1342
+ input: "205"
1343
+ output: "206"
1344
+ name: "Cast_100"
1345
+ op_type: "Cast"
1346
+ attribute {
1347
+ name: "to"
1348
+ i: 1
1349
+ type: INT
1350
+ }
1351
+ }
1352
+ node {
1353
+ input: "206"
1354
+ input: "509"
1355
+ output: "208"
1356
+ name: "Pow_101"
1357
+ op_type: "Pow"
1358
+ }
1359
+ node {
1360
+ input: "208"
1361
+ output: "209"
1362
+ name: "ReduceMean_102"
1363
+ op_type: "ReduceMean"
1364
+ attribute {
1365
+ name: "axes"
1366
+ ints: -1
1367
+ type: INTS
1368
+ }
1369
+ }
1370
+ node {
1371
+ output: "210"
1372
+ name: "Constant_103"
1373
+ op_type: "Constant"
1374
+ attribute {
1375
+ name: "value"
1376
+ t {
1377
+ data_type: 1
1378
+ }
1379
+ type: TENSOR
1380
+ }
1381
+ }
1382
+ node {
1383
+ input: "209"
1384
+ input: "210"
1385
+ output: "211"
1386
+ name: "Add_104"
1387
+ op_type: "Add"
1388
+ }
1389
+ node {
1390
+ input: "211"
1391
+ output: "212"
1392
+ name: "Sqrt_105"
1393
+ op_type: "Sqrt"
1394
+ }
1395
+ node {
1396
+ input: "205"
1397
+ input: "212"
1398
+ output: "213"
1399
+ name: "Div_106"
1400
+ op_type: "Div"
1401
+ }
1402
+ node {
1403
+ input: "213"
1404
+ input: "transformer_encoder.layers.0.norm2.weight"
1405
+ output: "214"
1406
+ name: "Mul_107"
1407
+ op_type: "Mul"
1408
+ }
1409
+ node {
1410
+ input: "214"
1411
+ input: "transformer_encoder.layers.0.norm2.bias"
1412
+ output: "215"
1413
+ name: "Add_108"
1414
+ op_type: "Add"
1415
+ }
1416
+ node {
1417
+ input: "215"
1418
+ output: "216"
1419
+ name: "Shape_109"
1420
+ op_type: "Shape"
1421
+ }
1422
+ node {
1423
+ output: "217"
1424
+ name: "Constant_110"
1425
+ op_type: "Constant"
1426
+ attribute {
1427
+ name: "value"
1428
+ t {
1429
+ data_type: 7
1430
+ }
1431
+ type: TENSOR
1432
+ }
1433
+ }
1434
+ node {
1435
+ input: "216"
1436
+ input: "217"
1437
+ output: "218"
1438
+ name: "Gather_111"
1439
+ op_type: "Gather"
1440
+ attribute {
1441
+ name: "axis"
1442
+ i: 0
1443
+ type: INT
1444
+ }
1445
+ }
1446
+ node {
1447
+ input: "215"
1448
+ output: "219"
1449
+ name: "Shape_112"
1450
+ op_type: "Shape"
1451
+ }
1452
+ node {
1453
+ output: "220"
1454
+ name: "Constant_113"
1455
+ op_type: "Constant"
1456
+ attribute {
1457
+ name: "value"
1458
+ t {
1459
+ data_type: 7
1460
+ }
1461
+ type: TENSOR
1462
+ }
1463
+ }
1464
+ node {
1465
+ input: "219"
1466
+ input: "220"
1467
+ output: "221"
1468
+ name: "Gather_114"
1469
+ op_type: "Gather"
1470
+ attribute {
1471
+ name: "axis"
1472
+ i: 0
1473
+ type: INT
1474
+ }
1475
+ }
1476
+ node {
1477
+ input: "215"
1478
+ output: "222"
1479
+ name: "Shape_115"
1480
+ op_type: "Shape"
1481
+ }
1482
+ node {
1483
+ output: "223"
1484
+ name: "Constant_116"
1485
+ op_type: "Constant"
1486
+ attribute {
1487
+ name: "value"
1488
+ t {
1489
+ data_type: 7
1490
+ }
1491
+ type: TENSOR
1492
+ }
1493
+ }
1494
+ node {
1495
+ input: "222"
1496
+ input: "223"
1497
+ output: "224"
1498
+ name: "Gather_117"
1499
+ op_type: "Gather"
1500
+ attribute {
1501
+ name: "axis"
1502
+ i: 0
1503
+ type: INT
1504
+ }
1505
+ }
1506
+ node {
1507
+ output: "225"
1508
+ name: "Constant_118"
1509
+ op_type: "Constant"
1510
+ attribute {
1511
+ name: "value"
1512
+ t {
1513
+ data_type: 7
1514
+ }
1515
+ type: TENSOR
1516
+ }
1517
+ }
1518
+ node {
1519
+ input: "224"
1520
+ input: "225"
1521
+ output: "226"
1522
+ name: "Div_119"
1523
+ op_type: "Div"
1524
+ }
1525
+ node {
1526
+ input: "226"
1527
+ output: "227"
1528
+ name: "Cast_120"
1529
+ op_type: "Cast"
1530
+ attribute {
1531
+ name: "to"
1532
+ i: 7
1533
+ type: INT
1534
+ }
1535
+ }
1536
+ node {
1537
+ input: "227"
1538
+ output: "228"
1539
+ name: "Cast_121"
1540
+ op_type: "Cast"
1541
+ attribute {
1542
+ name: "to"
1543
+ i: 7
1544
+ type: INT
1545
+ }
1546
+ }
1547
+ node {
1548
+ input: "215"
1549
+ input: "510"
1550
+ output: "230"
1551
+ name: "MatMul_122"
1552
+ op_type: "MatMul"
1553
+ }
1554
+ node {
1555
+ input: "230"
1556
+ input: "transformer_encoder.layers.1.self_attn.in_proj_bias"
1557
+ output: "231"
1558
+ name: "Add_123"
1559
+ op_type: "Add"
1560
+ }
1561
+ node {
1562
+ input: "231"
1563
+ output: "232"
1564
+ output: "233"
1565
+ output: "234"
1566
+ name: "Split_124"
1567
+ op_type: "Split"
1568
+ attribute {
1569
+ name: "axis"
1570
+ i: -1
1571
+ type: INT
1572
+ }
1573
+ attribute {
1574
+ name: "split"
1575
+ ints: 40
1576
+ ints: 40
1577
+ ints: 40
1578
+ type: INTS
1579
+ }
1580
+ }
1581
+ node {
1582
+ output: "235"
1583
+ name: "Constant_125"
1584
+ op_type: "Constant"
1585
+ attribute {
1586
+ name: "value"
1587
+ t {
1588
+ data_type: 1
1589
+ }
1590
+ type: TENSOR
1591
+ }
1592
+ }
1593
+ node {
1594
+ input: "232"
1595
+ input: "235"
1596
+ output: "236"
1597
+ name: "Mul_126"
1598
+ op_type: "Mul"
1599
+ }
1600
+ node {
1601
+ output: "237"
1602
+ name: "Constant_127"
1603
+ op_type: "Constant"
1604
+ attribute {
1605
+ name: "value"
1606
+ t {
1607
+ data_type: 7
1608
+ }
1609
+ type: TENSOR
1610
+ }
1611
+ }
1612
+ node {
1613
+ input: "221"
1614
+ input: "237"
1615
+ output: "238"
1616
+ name: "Mul_128"
1617
+ op_type: "Mul"
1618
+ }
1619
+ node {
1620
+ input: "218"
1621
+ output: "239"
1622
+ name: "Unsqueeze_129"
1623
+ op_type: "Unsqueeze"
1624
+ attribute {
1625
+ name: "axes"
1626
+ ints: 0
1627
+ type: INTS
1628
+ }
1629
+ }
1630
+ node {
1631
+ input: "238"
1632
+ output: "240"
1633
+ name: "Unsqueeze_130"
1634
+ op_type: "Unsqueeze"
1635
+ attribute {
1636
+ name: "axes"
1637
+ ints: 0
1638
+ type: INTS
1639
+ }
1640
+ }
1641
+ node {
1642
+ input: "228"
1643
+ output: "241"
1644
+ name: "Unsqueeze_131"
1645
+ op_type: "Unsqueeze"
1646
+ attribute {
1647
+ name: "axes"
1648
+ ints: 0
1649
+ type: INTS
1650
+ }
1651
+ }
1652
+ node {
1653
+ input: "239"
1654
+ input: "240"
1655
+ input: "241"
1656
+ output: "242"
1657
+ name: "Concat_132"
1658
+ op_type: "Concat"
1659
+ attribute {
1660
+ name: "axis"
1661
+ i: 0
1662
+ type: INT
1663
+ }
1664
+ }
1665
+ node {
1666
+ input: "236"
1667
+ input: "242"
1668
+ output: "243"
1669
+ name: "Reshape_133"
1670
+ op_type: "Reshape"
1671
+ }
1672
+ node {
1673
+ input: "243"
1674
+ output: "244"
1675
+ name: "Transpose_134"
1676
+ op_type: "Transpose"
1677
+ attribute {
1678
+ name: "perm"
1679
+ ints: 1
1680
+ ints: 0
1681
+ ints: 2
1682
+ type: INTS
1683
+ }
1684
+ }
1685
+ node {
1686
+ output: "245"
1687
+ name: "Constant_135"
1688
+ op_type: "Constant"
1689
+ attribute {
1690
+ name: "value"
1691
+ t {
1692
+ data_type: 7
1693
+ }
1694
+ type: TENSOR
1695
+ }
1696
+ }
1697
+ node {
1698
+ input: "221"
1699
+ input: "245"
1700
+ output: "246"
1701
+ name: "Mul_136"
1702
+ op_type: "Mul"
1703
+ }
1704
+ node {
1705
+ input: "246"
1706
+ output: "249"
1707
+ name: "Unsqueeze_137"
1708
+ op_type: "Unsqueeze"
1709
+ attribute {
1710
+ name: "axes"
1711
+ ints: 0
1712
+ type: INTS
1713
+ }
1714
+ }
1715
+ node {
1716
+ input: "228"
1717
+ output: "250"
1718
+ name: "Unsqueeze_138"
1719
+ op_type: "Unsqueeze"
1720
+ attribute {
1721
+ name: "axes"
1722
+ ints: 0
1723
+ type: INTS
1724
+ }
1725
+ }
1726
+ node {
1727
+ input: "511"
1728
+ input: "249"
1729
+ input: "250"
1730
+ output: "251"
1731
+ name: "Concat_139"
1732
+ op_type: "Concat"
1733
+ attribute {
1734
+ name: "axis"
1735
+ i: 0
1736
+ type: INT
1737
+ }
1738
+ }
1739
+ node {
1740
+ input: "233"
1741
+ input: "251"
1742
+ output: "252"
1743
+ name: "Reshape_140"
1744
+ op_type: "Reshape"
1745
+ }
1746
+ node {
1747
+ output: "253"
1748
+ name: "Constant_141"
1749
+ op_type: "Constant"
1750
+ attribute {
1751
+ name: "value"
1752
+ t {
1753
+ data_type: 7
1754
+ }
1755
+ type: TENSOR
1756
+ }
1757
+ }
1758
+ node {
1759
+ input: "221"
1760
+ input: "253"
1761
+ output: "254"
1762
+ name: "Mul_142"
1763
+ op_type: "Mul"
1764
+ }
1765
+ node {
1766
+ input: "254"
1767
+ output: "257"
1768
+ name: "Unsqueeze_143"
1769
+ op_type: "Unsqueeze"
1770
+ attribute {
1771
+ name: "axes"
1772
+ ints: 0
1773
+ type: INTS
1774
+ }
1775
+ }
1776
+ node {
1777
+ input: "228"
1778
+ output: "258"
1779
+ name: "Unsqueeze_144"
1780
+ op_type: "Unsqueeze"
1781
+ attribute {
1782
+ name: "axes"
1783
+ ints: 0
1784
+ type: INTS
1785
+ }
1786
+ }
1787
+ node {
1788
+ input: "512"
1789
+ input: "257"
1790
+ input: "258"
1791
+ output: "259"
1792
+ name: "Concat_145"
1793
+ op_type: "Concat"
1794
+ attribute {
1795
+ name: "axis"
1796
+ i: 0
1797
+ type: INT
1798
+ }
1799
+ }
1800
+ node {
1801
+ input: "234"
1802
+ input: "259"
1803
+ output: "260"
1804
+ name: "Reshape_146"
1805
+ op_type: "Reshape"
1806
+ }
1807
+ node {
1808
+ input: "260"
1809
+ output: "261"
1810
+ name: "Transpose_147"
1811
+ op_type: "Transpose"
1812
+ attribute {
1813
+ name: "perm"
1814
+ ints: 1
1815
+ ints: 0
1816
+ ints: 2
1817
+ type: INTS
1818
+ }
1819
+ }
1820
+ node {
1821
+ input: "252"
1822
+ output: "262"
1823
+ name: "Transpose_148"
1824
+ op_type: "Transpose"
1825
+ attribute {
1826
+ name: "perm"
1827
+ ints: 1
1828
+ ints: 2
1829
+ ints: 0
1830
+ type: INTS
1831
+ }
1832
+ }
1833
+ node {
1834
+ input: "244"
1835
+ input: "262"
1836
+ output: "263"
1837
+ name: "MatMul_149"
1838
+ op_type: "MatMul"
1839
+ }
1840
+ node {
1841
+ input: "263"
1842
+ output: "264"
1843
+ name: "Softmax_150"
1844
+ op_type: "Softmax"
1845
+ attribute {
1846
+ name: "axis"
1847
+ i: 2
1848
+ type: INT
1849
+ }
1850
+ }
1851
+ node {
1852
+ input: "264"
1853
+ input: "261"
1854
+ output: "265"
1855
+ name: "MatMul_151"
1856
+ op_type: "MatMul"
1857
+ }
1858
+ node {
1859
+ input: "265"
1860
+ output: "266"
1861
+ name: "Transpose_152"
1862
+ op_type: "Transpose"
1863
+ attribute {
1864
+ name: "perm"
1865
+ ints: 1
1866
+ ints: 0
1867
+ ints: 2
1868
+ type: INTS
1869
+ }
1870
+ }
1871
+ node {
1872
+ input: "218"
1873
+ output: "267"
1874
+ name: "Unsqueeze_153"
1875
+ op_type: "Unsqueeze"
1876
+ attribute {
1877
+ name: "axes"
1878
+ ints: 0
1879
+ type: INTS
1880
+ }
1881
+ }
1882
+ node {
1883
+ input: "221"
1884
+ output: "268"
1885
+ name: "Unsqueeze_154"
1886
+ op_type: "Unsqueeze"
1887
+ attribute {
1888
+ name: "axes"
1889
+ ints: 0
1890
+ type: INTS
1891
+ }
1892
+ }
1893
+ node {
1894
+ input: "224"
1895
+ output: "269"
1896
+ name: "Unsqueeze_155"
1897
+ op_type: "Unsqueeze"
1898
+ attribute {
1899
+ name: "axes"
1900
+ ints: 0
1901
+ type: INTS
1902
+ }
1903
+ }
1904
+ node {
1905
+ input: "267"
1906
+ input: "268"
1907
+ input: "269"
1908
+ output: "270"
1909
+ name: "Concat_156"
1910
+ op_type: "Concat"
1911
+ attribute {
1912
+ name: "axis"
1913
+ i: 0
1914
+ type: INT
1915
+ }
1916
+ }
1917
+ node {
1918
+ input: "266"
1919
+ input: "270"
1920
+ output: "271"
1921
+ name: "Reshape_157"
1922
+ op_type: "Reshape"
1923
+ }
1924
+ node {
1925
+ input: "271"
1926
+ input: "513"
1927
+ output: "273"
1928
+ name: "MatMul_158"
1929
+ op_type: "MatMul"
1930
+ }
1931
+ node {
1932
+ input: "273"
1933
+ input: "transformer_encoder.layers.1.self_attn.out_proj.bias"
1934
+ output: "274"
1935
+ name: "Add_159"
1936
+ op_type: "Add"
1937
+ }
1938
+ node {
1939
+ input: "215"
1940
+ input: "274"
1941
+ output: "275"
1942
+ name: "Add_160"
1943
+ op_type: "Add"
1944
+ }
1945
+ node {
1946
+ input: "275"
1947
+ output: "277"
1948
+ name: "ReduceMean_161"
1949
+ op_type: "ReduceMean"
1950
+ attribute {
1951
+ name: "axes"
1952
+ ints: -1
1953
+ type: INTS
1954
+ }
1955
+ }
1956
+ node {
1957
+ input: "275"
1958
+ input: "277"
1959
+ output: "278"
1960
+ name: "Sub_162"
1961
+ op_type: "Sub"
1962
+ }
1963
+ node {
1964
+ input: "278"
1965
+ output: "279"
1966
+ name: "Cast_163"
1967
+ op_type: "Cast"
1968
+ attribute {
1969
+ name: "to"
1970
+ i: 1
1971
+ type: INT
1972
+ }
1973
+ }
1974
+ node {
1975
+ input: "279"
1976
+ input: "514"
1977
+ output: "281"
1978
+ name: "Pow_164"
1979
+ op_type: "Pow"
1980
+ }
1981
+ node {
1982
+ input: "281"
1983
+ output: "282"
1984
+ name: "ReduceMean_165"
1985
+ op_type: "ReduceMean"
1986
+ attribute {
1987
+ name: "axes"
1988
+ ints: -1
1989
+ type: INTS
1990
+ }
1991
+ }
1992
+ node {
1993
+ output: "283"
1994
+ name: "Constant_166"
1995
+ op_type: "Constant"
1996
+ attribute {
1997
+ name: "value"
1998
+ t {
1999
+ data_type: 1
2000
+ }
2001
+ type: TENSOR
2002
+ }
2003
+ }
2004
+ node {
2005
+ input: "282"
2006
+ input: "283"
2007
+ output: "284"
2008
+ name: "Add_167"
2009
+ op_type: "Add"
2010
+ }
2011
+ node {
2012
+ input: "284"
2013
+ output: "285"
2014
+ name: "Sqrt_168"
2015
+ op_type: "Sqrt"
2016
+ }
2017
+ node {
2018
+ input: "278"
2019
+ input: "285"
2020
+ output: "286"
2021
+ name: "Div_169"
2022
+ op_type: "Div"
2023
+ }
2024
+ node {
2025
+ input: "286"
2026
+ input: "transformer_encoder.layers.1.norm1.weight"
2027
+ output: "287"
2028
+ name: "Mul_170"
2029
+ op_type: "Mul"
2030
+ }
2031
+ node {
2032
+ input: "287"
2033
+ input: "transformer_encoder.layers.1.norm1.bias"
2034
+ output: "288"
2035
+ name: "Add_171"
2036
+ op_type: "Add"
2037
+ }
2038
+ node {
2039
+ input: "288"
2040
+ input: "515"
2041
+ output: "290"
2042
+ name: "MatMul_172"
2043
+ op_type: "MatMul"
2044
+ }
2045
+ node {
2046
+ input: "290"
2047
+ input: "transformer_encoder.layers.1.linear1.bias"
2048
+ output: "291"
2049
+ name: "Add_173"
2050
+ op_type: "Add"
2051
+ }
2052
+ node {
2053
+ input: "291"
2054
+ output: "292"
2055
+ name: "Relu_174"
2056
+ op_type: "Relu"
2057
+ }
2058
+ node {
2059
+ input: "292"
2060
+ input: "516"
2061
+ output: "294"
2062
+ name: "MatMul_175"
2063
+ op_type: "MatMul"
2064
+ }
2065
+ node {
2066
+ input: "294"
2067
+ input: "transformer_encoder.layers.1.linear2.bias"
2068
+ output: "295"
2069
+ name: "Add_176"
2070
+ op_type: "Add"
2071
+ }
2072
+ node {
2073
+ input: "288"
2074
+ input: "295"
2075
+ output: "296"
2076
+ name: "Add_177"
2077
+ op_type: "Add"
2078
+ }
2079
+ node {
2080
+ input: "296"
2081
+ output: "298"
2082
+ name: "ReduceMean_178"
2083
+ op_type: "ReduceMean"
2084
+ attribute {
2085
+ name: "axes"
2086
+ ints: -1
2087
+ type: INTS
2088
+ }
2089
+ }
2090
+ node {
2091
+ input: "296"
2092
+ input: "298"
2093
+ output: "299"
2094
+ name: "Sub_179"
2095
+ op_type: "Sub"
2096
+ }
2097
+ node {
2098
+ input: "299"
2099
+ output: "300"
2100
+ name: "Cast_180"
2101
+ op_type: "Cast"
2102
+ attribute {
2103
+ name: "to"
2104
+ i: 1
2105
+ type: INT
2106
+ }
2107
+ }
2108
+ node {
2109
+ input: "300"
2110
+ input: "517"
2111
+ output: "302"
2112
+ name: "Pow_181"
2113
+ op_type: "Pow"
2114
+ }
2115
+ node {
2116
+ input: "302"
2117
+ output: "303"
2118
+ name: "ReduceMean_182"
2119
+ op_type: "ReduceMean"
2120
+ attribute {
2121
+ name: "axes"
2122
+ ints: -1
2123
+ type: INTS
2124
+ }
2125
+ }
2126
+ node {
2127
+ output: "304"
2128
+ name: "Constant_183"
2129
+ op_type: "Constant"
2130
+ attribute {
2131
+ name: "value"
2132
+ t {
2133
+ data_type: 1
2134
+ }
2135
+ type: TENSOR
2136
+ }
2137
+ }
2138
+ node {
2139
+ input: "303"
2140
+ input: "304"
2141
+ output: "305"
2142
+ name: "Add_184"
2143
+ op_type: "Add"
2144
+ }
2145
+ node {
2146
+ input: "305"
2147
+ output: "306"
2148
+ name: "Sqrt_185"
2149
+ op_type: "Sqrt"
2150
+ }
2151
+ node {
2152
+ input: "299"
2153
+ input: "306"
2154
+ output: "307"
2155
+ name: "Div_186"
2156
+ op_type: "Div"
2157
+ }
2158
+ node {
2159
+ input: "307"
2160
+ input: "transformer_encoder.layers.1.norm2.weight"
2161
+ output: "308"
2162
+ name: "Mul_187"
2163
+ op_type: "Mul"
2164
+ }
2165
+ node {
2166
+ input: "308"
2167
+ input: "transformer_encoder.layers.1.norm2.bias"
2168
+ output: "309"
2169
+ name: "Add_188"
2170
+ op_type: "Add"
2171
+ }
2172
+ node {
2173
+ input: "309"
2174
+ output: "310"
2175
+ name: "Shape_189"
2176
+ op_type: "Shape"
2177
+ }
2178
+ node {
2179
+ output: "311"
2180
+ name: "Constant_190"
2181
+ op_type: "Constant"
2182
+ attribute {
2183
+ name: "value"
2184
+ t {
2185
+ data_type: 7
2186
+ }
2187
+ type: TENSOR
2188
+ }
2189
+ }
2190
+ node {
2191
+ input: "310"
2192
+ input: "311"
2193
+ output: "312"
2194
+ name: "Gather_191"
2195
+ op_type: "Gather"
2196
+ attribute {
2197
+ name: "axis"
2198
+ i: 0
2199
+ type: INT
2200
+ }
2201
+ }
2202
+ node {
2203
+ input: "309"
2204
+ output: "313"
2205
+ name: "Shape_192"
2206
+ op_type: "Shape"
2207
+ }
2208
+ node {
2209
+ output: "314"
2210
+ name: "Constant_193"
2211
+ op_type: "Constant"
2212
+ attribute {
2213
+ name: "value"
2214
+ t {
2215
+ data_type: 7
2216
+ }
2217
+ type: TENSOR
2218
+ }
2219
+ }
2220
+ node {
2221
+ input: "313"
2222
+ input: "314"
2223
+ output: "315"
2224
+ name: "Gather_194"
2225
+ op_type: "Gather"
2226
+ attribute {
2227
+ name: "axis"
2228
+ i: 0
2229
+ type: INT
2230
+ }
2231
+ }
2232
+ node {
2233
+ input: "309"
2234
+ output: "316"
2235
+ name: "Shape_195"
2236
+ op_type: "Shape"
2237
+ }
2238
+ node {
2239
+ output: "317"
2240
+ name: "Constant_196"
2241
+ op_type: "Constant"
2242
+ attribute {
2243
+ name: "value"
2244
+ t {
2245
+ data_type: 7
2246
+ }
2247
+ type: TENSOR
2248
+ }
2249
+ }
2250
+ node {
2251
+ input: "316"
2252
+ input: "317"
2253
+ output: "318"
2254
+ name: "Gather_197"
2255
+ op_type: "Gather"
2256
+ attribute {
2257
+ name: "axis"
2258
+ i: 0
2259
+ type: INT
2260
+ }
2261
+ }
2262
+ node {
2263
+ output: "319"
2264
+ name: "Constant_198"
2265
+ op_type: "Constant"
2266
+ attribute {
2267
+ name: "value"
2268
+ t {
2269
+ data_type: 7
2270
+ }
2271
+ type: TENSOR
2272
+ }
2273
+ }
2274
+ node {
2275
+ input: "318"
2276
+ input: "319"
2277
+ output: "320"
2278
+ name: "Div_199"
2279
+ op_type: "Div"
2280
+ }
2281
+ node {
2282
+ input: "320"
2283
+ output: "321"
2284
+ name: "Cast_200"
2285
+ op_type: "Cast"
2286
+ attribute {
2287
+ name: "to"
2288
+ i: 7
2289
+ type: INT
2290
+ }
2291
+ }
2292
+ node {
2293
+ input: "321"
2294
+ output: "322"
2295
+ name: "Cast_201"
2296
+ op_type: "Cast"
2297
+ attribute {
2298
+ name: "to"
2299
+ i: 7
2300
+ type: INT
2301
+ }
2302
+ }
2303
+ node {
2304
+ input: "309"
2305
+ input: "518"
2306
+ output: "324"
2307
+ name: "MatMul_202"
2308
+ op_type: "MatMul"
2309
+ }
2310
+ node {
2311
+ input: "324"
2312
+ input: "transformer_encoder.layers.2.self_attn.in_proj_bias"
2313
+ output: "325"
2314
+ name: "Add_203"
2315
+ op_type: "Add"
2316
+ }
2317
+ node {
2318
+ input: "325"
2319
+ output: "326"
2320
+ output: "327"
2321
+ output: "328"
2322
+ name: "Split_204"
2323
+ op_type: "Split"
2324
+ attribute {
2325
+ name: "axis"
2326
+ i: -1
2327
+ type: INT
2328
+ }
2329
+ attribute {
2330
+ name: "split"
2331
+ ints: 40
2332
+ ints: 40
2333
+ ints: 40
2334
+ type: INTS
2335
+ }
2336
+ }
2337
+ node {
2338
+ output: "329"
2339
+ name: "Constant_205"
2340
+ op_type: "Constant"
2341
+ attribute {
2342
+ name: "value"
2343
+ t {
2344
+ data_type: 1
2345
+ }
2346
+ type: TENSOR
2347
+ }
2348
+ }
2349
+ node {
2350
+ input: "326"
2351
+ input: "329"
2352
+ output: "330"
2353
+ name: "Mul_206"
2354
+ op_type: "Mul"
2355
+ }
2356
+ node {
2357
+ output: "331"
2358
+ name: "Constant_207"
2359
+ op_type: "Constant"
2360
+ attribute {
2361
+ name: "value"
2362
+ t {
2363
+ data_type: 7
2364
+ }
2365
+ type: TENSOR
2366
+ }
2367
+ }
2368
+ node {
2369
+ input: "315"
2370
+ input: "331"
2371
+ output: "332"
2372
+ name: "Mul_208"
2373
+ op_type: "Mul"
2374
+ }
2375
+ node {
2376
+ input: "312"
2377
+ output: "333"
2378
+ name: "Unsqueeze_209"
2379
+ op_type: "Unsqueeze"
2380
+ attribute {
2381
+ name: "axes"
2382
+ ints: 0
2383
+ type: INTS
2384
+ }
2385
+ }
2386
+ node {
2387
+ input: "332"
2388
+ output: "334"
2389
+ name: "Unsqueeze_210"
2390
+ op_type: "Unsqueeze"
2391
+ attribute {
2392
+ name: "axes"
2393
+ ints: 0
2394
+ type: INTS
2395
+ }
2396
+ }
2397
+ node {
2398
+ input: "322"
2399
+ output: "335"
2400
+ name: "Unsqueeze_211"
2401
+ op_type: "Unsqueeze"
2402
+ attribute {
2403
+ name: "axes"
2404
+ ints: 0
2405
+ type: INTS
2406
+ }
2407
+ }
2408
+ node {
2409
+ input: "333"
2410
+ input: "334"
2411
+ input: "335"
2412
+ output: "336"
2413
+ name: "Concat_212"
2414
+ op_type: "Concat"
2415
+ attribute {
2416
+ name: "axis"
2417
+ i: 0
2418
+ type: INT
2419
+ }
2420
+ }
2421
+ node {
2422
+ input: "330"
2423
+ input: "336"
2424
+ output: "337"
2425
+ name: "Reshape_213"
2426
+ op_type: "Reshape"
2427
+ }
2428
+ node {
2429
+ input: "337"
2430
+ output: "338"
2431
+ name: "Transpose_214"
2432
+ op_type: "Transpose"
2433
+ attribute {
2434
+ name: "perm"
2435
+ ints: 1
2436
+ ints: 0
2437
+ ints: 2
2438
+ type: INTS
2439
+ }
2440
+ }
2441
+ node {
2442
+ output: "339"
2443
+ name: "Constant_215"
2444
+ op_type: "Constant"
2445
+ attribute {
2446
+ name: "value"
2447
+ t {
2448
+ data_type: 7
2449
+ }
2450
+ type: TENSOR
2451
+ }
2452
+ }
2453
+ node {
2454
+ input: "315"
2455
+ input: "339"
2456
+ output: "340"
2457
+ name: "Mul_216"
2458
+ op_type: "Mul"
2459
+ }
2460
+ node {
2461
+ input: "340"
2462
+ output: "343"
2463
+ name: "Unsqueeze_217"
2464
+ op_type: "Unsqueeze"
2465
+ attribute {
2466
+ name: "axes"
2467
+ ints: 0
2468
+ type: INTS
2469
+ }
2470
+ }
2471
+ node {
2472
+ input: "322"
2473
+ output: "344"
2474
+ name: "Unsqueeze_218"
2475
+ op_type: "Unsqueeze"
2476
+ attribute {
2477
+ name: "axes"
2478
+ ints: 0
2479
+ type: INTS
2480
+ }
2481
+ }
2482
+ node {
2483
+ input: "519"
2484
+ input: "343"
2485
+ input: "344"
2486
+ output: "345"
2487
+ name: "Concat_219"
2488
+ op_type: "Concat"
2489
+ attribute {
2490
+ name: "axis"
2491
+ i: 0
2492
+ type: INT
2493
+ }
2494
+ }
2495
+ node {
2496
+ input: "327"
2497
+ input: "345"
2498
+ output: "346"
2499
+ name: "Reshape_220"
2500
+ op_type: "Reshape"
2501
+ }
2502
+ node {
2503
+ output: "347"
2504
+ name: "Constant_221"
2505
+ op_type: "Constant"
2506
+ attribute {
2507
+ name: "value"
2508
+ t {
2509
+ data_type: 7
2510
+ }
2511
+ type: TENSOR
2512
+ }
2513
+ }
2514
+ node {
2515
+ input: "315"
2516
+ input: "347"
2517
+ output: "348"
2518
+ name: "Mul_222"
2519
+ op_type: "Mul"
2520
+ }
2521
+ node {
2522
+ input: "348"
2523
+ output: "351"
2524
+ name: "Unsqueeze_223"
2525
+ op_type: "Unsqueeze"
2526
+ attribute {
2527
+ name: "axes"
2528
+ ints: 0
2529
+ type: INTS
2530
+ }
2531
+ }
2532
+ node {
2533
+ input: "322"
2534
+ output: "352"
2535
+ name: "Unsqueeze_224"
2536
+ op_type: "Unsqueeze"
2537
+ attribute {
2538
+ name: "axes"
2539
+ ints: 0
2540
+ type: INTS
2541
+ }
2542
+ }
2543
+ node {
2544
+ input: "520"
2545
+ input: "351"
2546
+ input: "352"
2547
+ output: "353"
2548
+ name: "Concat_225"
2549
+ op_type: "Concat"
2550
+ attribute {
2551
+ name: "axis"
2552
+ i: 0
2553
+ type: INT
2554
+ }
2555
+ }
2556
+ node {
2557
+ input: "328"
2558
+ input: "353"
2559
+ output: "354"
2560
+ name: "Reshape_226"
2561
+ op_type: "Reshape"
2562
+ }
2563
+ node {
2564
+ input: "354"
2565
+ output: "355"
2566
+ name: "Transpose_227"
2567
+ op_type: "Transpose"
2568
+ attribute {
2569
+ name: "perm"
2570
+ ints: 1
2571
+ ints: 0
2572
+ ints: 2
2573
+ type: INTS
2574
+ }
2575
+ }
2576
+ node {
2577
+ input: "346"
2578
+ output: "356"
2579
+ name: "Transpose_228"
2580
+ op_type: "Transpose"
2581
+ attribute {
2582
+ name: "perm"
2583
+ ints: 1
2584
+ ints: 2
2585
+ ints: 0
2586
+ type: INTS
2587
+ }
2588
+ }
2589
+ node {
2590
+ input: "338"
2591
+ input: "356"
2592
+ output: "357"
2593
+ name: "MatMul_229"
2594
+ op_type: "MatMul"
2595
+ }
2596
+ node {
2597
+ input: "357"
2598
+ output: "358"
2599
+ name: "Softmax_230"
2600
+ op_type: "Softmax"
2601
+ attribute {
2602
+ name: "axis"
2603
+ i: 2
2604
+ type: INT
2605
+ }
2606
+ }
2607
+ node {
2608
+ input: "358"
2609
+ input: "355"
2610
+ output: "359"
2611
+ name: "MatMul_231"
2612
+ op_type: "MatMul"
2613
+ }
2614
+ node {
2615
+ input: "359"
2616
+ output: "360"
2617
+ name: "Transpose_232"
2618
+ op_type: "Transpose"
2619
+ attribute {
2620
+ name: "perm"
2621
+ ints: 1
2622
+ ints: 0
2623
+ ints: 2
2624
+ type: INTS
2625
+ }
2626
+ }
2627
+ node {
2628
+ input: "312"
2629
+ output: "361"
2630
+ name: "Unsqueeze_233"
2631
+ op_type: "Unsqueeze"
2632
+ attribute {
2633
+ name: "axes"
2634
+ ints: 0
2635
+ type: INTS
2636
+ }
2637
+ }
2638
+ node {
2639
+ input: "315"
2640
+ output: "362"
2641
+ name: "Unsqueeze_234"
2642
+ op_type: "Unsqueeze"
2643
+ attribute {
2644
+ name: "axes"
2645
+ ints: 0
2646
+ type: INTS
2647
+ }
2648
+ }
2649
+ node {
2650
+ input: "318"
2651
+ output: "363"
2652
+ name: "Unsqueeze_235"
2653
+ op_type: "Unsqueeze"
2654
+ attribute {
2655
+ name: "axes"
2656
+ ints: 0
2657
+ type: INTS
2658
+ }
2659
+ }
2660
+ node {
2661
+ input: "361"
2662
+ input: "362"
2663
+ input: "363"
2664
+ output: "364"
2665
+ name: "Concat_236"
2666
+ op_type: "Concat"
2667
+ attribute {
2668
+ name: "axis"
2669
+ i: 0
2670
+ type: INT
2671
+ }
2672
+ }
2673
+ node {
2674
+ input: "360"
2675
+ input: "364"
2676
+ output: "365"
2677
+ name: "Reshape_237"
2678
+ op_type: "Reshape"
2679
+ }
2680
+ node {
2681
+ input: "365"
2682
+ input: "521"
2683
+ output: "367"
2684
+ name: "MatMul_238"
2685
+ op_type: "MatMul"
2686
+ }
2687
+ node {
2688
+ input: "367"
2689
+ input: "transformer_encoder.layers.2.self_attn.out_proj.bias"
2690
+ output: "368"
2691
+ name: "Add_239"
2692
+ op_type: "Add"
2693
+ }
2694
+ node {
2695
+ input: "309"
2696
+ input: "368"
2697
+ output: "369"
2698
+ name: "Add_240"
2699
+ op_type: "Add"
2700
+ }
2701
+ node {
2702
+ input: "369"
2703
+ output: "371"
2704
+ name: "ReduceMean_241"
2705
+ op_type: "ReduceMean"
2706
+ attribute {
2707
+ name: "axes"
2708
+ ints: -1
2709
+ type: INTS
2710
+ }
2711
+ }
2712
+ node {
2713
+ input: "369"
2714
+ input: "371"
2715
+ output: "372"
2716
+ name: "Sub_242"
2717
+ op_type: "Sub"
2718
+ }
2719
+ node {
2720
+ input: "372"
2721
+ output: "373"
2722
+ name: "Cast_243"
2723
+ op_type: "Cast"
2724
+ attribute {
2725
+ name: "to"
2726
+ i: 1
2727
+ type: INT
2728
+ }
2729
+ }
2730
+ node {
2731
+ input: "373"
2732
+ input: "522"
2733
+ output: "375"
2734
+ name: "Pow_244"
2735
+ op_type: "Pow"
2736
+ }
2737
+ node {
2738
+ input: "375"
2739
+ output: "376"
2740
+ name: "ReduceMean_245"
2741
+ op_type: "ReduceMean"
2742
+ attribute {
2743
+ name: "axes"
2744
+ ints: -1
2745
+ type: INTS
2746
+ }
2747
+ }
2748
+ node {
2749
+ output: "377"
2750
+ name: "Constant_246"
2751
+ op_type: "Constant"
2752
+ attribute {
2753
+ name: "value"
2754
+ t {
2755
+ data_type: 1
2756
+ }
2757
+ type: TENSOR
2758
+ }
2759
+ }
2760
+ node {
2761
+ input: "376"
2762
+ input: "377"
2763
+ output: "378"
2764
+ name: "Add_247"
2765
+ op_type: "Add"
2766
+ }
2767
+ node {
2768
+ input: "378"
2769
+ output: "379"
2770
+ name: "Sqrt_248"
2771
+ op_type: "Sqrt"
2772
+ }
2773
+ node {
2774
+ input: "372"
2775
+ input: "379"
2776
+ output: "380"
2777
+ name: "Div_249"
2778
+ op_type: "Div"
2779
+ }
2780
+ node {
2781
+ input: "380"
2782
+ input: "transformer_encoder.layers.2.norm1.weight"
2783
+ output: "381"
2784
+ name: "Mul_250"
2785
+ op_type: "Mul"
2786
+ }
2787
+ node {
2788
+ input: "381"
2789
+ input: "transformer_encoder.layers.2.norm1.bias"
2790
+ output: "382"
2791
+ name: "Add_251"
2792
+ op_type: "Add"
2793
+ }
2794
+ node {
2795
+ input: "382"
2796
+ input: "523"
2797
+ output: "384"
2798
+ name: "MatMul_252"
2799
+ op_type: "MatMul"
2800
+ }
2801
+ node {
2802
+ input: "384"
2803
+ input: "transformer_encoder.layers.2.linear1.bias"
2804
+ output: "385"
2805
+ name: "Add_253"
2806
+ op_type: "Add"
2807
+ }
2808
+ node {
2809
+ input: "385"
2810
+ output: "386"
2811
+ name: "Relu_254"
2812
+ op_type: "Relu"
2813
+ }
2814
+ node {
2815
+ input: "386"
2816
+ input: "524"
2817
+ output: "388"
2818
+ name: "MatMul_255"
2819
+ op_type: "MatMul"
2820
+ }
2821
+ node {
2822
+ input: "388"
2823
+ input: "transformer_encoder.layers.2.linear2.bias"
2824
+ output: "389"
2825
+ name: "Add_256"
2826
+ op_type: "Add"
2827
+ }
2828
+ node {
2829
+ input: "382"
2830
+ input: "389"
2831
+ output: "390"
2832
+ name: "Add_257"
2833
+ op_type: "Add"
2834
+ }
2835
+ node {
2836
+ input: "390"
2837
+ output: "392"
2838
+ name: "ReduceMean_258"
2839
+ op_type: "ReduceMean"
2840
+ attribute {
2841
+ name: "axes"
2842
+ ints: -1
2843
+ type: INTS
2844
+ }
2845
+ }
2846
+ node {
2847
+ input: "390"
2848
+ input: "392"
2849
+ output: "393"
2850
+ name: "Sub_259"
2851
+ op_type: "Sub"
2852
+ }
2853
+ node {
2854
+ input: "393"
2855
+ output: "394"
2856
+ name: "Cast_260"
2857
+ op_type: "Cast"
2858
+ attribute {
2859
+ name: "to"
2860
+ i: 1
2861
+ type: INT
2862
+ }
2863
+ }
2864
+ node {
2865
+ input: "394"
2866
+ input: "525"
2867
+ output: "396"
2868
+ name: "Pow_261"
2869
+ op_type: "Pow"
2870
+ }
2871
+ node {
2872
+ input: "396"
2873
+ output: "397"
2874
+ name: "ReduceMean_262"
2875
+ op_type: "ReduceMean"
2876
+ attribute {
2877
+ name: "axes"
2878
+ ints: -1
2879
+ type: INTS
2880
+ }
2881
+ }
2882
+ node {
2883
+ output: "398"
2884
+ name: "Constant_263"
2885
+ op_type: "Constant"
2886
+ attribute {
2887
+ name: "value"
2888
+ t {
2889
+ data_type: 1
2890
+ }
2891
+ type: TENSOR
2892
+ }
2893
+ }
2894
+ node {
2895
+ input: "397"
2896
+ input: "398"
2897
+ output: "399"
2898
+ name: "Add_264"
2899
+ op_type: "Add"
2900
+ }
2901
+ node {
2902
+ input: "399"
2903
+ output: "400"
2904
+ name: "Sqrt_265"
2905
+ op_type: "Sqrt"
2906
+ }
2907
+ node {
2908
+ input: "393"
2909
+ input: "400"
2910
+ output: "401"
2911
+ name: "Div_266"
2912
+ op_type: "Div"
2913
+ }
2914
+ node {
2915
+ input: "401"
2916
+ input: "transformer_encoder.layers.2.norm2.weight"
2917
+ output: "402"
2918
+ name: "Mul_267"
2919
+ op_type: "Mul"
2920
+ }
2921
+ node {
2922
+ input: "402"
2923
+ input: "transformer_encoder.layers.2.norm2.bias"
2924
+ output: "403"
2925
+ name: "Add_268"
2926
+ op_type: "Add"
2927
+ }
2928
+ node {
2929
+ input: "403"
2930
+ output: "404"
2931
+ name: "Shape_269"
2932
+ op_type: "Shape"
2933
+ }
2934
+ node {
2935
+ output: "405"
2936
+ name: "Constant_270"
2937
+ op_type: "Constant"
2938
+ attribute {
2939
+ name: "value"
2940
+ t {
2941
+ data_type: 7
2942
+ }
2943
+ type: TENSOR
2944
+ }
2945
+ }
2946
+ node {
2947
+ input: "404"
2948
+ input: "405"
2949
+ output: "406"
2950
+ name: "Gather_271"
2951
+ op_type: "Gather"
2952
+ attribute {
2953
+ name: "axis"
2954
+ i: 0
2955
+ type: INT
2956
+ }
2957
+ }
2958
+ node {
2959
+ input: "403"
2960
+ output: "407"
2961
+ name: "Shape_272"
2962
+ op_type: "Shape"
2963
+ }
2964
+ node {
2965
+ output: "408"
2966
+ name: "Constant_273"
2967
+ op_type: "Constant"
2968
+ attribute {
2969
+ name: "value"
2970
+ t {
2971
+ data_type: 7
2972
+ }
2973
+ type: TENSOR
2974
+ }
2975
+ }
2976
+ node {
2977
+ input: "407"
2978
+ input: "408"
2979
+ output: "409"
2980
+ name: "Gather_274"
2981
+ op_type: "Gather"
2982
+ attribute {
2983
+ name: "axis"
2984
+ i: 0
2985
+ type: INT
2986
+ }
2987
+ }
2988
+ node {
2989
+ input: "403"
2990
+ output: "410"
2991
+ name: "Shape_275"
2992
+ op_type: "Shape"
2993
+ }
2994
+ node {
2995
+ output: "411"
2996
+ name: "Constant_276"
2997
+ op_type: "Constant"
2998
+ attribute {
2999
+ name: "value"
3000
+ t {
3001
+ data_type: 7
3002
+ }
3003
+ type: TENSOR
3004
+ }
3005
+ }
3006
+ node {
3007
+ input: "410"
3008
+ input: "411"
3009
+ output: "412"
3010
+ name: "Gather_277"
3011
+ op_type: "Gather"
3012
+ attribute {
3013
+ name: "axis"
3014
+ i: 0
3015
+ type: INT
3016
+ }
3017
+ }
3018
+ node {
3019
+ output: "413"
3020
+ name: "Constant_278"
3021
+ op_type: "Constant"
3022
+ attribute {
3023
+ name: "value"
3024
+ t {
3025
+ data_type: 7
3026
+ }
3027
+ type: TENSOR
3028
+ }
3029
+ }
3030
+ node {
3031
+ input: "412"
3032
+ input: "413"
3033
+ output: "414"
3034
+ name: "Div_279"
3035
+ op_type: "Div"
3036
+ }
3037
+ node {
3038
+ input: "414"
3039
+ output: "415"
3040
+ name: "Cast_280"
3041
+ op_type: "Cast"
3042
+ attribute {
3043
+ name: "to"
3044
+ i: 7
3045
+ type: INT
3046
+ }
3047
+ }
3048
+ node {
3049
+ input: "415"
3050
+ output: "416"
3051
+ name: "Cast_281"
3052
+ op_type: "Cast"
3053
+ attribute {
3054
+ name: "to"
3055
+ i: 7
3056
+ type: INT
3057
+ }
3058
+ }
3059
+ node {
3060
+ input: "403"
3061
+ input: "526"
3062
+ output: "418"
3063
+ name: "MatMul_282"
3064
+ op_type: "MatMul"
3065
+ }
3066
+ node {
3067
+ input: "418"
3068
+ input: "transformer_encoder.layers.3.self_attn.in_proj_bias"
3069
+ output: "419"
3070
+ name: "Add_283"
3071
+ op_type: "Add"
3072
+ }
3073
+ node {
3074
+ input: "419"
3075
+ output: "420"
3076
+ output: "421"
3077
+ output: "422"
3078
+ name: "Split_284"
3079
+ op_type: "Split"
3080
+ attribute {
3081
+ name: "axis"
3082
+ i: -1
3083
+ type: INT
3084
+ }
3085
+ attribute {
3086
+ name: "split"
3087
+ ints: 40
3088
+ ints: 40
3089
+ ints: 40
3090
+ type: INTS
3091
+ }
3092
+ }
3093
+ node {
3094
+ output: "423"
3095
+ name: "Constant_285"
3096
+ op_type: "Constant"
3097
+ attribute {
3098
+ name: "value"
3099
+ t {
3100
+ data_type: 1
3101
+ }
3102
+ type: TENSOR
3103
+ }
3104
+ }
3105
+ node {
3106
+ input: "420"
3107
+ input: "423"
3108
+ output: "424"
3109
+ name: "Mul_286"
3110
+ op_type: "Mul"
3111
+ }
3112
+ node {
3113
+ output: "425"
3114
+ name: "Constant_287"
3115
+ op_type: "Constant"
3116
+ attribute {
3117
+ name: "value"
3118
+ t {
3119
+ data_type: 7
3120
+ }
3121
+ type: TENSOR
3122
+ }
3123
+ }
3124
+ node {
3125
+ input: "409"
3126
+ input: "425"
3127
+ output: "426"
3128
+ name: "Mul_288"
3129
+ op_type: "Mul"
3130
+ }
3131
+ node {
3132
+ input: "406"
3133
+ output: "427"
3134
+ name: "Unsqueeze_289"
3135
+ op_type: "Unsqueeze"
3136
+ attribute {
3137
+ name: "axes"
3138
+ ints: 0
3139
+ type: INTS
3140
+ }
3141
+ }
3142
+ node {
3143
+ input: "426"
3144
+ output: "428"
3145
+ name: "Unsqueeze_290"
3146
+ op_type: "Unsqueeze"
3147
+ attribute {
3148
+ name: "axes"
3149
+ ints: 0
3150
+ type: INTS
3151
+ }
3152
+ }
3153
+ node {
3154
+ input: "416"
3155
+ output: "429"
3156
+ name: "Unsqueeze_291"
3157
+ op_type: "Unsqueeze"
3158
+ attribute {
3159
+ name: "axes"
3160
+ ints: 0
3161
+ type: INTS
3162
+ }
3163
+ }
3164
+ node {
3165
+ input: "427"
3166
+ input: "428"
3167
+ input: "429"
3168
+ output: "430"
3169
+ name: "Concat_292"
3170
+ op_type: "Concat"
3171
+ attribute {
3172
+ name: "axis"
3173
+ i: 0
3174
+ type: INT
3175
+ }
3176
+ }
3177
+ node {
3178
+ input: "424"
3179
+ input: "430"
3180
+ output: "431"
3181
+ name: "Reshape_293"
3182
+ op_type: "Reshape"
3183
+ }
3184
+ node {
3185
+ input: "431"
3186
+ output: "432"
3187
+ name: "Transpose_294"
3188
+ op_type: "Transpose"
3189
+ attribute {
3190
+ name: "perm"
3191
+ ints: 1
3192
+ ints: 0
3193
+ ints: 2
3194
+ type: INTS
3195
+ }
3196
+ }
3197
+ node {
3198
+ output: "433"
3199
+ name: "Constant_295"
3200
+ op_type: "Constant"
3201
+ attribute {
3202
+ name: "value"
3203
+ t {
3204
+ data_type: 7
3205
+ }
3206
+ type: TENSOR
3207
+ }
3208
+ }
3209
+ node {
3210
+ input: "409"
3211
+ input: "433"
3212
+ output: "434"
3213
+ name: "Mul_296"
3214
+ op_type: "Mul"
3215
+ }
3216
+ node {
3217
+ input: "434"
3218
+ output: "437"
3219
+ name: "Unsqueeze_297"
3220
+ op_type: "Unsqueeze"
3221
+ attribute {
3222
+ name: "axes"
3223
+ ints: 0
3224
+ type: INTS
3225
+ }
3226
+ }
3227
+ node {
3228
+ input: "416"
3229
+ output: "438"
3230
+ name: "Unsqueeze_298"
3231
+ op_type: "Unsqueeze"
3232
+ attribute {
3233
+ name: "axes"
3234
+ ints: 0
3235
+ type: INTS
3236
+ }
3237
+ }
3238
+ node {
3239
+ input: "527"
3240
+ input: "437"
3241
+ input: "438"
3242
+ output: "439"
3243
+ name: "Concat_299"
3244
+ op_type: "Concat"
3245
+ attribute {
3246
+ name: "axis"
3247
+ i: 0
3248
+ type: INT
3249
+ }
3250
+ }
3251
+ node {
3252
+ input: "421"
3253
+ input: "439"
3254
+ output: "440"
3255
+ name: "Reshape_300"
3256
+ op_type: "Reshape"
3257
+ }
3258
+ node {
3259
+ output: "441"
3260
+ name: "Constant_301"
3261
+ op_type: "Constant"
3262
+ attribute {
3263
+ name: "value"
3264
+ t {
3265
+ data_type: 7
3266
+ }
3267
+ type: TENSOR
3268
+ }
3269
+ }
3270
+ node {
3271
+ input: "409"
3272
+ input: "441"
3273
+ output: "442"
3274
+ name: "Mul_302"
3275
+ op_type: "Mul"
3276
+ }
3277
+ node {
3278
+ input: "442"
3279
+ output: "445"
3280
+ name: "Unsqueeze_303"
3281
+ op_type: "Unsqueeze"
3282
+ attribute {
3283
+ name: "axes"
3284
+ ints: 0
3285
+ type: INTS
3286
+ }
3287
+ }
3288
+ node {
3289
+ input: "416"
3290
+ output: "446"
3291
+ name: "Unsqueeze_304"
3292
+ op_type: "Unsqueeze"
3293
+ attribute {
3294
+ name: "axes"
3295
+ ints: 0
3296
+ type: INTS
3297
+ }
3298
+ }
3299
+ node {
3300
+ input: "528"
3301
+ input: "445"
3302
+ input: "446"
3303
+ output: "447"
3304
+ name: "Concat_305"
3305
+ op_type: "Concat"
3306
+ attribute {
3307
+ name: "axis"
3308
+ i: 0
3309
+ type: INT
3310
+ }
3311
+ }
3312
+ node {
3313
+ input: "422"
3314
+ input: "447"
3315
+ output: "448"
3316
+ name: "Reshape_306"
3317
+ op_type: "Reshape"
3318
+ }
3319
+ node {
3320
+ input: "448"
3321
+ output: "449"
3322
+ name: "Transpose_307"
3323
+ op_type: "Transpose"
3324
+ attribute {
3325
+ name: "perm"
3326
+ ints: 1
3327
+ ints: 0
3328
+ ints: 2
3329
+ type: INTS
3330
+ }
3331
+ }
3332
+ node {
3333
+ input: "440"
3334
+ output: "450"
3335
+ name: "Transpose_308"
3336
+ op_type: "Transpose"
3337
+ attribute {
3338
+ name: "perm"
3339
+ ints: 1
3340
+ ints: 2
3341
+ ints: 0
3342
+ type: INTS
3343
+ }
3344
+ }
3345
+ node {
3346
+ input: "432"
3347
+ input: "450"
3348
+ output: "451"
3349
+ name: "MatMul_309"
3350
+ op_type: "MatMul"
3351
+ }
3352
+ node {
3353
+ input: "451"
3354
+ output: "452"
3355
+ name: "Softmax_310"
3356
+ op_type: "Softmax"
3357
+ attribute {
3358
+ name: "axis"
3359
+ i: 2
3360
+ type: INT
3361
+ }
3362
+ }
3363
+ node {
3364
+ input: "452"
3365
+ input: "449"
3366
+ output: "453"
3367
+ name: "MatMul_311"
3368
+ op_type: "MatMul"
3369
+ }
3370
+ node {
3371
+ input: "453"
3372
+ output: "454"
3373
+ name: "Transpose_312"
3374
+ op_type: "Transpose"
3375
+ attribute {
3376
+ name: "perm"
3377
+ ints: 1
3378
+ ints: 0
3379
+ ints: 2
3380
+ type: INTS
3381
+ }
3382
+ }
3383
+ node {
3384
+ input: "406"
3385
+ output: "455"
3386
+ name: "Unsqueeze_313"
3387
+ op_type: "Unsqueeze"
3388
+ attribute {
3389
+ name: "axes"
3390
+ ints: 0
3391
+ type: INTS
3392
+ }
3393
+ }
3394
+ node {
3395
+ input: "409"
3396
+ output: "456"
3397
+ name: "Unsqueeze_314"
3398
+ op_type: "Unsqueeze"
3399
+ attribute {
3400
+ name: "axes"
3401
+ ints: 0
3402
+ type: INTS
3403
+ }
3404
+ }
3405
+ node {
3406
+ input: "412"
3407
+ output: "457"
3408
+ name: "Unsqueeze_315"
3409
+ op_type: "Unsqueeze"
3410
+ attribute {
3411
+ name: "axes"
3412
+ ints: 0
3413
+ type: INTS
3414
+ }
3415
+ }
3416
+ node {
3417
+ input: "455"
3418
+ input: "456"
3419
+ input: "457"
3420
+ output: "458"
3421
+ name: "Concat_316"
3422
+ op_type: "Concat"
3423
+ attribute {
3424
+ name: "axis"
3425
+ i: 0
3426
+ type: INT
3427
+ }
3428
+ }
3429
+ node {
3430
+ input: "454"
3431
+ input: "458"
3432
+ output: "459"
3433
+ name: "Reshape_317"
3434
+ op_type: "Reshape"
3435
+ }
3436
+ node {
3437
+ input: "459"
3438
+ input: "529"
3439
+ output: "461"
3440
+ name: "MatMul_318"
3441
+ op_type: "MatMul"
3442
+ }
3443
+ node {
3444
+ input: "461"
3445
+ input: "transformer_encoder.layers.3.self_attn.out_proj.bias"
3446
+ output: "462"
3447
+ name: "Add_319"
3448
+ op_type: "Add"
3449
+ }
3450
+ node {
3451
+ input: "403"
3452
+ input: "462"
3453
+ output: "463"
3454
+ name: "Add_320"
3455
+ op_type: "Add"
3456
+ }
3457
+ node {
3458
+ input: "463"
3459
+ output: "465"
3460
+ name: "ReduceMean_321"
3461
+ op_type: "ReduceMean"
3462
+ attribute {
3463
+ name: "axes"
3464
+ ints: -1
3465
+ type: INTS
3466
+ }
3467
+ }
3468
+ node {
3469
+ input: "463"
3470
+ input: "465"
3471
+ output: "466"
3472
+ name: "Sub_322"
3473
+ op_type: "Sub"
3474
+ }
3475
+ node {
3476
+ input: "466"
3477
+ output: "467"
3478
+ name: "Cast_323"
3479
+ op_type: "Cast"
3480
+ attribute {
3481
+ name: "to"
3482
+ i: 1
3483
+ type: INT
3484
+ }
3485
+ }
3486
+ node {
3487
+ input: "467"
3488
+ input: "530"
3489
+ output: "469"
3490
+ name: "Pow_324"
3491
+ op_type: "Pow"
3492
+ }
3493
+ node {
3494
+ input: "469"
3495
+ output: "470"
3496
+ name: "ReduceMean_325"
3497
+ op_type: "ReduceMean"
3498
+ attribute {
3499
+ name: "axes"
3500
+ ints: -1
3501
+ type: INTS
3502
+ }
3503
+ }
3504
+ node {
3505
+ output: "471"
3506
+ name: "Constant_326"
3507
+ op_type: "Constant"
3508
+ attribute {
3509
+ name: "value"
3510
+ t {
3511
+ data_type: 1
3512
+ }
3513
+ type: TENSOR
3514
+ }
3515
+ }
3516
+ node {
3517
+ input: "470"
3518
+ input: "471"
3519
+ output: "472"
3520
+ name: "Add_327"
3521
+ op_type: "Add"
3522
+ }
3523
+ node {
3524
+ input: "472"
3525
+ output: "473"
3526
+ name: "Sqrt_328"
3527
+ op_type: "Sqrt"
3528
+ }
3529
+ node {
3530
+ input: "466"
3531
+ input: "473"
3532
+ output: "474"
3533
+ name: "Div_329"
3534
+ op_type: "Div"
3535
+ }
3536
+ node {
3537
+ input: "474"
3538
+ input: "transformer_encoder.layers.3.norm1.weight"
3539
+ output: "475"
3540
+ name: "Mul_330"
3541
+ op_type: "Mul"
3542
+ }
3543
+ node {
3544
+ input: "475"
3545
+ input: "transformer_encoder.layers.3.norm1.bias"
3546
+ output: "476"
3547
+ name: "Add_331"
3548
+ op_type: "Add"
3549
+ }
3550
+ node {
3551
+ input: "476"
3552
+ input: "531"
3553
+ output: "478"
3554
+ name: "MatMul_332"
3555
+ op_type: "MatMul"
3556
+ }
3557
+ node {
3558
+ input: "478"
3559
+ input: "transformer_encoder.layers.3.linear1.bias"
3560
+ output: "479"
3561
+ name: "Add_333"
3562
+ op_type: "Add"
3563
+ }
3564
+ node {
3565
+ input: "479"
3566
+ output: "480"
3567
+ name: "Relu_334"
3568
+ op_type: "Relu"
3569
+ }
3570
+ node {
3571
+ input: "480"
3572
+ input: "532"
3573
+ output: "482"
3574
+ name: "MatMul_335"
3575
+ op_type: "MatMul"
3576
+ }
3577
+ node {
3578
+ input: "482"
3579
+ input: "transformer_encoder.layers.3.linear2.bias"
3580
+ output: "483"
3581
+ name: "Add_336"
3582
+ op_type: "Add"
3583
+ }
3584
+ node {
3585
+ input: "476"
3586
+ input: "483"
3587
+ output: "484"
3588
+ name: "Add_337"
3589
+ op_type: "Add"
3590
+ }
3591
+ node {
3592
+ input: "484"
3593
+ output: "486"
3594
+ name: "ReduceMean_338"
3595
+ op_type: "ReduceMean"
3596
+ attribute {
3597
+ name: "axes"
3598
+ ints: -1
3599
+ type: INTS
3600
+ }
3601
+ }
3602
+ node {
3603
+ input: "484"
3604
+ input: "486"
3605
+ output: "487"
3606
+ name: "Sub_339"
3607
+ op_type: "Sub"
3608
+ }
3609
+ node {
3610
+ input: "487"
3611
+ output: "488"
3612
+ name: "Cast_340"
3613
+ op_type: "Cast"
3614
+ attribute {
3615
+ name: "to"
3616
+ i: 1
3617
+ type: INT
3618
+ }
3619
+ }
3620
+ node {
3621
+ input: "488"
3622
+ input: "533"
3623
+ output: "490"
3624
+ name: "Pow_341"
3625
+ op_type: "Pow"
3626
+ }
3627
+ node {
3628
+ input: "490"
3629
+ output: "491"
3630
+ name: "ReduceMean_342"
3631
+ op_type: "ReduceMean"
3632
+ attribute {
3633
+ name: "axes"
3634
+ ints: -1
3635
+ type: INTS
3636
+ }
3637
+ }
3638
+ node {
3639
+ output: "492"
3640
+ name: "Constant_343"
3641
+ op_type: "Constant"
3642
+ attribute {
3643
+ name: "value"
3644
+ t {
3645
+ data_type: 1
3646
+ }
3647
+ type: TENSOR
3648
+ }
3649
+ }
3650
+ node {
3651
+ input: "491"
3652
+ input: "492"
3653
+ output: "493"
3654
+ name: "Add_344"
3655
+ op_type: "Add"
3656
+ }
3657
+ node {
3658
+ input: "493"
3659
+ output: "494"
3660
+ name: "Sqrt_345"
3661
+ op_type: "Sqrt"
3662
+ }
3663
+ node {
3664
+ input: "487"
3665
+ input: "494"
3666
+ output: "495"
3667
+ name: "Div_346"
3668
+ op_type: "Div"
3669
+ }
3670
+ node {
3671
+ input: "495"
3672
+ input: "transformer_encoder.layers.3.norm2.weight"
3673
+ output: "496"
3674
+ name: "Mul_347"
3675
+ op_type: "Mul"
3676
+ }
3677
+ node {
3678
+ input: "496"
3679
+ input: "transformer_encoder.layers.3.norm2.bias"
3680
+ output: "497"
3681
+ name: "Add_348"
3682
+ op_type: "Add"
3683
+ }
3684
+ node {
3685
+ input: "497"
3686
+ output: "498"
3687
+ name: "ReduceMean_349"
3688
+ op_type: "ReduceMean"
3689
+ attribute {
3690
+ name: "axes"
3691
+ ints: 0
3692
+ type: INTS
3693
+ }
3694
+ attribute {
3695
+ name: "keepdims"
3696
+ i: 0
3697
+ type: INT
3698
+ }
3699
+ }
3700
+ node {
3701
+ input: "105"
3702
+ input: "118"
3703
+ input: "498"
3704
+ output: "499"
3705
+ name: "Concat_350"
3706
+ op_type: "Concat"
3707
+ attribute {
3708
+ name: "axis"
3709
+ i: 1
3710
+ type: INT
3711
+ }
3712
+ }
3713
+ node {
3714
+ input: "499"
3715
+ input: "fc1_linear.weight"
3716
+ input: "fc1_linear.bias"
3717
+ output: "output_logits"
3718
+ name: "Gemm_351"
3719
+ op_type: "Gemm"
3720
+ attribute {
3721
+ name: "alpha"
3722
+ f: 1.0
3723
+ type: FLOAT
3724
+ }
3725
+ attribute {
3726
+ name: "beta"
3727
+ f: 1.0
3728
+ type: FLOAT
3729
+ }
3730
+ attribute {
3731
+ name: "transB"
3732
+ i: 1
3733
+ type: INT
3734
+ }
3735
+ }
3736
+ node {
3737
+ input: "output_logits"
3738
+ output: "output_softmax"
3739
+ name: "Softmax_352"
3740
+ op_type: "Softmax"
3741
+ attribute {
3742
+ name: "axis"
3743
+ i: 1
3744
+ type: INT
3745
+ }
3746
+ }
3747
+ initializer {
3748
+ dims: 40
3749
+ dims: 120
3750
+ data_type: 1
3751
+ name: "502"
3752
+ }
3753
+ initializer {
3754
+ dims: 1
3755
+ data_type: 7
3756
+ name: "503"
3757
+ }
3758
+ initializer {
3759
+ dims: 1
3760
+ data_type: 7
3761
+ name: "504"
3762
+ }
3763
+ initializer {
3764
+ dims: 40
3765
+ dims: 40
3766
+ data_type: 1
3767
+ name: "505"
3768
+ }
3769
+ initializer {
3770
+ data_type: 1
3771
+ name: "506"
3772
+ }
3773
+ initializer {
3774
+ dims: 40
3775
+ dims: 512
3776
+ data_type: 1
3777
+ name: "507"
3778
+ }
3779
+ initializer {
3780
+ dims: 512
3781
+ dims: 40
3782
+ data_type: 1
3783
+ name: "508"
3784
+ }
3785
+ initializer {
3786
+ data_type: 1
3787
+ name: "509"
3788
+ }
3789
+ initializer {
3790
+ dims: 40
3791
+ dims: 120
3792
+ data_type: 1
3793
+ name: "510"
3794
+ }
3795
+ initializer {
3796
+ dims: 1
3797
+ data_type: 7
3798
+ name: "511"
3799
+ }
3800
+ initializer {
3801
+ dims: 1
3802
+ data_type: 7
3803
+ name: "512"
3804
+ }
3805
+ initializer {
3806
+ dims: 40
3807
+ dims: 40
3808
+ data_type: 1
3809
+ name: "513"
3810
+ }
3811
+ initializer {
3812
+ data_type: 1
3813
+ name: "514"
3814
+ }
3815
+ initializer {
3816
+ dims: 40
3817
+ dims: 512
3818
+ data_type: 1
3819
+ name: "515"
3820
+ }
3821
+ initializer {
3822
+ dims: 512
3823
+ dims: 40
3824
+ data_type: 1
3825
+ name: "516"
3826
+ }
3827
+ initializer {
3828
+ data_type: 1
3829
+ name: "517"
3830
+ }
3831
+ initializer {
3832
+ dims: 40
3833
+ dims: 120
3834
+ data_type: 1
3835
+ name: "518"
3836
+ }
3837
+ initializer {
3838
+ dims: 1
3839
+ data_type: 7
3840
+ name: "519"
3841
+ }
3842
+ initializer {
3843
+ dims: 1
3844
+ data_type: 7
3845
+ name: "520"
3846
+ }
3847
+ initializer {
3848
+ dims: 40
3849
+ dims: 40
3850
+ data_type: 1
3851
+ name: "521"
3852
+ }
3853
+ initializer {
3854
+ data_type: 1
3855
+ name: "522"
3856
+ }
3857
+ initializer {
3858
+ dims: 40
3859
+ dims: 512
3860
+ data_type: 1
3861
+ name: "523"
3862
+ }
3863
+ initializer {
3864
+ dims: 512
3865
+ dims: 40
3866
+ data_type: 1
3867
+ name: "524"
3868
+ }
3869
+ initializer {
3870
+ data_type: 1
3871
+ name: "525"
3872
+ }
3873
+ initializer {
3874
+ dims: 40
3875
+ dims: 120
3876
+ data_type: 1
3877
+ name: "526"
3878
+ }
3879
+ initializer {
3880
+ dims: 1
3881
+ data_type: 7
3882
+ name: "527"
3883
+ }
3884
+ initializer {
3885
+ dims: 1
3886
+ data_type: 7
3887
+ name: "528"
3888
+ }
3889
+ initializer {
3890
+ dims: 40
3891
+ dims: 40
3892
+ data_type: 1
3893
+ name: "529"
3894
+ }
3895
+ initializer {
3896
+ data_type: 1
3897
+ name: "530"
3898
+ }
3899
+ initializer {
3900
+ dims: 40
3901
+ dims: 512
3902
+ data_type: 1
3903
+ name: "531"
3904
+ }
3905
+ initializer {
3906
+ dims: 512
3907
+ dims: 40
3908
+ data_type: 1
3909
+ name: "532"
3910
+ }
3911
+ initializer {
3912
+ data_type: 1
3913
+ name: "533"
3914
+ }
3915
+ initializer {
3916
+ dims: 16
3917
+ data_type: 1
3918
+ name: "conv2Dblock1.0.bias"
3919
+ }
3920
+ initializer {
3921
+ dims: 16
3922
+ dims: 1
3923
+ dims: 3
3924
+ dims: 3
3925
+ data_type: 1
3926
+ name: "conv2Dblock1.0.weight"
3927
+ }
3928
+ initializer {
3929
+ dims: 16
3930
+ data_type: 1
3931
+ name: "conv2Dblock1.1.bias"
3932
+ }
3933
+ initializer {
3934
+ dims: 16
3935
+ data_type: 1
3936
+ name: "conv2Dblock1.1.running_mean"
3937
+ }
3938
+ initializer {
3939
+ dims: 16
3940
+ data_type: 1
3941
+ name: "conv2Dblock1.1.running_var"
3942
+ }
3943
+ initializer {
3944
+ dims: 16
3945
+ data_type: 1
3946
+ name: "conv2Dblock1.1.weight"
3947
+ }
3948
+ initializer {
3949
+ dims: 64
3950
+ data_type: 1
3951
+ name: "conv2Dblock1.10.bias"
3952
+ }
3953
+ initializer {
3954
+ dims: 64
3955
+ dims: 32
3956
+ dims: 3
3957
+ dims: 3
3958
+ data_type: 1
3959
+ name: "conv2Dblock1.10.weight"
3960
+ }
3961
+ initializer {
3962
+ dims: 64
3963
+ data_type: 1
3964
+ name: "conv2Dblock1.11.bias"
3965
+ }
3966
+ initializer {
3967
+ dims: 64
3968
+ data_type: 1
3969
+ name: "conv2Dblock1.11.running_mean"
3970
+ }
3971
+ initializer {
3972
+ dims: 64
3973
+ data_type: 1
3974
+ name: "conv2Dblock1.11.running_var"
3975
+ }
3976
+ initializer {
3977
+ dims: 64
3978
+ data_type: 1
3979
+ name: "conv2Dblock1.11.weight"
3980
+ }
3981
+ initializer {
3982
+ dims: 32
3983
+ data_type: 1
3984
+ name: "conv2Dblock1.5.bias"
3985
+ }
3986
+ initializer {
3987
+ dims: 32
3988
+ dims: 16
3989
+ dims: 3
3990
+ dims: 3
3991
+ data_type: 1
3992
+ name: "conv2Dblock1.5.weight"
3993
+ }
3994
+ initializer {
3995
+ dims: 32
3996
+ data_type: 1
3997
+ name: "conv2Dblock1.6.bias"
3998
+ }
3999
+ initializer {
4000
+ dims: 32
4001
+ data_type: 1
4002
+ name: "conv2Dblock1.6.running_mean"
4003
+ }
4004
+ initializer {
4005
+ dims: 32
4006
+ data_type: 1
4007
+ name: "conv2Dblock1.6.running_var"
4008
+ }
4009
+ initializer {
4010
+ dims: 32
4011
+ data_type: 1
4012
+ name: "conv2Dblock1.6.weight"
4013
+ }
4014
+ initializer {
4015
+ dims: 16
4016
+ data_type: 1
4017
+ name: "conv2Dblock2.0.bias"
4018
+ }
4019
+ initializer {
4020
+ dims: 16
4021
+ dims: 1
4022
+ dims: 3
4023
+ dims: 3
4024
+ data_type: 1
4025
+ name: "conv2Dblock2.0.weight"
4026
+ }
4027
+ initializer {
4028
+ dims: 16
4029
+ data_type: 1
4030
+ name: "conv2Dblock2.1.bias"
4031
+ }
4032
+ initializer {
4033
+ dims: 16
4034
+ data_type: 1
4035
+ name: "conv2Dblock2.1.running_mean"
4036
+ }
4037
+ initializer {
4038
+ dims: 16
4039
+ data_type: 1
4040
+ name: "conv2Dblock2.1.running_var"
4041
+ }
4042
+ initializer {
4043
+ dims: 16
4044
+ data_type: 1
4045
+ name: "conv2Dblock2.1.weight"
4046
+ }
4047
+ initializer {
4048
+ dims: 64
4049
+ data_type: 1
4050
+ name: "conv2Dblock2.10.bias"
4051
+ }
4052
+ initializer {
4053
+ dims: 64
4054
+ dims: 32
4055
+ dims: 3
4056
+ dims: 3
4057
+ data_type: 1
4058
+ name: "conv2Dblock2.10.weight"
4059
+ }
4060
+ initializer {
4061
+ dims: 64
4062
+ data_type: 1
4063
+ name: "conv2Dblock2.11.bias"
4064
+ }
4065
+ initializer {
4066
+ dims: 64
4067
+ data_type: 1
4068
+ name: "conv2Dblock2.11.running_mean"
4069
+ }
4070
+ initializer {
4071
+ dims: 64
4072
+ data_type: 1
4073
+ name: "conv2Dblock2.11.running_var"
4074
+ }
4075
+ initializer {
4076
+ dims: 64
4077
+ data_type: 1
4078
+ name: "conv2Dblock2.11.weight"
4079
+ }
4080
+ initializer {
4081
+ dims: 32
4082
+ data_type: 1
4083
+ name: "conv2Dblock2.5.bias"
4084
+ }
4085
+ initializer {
4086
+ dims: 32
4087
+ dims: 16
4088
+ dims: 3
4089
+ dims: 3
4090
+ data_type: 1
4091
+ name: "conv2Dblock2.5.weight"
4092
+ }
4093
+ initializer {
4094
+ dims: 32
4095
+ data_type: 1
4096
+ name: "conv2Dblock2.6.bias"
4097
+ }
4098
+ initializer {
4099
+ dims: 32
4100
+ data_type: 1
4101
+ name: "conv2Dblock2.6.running_mean"
4102
+ }
4103
+ initializer {
4104
+ dims: 32
4105
+ data_type: 1
4106
+ name: "conv2Dblock2.6.running_var"
4107
+ }
4108
+ initializer {
4109
+ dims: 32
4110
+ data_type: 1
4111
+ name: "conv2Dblock2.6.weight"
4112
+ }
4113
+ initializer {
4114
+ dims: 8
4115
+ data_type: 1
4116
+ name: "fc1_linear.bias"
4117
+ }
4118
+ initializer {
4119
+ dims: 8
4120
+ dims: 1064
4121
+ data_type: 1
4122
+ name: "fc1_linear.weight"
4123
+ }
4124
+ initializer {
4125
+ dims: 512
4126
+ data_type: 1
4127
+ name: "transformer_encoder.layers.0.linear1.bias"
4128
+ }
4129
+ initializer {
4130
+ dims: 40
4131
+ data_type: 1
4132
+ name: "transformer_encoder.layers.0.linear2.bias"
4133
+ }
4134
+ initializer {
4135
+ dims: 40
4136
+ data_type: 1
4137
+ name: "transformer_encoder.layers.0.norm1.bias"
4138
+ }
4139
+ initializer {
4140
+ dims: 40
4141
+ data_type: 1
4142
+ name: "transformer_encoder.layers.0.norm1.weight"
4143
+ }
4144
+ initializer {
4145
+ dims: 40
4146
+ data_type: 1
4147
+ name: "transformer_encoder.layers.0.norm2.bias"
4148
+ }
4149
+ initializer {
4150
+ dims: 40
4151
+ data_type: 1
4152
+ name: "transformer_encoder.layers.0.norm2.weight"
4153
+ }
4154
+ initializer {
4155
+ dims: 120
4156
+ data_type: 1
4157
+ name: "transformer_encoder.layers.0.self_attn.in_proj_bias"
4158
+ }
4159
+ initializer {
4160
+ dims: 40
4161
+ data_type: 1
4162
+ name: "transformer_encoder.layers.0.self_attn.out_proj.bias"
4163
+ }
4164
+ initializer {
4165
+ dims: 512
4166
+ data_type: 1
4167
+ name: "transformer_encoder.layers.1.linear1.bias"
4168
+ }
4169
+ initializer {
4170
+ dims: 40
4171
+ data_type: 1
4172
+ name: "transformer_encoder.layers.1.linear2.bias"
4173
+ }
4174
+ initializer {
4175
+ dims: 40
4176
+ data_type: 1
4177
+ name: "transformer_encoder.layers.1.norm1.bias"
4178
+ }
4179
+ initializer {
4180
+ dims: 40
4181
+ data_type: 1
4182
+ name: "transformer_encoder.layers.1.norm1.weight"
4183
+ }
4184
+ initializer {
4185
+ dims: 40
4186
+ data_type: 1
4187
+ name: "transformer_encoder.layers.1.norm2.bias"
4188
+ }
4189
+ initializer {
4190
+ dims: 40
4191
+ data_type: 1
4192
+ name: "transformer_encoder.layers.1.norm2.weight"
4193
+ }
4194
+ initializer {
4195
+ dims: 120
4196
+ data_type: 1
4197
+ name: "transformer_encoder.layers.1.self_attn.in_proj_bias"
4198
+ }
4199
+ initializer {
4200
+ dims: 40
4201
+ data_type: 1
4202
+ name: "transformer_encoder.layers.1.self_attn.out_proj.bias"
4203
+ }
4204
+ initializer {
4205
+ dims: 512
4206
+ data_type: 1
4207
+ name: "transformer_encoder.layers.2.linear1.bias"
4208
+ }
4209
+ initializer {
4210
+ dims: 40
4211
+ data_type: 1
4212
+ name: "transformer_encoder.layers.2.linear2.bias"
4213
+ }
4214
+ initializer {
4215
+ dims: 40
4216
+ data_type: 1
4217
+ name: "transformer_encoder.layers.2.norm1.bias"
4218
+ }
4219
+ initializer {
4220
+ dims: 40
4221
+ data_type: 1
4222
+ name: "transformer_encoder.layers.2.norm1.weight"
4223
+ }
4224
+ initializer {
4225
+ dims: 40
4226
+ data_type: 1
4227
+ name: "transformer_encoder.layers.2.norm2.bias"
4228
+ }
4229
+ initializer {
4230
+ dims: 40
4231
+ data_type: 1
4232
+ name: "transformer_encoder.layers.2.norm2.weight"
4233
+ }
4234
+ initializer {
4235
+ dims: 120
4236
+ data_type: 1
4237
+ name: "transformer_encoder.layers.2.self_attn.in_proj_bias"
4238
+ }
4239
+ initializer {
4240
+ dims: 40
4241
+ data_type: 1
4242
+ name: "transformer_encoder.layers.2.self_attn.out_proj.bias"
4243
+ }
4244
+ initializer {
4245
+ dims: 512
4246
+ data_type: 1
4247
+ name: "transformer_encoder.layers.3.linear1.bias"
4248
+ }
4249
+ initializer {
4250
+ dims: 40
4251
+ data_type: 1
4252
+ name: "transformer_encoder.layers.3.linear2.bias"
4253
+ }
4254
+ initializer {
4255
+ dims: 40
4256
+ data_type: 1
4257
+ name: "transformer_encoder.layers.3.norm1.bias"
4258
+ }
4259
+ initializer {
4260
+ dims: 40
4261
+ data_type: 1
4262
+ name: "transformer_encoder.layers.3.norm1.weight"
4263
+ }
4264
+ initializer {
4265
+ dims: 40
4266
+ data_type: 1
4267
+ name: "transformer_encoder.layers.3.norm2.bias"
4268
+ }
4269
+ initializer {
4270
+ dims: 40
4271
+ data_type: 1
4272
+ name: "transformer_encoder.layers.3.norm2.weight"
4273
+ }
4274
+ initializer {
4275
+ dims: 120
4276
+ data_type: 1
4277
+ name: "transformer_encoder.layers.3.self_attn.in_proj_bias"
4278
+ }
4279
+ initializer {
4280
+ dims: 40
4281
+ data_type: 1
4282
+ name: "transformer_encoder.layers.3.self_attn.out_proj.bias"
4283
+ }
4284
+ input {
4285
+ name: "features"
4286
+ type {
4287
+ tensor_type {
4288
+ elem_type: 1
4289
+ shape {
4290
+ dim {
4291
+ dim_param: "N"
4292
+ }
4293
+ dim {
4294
+ dim_value: 1
4295
+ }
4296
+ dim {
4297
+ dim_value: 40
4298
+ }
4299
+ dim {
4300
+ dim_value: 282
4301
+ }
4302
+ }
4303
+ }
4304
+ }
4305
+ }
4306
+ output {
4307
+ name: "output_logits"
4308
+ type {
4309
+ tensor_type {
4310
+ elem_type: 1
4311
+ shape {
4312
+ dim {
4313
+ dim_param: "N"
4314
+ }
4315
+ dim {
4316
+ dim_value: 8
4317
+ }
4318
+ }
4319
+ }
4320
+ }
4321
+ }
4322
+ output {
4323
+ name: "output_softmax"
4324
+ type {
4325
+ tensor_type {
4326
+ elem_type: 1
4327
+ shape {
4328
+ dim {
4329
+ dim_param: "N"
4330
+ }
4331
+ dim {
4332
+ dim_value: 8
4333
+ }
4334
+ }
4335
+ }
4336
+ }
4337
+ }
4338
+ }
4339
+ opset_import {
4340
+ version: 11
4341
+ }
models/ailia-models/source.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ https://github.com/axinc-ai/ailia-models/tree/master/audio_processing/transformer-cnn-emotion-recognition
2
+
3
+ https://storage.googleapis.com/ailia-models/parallel_is_all_you_want/parallel_is_all_you_want_ep428.onnx
4
+ https://storage.googleapis.com/ailia-models/parallel_is_all_you_want/parallel_is_all_you_want_ep428.onnx.prototxt