niobures commited on
Commit
e6d49fe
·
verified ·
1 Parent(s): be0d417

DTLN (models: ailia-models)

Browse files
.gitattributes CHANGED
@@ -45,3 +45,4 @@ models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/audio
45
  models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/enhanced.wav filter=lfs diff=lfs merge=lfs -text
46
  models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/in.png filter=lfs diff=lfs merge=lfs -text
47
  models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/out.png filter=lfs diff=lfs merge=lfs -text
 
 
45
  models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/enhanced.wav filter=lfs diff=lfs merge=lfs -text
46
  models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/in.png filter=lfs diff=lfs merge=lfs -text
47
  models/DTLN_pytorch[[:space:]](ONNX,[[:space:]]pth,[[:space:]]bin)/samples/out.png filter=lfs diff=lfs merge=lfs -text
48
+ models/ailia-models/code/1221-135766-0000.wav filter=lfs diff=lfs merge=lfs -text
models/ailia-models/code/1221-135766-0000.wav ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29c67807e2be8a661b38ac883f3197be8d0550b3940eb3c09e24b72c0d0dde48
3
+ size 397964
models/ailia-models/code/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2020 Nils L. Westhausen
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
models/ailia-models/code/README.md ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Dual-signal Transformation LSTM Network
2
+
3
+ ## input
4
+
5
+ audio file(16kHz)
6
+
7
+ ```
8
+ LibriSpeech ASR corpus
9
+ http://www.openslr.org/12
10
+ 1221-135766-0000.wav
11
+ ```
12
+
13
+ ## Output
14
+
15
+ Audio file with noise removed
16
+
17
+ ## Usage
18
+ Automatically downloads the onnx and prototxt files on the first run.
19
+ It is necessary to be connected to the Internet while downloading.
20
+
21
+ For the sample wav,
22
+ ```bash
23
+ $ python3 dtln.py
24
+ ```
25
+
26
+ If you want to specify the audio, put the file path after the `--input` option.
27
+ ```bash
28
+ $ python3 dtln.py --input AUDIO_FILE
29
+ ```
30
+ If you run by onnxruntime instead of ailia, you use --onnx option.
31
+
32
+
33
+
34
+ ## Reference
35
+
36
+ - [Dual-signal Transformation LSTM Network](https://github.com/breizhn/DTLN)
37
+
38
+ ## Framework
39
+
40
+ Pytorch
41
+
42
+ ## Model Format
43
+
44
+ ONNX opset=11
45
+
46
+ ## Netron
47
+
48
+ - [dtln1.onnx.prototxt](https://netron.app/?url=https://storage.googleapis.com/ailia-models/dtln/dtln1.onnx.prototxt)
49
+ - [dtln2.onnx.prototxt](https://netron.app/?url=https://storage.googleapis.com/ailia-models/dtln2/dtln2.onnx.prototxt)
models/ailia-models/code/dtln.py ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import time
3
+ from logging import getLogger
4
+ import onnxruntime
5
+
6
+ import numpy as np
7
+ import soundfile as sf
8
+
9
+ import ailia
10
+
11
+ # import original modules
12
+ sys.path.append('../../util')
13
+ from model_utils import check_and_download_models # noqa
14
+ from arg_utils import get_base_parser, get_savepath, update_parser # noqa
15
+
16
+ logger = getLogger(__name__)
17
+
18
+ # ======================
19
+ # Parameters
20
+ # ======================
21
+
22
+ WEIGHT1_PATH = "dtln1.onnx"
23
+ MODEL1_PATH = "dtln1.onnx.prototxt"
24
+ WEIGHT2_PATH = "dtln2.onnx"
25
+ MODEL2_PATH = "dtln2.onnx.prototxt"
26
+ REMOTE_PATH = 'https://storage.googleapis.com/ailia-models/dtln/'
27
+
28
+ SAMPLE_RATE = 16000
29
+
30
+ WAV_PATH = '1221-135766-0000.wav'
31
+ SAVE_WAV_PATH = 'output.wav'
32
+
33
+ # ======================
34
+ # Arguemnt Parser Config
35
+ # ======================
36
+
37
+ parser = get_base_parser(
38
+ 'Dual-signal Transformation LSTM Network', WAV_PATH, SAVE_WAV_PATH, input_ftype='audio'
39
+ )
40
+ parser.add_argument(
41
+ '--onnx',
42
+ action='store_true',
43
+ help='By default, the ailia SDK is used, but with this option, you can switch to using ONNX Runtime'
44
+ )
45
+ parser.add_argument(
46
+ '--shift',
47
+ default=128, type=int,
48
+ )
49
+ args = update_parser(parser)
50
+
51
+ block_shift = args.shift
52
+
53
+ # ======================
54
+ # Main functions
55
+ # ======================
56
+
57
+ def predict(audio,models):
58
+ block_len = 512
59
+ out_file = np.zeros((len(audio)))
60
+ # create buffer
61
+ in_buffer = np.zeros((block_len)).astype('float32')
62
+ out_buffer = np.zeros((block_len)).astype('float32')
63
+ # calculate number of blocks
64
+ num_blocks = (audio.shape[0] - (block_len-block_shift)) // block_shift
65
+ # iterate over the number of blcoks
66
+ time_array = []
67
+
68
+ inp_shape = [(1, 1, 257) ,(1, 2, 128, 2)]
69
+ model_input_names_1 = ['input_2', 'input_3']
70
+ interpreter_1 = models[0]
71
+ interpreter_2 = models[1]
72
+ model_inputs_1 = {}
73
+ model_inputs_1[model_input_names_1[0]] = np.zeros(
74
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[0]],
75
+ dtype=np.float32)
76
+ model_inputs_1[model_input_names_1[1]] = np.zeros(
77
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[1]],
78
+ dtype=np.float32)
79
+
80
+ model_input_names_2 = ['input_4','input_5']
81
+ model_inputs_2 = {}
82
+ inp_shape = [(1, 1, 512) ,(1, 2, 128, 2)]
83
+ model_inputs_2[model_input_names_2[0]] = np.zeros(
84
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[0]],
85
+ dtype=np.float32)
86
+ model_inputs_2[model_input_names_2[1]] = np.zeros(
87
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[1]],
88
+ dtype=np.float32)
89
+
90
+
91
+ for idx in range(num_blocks):
92
+ start_time = time.time()
93
+ # shift values and write to buffer
94
+ in_buffer[:-block_shift] = in_buffer[block_shift:]
95
+ in_buffer[-block_shift:] = audio[idx*block_shift:(idx*block_shift)+block_shift]
96
+ # calculate fft of input block
97
+ in_block_fft = np.fft.rfft(in_buffer)
98
+ in_mag = np.abs(in_block_fft)
99
+ in_phase = np.angle(in_block_fft)
100
+ # reshape magnitude to input dimensions
101
+ in_mag = np.reshape(in_mag, (1,1,-1)).astype('float32')
102
+ # set block to input
103
+ model_inputs_1[model_input_names_1[0]] = in_mag
104
+ # run calculation
105
+ if args.onnx:
106
+ model_outputs_1 = interpreter_1.run([],{'input_2':model_inputs_1['input_2'],'input_3':model_inputs_1['input_3']})
107
+ else:
108
+ inputs = [model_inputs_1['input_2'],model_inputs_1['input_3']]
109
+ model_outputs_1 = interpreter_1.run(inputs)
110
+ # get the output of the first block
111
+ out_mask = model_outputs_1[0]
112
+ # set out states back to input
113
+ model_inputs_1[model_input_names_1[1]] = model_outputs_1[1]
114
+ # calculate the ifft
115
+ estimated_complex = in_mag * out_mask * np.exp(1j * in_phase)
116
+ estimated_block = np.fft.irfft(estimated_complex)
117
+ # reshape the time domain block
118
+ estimated_block = np.reshape(estimated_block, (1,1,-1)).astype('float32')
119
+ # set tensors to the second block
120
+ model_inputs_2[model_input_names_2[0]] = estimated_block
121
+ # run calculation
122
+ if args.onnx:
123
+ model_outputs_2 = interpreter_2.run([],{'input_4':model_inputs_2['input_4'],'input_5':model_inputs_2['input_5']})
124
+ else:
125
+ inputs = [model_inputs_2['input_4'],model_inputs_2['input_5']]
126
+ model_outputs_2 = interpreter_2.run(inputs)
127
+ # get output
128
+ out_block = model_outputs_2[0]
129
+ # set out states back to input
130
+ model_inputs_2[model_input_names_2[1]] = model_outputs_2[1]
131
+ # shift values and write to buffer
132
+ out_buffer[:-block_shift] = out_buffer[block_shift:]
133
+ out_buffer[-block_shift:] = np.zeros((block_shift))
134
+ out_buffer += np.squeeze(out_block)
135
+ # write block to output file
136
+ out_file[idx*block_shift:(idx*block_shift)+block_shift] = out_buffer[:block_shift]
137
+ time_array.append(time.time()-start_time)
138
+ return out_file
139
+
140
+ def recognize_from_audio(models):
141
+
142
+ inp_shape = [(1, 1, 257) ,(1, 2, 128, 2)]
143
+ model_input_names_1 = ['input_2', 'input_3']
144
+
145
+ model_inputs_1 = {}
146
+ model_inputs_1[model_input_names_1[0]] = np.zeros(
147
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[0]],
148
+ dtype=np.float32)
149
+ model_inputs_1[model_input_names_1[1]] = np.zeros(
150
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[1]],
151
+ dtype=np.float32)
152
+
153
+ model_input_names_2 = ['input_4','input_5']
154
+ model_inputs_2 = {}
155
+ inp_shape = [(1, 1, 512) ,(1, 2, 128, 2)]
156
+ model_inputs_2[model_input_names_2[0]] = np.zeros(
157
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[0]],
158
+ dtype=np.float32)
159
+ model_inputs_2[model_input_names_2[1]] = np.zeros(
160
+ [dim if isinstance(dim, int) else 1 for dim in inp_shape[1]],
161
+ dtype=np.float32)
162
+
163
+
164
+ # input audio loop
165
+ for audio_path in args.input:
166
+ logger.info(audio_path)
167
+
168
+ # load audio file
169
+ audio,fs = sf.read(audio_path)
170
+ # check for sampling rate
171
+ if fs != 16000:
172
+ print('This model only supports 16k sampling rate.')
173
+ continue
174
+
175
+ # inference
176
+ logger.info('Start inference...')
177
+ if args.benchmark:
178
+ logger.info('BENCHMARK mode')
179
+ start = int(round(time.time() * 1000))
180
+ output,sr = predict(audio, models)
181
+ end = int(round(time.time() * 1000))
182
+ estimation_time = (end - start)
183
+ logger.info(f'\ttotal processing time {estimation_time} ms')
184
+ else:
185
+ output = predict(audio, models)
186
+
187
+ # save result
188
+ savepath = get_savepath(args.savepath, audio_path, ext='.wav')
189
+ logger.info(f'saved at : {savepath}')
190
+ sf.write(savepath, output, fs)
191
+
192
+ logger.info('Script finished successfully.')
193
+
194
+
195
+ def main():
196
+ check_and_download_models(WEIGHT1_PATH, MODEL1_PATH, REMOTE_PATH)
197
+ check_and_download_models(WEIGHT2_PATH, MODEL2_PATH, REMOTE_PATH)
198
+
199
+ env_id = args.env_id
200
+
201
+ if args.onnx:
202
+ models = [onnxruntime.InferenceSession(WEIGHT1_PATH),
203
+ onnxruntime.InferenceSession(WEIGHT2_PATH)]
204
+ else:
205
+ models = [ailia.Net(MODEL1_PATH,WEIGHT1_PATH, env_id = env_id),
206
+ ailia.Net(MODEL2_PATH,WEIGHT2_PATH, env_id = env_id)]
207
+
208
+ # initialize
209
+ recognize_from_audio(models)
210
+
211
+
212
+ if __name__ == '__main__':
213
+ main()
models/ailia-models/dtln1.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22b91cae3855e5a0620e66a917ca6c82c58db0e842c770f58d86751c5e8d4ae3
3
+ size 1458237
models/ailia-models/dtln1.onnx.prototxt ADDED
@@ -0,0 +1,705 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ir_version: 6
2
+ producer_name: "OnnxMLTools"
3
+ producer_version: "1.7.0"
4
+ model_version: 0
5
+ graph {
6
+ name: "model_1"
7
+ node {
8
+ input: "input_2"
9
+ output: "lstm_4_X"
10
+ name: "Transpose2"
11
+ op_type: "Transpose"
12
+ domain: ""
13
+ attribute {
14
+ name: "perm"
15
+ type: INTS
16
+ ints: 1
17
+ ints: 0
18
+ ints: 2
19
+ }
20
+ doc_string: ""
21
+ }
22
+ node {
23
+ input: "input_3"
24
+ input: "input_3:01_cropping_start"
25
+ input: "input_3:01_cropping_end"
26
+ input: "input_3:01_cropping_axes"
27
+ input: "input_3:01_cropping_steps"
28
+ output: "input_3:01_cropping0"
29
+ name: "input_3:01_cropping"
30
+ op_type: "Slice"
31
+ domain: ""
32
+ doc_string: ""
33
+ }
34
+ node {
35
+ input: "input_3"
36
+ input: "input_3:01_cropping_start1"
37
+ input: "input_3:01_cropping_end1"
38
+ input: "input_3:01_cropping_axes1"
39
+ input: "input_3:01_cropping_steps1"
40
+ output: "input_3:01_cropping01"
41
+ name: "input_3:01_cropping_1"
42
+ op_type: "Slice"
43
+ domain: ""
44
+ doc_string: ""
45
+ }
46
+ node {
47
+ input: "input_3"
48
+ input: "input_3:01_cropping_start2"
49
+ input: "input_3:01_cropping_end2"
50
+ input: "input_3:01_cropping_axes2"
51
+ input: "input_3:01_cropping_steps2"
52
+ output: "input_3:01_cropping02"
53
+ name: "input_3:01_cropping_2"
54
+ op_type: "Slice"
55
+ domain: ""
56
+ doc_string: ""
57
+ }
58
+ node {
59
+ input: "input_3"
60
+ input: "input_3:01_cropping_start3"
61
+ input: "input_3:01_cropping_end3"
62
+ input: "input_3:01_cropping_axes3"
63
+ input: "input_3:01_cropping_steps3"
64
+ output: "input_3:01_cropping03"
65
+ name: "input_3:01_cropping_3"
66
+ op_type: "Slice"
67
+ domain: ""
68
+ doc_string: ""
69
+ }
70
+ node {
71
+ input: "input_3:01_cropping0"
72
+ output: "strided_slice_3:0"
73
+ name: "input_3:01_squeeze"
74
+ op_type: "Squeeze"
75
+ domain: ""
76
+ attribute {
77
+ name: "axes"
78
+ type: INTS
79
+ ints: 1
80
+ ints: 3
81
+ }
82
+ }
83
+ node {
84
+ input: "input_3:01_cropping01"
85
+ output: "strided_slice_2:0"
86
+ name: "input_3:01_squeeze_1"
87
+ op_type: "Squeeze"
88
+ domain: ""
89
+ attribute {
90
+ name: "axes"
91
+ type: INTS
92
+ ints: 1
93
+ ints: 3
94
+ }
95
+ }
96
+ node {
97
+ input: "input_3:01_cropping02"
98
+ output: "strided_slice_1:0"
99
+ name: "input_3:01_squeeze_2"
100
+ op_type: "Squeeze"
101
+ domain: ""
102
+ attribute {
103
+ name: "axes"
104
+ type: INTS
105
+ ints: 1
106
+ ints: 3
107
+ }
108
+ }
109
+ node {
110
+ input: "input_3:01_cropping03"
111
+ output: "strided_slice:0"
112
+ name: "input_3:01_squeeze_3"
113
+ op_type: "Squeeze"
114
+ domain: ""
115
+ attribute {
116
+ name: "axes"
117
+ type: INTS
118
+ ints: 1
119
+ ints: 3
120
+ }
121
+ }
122
+ node {
123
+ input: "strided_slice_2:0"
124
+ output: "lstm_5_initial_h"
125
+ name: "Unsqueeze"
126
+ op_type: "Unsqueeze"
127
+ domain: ""
128
+ attribute {
129
+ name: "axes"
130
+ type: INTS
131
+ ints: 0
132
+ }
133
+ }
134
+ node {
135
+ input: "strided_slice_3:0"
136
+ output: "lstm_5_initial_c"
137
+ name: "Unsqueeze1"
138
+ op_type: "Unsqueeze"
139
+ domain: ""
140
+ attribute {
141
+ name: "axes"
142
+ type: INTS
143
+ ints: 0
144
+ }
145
+ }
146
+ node {
147
+ input: "strided_slice:0"
148
+ output: "lstm_4_initial_h"
149
+ name: "Unsqueeze2"
150
+ op_type: "Unsqueeze"
151
+ domain: ""
152
+ attribute {
153
+ name: "axes"
154
+ type: INTS
155
+ ints: 0
156
+ }
157
+ }
158
+ node {
159
+ input: "strided_slice_1:0"
160
+ output: "lstm_4_initial_c"
161
+ name: "Unsqueeze3"
162
+ op_type: "Unsqueeze"
163
+ domain: ""
164
+ attribute {
165
+ name: "axes"
166
+ type: INTS
167
+ ints: 0
168
+ }
169
+ }
170
+ node {
171
+ input: "lstm_4_X"
172
+ input: "lstm_4_W"
173
+ input: "lstm_4_R"
174
+ input: "lstm_4_B"
175
+ input: ""
176
+ input: "lstm_4_initial_h"
177
+ input: "lstm_4_initial_c"
178
+ input: ""
179
+ output: "lstm_4_Y"
180
+ output: "lstm_4_Y_h"
181
+ output: "lstm_4_Y_c"
182
+ name: "lstm_4"
183
+ op_type: "LSTM"
184
+ domain: ""
185
+ attribute {
186
+ name: "activations"
187
+ type: STRINGS
188
+ strings: "Sigmoid"
189
+ strings: "Tanh"
190
+ strings: "Tanh"
191
+ }
192
+ attribute {
193
+ name: "direction"
194
+ type: STRING
195
+ s: "forward"
196
+ }
197
+ attribute {
198
+ name: "hidden_size"
199
+ type: INT
200
+ i: 128
201
+ }
202
+ }
203
+ node {
204
+ input: "lstm_4_Y"
205
+ output: "lstm_4_y_transposed"
206
+ name: "Transpose3"
207
+ op_type: "Transpose"
208
+ domain: ""
209
+ attribute {
210
+ name: "perm"
211
+ type: INTS
212
+ ints: 2
213
+ ints: 1
214
+ ints: 0
215
+ ints: 3
216
+ }
217
+ }
218
+ node {
219
+ input: "lstm_4_Y_h"
220
+ output: "lstm_4/Identity_1:0"
221
+ name: "Squeeze4"
222
+ op_type: "Squeeze"
223
+ domain: ""
224
+ attribute {
225
+ name: "axes"
226
+ type: INTS
227
+ ints: 0
228
+ }
229
+ }
230
+ node {
231
+ input: "lstm_4_Y_c"
232
+ output: "lstm_4/Identity_2:0"
233
+ name: "Squeeze5"
234
+ op_type: "Squeeze"
235
+ domain: ""
236
+ attribute {
237
+ name: "axes"
238
+ type: INTS
239
+ ints: 0
240
+ }
241
+ }
242
+ node {
243
+ input: "lstm_4/Identity_1:0"
244
+ output: "stack_unsqueeze00"
245
+ name: "stack_unsqueeze0"
246
+ op_type: "Unsqueeze"
247
+ domain: ""
248
+ attribute {
249
+ name: "axes"
250
+ type: INTS
251
+ ints: 1
252
+ }
253
+ }
254
+ node {
255
+ input: "lstm_4/Identity_2:0"
256
+ output: "stack_1_unsqueeze00"
257
+ name: "stack_1_unsqueeze0"
258
+ op_type: "Unsqueeze"
259
+ domain: ""
260
+ attribute {
261
+ name: "axes"
262
+ type: INTS
263
+ ints: 1
264
+ }
265
+ }
266
+ node {
267
+ input: "lstm_4_y_transposed"
268
+ output: "lstm_4/Identity:0"
269
+ name: "Squeeze3"
270
+ op_type: "Squeeze"
271
+ domain: ""
272
+ attribute {
273
+ name: "axes"
274
+ type: INTS
275
+ ints: 1
276
+ }
277
+ }
278
+ node {
279
+ input: "lstm_4/Identity:0"
280
+ output: "lstm_5_X"
281
+ name: "Transpose"
282
+ op_type: "Transpose"
283
+ domain: ""
284
+ attribute {
285
+ name: "perm"
286
+ type: INTS
287
+ ints: 1
288
+ ints: 0
289
+ ints: 2
290
+ }
291
+ doc_string: ""
292
+ }
293
+ node {
294
+ input: "lstm_5_X"
295
+ input: "lstm_5_W"
296
+ input: "lstm_5_R"
297
+ input: "lstm_5_B"
298
+ input: ""
299
+ input: "lstm_5_initial_h"
300
+ input: "lstm_5_initial_c"
301
+ input: ""
302
+ output: "lstm_5_Y"
303
+ output: "lstm_5_Y_h"
304
+ output: "lstm_5_Y_c"
305
+ name: "lstm_5"
306
+ op_type: "LSTM"
307
+ domain: ""
308
+ attribute {
309
+ name: "activations"
310
+ type: STRINGS
311
+ strings: "Sigmoid"
312
+ strings: "Tanh"
313
+ strings: "Tanh"
314
+ }
315
+ attribute {
316
+ name: "direction"
317
+ type: STRING
318
+ s: "forward"
319
+ }
320
+ attribute {
321
+ name: "hidden_size"
322
+ type: INT
323
+ i: 128
324
+ }
325
+ }
326
+ node {
327
+ input: "lstm_5_Y"
328
+ output: "lstm_5_y_transposed"
329
+ name: "Transpose1"
330
+ op_type: "Transpose"
331
+ domain: ""
332
+ attribute {
333
+ name: "perm"
334
+ type: INTS
335
+ ints: 2
336
+ ints: 1
337
+ ints: 0
338
+ ints: 3
339
+ }
340
+ }
341
+ node {
342
+ input: "lstm_5_Y_h"
343
+ output: "lstm_5/Identity_1:0"
344
+ name: "Squeeze1"
345
+ op_type: "Squeeze"
346
+ domain: ""
347
+ attribute {
348
+ name: "axes"
349
+ type: INTS
350
+ ints: 0
351
+ }
352
+ }
353
+ node {
354
+ input: "lstm_5_Y_c"
355
+ output: "lstm_5/Identity_2:0"
356
+ name: "Squeeze2"
357
+ op_type: "Squeeze"
358
+ domain: ""
359
+ attribute {
360
+ name: "axes"
361
+ type: INTS
362
+ ints: 0
363
+ }
364
+ }
365
+ node {
366
+ input: "lstm_5/Identity_1:0"
367
+ output: "stack_unsqueeze10"
368
+ name: "stack_unsqueeze1"
369
+ op_type: "Unsqueeze"
370
+ domain: ""
371
+ attribute {
372
+ name: "axes"
373
+ type: INTS
374
+ ints: 1
375
+ }
376
+ }
377
+ node {
378
+ input: "lstm_5/Identity_2:0"
379
+ output: "stack_1_unsqueeze10"
380
+ name: "stack_1_unsqueeze1"
381
+ op_type: "Unsqueeze"
382
+ domain: ""
383
+ attribute {
384
+ name: "axes"
385
+ type: INTS
386
+ ints: 1
387
+ }
388
+ }
389
+ node {
390
+ input: "lstm_5_y_transposed"
391
+ output: "lstm_5/Identity:0"
392
+ name: "Squeeze"
393
+ op_type: "Squeeze"
394
+ domain: ""
395
+ attribute {
396
+ name: "axes"
397
+ type: INTS
398
+ ints: 1
399
+ }
400
+ }
401
+ node {
402
+ input: "lstm_5/Identity:0"
403
+ input: "dense_2/kernel:0"
404
+ output: "dense_20"
405
+ name: "dense_2"
406
+ op_type: "MatMul"
407
+ domain: ""
408
+ }
409
+ node {
410
+ input: "stack_unsqueeze00"
411
+ input: "stack_unsqueeze10"
412
+ output: "stack:0"
413
+ name: "stack_concat"
414
+ op_type: "Concat"
415
+ domain: ""
416
+ attribute {
417
+ name: "axis"
418
+ type: INT
419
+ i: 1
420
+ }
421
+ }
422
+ node {
423
+ input: "stack_1_unsqueeze00"
424
+ input: "stack_1_unsqueeze10"
425
+ output: "stack_1:0"
426
+ name: "stack_1_concat"
427
+ op_type: "Concat"
428
+ domain: ""
429
+ attribute {
430
+ name: "axis"
431
+ type: INT
432
+ i: 1
433
+ }
434
+ }
435
+ node {
436
+ input: "stack:0"
437
+ output: "stack_2_unsqueeze00"
438
+ name: "stack_2_unsqueeze0"
439
+ op_type: "Unsqueeze"
440
+ domain: ""
441
+ attribute {
442
+ name: "axes"
443
+ type: INTS
444
+ ints: -1
445
+ }
446
+ }
447
+ node {
448
+ input: "stack_1:0"
449
+ output: "stack_2_unsqueeze10"
450
+ name: "stack_2_unsqueeze1"
451
+ op_type: "Unsqueeze"
452
+ domain: ""
453
+ attribute {
454
+ name: "axes"
455
+ type: INTS
456
+ ints: -1
457
+ }
458
+ }
459
+ node {
460
+ input: "dense_20"
461
+ input: "dense_2/bias:0"
462
+ output: "biased_tensor_name"
463
+ name: "Add"
464
+ op_type: "Add"
465
+ domain: ""
466
+ }
467
+ node {
468
+ input: "biased_tensor_name"
469
+ output: "activation_2"
470
+ name: "Sigmoid"
471
+ op_type: "Sigmoid"
472
+ domain: ""
473
+ doc_string: ""
474
+ }
475
+ node {
476
+ input: "stack_2_unsqueeze00"
477
+ input: "stack_2_unsqueeze10"
478
+ output: "tf_op_layer_stack_2"
479
+ name: "stack_2_concat"
480
+ op_type: "Concat"
481
+ domain: ""
482
+ attribute {
483
+ name: "axis"
484
+ type: INT
485
+ i: -1
486
+ }
487
+ doc_string: ""
488
+ }
489
+ initializer {
490
+ dims: 128
491
+ dims: 257
492
+ data_type: 1
493
+ name: "dense_2/kernel:0"
494
+ }
495
+ initializer {
496
+ dims: 257
497
+ data_type: 1
498
+ name: "dense_2/bias:0"
499
+ }
500
+ initializer {
501
+ dims: 1
502
+ dims: 512
503
+ dims: 128
504
+ data_type: 1
505
+ name: "lstm_5_W"
506
+ }
507
+ initializer {
508
+ dims: 1
509
+ dims: 512
510
+ dims: 128
511
+ data_type: 1
512
+ name: "lstm_5_R"
513
+ }
514
+ initializer {
515
+ dims: 1
516
+ dims: 1024
517
+ data_type: 1
518
+ name: "lstm_5_B"
519
+ }
520
+ initializer {
521
+ dims: 1
522
+ dims: 512
523
+ dims: 257
524
+ data_type: 1
525
+ name: "lstm_4_W"
526
+ }
527
+ initializer {
528
+ dims: 1
529
+ dims: 512
530
+ dims: 128
531
+ data_type: 1
532
+ name: "lstm_4_R"
533
+ }
534
+ initializer {
535
+ dims: 1
536
+ dims: 1024
537
+ data_type: 1
538
+ name: "lstm_4_B"
539
+ }
540
+ initializer {
541
+ dims: 4
542
+ data_type: 7
543
+ name: "input_3:01_cropping_start"
544
+ }
545
+ initializer {
546
+ dims: 4
547
+ data_type: 7
548
+ name: "input_3:01_cropping_end"
549
+ }
550
+ initializer {
551
+ dims: 4
552
+ data_type: 7
553
+ name: "input_3:01_cropping_axes"
554
+ }
555
+ initializer {
556
+ dims: 4
557
+ data_type: 7
558
+ name: "input_3:01_cropping_steps"
559
+ }
560
+ initializer {
561
+ dims: 4
562
+ data_type: 7
563
+ name: "input_3:01_cropping_start1"
564
+ }
565
+ initializer {
566
+ dims: 4
567
+ data_type: 7
568
+ name: "input_3:01_cropping_end1"
569
+ }
570
+ initializer {
571
+ dims: 4
572
+ data_type: 7
573
+ name: "input_3:01_cropping_axes1"
574
+ }
575
+ initializer {
576
+ dims: 4
577
+ data_type: 7
578
+ name: "input_3:01_cropping_steps1"
579
+ }
580
+ initializer {
581
+ dims: 4
582
+ data_type: 7
583
+ name: "input_3:01_cropping_start2"
584
+ }
585
+ initializer {
586
+ dims: 4
587
+ data_type: 7
588
+ name: "input_3:01_cropping_end2"
589
+ }
590
+ initializer {
591
+ dims: 4
592
+ data_type: 7
593
+ name: "input_3:01_cropping_axes2"
594
+ }
595
+ initializer {
596
+ dims: 4
597
+ data_type: 7
598
+ name: "input_3:01_cropping_steps2"
599
+ }
600
+ initializer {
601
+ dims: 4
602
+ data_type: 7
603
+ name: "input_3:01_cropping_start3"
604
+ }
605
+ initializer {
606
+ dims: 4
607
+ data_type: 7
608
+ name: "input_3:01_cropping_end3"
609
+ }
610
+ initializer {
611
+ dims: 4
612
+ data_type: 7
613
+ name: "input_3:01_cropping_axes3"
614
+ }
615
+ initializer {
616
+ dims: 4
617
+ data_type: 7
618
+ name: "input_3:01_cropping_steps3"
619
+ }
620
+ input {
621
+ name: "input_2"
622
+ type {
623
+ tensor_type {
624
+ elem_type: 1
625
+ shape {
626
+ dim {
627
+ dim_value: 1
628
+ }
629
+ dim {
630
+ dim_value: 1
631
+ }
632
+ dim {
633
+ dim_value: 257
634
+ }
635
+ }
636
+ }
637
+ }
638
+ }
639
+ input {
640
+ name: "input_3"
641
+ type {
642
+ tensor_type {
643
+ elem_type: 1
644
+ shape {
645
+ dim {
646
+ dim_value: 1
647
+ }
648
+ dim {
649
+ dim_value: 2
650
+ }
651
+ dim {
652
+ dim_value: 128
653
+ }
654
+ dim {
655
+ dim_value: 2
656
+ }
657
+ }
658
+ }
659
+ }
660
+ }
661
+ output {
662
+ name: "activation_2"
663
+ type {
664
+ tensor_type {
665
+ elem_type: 1
666
+ shape {
667
+ dim {
668
+ dim_value: 1
669
+ }
670
+ dim {
671
+ dim_value: 1
672
+ }
673
+ dim {
674
+ dim_value: 257
675
+ }
676
+ }
677
+ }
678
+ }
679
+ }
680
+ output {
681
+ name: "tf_op_layer_stack_2"
682
+ type {
683
+ tensor_type {
684
+ elem_type: 1
685
+ shape {
686
+ dim {
687
+ dim_value: 1
688
+ }
689
+ dim {
690
+ dim_value: 2
691
+ }
692
+ dim {
693
+ dim_value: 128
694
+ }
695
+ dim {
696
+ dim_value: 2
697
+ }
698
+ }
699
+ }
700
+ }
701
+ }
702
+ }
703
+ opset_import {
704
+ version: 11
705
+ }
models/ailia-models/dtln2.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e20c92f9233fccf29cddf86970d0d0161a03aebccc26d6f4d5639c4d5ec2e639
3
+ size 2510010
models/ailia-models/dtln2.onnx.prototxt ADDED
@@ -0,0 +1,1123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ir_version: 6
2
+ producer_name: "OnnxMLTools"
3
+ producer_version: "1.7.0"
4
+ model_version: 0
5
+ graph {
6
+ name: "model_2"
7
+ node {
8
+ input: "input_4"
9
+ output: "adjusted_input1"
10
+ name: "Transpose2"
11
+ op_type: "Transpose"
12
+ domain: ""
13
+ attribute {
14
+ name: "perm"
15
+ type: INTS
16
+ ints: 0
17
+ ints: 2
18
+ ints: 1
19
+ }
20
+ doc_string: ""
21
+ }
22
+ node {
23
+ input: "input_5"
24
+ input: "input_5:01_cropping_start"
25
+ input: "input_5:01_cropping_end"
26
+ input: "input_5:01_cropping_axes"
27
+ input: "input_5:01_cropping_steps"
28
+ output: "input_5:01_cropping0"
29
+ name: "input_5:01_cropping"
30
+ op_type: "Slice"
31
+ domain: ""
32
+ doc_string: ""
33
+ }
34
+ node {
35
+ input: "input_5"
36
+ input: "input_5:01_cropping_start1"
37
+ input: "input_5:01_cropping_end1"
38
+ input: "input_5:01_cropping_axes1"
39
+ input: "input_5:01_cropping_steps1"
40
+ output: "input_5:01_cropping01"
41
+ name: "input_5:01_cropping_1"
42
+ op_type: "Slice"
43
+ domain: ""
44
+ doc_string: ""
45
+ }
46
+ node {
47
+ input: "input_5"
48
+ input: "input_5:01_cropping_start2"
49
+ input: "input_5:01_cropping_end2"
50
+ input: "input_5:01_cropping_axes2"
51
+ input: "input_5:01_cropping_steps2"
52
+ output: "input_5:01_cropping02"
53
+ name: "input_5:01_cropping_2"
54
+ op_type: "Slice"
55
+ domain: ""
56
+ doc_string: ""
57
+ }
58
+ node {
59
+ input: "input_5"
60
+ input: "input_5:01_cropping_start3"
61
+ input: "input_5:01_cropping_end3"
62
+ input: "input_5:01_cropping_axes3"
63
+ input: "input_5:01_cropping_steps3"
64
+ output: "input_5:01_cropping03"
65
+ name: "input_5:01_cropping_3"
66
+ op_type: "Slice"
67
+ domain: ""
68
+ doc_string: ""
69
+ }
70
+ node {
71
+ input: "adjusted_input1"
72
+ input: "conv1d_2/kernel:0"
73
+ output: "convolution_output1"
74
+ name: "conv1d_2"
75
+ op_type: "Conv"
76
+ domain: ""
77
+ attribute {
78
+ name: "auto_pad"
79
+ type: STRING
80
+ s: "VALID"
81
+ }
82
+ attribute {
83
+ name: "dilations"
84
+ type: INTS
85
+ ints: 1
86
+ }
87
+ attribute {
88
+ name: "group"
89
+ type: INT
90
+ i: 1
91
+ }
92
+ attribute {
93
+ name: "kernel_shape"
94
+ type: INTS
95
+ ints: 1
96
+ }
97
+ attribute {
98
+ name: "strides"
99
+ type: INTS
100
+ ints: 1
101
+ }
102
+ }
103
+ node {
104
+ input: "input_5:01_cropping0"
105
+ output: "model_2/tf_op_layer_strided_slice_5/strided_slice_5:0"
106
+ name: "input_5:01_squeeze"
107
+ op_type: "Squeeze"
108
+ domain: ""
109
+ attribute {
110
+ name: "axes"
111
+ type: INTS
112
+ ints: 1
113
+ ints: 3
114
+ }
115
+ }
116
+ node {
117
+ input: "input_5:01_cropping01"
118
+ output: "model_2/tf_op_layer_strided_slice_7/strided_slice_7:0"
119
+ name: "input_5:01_squeeze_1"
120
+ op_type: "Squeeze"
121
+ domain: ""
122
+ attribute {
123
+ name: "axes"
124
+ type: INTS
125
+ ints: 1
126
+ ints: 3
127
+ }
128
+ }
129
+ node {
130
+ input: "input_5:01_cropping02"
131
+ output: "model_2/tf_op_layer_strided_slice_4/strided_slice_4:0"
132
+ name: "input_5:01_squeeze_2"
133
+ op_type: "Squeeze"
134
+ domain: ""
135
+ attribute {
136
+ name: "axes"
137
+ type: INTS
138
+ ints: 1
139
+ ints: 3
140
+ }
141
+ }
142
+ node {
143
+ input: "input_5:01_cropping03"
144
+ output: "model_2/tf_op_layer_strided_slice_6/strided_slice_6:0"
145
+ name: "input_5:01_squeeze_3"
146
+ op_type: "Squeeze"
147
+ domain: ""
148
+ attribute {
149
+ name: "axes"
150
+ type: INTS
151
+ ints: 1
152
+ ints: 3
153
+ }
154
+ }
155
+ node {
156
+ input: "convolution_output1"
157
+ output: "transpose_output1"
158
+ name: "Transpose3"
159
+ op_type: "Transpose"
160
+ domain: ""
161
+ attribute {
162
+ name: "perm"
163
+ type: INTS
164
+ ints: 0
165
+ ints: 2
166
+ ints: 1
167
+ }
168
+ }
169
+ node {
170
+ input: "model_2/tf_op_layer_strided_slice_4/strided_slice_4:0"
171
+ input: "model_2/lstm_6/MatMul_1/ReadVariableOp/resource:0"
172
+ output: "model_2/lstm_6/MatMul_1:0"
173
+ name: "model_2/lstm_6/MatMul_1_add"
174
+ op_type: "MatMul"
175
+ domain: ""
176
+ doc_string: ""
177
+ }
178
+ node {
179
+ input: "model_2/tf_op_layer_strided_slice_6/strided_slice_6:0"
180
+ input: "model_2/lstm_7/MatMul_1/ReadVariableOp/resource:0"
181
+ output: "model_2/lstm_7/MatMul_1:0"
182
+ name: "model_2/lstm_7/MatMul_1_add"
183
+ op_type: "MatMul"
184
+ domain: ""
185
+ doc_string: ""
186
+ }
187
+ node {
188
+ input: "transpose_output1"
189
+ output: "model_2/instant_layer_normalization_1/Mean:0"
190
+ name: "model_2/instant_layer_normalization_1/Mean_reduce_min"
191
+ op_type: "ReduceMean"
192
+ domain: ""
193
+ attribute {
194
+ name: "axes"
195
+ type: INTS
196
+ ints: -1
197
+ }
198
+ attribute {
199
+ name: "keepdims"
200
+ type: INT
201
+ i: 1
202
+ }
203
+ doc_string: ""
204
+ }
205
+ node {
206
+ input: "transpose_output1"
207
+ input: "model_2/instant_layer_normalization_1/Mean:0"
208
+ output: "model_2/instant_layer_normalization_1/sub_1:0"
209
+ name: "model_2/instant_layer_normalization_1/sub_1"
210
+ op_type: "Sub"
211
+ domain: ""
212
+ doc_string: ""
213
+ }
214
+ node {
215
+ input: "transpose_output1"
216
+ input: "model_2/instant_layer_normalization_1/Mean:0"
217
+ output: "model_2/instant_layer_normalization_1/sub:0"
218
+ name: "model_2/instant_layer_normalization_1/sub"
219
+ op_type: "Sub"
220
+ domain: ""
221
+ doc_string: ""
222
+ }
223
+ node {
224
+ input: "model_2/instant_layer_normalization_1/sub:0"
225
+ input: "model_2/instant_layer_normalization_1/sub:0"
226
+ output: "model_2/instant_layer_normalization_1/Square:0"
227
+ name: "model_2/instant_layer_normalization_1/Square"
228
+ op_type: "Mul"
229
+ domain: ""
230
+ }
231
+ node {
232
+ input: "model_2/instant_layer_normalization_1/Square:0"
233
+ output: "model_2/instant_layer_normalization_1/Mean_1:0"
234
+ name: "model_2/instant_layer_normalization_1/Mean_1_reduce_min"
235
+ op_type: "ReduceMean"
236
+ domain: ""
237
+ attribute {
238
+ name: "axes"
239
+ type: INTS
240
+ ints: -1
241
+ }
242
+ attribute {
243
+ name: "keepdims"
244
+ type: INT
245
+ i: 1
246
+ }
247
+ }
248
+ node {
249
+ input: "model_2/instant_layer_normalization_1/Mean_1:0"
250
+ input: "model_2/instant_layer_normalization_1/add/y:0"
251
+ output: "model_2/instant_layer_normalization_1/add:0"
252
+ name: "model_2/instant_layer_normalization_1/add"
253
+ op_type: "Add"
254
+ domain: ""
255
+ }
256
+ node {
257
+ input: "model_2/instant_layer_normalization_1/add:0"
258
+ output: "model_2/instant_layer_normalization_1/Sqrt:0"
259
+ name: "model_2/instant_layer_normalization_1/Sqrt"
260
+ op_type: "Sqrt"
261
+ domain: ""
262
+ }
263
+ node {
264
+ input: "model_2/instant_layer_normalization_1/sub_1:0"
265
+ input: "model_2/instant_layer_normalization_1/Sqrt:0"
266
+ output: "model_2/instant_layer_normalization_1/truediv:0"
267
+ name: "model_2/instant_layer_normalization_1/truediv"
268
+ op_type: "Div"
269
+ domain: ""
270
+ }
271
+ node {
272
+ input: "model_2/instant_layer_normalization_1/truediv:0"
273
+ input: "model_2/instant_layer_normalization_1/mul/ReadVariableOp/resource:0"
274
+ output: "model_2/instant_layer_normalization_1/mul:0"
275
+ name: "model_2/instant_layer_normalization_1/mul"
276
+ op_type: "Mul"
277
+ domain: ""
278
+ doc_string: ""
279
+ }
280
+ node {
281
+ input: "model_2/instant_layer_normalization_1/mul:0"
282
+ input: "model_2/instant_layer_normalization_1/add_1/ReadVariableOp/resource:0"
283
+ output: "model_2/instant_layer_normalization_1/add_1:0"
284
+ name: "model_2/instant_layer_normalization_1/add_1"
285
+ op_type: "Add"
286
+ domain: ""
287
+ doc_string: ""
288
+ }
289
+ node {
290
+ input: "model_2/instant_layer_normalization_1/add_1:0"
291
+ output: "model_2/lstm_6/transpose:0"
292
+ name: "model_2/lstm_6/transpose"
293
+ op_type: "Transpose"
294
+ domain: ""
295
+ attribute {
296
+ name: "perm"
297
+ type: INTS
298
+ ints: 1
299
+ ints: 0
300
+ ints: 2
301
+ }
302
+ }
303
+ node {
304
+ input: "model_2/lstm_6/transpose:0"
305
+ output: "model_2/lstm_6/unstack_split0"
306
+ name: "model_2/lstm_6/unstack_split"
307
+ op_type: "Split"
308
+ domain: ""
309
+ attribute {
310
+ name: "axis"
311
+ type: INT
312
+ i: 0
313
+ }
314
+ }
315
+ node {
316
+ input: "model_2/lstm_6/unstack_split0"
317
+ output: "model_2/lstm_6/unstack:0"
318
+ name: "model_2/lstm_6/unstack_squeeze_0"
319
+ op_type: "Squeeze"
320
+ domain: ""
321
+ attribute {
322
+ name: "axes"
323
+ type: INTS
324
+ ints: 0
325
+ }
326
+ }
327
+ node {
328
+ input: "model_2/lstm_6/unstack:0"
329
+ input: "model_2/lstm_6/MatMul/ReadVariableOp/resource:0"
330
+ output: "model_2/lstm_6/MatMul:0"
331
+ name: "model_2/lstm_6/MatMul_add"
332
+ op_type: "MatMul"
333
+ domain: ""
334
+ doc_string: ""
335
+ }
336
+ node {
337
+ input: "model_2/lstm_6/MatMul:0"
338
+ input: "model_2/lstm_6/MatMul_1:0"
339
+ output: "model_2/lstm_6/add:0"
340
+ name: "model_2/lstm_6/add"
341
+ op_type: "Add"
342
+ domain: ""
343
+ }
344
+ node {
345
+ input: "model_2/lstm_6/add:0"
346
+ input: "model_2/lstm_6/BiasAdd/ReadVariableOp/resource:0"
347
+ output: "model_2/lstm_6/BiasAdd:0"
348
+ name: "model_2/lstm_6/BiasAdd_add"
349
+ op_type: "Add"
350
+ domain: ""
351
+ doc_string: ""
352
+ }
353
+ node {
354
+ input: "model_2/lstm_6/BiasAdd:0"
355
+ output: "model_2/lstm_6/split:0"
356
+ output: "model_2/lstm_6/split:1"
357
+ output: "model_2/lstm_6/split:2"
358
+ output: "model_2/lstm_6/split:3"
359
+ name: "model_2/lstm_6/split/split_dim:0_split"
360
+ op_type: "Split"
361
+ domain: ""
362
+ attribute {
363
+ name: "axis"
364
+ type: INT
365
+ i: 1
366
+ }
367
+ }
368
+ node {
369
+ input: "model_2/lstm_6/split:3"
370
+ output: "model_2/lstm_6/Sigmoid_2:0"
371
+ name: "model_2/lstm_6/Sigmoid_2"
372
+ op_type: "Sigmoid"
373
+ domain: ""
374
+ }
375
+ node {
376
+ input: "model_2/lstm_6/split:1"
377
+ output: "model_2/lstm_6/Sigmoid_1:0"
378
+ name: "model_2/lstm_6/Sigmoid_1"
379
+ op_type: "Sigmoid"
380
+ domain: ""
381
+ }
382
+ node {
383
+ input: "model_2/lstm_6/split:0"
384
+ output: "model_2/lstm_6/Sigmoid:0"
385
+ name: "model_2/lstm_6/Sigmoid"
386
+ op_type: "Sigmoid"
387
+ domain: ""
388
+ }
389
+ node {
390
+ input: "model_2/lstm_6/split:2"
391
+ output: "model_2/lstm_6/Tanh:0"
392
+ name: "model_2/lstm_6/Tanh"
393
+ op_type: "Tanh"
394
+ domain: ""
395
+ }
396
+ node {
397
+ input: "model_2/lstm_6/Sigmoid_1:0"
398
+ input: "model_2/tf_op_layer_strided_slice_5/strided_slice_5:0"
399
+ output: "model_2/lstm_6/mul:0"
400
+ name: "model_2/lstm_6/mul"
401
+ op_type: "Mul"
402
+ domain: ""
403
+ }
404
+ node {
405
+ input: "model_2/lstm_6/Sigmoid:0"
406
+ input: "model_2/lstm_6/Tanh:0"
407
+ output: "model_2/lstm_6/mul_1:0"
408
+ name: "model_2/lstm_6/mul_1"
409
+ op_type: "Mul"
410
+ domain: ""
411
+ }
412
+ node {
413
+ input: "model_2/lstm_6/mul:0"
414
+ input: "model_2/lstm_6/mul_1:0"
415
+ output: "model_2/lstm_6/add_1:0"
416
+ name: "model_2/lstm_6/add_1"
417
+ op_type: "Add"
418
+ domain: ""
419
+ }
420
+ node {
421
+ input: "model_2/lstm_6/add_1:0"
422
+ output: "model_2/tf_op_layer_stack_4/stack_4_unsqueeze00"
423
+ name: "model_2/tf_op_layer_stack_4/stack_4_unsqueeze0"
424
+ op_type: "Unsqueeze"
425
+ domain: ""
426
+ attribute {
427
+ name: "axes"
428
+ type: INTS
429
+ ints: 1
430
+ }
431
+ }
432
+ node {
433
+ input: "model_2/lstm_6/add_1:0"
434
+ output: "model_2/lstm_6/Tanh_1:0"
435
+ name: "model_2/lstm_6/Tanh_1"
436
+ op_type: "Tanh"
437
+ domain: ""
438
+ }
439
+ node {
440
+ input: "model_2/lstm_6/Sigmoid_2:0"
441
+ input: "model_2/lstm_6/Tanh_1:0"
442
+ output: "model_2/lstm_6/mul_2:0"
443
+ name: "model_2/lstm_6/mul_2"
444
+ op_type: "Mul"
445
+ domain: ""
446
+ }
447
+ node {
448
+ input: "model_2/lstm_6/mul_2:0"
449
+ output: "model_2/tf_op_layer_stack_3/stack_3_unsqueeze00"
450
+ name: "model_2/tf_op_layer_stack_3/stack_3_unsqueeze0"
451
+ op_type: "Unsqueeze"
452
+ domain: ""
453
+ attribute {
454
+ name: "axes"
455
+ type: INTS
456
+ ints: 1
457
+ }
458
+ }
459
+ node {
460
+ input: "model_2/lstm_6/mul_2:0"
461
+ output: "model_2/lstm_6/stack_unsqueeze00"
462
+ name: "model_2/lstm_6/stack_unsqueeze0"
463
+ op_type: "Unsqueeze"
464
+ domain: ""
465
+ attribute {
466
+ name: "axes"
467
+ type: INTS
468
+ ints: 0
469
+ }
470
+ }
471
+ node {
472
+ input: "model_2/lstm_6/stack_unsqueeze00"
473
+ output: "model_2/lstm_6/stack:0"
474
+ name: "model_2/lstm_6/stack_concat"
475
+ op_type: "Concat"
476
+ domain: ""
477
+ attribute {
478
+ name: "axis"
479
+ type: INT
480
+ i: 0
481
+ }
482
+ }
483
+ node {
484
+ input: "model_2/lstm_6/stack:0"
485
+ output: "model_2/lstm_7/unstack_split0"
486
+ name: "model_2/lstm_7/unstack_split"
487
+ op_type: "Split"
488
+ domain: ""
489
+ attribute {
490
+ name: "axis"
491
+ type: INT
492
+ i: 0
493
+ }
494
+ doc_string: ""
495
+ }
496
+ node {
497
+ input: "model_2/lstm_7/unstack_split0"
498
+ output: "model_2/lstm_7/unstack:0"
499
+ name: "model_2/lstm_7/unstack_squeeze_0"
500
+ op_type: "Squeeze"
501
+ domain: ""
502
+ attribute {
503
+ name: "axes"
504
+ type: INTS
505
+ ints: 0
506
+ }
507
+ }
508
+ node {
509
+ input: "model_2/lstm_7/unstack:0"
510
+ input: "model_2/lstm_7/MatMul/ReadVariableOp/resource:0"
511
+ output: "model_2/lstm_7/MatMul:0"
512
+ name: "model_2/lstm_7/MatMul_add"
513
+ op_type: "MatMul"
514
+ domain: ""
515
+ doc_string: ""
516
+ }
517
+ node {
518
+ input: "model_2/lstm_7/MatMul:0"
519
+ input: "model_2/lstm_7/MatMul_1:0"
520
+ output: "model_2/lstm_7/add:0"
521
+ name: "model_2/lstm_7/add"
522
+ op_type: "Add"
523
+ domain: ""
524
+ }
525
+ node {
526
+ input: "model_2/lstm_7/add:0"
527
+ input: "model_2/lstm_7/BiasAdd/ReadVariableOp/resource:0"
528
+ output: "model_2/lstm_7/BiasAdd:0"
529
+ name: "model_2/lstm_7/BiasAdd_add"
530
+ op_type: "Add"
531
+ domain: ""
532
+ doc_string: ""
533
+ }
534
+ node {
535
+ input: "model_2/lstm_7/BiasAdd:0"
536
+ output: "model_2/lstm_7/split:0"
537
+ output: "model_2/lstm_7/split:1"
538
+ output: "model_2/lstm_7/split:2"
539
+ output: "model_2/lstm_7/split:3"
540
+ name: "model_2/lstm_7/split/split_dim:0_split"
541
+ op_type: "Split"
542
+ domain: ""
543
+ attribute {
544
+ name: "axis"
545
+ type: INT
546
+ i: 1
547
+ }
548
+ }
549
+ node {
550
+ input: "model_2/lstm_7/split:3"
551
+ output: "model_2/lstm_7/Sigmoid_2:0"
552
+ name: "model_2/lstm_7/Sigmoid_2"
553
+ op_type: "Sigmoid"
554
+ domain: ""
555
+ }
556
+ node {
557
+ input: "model_2/lstm_7/split:1"
558
+ output: "model_2/lstm_7/Sigmoid_1:0"
559
+ name: "model_2/lstm_7/Sigmoid_1"
560
+ op_type: "Sigmoid"
561
+ domain: ""
562
+ }
563
+ node {
564
+ input: "model_2/lstm_7/split:0"
565
+ output: "model_2/lstm_7/Sigmoid:0"
566
+ name: "model_2/lstm_7/Sigmoid"
567
+ op_type: "Sigmoid"
568
+ domain: ""
569
+ }
570
+ node {
571
+ input: "model_2/lstm_7/split:2"
572
+ output: "model_2/lstm_7/Tanh:0"
573
+ name: "model_2/lstm_7/Tanh"
574
+ op_type: "Tanh"
575
+ domain: ""
576
+ }
577
+ node {
578
+ input: "model_2/lstm_7/Sigmoid_1:0"
579
+ input: "model_2/tf_op_layer_strided_slice_7/strided_slice_7:0"
580
+ output: "model_2/lstm_7/mul:0"
581
+ name: "model_2/lstm_7/mul"
582
+ op_type: "Mul"
583
+ domain: ""
584
+ }
585
+ node {
586
+ input: "model_2/lstm_7/Sigmoid:0"
587
+ input: "model_2/lstm_7/Tanh:0"
588
+ output: "model_2/lstm_7/mul_1:0"
589
+ name: "model_2/lstm_7/mul_1"
590
+ op_type: "Mul"
591
+ domain: ""
592
+ }
593
+ node {
594
+ input: "model_2/lstm_7/mul:0"
595
+ input: "model_2/lstm_7/mul_1:0"
596
+ output: "model_2/lstm_7/add_1:0"
597
+ name: "model_2/lstm_7/add_1"
598
+ op_type: "Add"
599
+ domain: ""
600
+ }
601
+ node {
602
+ input: "model_2/lstm_7/add_1:0"
603
+ output: "model_2/tf_op_layer_stack_4/stack_4_unsqueeze10"
604
+ name: "model_2/tf_op_layer_stack_4/stack_4_unsqueeze1"
605
+ op_type: "Unsqueeze"
606
+ domain: ""
607
+ attribute {
608
+ name: "axes"
609
+ type: INTS
610
+ ints: 1
611
+ }
612
+ }
613
+ node {
614
+ input: "model_2/lstm_7/add_1:0"
615
+ output: "model_2/lstm_7/Tanh_1:0"
616
+ name: "model_2/lstm_7/Tanh_1"
617
+ op_type: "Tanh"
618
+ domain: ""
619
+ }
620
+ node {
621
+ input: "model_2/tf_op_layer_stack_4/stack_4_unsqueeze00"
622
+ input: "model_2/tf_op_layer_stack_4/stack_4_unsqueeze10"
623
+ output: "model_2/tf_op_layer_stack_4/stack_4:0"
624
+ name: "model_2/tf_op_layer_stack_4/stack_4_concat"
625
+ op_type: "Concat"
626
+ domain: ""
627
+ attribute {
628
+ name: "axis"
629
+ type: INT
630
+ i: 1
631
+ }
632
+ }
633
+ node {
634
+ input: "model_2/lstm_7/Sigmoid_2:0"
635
+ input: "model_2/lstm_7/Tanh_1:0"
636
+ output: "model_2/lstm_7/mul_2:0"
637
+ name: "model_2/lstm_7/mul_2"
638
+ op_type: "Mul"
639
+ domain: ""
640
+ }
641
+ node {
642
+ input: "model_2/tf_op_layer_stack_4/stack_4:0"
643
+ output: "model_2/tf_op_layer_stack_5/stack_5_unsqueeze10"
644
+ name: "model_2/tf_op_layer_stack_5/stack_5_unsqueeze1"
645
+ op_type: "Unsqueeze"
646
+ domain: ""
647
+ attribute {
648
+ name: "axes"
649
+ type: INTS
650
+ ints: -1
651
+ }
652
+ }
653
+ node {
654
+ input: "model_2/lstm_7/mul_2:0"
655
+ output: "model_2/tf_op_layer_stack_3/stack_3_unsqueeze10"
656
+ name: "model_2/tf_op_layer_stack_3/stack_3_unsqueeze1"
657
+ op_type: "Unsqueeze"
658
+ domain: ""
659
+ attribute {
660
+ name: "axes"
661
+ type: INTS
662
+ ints: 1
663
+ }
664
+ }
665
+ node {
666
+ input: "model_2/lstm_7/mul_2:0"
667
+ output: "model_2/lstm_7/stack_unsqueeze00"
668
+ name: "model_2/lstm_7/stack_unsqueeze0"
669
+ op_type: "Unsqueeze"
670
+ domain: ""
671
+ attribute {
672
+ name: "axes"
673
+ type: INTS
674
+ ints: 0
675
+ }
676
+ }
677
+ node {
678
+ input: "model_2/tf_op_layer_stack_3/stack_3_unsqueeze00"
679
+ input: "model_2/tf_op_layer_stack_3/stack_3_unsqueeze10"
680
+ output: "model_2/tf_op_layer_stack_3/stack_3:0"
681
+ name: "model_2/tf_op_layer_stack_3/stack_3_concat"
682
+ op_type: "Concat"
683
+ domain: ""
684
+ attribute {
685
+ name: "axis"
686
+ type: INT
687
+ i: 1
688
+ }
689
+ }
690
+ node {
691
+ input: "model_2/lstm_7/stack_unsqueeze00"
692
+ output: "model_2/lstm_7/stack:0"
693
+ name: "model_2/lstm_7/stack_concat"
694
+ op_type: "Concat"
695
+ domain: ""
696
+ attribute {
697
+ name: "axis"
698
+ type: INT
699
+ i: 0
700
+ }
701
+ }
702
+ node {
703
+ input: "model_2/tf_op_layer_stack_3/stack_3:0"
704
+ output: "model_2/tf_op_layer_stack_5/stack_5_unsqueeze00"
705
+ name: "model_2/tf_op_layer_stack_5/stack_5_unsqueeze0"
706
+ op_type: "Unsqueeze"
707
+ domain: ""
708
+ attribute {
709
+ name: "axes"
710
+ type: INTS
711
+ ints: -1
712
+ }
713
+ }
714
+ node {
715
+ input: "model_2/lstm_7/stack:0"
716
+ output: "model_2/lstm_7/transpose_1:0"
717
+ name: "model_2/lstm_7/transpose_1"
718
+ op_type: "Transpose"
719
+ domain: ""
720
+ attribute {
721
+ name: "perm"
722
+ type: INTS
723
+ ints: 1
724
+ ints: 0
725
+ ints: 2
726
+ }
727
+ }
728
+ node {
729
+ input: "model_2/tf_op_layer_stack_5/stack_5_unsqueeze00"
730
+ input: "model_2/tf_op_layer_stack_5/stack_5_unsqueeze10"
731
+ output: "tf_op_layer_stack_5"
732
+ name: "model_2/tf_op_layer_stack_5/stack_5_concat"
733
+ op_type: "Concat"
734
+ domain: ""
735
+ attribute {
736
+ name: "axis"
737
+ type: INT
738
+ i: -1
739
+ }
740
+ doc_string: ""
741
+ }
742
+ node {
743
+ input: "model_2/lstm_7/transpose_1:0"
744
+ input: "shape_tensor1"
745
+ output: "model_2/dense_3/Tensordot/Reshape:0"
746
+ name: "model_2/dense_3/Tensordot/Reshape_reshape"
747
+ op_type: "Reshape"
748
+ domain: ""
749
+ doc_string: ""
750
+ }
751
+ node {
752
+ input: "model_2/dense_3/Tensordot/Reshape:0"
753
+ input: "model_2/dense_3/Tensordot/Reshape_1:0"
754
+ output: "model_2/dense_3/Tensordot/MatMul:0"
755
+ name: "model_2/dense_3/Tensordot/MatMul_add"
756
+ op_type: "MatMul"
757
+ domain: ""
758
+ }
759
+ node {
760
+ input: "model_2/dense_3/Tensordot/MatMul:0"
761
+ input: "shape_tensor"
762
+ output: "model_2/dense_3/Tensordot:0"
763
+ name: "model_2/dense_3/Tensordot_reshape"
764
+ op_type: "Reshape"
765
+ domain: ""
766
+ }
767
+ node {
768
+ input: "model_2/dense_3/Tensordot:0"
769
+ input: "model_2/dense_3/BiasAdd/ReadVariableOp/resource:0"
770
+ output: "model_2/dense_3/BiasAdd:0"
771
+ name: "model_2/dense_3/BiasAdd_add"
772
+ op_type: "Add"
773
+ domain: ""
774
+ doc_string: ""
775
+ }
776
+ node {
777
+ input: "model_2/dense_3/BiasAdd:0"
778
+ output: "model_2/activation_3/Sigmoid:0"
779
+ name: "model_2/activation_3/Sigmoid"
780
+ op_type: "Sigmoid"
781
+ domain: ""
782
+ }
783
+ node {
784
+ input: "transpose_output1"
785
+ input: "model_2/activation_3/Sigmoid:0"
786
+ output: "model_2/multiply_2/mul:0"
787
+ name: "model_2/multiply_2/mul"
788
+ op_type: "Mul"
789
+ domain: ""
790
+ doc_string: ""
791
+ }
792
+ node {
793
+ input: "model_2/multiply_2/mul:0"
794
+ input: "model_2/conv1d_3/Pad_pad_pads"
795
+ output: "model_2/conv1d_3/Pad_pad0"
796
+ name: "model_2/conv1d_3/Pad_pad"
797
+ op_type: "Pad"
798
+ domain: ""
799
+ }
800
+ node {
801
+ input: "model_2/conv1d_3/Pad_pad0"
802
+ output: "adjusted_input"
803
+ name: "Transpose"
804
+ op_type: "Transpose"
805
+ domain: ""
806
+ attribute {
807
+ name: "perm"
808
+ type: INTS
809
+ ints: 0
810
+ ints: 2
811
+ ints: 1
812
+ }
813
+ doc_string: ""
814
+ }
815
+ node {
816
+ input: "adjusted_input"
817
+ input: "conv1d_3/kernel:0"
818
+ output: "convolution_output"
819
+ name: "conv1d_3"
820
+ op_type: "Conv"
821
+ domain: ""
822
+ attribute {
823
+ name: "dilations"
824
+ type: INTS
825
+ ints: 1
826
+ }
827
+ attribute {
828
+ name: "group"
829
+ type: INT
830
+ i: 1
831
+ }
832
+ attribute {
833
+ name: "kernel_shape"
834
+ type: INTS
835
+ ints: 1
836
+ }
837
+ attribute {
838
+ name: "strides"
839
+ type: INTS
840
+ ints: 1
841
+ }
842
+ attribute {
843
+ name: "auto_pad"
844
+ type: STRING
845
+ s: "NOTSET"
846
+ }
847
+ attribute {
848
+ name: "pads"
849
+ type: INTS
850
+ ints: 0
851
+ ints: 0
852
+ }
853
+ doc_string: ""
854
+ }
855
+ node {
856
+ input: "convolution_output"
857
+ output: "conv1d_3"
858
+ name: "PushTranspose_1"
859
+ op_type: "Transpose"
860
+ domain: ""
861
+ attribute {
862
+ name: "perm"
863
+ type: INTS
864
+ ints: 0
865
+ ints: 2
866
+ ints: 1
867
+ }
868
+ doc_string: ""
869
+ }
870
+ initializer {
871
+ dims: 512
872
+ dims: 256
873
+ dims: 1
874
+ data_type: 1
875
+ name: "conv1d_3/kernel:0"
876
+ }
877
+ initializer {
878
+ dims: 6
879
+ data_type: 7
880
+ name: "model_2/conv1d_3/Pad_pad_pads"
881
+ }
882
+ initializer {
883
+ dims: 256
884
+ dims: 512
885
+ dims: 1
886
+ data_type: 1
887
+ name: "conv1d_2/kernel:0"
888
+ }
889
+ initializer {
890
+ dims: 4
891
+ data_type: 7
892
+ name: "input_5:01_cropping_start"
893
+ }
894
+ initializer {
895
+ dims: 4
896
+ data_type: 7
897
+ name: "input_5:01_cropping_end"
898
+ }
899
+ initializer {
900
+ dims: 4
901
+ data_type: 7
902
+ name: "input_5:01_cropping_axes"
903
+ }
904
+ initializer {
905
+ dims: 4
906
+ data_type: 7
907
+ name: "input_5:01_cropping_steps"
908
+ }
909
+ initializer {
910
+ dims: 4
911
+ data_type: 7
912
+ name: "input_5:01_cropping_start1"
913
+ }
914
+ initializer {
915
+ dims: 4
916
+ data_type: 7
917
+ name: "input_5:01_cropping_end1"
918
+ }
919
+ initializer {
920
+ dims: 4
921
+ data_type: 7
922
+ name: "input_5:01_cropping_axes1"
923
+ }
924
+ initializer {
925
+ dims: 4
926
+ data_type: 7
927
+ name: "input_5:01_cropping_steps1"
928
+ }
929
+ initializer {
930
+ dims: 3
931
+ data_type: 7
932
+ name: "shape_tensor"
933
+ }
934
+ initializer {
935
+ dims: 256
936
+ data_type: 1
937
+ name: "model_2/dense_3/BiasAdd/ReadVariableOp/resource:0"
938
+ }
939
+ initializer {
940
+ dims: 2
941
+ data_type: 7
942
+ name: "shape_tensor1"
943
+ }
944
+ initializer {
945
+ dims: 512
946
+ data_type: 1
947
+ name: "model_2/lstm_6/BiasAdd/ReadVariableOp/resource:0"
948
+ }
949
+ initializer {
950
+ dims: 512
951
+ data_type: 1
952
+ name: "model_2/lstm_7/BiasAdd/ReadVariableOp/resource:0"
953
+ }
954
+ initializer {
955
+ dims: 4
956
+ data_type: 7
957
+ name: "input_5:01_cropping_start2"
958
+ }
959
+ initializer {
960
+ dims: 4
961
+ data_type: 7
962
+ name: "input_5:01_cropping_end2"
963
+ }
964
+ initializer {
965
+ dims: 4
966
+ data_type: 7
967
+ name: "input_5:01_cropping_axes2"
968
+ }
969
+ initializer {
970
+ dims: 4
971
+ data_type: 7
972
+ name: "input_5:01_cropping_steps2"
973
+ }
974
+ initializer {
975
+ dims: 4
976
+ data_type: 7
977
+ name: "input_5:01_cropping_start3"
978
+ }
979
+ initializer {
980
+ dims: 4
981
+ data_type: 7
982
+ name: "input_5:01_cropping_end3"
983
+ }
984
+ initializer {
985
+ dims: 4
986
+ data_type: 7
987
+ name: "input_5:01_cropping_axes3"
988
+ }
989
+ initializer {
990
+ dims: 4
991
+ data_type: 7
992
+ name: "input_5:01_cropping_steps3"
993
+ }
994
+ initializer {
995
+ dims: 256
996
+ dims: 512
997
+ data_type: 1
998
+ name: "model_2/lstm_6/MatMul/ReadVariableOp/resource:0"
999
+ }
1000
+ initializer {
1001
+ dims: 128
1002
+ dims: 512
1003
+ data_type: 1
1004
+ name: "model_2/lstm_6/MatMul_1/ReadVariableOp/resource:0"
1005
+ }
1006
+ initializer {
1007
+ dims: 128
1008
+ dims: 512
1009
+ data_type: 1
1010
+ name: "model_2/lstm_7/MatMul/ReadVariableOp/resource:0"
1011
+ }
1012
+ initializer {
1013
+ dims: 128
1014
+ dims: 512
1015
+ data_type: 1
1016
+ name: "model_2/lstm_7/MatMul_1/ReadVariableOp/resource:0"
1017
+ }
1018
+ initializer {
1019
+ dims: 256
1020
+ data_type: 1
1021
+ name: "model_2/instant_layer_normalization_1/add_1/ReadVariableOp/resource:0"
1022
+ }
1023
+ initializer {
1024
+ dims: 256
1025
+ data_type: 1
1026
+ name: "model_2/instant_layer_normalization_1/mul/ReadVariableOp/resource:0"
1027
+ }
1028
+ initializer {
1029
+ data_type: 1
1030
+ name: "model_2/instant_layer_normalization_1/add/y:0"
1031
+ }
1032
+ initializer {
1033
+ dims: 128
1034
+ dims: 256
1035
+ data_type: 1
1036
+ name: "model_2/dense_3/Tensordot/Reshape_1:0"
1037
+ }
1038
+ input {
1039
+ name: "input_4"
1040
+ type {
1041
+ tensor_type {
1042
+ elem_type: 1
1043
+ shape {
1044
+ dim {
1045
+ dim_value: 1
1046
+ }
1047
+ dim {
1048
+ dim_value: 1
1049
+ }
1050
+ dim {
1051
+ dim_value: 512
1052
+ }
1053
+ }
1054
+ }
1055
+ }
1056
+ }
1057
+ input {
1058
+ name: "input_5"
1059
+ type {
1060
+ tensor_type {
1061
+ elem_type: 1
1062
+ shape {
1063
+ dim {
1064
+ dim_value: 1
1065
+ }
1066
+ dim {
1067
+ dim_value: 2
1068
+ }
1069
+ dim {
1070
+ dim_value: 128
1071
+ }
1072
+ dim {
1073
+ dim_value: 2
1074
+ }
1075
+ }
1076
+ }
1077
+ }
1078
+ }
1079
+ output {
1080
+ name: "conv1d_3"
1081
+ type {
1082
+ tensor_type {
1083
+ elem_type: 1
1084
+ shape {
1085
+ dim {
1086
+ dim_param: "N"
1087
+ }
1088
+ dim {
1089
+ dim_value: 1
1090
+ }
1091
+ dim {
1092
+ dim_value: 512
1093
+ }
1094
+ }
1095
+ }
1096
+ }
1097
+ }
1098
+ output {
1099
+ name: "tf_op_layer_stack_5"
1100
+ type {
1101
+ tensor_type {
1102
+ elem_type: 1
1103
+ shape {
1104
+ dim {
1105
+ dim_param: "N"
1106
+ }
1107
+ dim {
1108
+ dim_value: 2
1109
+ }
1110
+ dim {
1111
+ dim_value: 128
1112
+ }
1113
+ dim {
1114
+ dim_value: 2
1115
+ }
1116
+ }
1117
+ }
1118
+ }
1119
+ }
1120
+ }
1121
+ opset_import {
1122
+ version: 11
1123
+ }
models/ailia-models/source.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ https://github.com/axinc-ai/ailia-models/tree/master/audio_processing/dtln
2
+
3
+ https://storage.googleapis.com/ailia-models/dtln/dtln1.onnx
4
+ https://storage.googleapis.com/ailia-models/dtln/dtln1.onnx.prototxt
5
+
6
+ https://storage.googleapis.com/ailia-models/dtln/dtln2.onnx
7
+ https://storage.googleapis.com/ailia-models/dtln/dtln2.onnx.prototxt