File size: 4,057 Bytes
c302e35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
{
  "inputs": [
    "images"
  ],
  "modules": {
    "avg_pool": {
      "config": {
        "args": {
          "output_size": [
            null,
            1
          ]
        }
      },
      "type": "DeepTextRecognition.AdaptiveAvgPoolModule"
    },
    "feature_extraction": {
      "config": {
        "args": {
          "input_channel": 1,
          "output_channel": 512,
          "variant": "DTRB"
        }
      },
      "type": "DeepTextRecognition.ResNetModel"
    },
    "permute": {
      "config": {
        "args": {
          "dims": [
            0,
            3,
            1,
            2
          ]
        }
      },
      "type": "DeepTextRecognition.PermuteModule"
    },
    "prediction": {
      "config": {
        "args": {
          "N_max_character": 26,
          "n_class": 38,
          "n_position": 26
        }
      },
      "type": "DeepTextRecognition.SRNDecoder"
    },
    "processing": {
      "config": {
        "args": {
          "channels_size": 1,
          "image_size": [
            32,
            100
          ],
          "padding": "left"
        }
      },
      "type": "DeepTextRecognition.ImageProcessor"
    },
    "sequence_modeling": {
      "config": {
        "args": {
          "d_inner": 1024,
          "d_k": 64,
          "d_model": 512,
          "d_v": 64,
          "dropout": 0.1,
          "n_head": 8,
          "n_layers": 2,
          "n_position": 26
        }
      },
      "type": "DeepTextRecognition.TransformerEncoderv1"
    },
    "squeeze": {
      "config": {
        "args": {
          "dim": 3
        }
      },
      "type": "DeepTextRecognition.SqueezeModule"
    },
    "tokenizer": {
      "config": {
        "args": {
          "characters": [
            "0",
            "1",
            "2",
            "3",
            "4",
            "5",
            "6",
            "7",
            "8",
            "9",
            "a",
            "b",
            "c",
            "d",
            "e",
            "f",
            "g",
            "h",
            "i",
            "j",
            "k",
            "l",
            "m",
            "n",
            "o",
            "p",
            "q",
            "r",
            "s",
            "t",
            "u",
            "v",
            "w",
            "x",
            "y",
            "z"
          ],
          "max_length": 25,
          "pad_token": 36
        }
      },
      "type": "DeepTextRecognition.SRNTokenizer"
    }
  },
  "order": [
    "processing",
    "feature_extraction",
    "permute",
    "avg_pool",
    "squeeze",
    "sequence_modeling",
    "prediction",
    "tokenizer"
  ],
  "outputs": [
    "tokenizer:labels"
  ],
  "routing": {
    "avg_pool": {
      "inputs": [
        "permute:permuted_features"
      ],
      "outputs": [
        "avg_pool:pooled_features"
      ]
    },
    "feature_extraction": {
      "inputs": [
        "processing:processed_images"
      ],
      "outputs": [
        "feature_extraction:extracted_features"
      ]
    },
    "permute": {
      "inputs": [
        "feature_extraction:extracted_features"
      ],
      "outputs": [
        "permute:permuted_features"
      ]
    },
    "prediction": {
      "inputs": [
        "sequence_modeling:modeled_features"
      ],
      "outputs": [
        "prediction:predictions_1",
        "prediction:predictions_2",
        "prediction:predictions_3"
      ]
    },
    "processing": {
      "inputs": [
        "images"
      ],
      "outputs": [
        "processing:processed_images"
      ]
    },
    "sequence_modeling": {
      "inputs": [
        "squeeze:squeezed_features"
      ],
      "outputs": [
        "sequence_modeling:modeled_features"
      ]
    },
    "squeeze": {
      "inputs": [
        "avg_pool:pooled_features"
      ],
      "outputs": [
        "squeeze:squeezed_features"
      ]
    },
    "tokenizer": {
      "inputs": [
        "prediction:predictions_3"
      ],
      "outputs": [
        "tokenizer:labels"
      ]
    }
  }
}