Datasets:

ArXiv:
File size: 13,468 Bytes
82de705
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
import random
import numpy as np

from openrec.preprocess.ctc_label_encode import BaseRecLabelEncode


class MDiffLabelEncode(BaseRecLabelEncode):
    """Convert between text-label and text-index."""

    MASK = '<mask>'
    EOS = '</s>'
    PAD = '<pad>'

    def __init__(self,
                 max_text_length,
                 character_dict_path=None,
                 use_space_char=False,
                 semi_ar=False,
                 mask_tpye=[0, 1, 2, 3, 4, 5],
                 train_all_layer=False,
                 sample_num=1,
                 **kwargs):
        super(MDiffLabelEncode,
              self).__init__(max_text_length, character_dict_path,
                             use_space_char)

        self.semi_ar = semi_ar
        self.mask_tpye = mask_tpye  #[0, 1, 2, 3, 4, 5]
        # 5种mask:
        # 全部mask,纯并行
        # 正向自回归
        # 反向自回归
        # block mask(1, 2,3,4,5,6步)
        # 随机mask
        self.train_all_layer = train_all_layer
        if not self.train_all_layer:
            self.sample_num = 1
        else:
            self.sample_num = sample_num

    def random_mask(self, text):
        l = len(text)
        p_mask = random.random()

        noisy_batch = text[:]
        masked_indices = [False] * l
        none_pad_indices = []
        for i in range(l):
            if random.random() < p_mask and text[i] != self.dict[self.PAD]:
                noisy_batch[i] = self.dict[self.MASK]
                masked_indices[i] = True
            if text[i] != self.dict[self.PAD]:
                none_pad_indices.append(i)
            if noisy_batch[i] == self.dict[self.PAD]:
                noisy_batch[i] = self.dict[self.MASK]

        if not any(masked_indices) and len(none_pad_indices) > 0:
            idx = random.choice(none_pad_indices)
            noisy_batch[idx] = self.dict[self.MASK]
            masked_indices[idx] = True
        return noisy_batch, masked_indices

    def full_mask(self, text):
        noisy_batch = [self.dict[self.MASK]] * (self.max_text_len + 1)
        masked_indices = [True] * len(text) + [False] * (self.max_text_len +
                                                         1 - len(text))
        return noisy_batch, masked_indices

    def left_to_right_mask(self, text):
        rand_split = random.randint(1, len(text) - 1)
        noisy_batch = text[:rand_split] + [self.dict[self.MASK]] * (
            self.max_text_len + 1 - rand_split)
        masked_indices = [False] * rand_split + [True] * (len(
            text) - rand_split) + [False] * (self.max_text_len + 1 - len(text))
        return noisy_batch, masked_indices

    def semi_left_to_right_mask(self, text, block_size=5, step=3, i=0):

        text = text + [self.dict[self.PAD]
                       ] * (self.max_text_len + 1 - len(text))
        noisy_batch_step_i = []
        masked_indices_step_i = []

        if i == 0:
            for t in text[:block_size]:
                if t == self.dict[self.PAD]:
                    masked_indices_step_i.append(False)
                else:
                    masked_indices_step_i.append(True)

            noisy_batch_step_i = [self.dict[self.MASK]] * block_size + [
                self.dict[self.PAD]
            ] * (self.max_text_len + 1)
            masked_indices_step_i = masked_indices_step_i + [False] * (
                self.max_text_len + 1 - block_size)
        elif i == 1:

            noisy_batch_r, masked_indices_r = self.random_mask(
                text[:block_size])
            noisy_batch_step_i = noisy_batch_r
            masked_indices_step_i = masked_indices_r
            if i == step - 1:
                for t in text[i * block_size:]:
                    if t == self.dict[self.PAD]:
                        masked_indices_step_i.append(False)
                    else:
                        masked_indices_step_i.append(True)
                    noisy_batch_step_i.append(self.dict[self.MASK])
            else:
                for t in text[i * block_size:(i + 1) * block_size]:
                    if t == self.dict[self.PAD]:
                        masked_indices_step_i.append(False)
                    else:
                        masked_indices_step_i.append(True)
                    noisy_batch_step_i.append(self.dict[self.MASK])

            noisy_batch_step_i = noisy_batch_step_i + [self.dict[self.PAD]] * (
                self.max_text_len + 1 - len(noisy_batch_step_i))
            masked_indices_step_i = masked_indices_step_i + [False] * (
                self.max_text_len + 1 - len(noisy_batch_step_i))
        elif i >= 2:

            for t in text[:(i - 1) * block_size]:
                if t == self.dict[self.PAD]:
                    noisy_batch_step_i.append(self.dict[self.MASK])
                else:
                    noisy_batch_step_i.append(text[t])
            masked_indices_step_i = [False] * ((i - 1) * block_size)

            noisy_batch_r, masked_indices_r = self.random_mask(
                text[(i - 1) * block_size:i * block_size])

            noisy_batch_step_i = noisy_batch_step_i + noisy_batch_r
            masked_indices_step_i = masked_indices_step_i + masked_indices_r
            if i == step - 1:
                for t in text[i * block_size:]:
                    if t == self.dict[self.PAD]:
                        masked_indices_step_i.append(False)
                    else:
                        masked_indices_step_i.append(True)
                    noisy_batch_step_i.append(self.dict[self.MASK])
            else:
                for t in text[i * block_size:(i + 1) * block_size]:
                    if t == self.dict[self.PAD]:
                        masked_indices_step_i.append(False)
                    else:
                        masked_indices_step_i.append(True)
                    noisy_batch_step_i.append(self.dict[self.MASK])

            noisy_batch_step_i = noisy_batch_step_i + [self.dict[self.PAD]] * (
                self.max_text_len + 1 - len(noisy_batch_step_i))
            masked_indices_step_i = masked_indices_step_i + [False] * (
                self.max_text_len + 1 - len(masked_indices_step_i))
        return noisy_batch_step_i, masked_indices_step_i

    def forward_process_semi_ar(self, text):

        step = 5
        block_size = (self.max_text_len + 1) // step
        noisy_batch_semi_ar = []
        masked_indices_semi_ar = []
        for i in range(step):
            noisy_batch_step_i, masked_indices_step_i = self.semi_left_to_right_mask(
                text, block_size=block_size, step=step, i=i)
            noisy_batch_semi_ar.append(noisy_batch_step_i)
            masked_indices_semi_ar.append(masked_indices_step_i)

        rd_step = random.choice([2, 3, 4, 6, 7, 8])
        block_size = (self.max_text_len + 1) // rd_step
        rd_step_i = random.randint(0, rd_step - 1)
        noisy_batch, masked_indices = self.semi_left_to_right_mask(
            text, block_size=block_size, step=rd_step, i=rd_step_i)
        noisy_batch_semi_ar.append(noisy_batch)
        masked_indices_semi_ar.append(masked_indices)

        # 随机将text中的部分token mask掉
        noisy_batch, masked_indices = self.random_mask(text)
        noisy_batch = noisy_batch + [self.dict[self.MASK]
                                     ] * (self.max_text_len + 1 - len(text))
        masked_indices = masked_indices + [False] * (self.max_text_len + 1 -
                                                     len(text))

        noisy_batch_semi_ar.append(noisy_batch)
        masked_indices_semi_ar.append(masked_indices)
        return noisy_batch_semi_ar, noisy_batch_semi_ar

    def right_to_left_mask(self, text):
        rand_split = random.randint(1, len(text) - 1)
        noisy_batch = [self.dict[self.MASK]
                       ] * rand_split + text[rand_split:] + [
                           self.dict[self.MASK]
                       ] * (self.max_text_len + 1 - len(text))
        masked_indices = [True] * rand_split + [False] * (len(
            text) - rand_split) + [False] * (self.max_text_len + 1 - len(text))
        return noisy_batch, masked_indices

    def forward_process(self, text):

        rand_choice = random.choice(self.mask_tpye)
        if rand_choice == 0:  # 并行mask full mask
            return self.full_mask(text)
        elif rand_choice == 1 and len(text) > 2:  # 正向自回归 right mask
            return self.left_to_right_mask(text)
        elif rand_choice == 2 and len(text) > 2:  # 反向自回归 left mask
            return self.right_to_left_mask(text)
        elif rand_choice == 3 and len(text) > 2:  # block mask
            rand_step = min(random.randint(2, 6), len(text))
            if rand_step <= 1:  # len(text) <= 1
                return self.full_mask(text)
            block_size = len(text) // rand_step
            if block_size == 1:
                return self.left_to_right_mask(text) if random.random(
                ) < 0.5 else self.right_to_left_mask(text)
            # 余数处理
            if len(text) % rand_step != 0:
                rand_step += 1
            # 选择一个随机的block_size
            rand_step_from_mask = random.randint(2, rand_step)
            if rand_step == 2:
                rand_step_from_mask = 1
            else:
                rand_step_from_mask = random.randint(2, rand_step)

            noisy_batch = text[:block_size * (rand_step_from_mask - 1)]
            masked_indices = [False] * (block_size * (rand_step_from_mask - 1))

            noisy_batch = noisy_batch + [self.dict[self.MASK]] * (
                self.max_text_len + 1 - len(noisy_batch))
            masked_indices = masked_indices + [True] * (
                len(text) - block_size *
                (rand_step_from_mask - 1)) + [False] * (self.max_text_len + 1 -
                                                        len(text))
            return noisy_batch, masked_indices
        elif rand_choice == 4 and len(text) > 2:  # cloze mask
            noisy_batch = text[:]
            masked_indices = [False] * len(text)
            rand_index = random.randint(0, len(text) - 1)
            noisy_batch[rand_index] = self.dict[self.MASK]
            masked_indices[rand_index] = True
            noisy_batch = noisy_batch + [self.dict[self.MASK]] * (
                self.max_text_len + 1 - len(text))
            masked_indices = masked_indices + [False] * (self.max_text_len +
                                                         1 - len(text))
            return noisy_batch, masked_indices
        else:  # random mask
            # 随机将text中的部分token mask掉
            noisy_batch, masked_indices = self.random_mask(text)
            noisy_batch = noisy_batch + [self.dict[self.MASK]] * (
                self.max_text_len + 1 - len(text))
            masked_indices = masked_indices + [False] * (self.max_text_len +
                                                         1 - len(text))
            return noisy_batch, masked_indices

    def reflect_random_idices(self, text, eps=1e-3):
        l = len(text)
        t = random.random()
        p_mask = (1 - eps) * t + eps
        reflect_ids = text[:]
        for i in range(l):
            if random.random() < p_mask:
                reflect_ids[i] = random.randint(0, len(self.dict) - 1)
        return reflect_ids

    def __call__(self, data):
        text = data['label']
        text = self.encode(text)
        if text is None:
            return None
        data['length'] = np.array(len(text) + 1)
        text = text + [self.dict[self.EOS]]
        p_mask_list = []
        noisy_batch_list = []
        masked_indices_list = []
        reflect_ids_list = []
        for i in range(self.sample_num):
            reflect_ids = self.reflect_random_idices(text)
            reflect_ids = reflect_ids + [self.dict[self.MASK]] * (
                self.max_text_len + 1 - len(reflect_ids))
            if self.semi_ar:
                noisy_batch, masked_indices = self.forward_process_semi_ar(
                    text)
            else:
                noisy_batch, masked_indices = self.forward_process(text)
                p_mask = float(sum(masked_indices)) / float(len(text))
                p_mask_list.append(np.array(p_mask))
            noisy_batch_list.append(np.array(noisy_batch))
            masked_indices_list.append(np.array(masked_indices))
            reflect_ids_list.append(np.array(reflect_ids))

        if not self.semi_ar:
            data['p_mask'] = np.array(
                p_mask_list) if self.train_all_layer else np.array(
                    p_mask_list[0])
        data['noisy_batch'] = np.array(
            noisy_batch_list) if self.train_all_layer else np.array(
                noisy_batch_list[0])
        data['masked_indices'] = np.array(
            masked_indices_list) if self.train_all_layer else np.array(
                masked_indices_list[0])
        data['reflect_ids'] = np.array(
            reflect_ids_list) if self.train_all_layer else np.array(
                reflect_ids_list[0])

        text = text + [self.dict[self.PAD]
                       ] * (self.max_text_len + 1 - len(text))
        data['label'] = np.array(text)

        return data

    def add_special_char(self, dict_character):
        dict_character = [self.EOS] + dict_character + [self.MASK, self.PAD]
        return dict_character