niobures commited on
Commit
0240c6e
·
verified ·
1 Parent(s): c7c0f99

DeepMorphy

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ best_checkpoint/-44.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text
37
+ best_checkpoint/-44.meta filter=lfs diff=lfs merge=lfs -text
10_release_dict.py ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import os
3
+ import gzip
4
+ import pickle
5
+ from utils import CONFIG, get_dict_path, load_datasets
6
+
7
+
8
+ NAR_REG = re.compile("\d+-.*")
9
+ RANDOM_SEED = 1917
10
+ VECT_PATH = CONFIG['vect_words_path']
11
+ DATASET_PATH = CONFIG['dataset_path']
12
+ REZ_PATHS = CONFIG['publish_dictionary_paths']
13
+ DICT_WORDS_PATH = CONFIG['dict_words_path']
14
+ NOT_DICT_WORDS_PATH = CONFIG['dataset_words_path']
15
+ MAX_WORD_SIZE = CONFIG['max_word_size']
16
+ DICT_POST_TYPES = CONFIG['dict_post_types']
17
+ GRAMMEMES_TYPES = CONFIG['grammemes_types']
18
+ IGNORE_AD_TAGS = CONFIG['dict_ignore_tags']
19
+ REPLACE_WORD_DICT_ID = 1
20
+
21
+ CLASSES_INDEX_DICT = {
22
+ cls: GRAMMEMES_TYPES[gram]['classes'][cls]['index']
23
+ for gram in sorted(GRAMMEMES_TYPES, key=lambda x: GRAMMEMES_TYPES[x]['index'])
24
+ for cls in GRAMMEMES_TYPES[gram]['classes']
25
+ }
26
+
27
+ POST_POWER_DICT = {}
28
+ for key in DICT_POST_TYPES:
29
+ POST_POWER_DICT[key] = DICT_POST_TYPES[key]['power'] if 'power' in DICT_POST_TYPES[key] else 1
30
+
31
+ p_dic = GRAMMEMES_TYPES['post']['classes']
32
+ for key in p_dic:
33
+ POST_POWER_DICT[key] = p_dic[key]['power'] if 'power' in p_dic[key] else 1
34
+
35
+ with open(CONFIG['inflect_templates_path'], 'rb') as f:
36
+ inflect_templates = pickle.load(f)
37
+
38
+ with open(CONFIG['tags_path'], 'rb') as f:
39
+ tpl_cls_dict = pickle.load(f)
40
+
41
+ lemma_cls_dict = {}
42
+ for lemma_tpl in inflect_templates:
43
+ lemma_id = tpl_cls_dict[lemma_tpl]['i']
44
+ for tpl in inflect_templates[lemma_tpl]:
45
+ lemma_cls_dict[tpl_cls_dict[tpl]['i']] = lemma_id
46
+
47
+ lemma_dict = {}
48
+ for item in load_datasets('inflect', 'test', 'train', 'valid'):
49
+ if item['id'] not in lemma_dict:
50
+ lemma_dict[item['id']] = (item['x_src'], item['x_cls'])
51
+
52
+ ad_tags_dict = {}
53
+ with open(VECT_PATH, 'rb') as f:
54
+ vec_words = pickle.load(f)
55
+ for word in vec_words:
56
+ item = vec_words[word]
57
+ for form in item['forms']:
58
+ lexeme_id_key = 'inflect_id' if 'inflect_id' in form else 'id'
59
+ lexeme_id = form[lexeme_id_key]
60
+
61
+ if 'ad_tags' not in form:
62
+ continue
63
+
64
+ if lexeme_id not in ad_tags_dict:
65
+ ad_tags_dict[lexeme_id] = set()
66
+
67
+ ad_tags_dict[lexeme_id].add(form['ad_tags'])
68
+
69
+
70
+ def build_index(words_dics):
71
+ text_forms_dict = {}
72
+ for id in words_dics:
73
+ for item in words_dics[id]:
74
+ text = item['text']
75
+ if text not in text_forms_dict:
76
+ text_forms_dict[text] = []
77
+ text_forms_dict[text].append(item)
78
+
79
+ index = []
80
+ for text in text_forms_dict:
81
+ lexemes = [str(item['id']) for item in text_forms_dict[text]]
82
+ lexemes = ','.join(lexemes)
83
+ index.append(f"{text}:{lexemes}")
84
+
85
+ index = list(set(index))
86
+ index = '\n'.join(index)
87
+ return index
88
+
89
+
90
+ def create_dictionary(words_dics):
91
+ index = build_index(words_dics)
92
+
93
+ lexeme = []
94
+ for id in words_dics:
95
+ cur_lexeme = [id, '\t']
96
+
97
+ order = []
98
+ cur_forms_dict = {}
99
+ for item in words_dics[id]:
100
+ if item['text'] not in cur_forms_dict:
101
+ cur_forms_dict[item['text']] = {}
102
+
103
+ replace_other = item['replace_other'] if 'replace_other' in item else False
104
+ cur_form_dic = cur_forms_dict[item['text']]
105
+ if item['main'] not in cur_forms_dict or not cur_forms_dict[item['main']]:
106
+ cur_form_dic[item['main']] = replace_other
107
+
108
+ if item['text'] not in order:
109
+ order.append(item['text'])
110
+
111
+ for text in order:
112
+ cur_lexeme.append(text)
113
+ cur_lexeme.append(':')
114
+ for cls in cur_forms_dict[text]:
115
+ replace_other = cur_forms_dict[text][cls]
116
+ cur_lexeme.append(str(cls))
117
+ if replace_other:
118
+ cur_lexeme.append('!')
119
+
120
+ cur_lexeme.append(",")
121
+
122
+ del cur_lexeme[-1]
123
+ cur_lexeme.append(';')
124
+
125
+ del cur_lexeme[-1]
126
+ lexeme.append(''.join(cur_lexeme))
127
+
128
+ lexeme = '\n'.join(lexeme)
129
+ return index, lexeme
130
+
131
+
132
+ def save_dictionary(index, lexeme, paths, file_name):
133
+ for path in paths:
134
+ path = os.path.join(path, f"{file_name}_index.txt.gz")
135
+ with gzip.open(path, 'wb+') as f:
136
+ f.write(index.encode('utf-8'))
137
+
138
+ for path in paths:
139
+ path = os.path.join(path, f"{file_name}.txt.gz")
140
+ with gzip.open(path, 'wb+') as f:
141
+ f.write(lexeme.encode('utf-8'))
142
+
143
+
144
+ def release_dict_items():
145
+ with open(DICT_WORDS_PATH, 'rb') as f:
146
+ words = pickle.load(f)
147
+
148
+ words = [word for word in words if word['post'] != 'numb']
149
+ dict_words = {}
150
+ for word in words:
151
+ if word['id'] not in dict_words:
152
+ dict_words[word['id']] = []
153
+ dict_words[word['id']].append(word)
154
+
155
+ for id in dict_words:
156
+ un_id_dict = {}
157
+ rez_list = []
158
+ for word in sorted(dict_words[id], key=lambda x: x['index']):
159
+ if word['main'] not in un_id_dict:
160
+ un_id_dict[word['main']] = word
161
+ else:
162
+ un_id_dict[word['main']]['replace_other'] = True
163
+ rez_list.append(word)
164
+ dict_words[id] = rez_list
165
+
166
+ index, lexeme = create_dictionary(dict_words)
167
+ save_dictionary(index, lexeme, REZ_PATHS, 'dict')
168
+
169
+
170
+ def release_correction_items():
171
+ dict_words = {}
172
+
173
+ with open(get_dict_path('lemma'), 'rb') as f:
174
+ items = pickle.load(f)
175
+ for word in items:
176
+ lexeme_id = word['id']
177
+ if lexeme_id not in lemma_dict \
178
+ or (lexeme_id in ad_tags_dict and any([key in ad_tags_dict[lexeme_id] for key in IGNORE_AD_TAGS])):
179
+ continue
180
+
181
+ if lexeme_id not in dict_words:
182
+ dict_words[lexeme_id] = []
183
+
184
+ dict_words[lexeme_id].append(word)
185
+ dict_words[lexeme_id].append({
186
+ 'id': lexeme_id,
187
+ 'text': lemma_dict[lexeme_id][0],
188
+ 'main': lemma_dict[lexeme_id][1],
189
+ })
190
+
191
+ with open(os.path.join(CONFIG['bad_path'], "bad_lemma.pkl"), 'rb') as f:
192
+ items = pickle.load(f)
193
+ for word in items:
194
+ word = word[0]
195
+ lexeme_id = word['id']
196
+ if lexeme_id not in lemma_dict \
197
+ or (lexeme_id in ad_tags_dict and any([key in ad_tags_dict[lexeme_id] for key in IGNORE_AD_TAGS])):
198
+ continue
199
+
200
+ if lexeme_id not in dict_words:
201
+ dict_words[lexeme_id] = []
202
+
203
+ lemma, lemma_cls = lemma_dict[lexeme_id]
204
+ dict_words[lexeme_id].append(dict(id=lexeme_id, main=word['main_cls'], text=word['x_src'], replace_other=True))
205
+ dict_words[lexeme_id].append(dict(id=lexeme_id, main=lemma_cls, text=lemma, replace_other=True))
206
+
207
+ with open(os.path.join(CONFIG['bad_path'], "bad_inflect.pkl"), 'rb') as f:
208
+ items = pickle.load(f)
209
+ for word in items:
210
+ word = word[0]
211
+ lexeme_id = word['id']
212
+ if lexeme_id not in lemma_dict \
213
+ or (lexeme_id in ad_tags_dict and any([key in ad_tags_dict[lexeme_id] for key in IGNORE_AD_TAGS])):
214
+ continue
215
+
216
+ if lexeme_id not in dict_words:
217
+ dict_words[lexeme_id] = []
218
+ dict_words[lexeme_id].append(dict(id=lexeme_id, main=word['x_cls'], text=word['x_src'], replace_other=True))
219
+ dict_words[lexeme_id].append(dict(id=lexeme_id, main=word['y_cls'], text=word['y_src'], replace_other=True))
220
+
221
+ with open(os.path.join(CONFIG['bad_path'], "bad_main.pkl"), 'rb') as f:
222
+ items = pickle.load(f)
223
+ for bad_item in items:
224
+ text = bad_item[0]['src']
225
+ for word in vec_words[text]['forms']:
226
+ lexeme_id = word['id']
227
+ if lexeme_id not in lemma_dict \
228
+ or (lexeme_id in ad_tags_dict and any([key in ad_tags_dict[lexeme_id] for key in IGNORE_AD_TAGS])):
229
+ continue
230
+
231
+ if lexeme_id not in dict_words:
232
+ dict_words[lexeme_id] = []
233
+
234
+ cls_id = tpl_cls_dict[word['main']]['i']
235
+ dict_words[lexeme_id].append(dict(id=lexeme_id, main=cls_id, text=text, replace_other=True))
236
+ dict_words[lexeme_id].append(dict(id=lexeme_id, main=lemma_dict[lexeme_id][1], text=lemma_dict[lexeme_id][0], replace_other=True))
237
+
238
+ index, lexeme = create_dictionary(dict_words)
239
+ save_dictionary(index, lexeme, REZ_PATHS, 'dict_correction')
240
+
241
+
242
+ release_correction_items()
243
+ release_dict_items()
244
+
11_release_tests.py ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pickle
3
+ import numpy as np
4
+ from utils import CONFIG, RANDOM, load_datasets
5
+
6
+ from lxml import etree
7
+ from xml.etree.ElementTree import ElementTree
8
+
9
+
10
+ GRAM_TYPES = CONFIG['grammemes_types']
11
+ ROOT = CONFIG['publish_tests_path']
12
+ DICT_WORDS_PATH = CONFIG['dict_words_path']
13
+ NMB_DATA_PATH = CONFIG['numb_data_path']
14
+ with open(NMB_DATA_PATH, 'rb') as f:
15
+ numb_data = pickle.load(f)
16
+
17
+ with open(CONFIG['tags_path'], 'rb') as f:
18
+ tags = pickle.load(f)
19
+ is_lemma_dict = {tags[key]['i']: tags[key]['l'] for key in tags}
20
+ tag_index_order = {tags[tag]['i']: tags[tag]['o'] for tag in tags}
21
+
22
+
23
+ def release_gram_tests(items, key, cls_dic, result_path):
24
+ root = etree.Element('Tests')
25
+ for word in items:
26
+ y = np.argwhere(word['y'] == 1).ravel()
27
+ y = ';'.join([cls_dic[index] for index in y])
28
+ test = etree.Element("T")
29
+ test.set('x', word['src'])
30
+ test.set('y', y)
31
+ root.append(test)
32
+
33
+ rez_path = os.path.join(result_path, f'{key}.xml')
34
+ tree = ElementTree(root)
35
+ with open(rez_path, 'wb+') as f:
36
+ tree.write(f, xml_declaration=True, encoding='utf-8')
37
+
38
+
39
+ def release_main_tests(items, result_path, y_is_index=True):
40
+ root = etree.Element('Tests')
41
+ for word in items:
42
+ if y_is_index:
43
+ y = word['y']
44
+ else:
45
+ y = np.argwhere(word['y'] == 1).ravel()
46
+ y = ';'.join([str(index) for index in y])
47
+ test = etree.Element("T")
48
+ test.set('x', word['src'])
49
+ test.set('y', y)
50
+ root.append(test)
51
+
52
+ rez_path = os.path.join(result_path, f'main.xml')
53
+ tree = ElementTree(root)
54
+ with open(rez_path, 'wb+') as f:
55
+ tree.write(f, xml_declaration=True, encoding='utf-8')
56
+
57
+
58
+ def release_lemma_tests(items, result_path):
59
+ root = etree.Element('Tests')
60
+ for word in items:
61
+ test = etree.Element("T")
62
+ test.set('x', word['x_src'])
63
+ test.set('x_c', str(word['main_cls']))
64
+ test.set('y', word['y_src'])
65
+ root.append(test)
66
+
67
+ rez_path = os.path.join(result_path, 'lemma.xml')
68
+ tree = ElementTree(root)
69
+ with open(rez_path, 'wb+') as f:
70
+ tree.write(f, xml_declaration=True, encoding='utf-8')
71
+
72
+
73
+ def release_inflect_tests(items, result_path):
74
+ root = etree.Element('Tests')
75
+ for word in items:
76
+ test = etree.Element("T")
77
+ test.set('x', word['x_src'])
78
+ test.set('x_c', str(word['x_cls']))
79
+ test.set('y', word['y_src'])
80
+ test.set('y_c', str(word['y_cls']))
81
+ root.append(test)
82
+
83
+ rez_path = os.path.join(result_path, 'inflect.xml')
84
+ tree = ElementTree(root)
85
+ with open(rez_path, 'wb+') as f:
86
+ tree.write(f, xml_declaration=True, encoding='utf-8')
87
+
88
+
89
+ def release_nn_tests():
90
+ res_path = os.path.join(ROOT, 'Network')
91
+ for gram in GRAM_TYPES:
92
+ items = load_datasets(gram, 'test')
93
+ cls = GRAM_TYPES[gram]['classes']
94
+ dic = {cls[g_key]['index']: g_key for g_key in cls}
95
+ release_gram_tests(items, gram, dic, res_path)
96
+
97
+ release_main_tests(load_datasets('main', 'test'), res_path, False)
98
+ release_lemma_tests(load_datasets('lemma', 'test'), res_path)
99
+ release_inflect_tests(load_datasets('inflect', 'test'), res_path)
100
+
101
+
102
+ def merge_same_main(items):
103
+ rez_dict = {}
104
+ for item in items:
105
+ if item['src'] not in rez_dict:
106
+ rez_dict[item['src']] = []
107
+
108
+ rez_dict[item['src']].append(item)
109
+
110
+ rez_list = []
111
+ for text in rez_dict:
112
+ ys = [item['y'] for item in rez_dict[text]]
113
+ rez_list.append(dict(src=text, y=ys))
114
+ return rez_list
115
+
116
+
117
+ def release_dictionary_tests():
118
+ res_path = os.path.join(ROOT, 'Dict')
119
+ with open(DICT_WORDS_PATH, 'rb') as f:
120
+ words = pickle.load(f)
121
+
122
+ lexeme_dict = {}
123
+ for word in words:
124
+ if word['post'] == 'numb':
125
+ continue
126
+
127
+ if word['id'] not in lexeme_dict:
128
+ lexeme_dict[word['id']] = []
129
+
130
+ lexeme_dict[word['id']].append(word)
131
+
132
+ main = []
133
+ inflect = []
134
+ lemmas = []
135
+ for word_id in lexeme_dict:
136
+ lexeme_words = lexeme_dict[word_id]
137
+ for item in lexeme_words:
138
+ main.append(dict(src=item['text'], y=item['main']))
139
+
140
+ for word in lexeme_words:
141
+ lemma = word['lemma'] if 'lemma' in word else word['text']
142
+ if is_lemma_dict[word['main']]:
143
+ lemma = word['text']
144
+
145
+ lemmas.append(dict(
146
+ x_src=word['text'],
147
+ main_cls=word['main'],
148
+ y_src=lemma
149
+ ))
150
+
151
+ un_cls_ids = []
152
+ rez_items = []
153
+ for item in sorted(lexeme_words, key=lambda x: x['index']):
154
+ if item['main'] in un_cls_ids:
155
+ continue
156
+
157
+ un_cls_ids.append(item['main'])
158
+ rez_items.append(item)
159
+ lexeme_words = rez_items
160
+
161
+ for i in range(0, len(lexeme_words) - 2):
162
+ main_word = lexeme_words[i]
163
+ for j in range(i, len(lexeme_words) - 1):
164
+ to_word = lexeme_words[j]
165
+ if to_word['main'] == main_word['main']:
166
+ continue
167
+
168
+ inflect.append(dict(
169
+ x_src=main_word['text'],
170
+ x_cls=main_word['main'],
171
+ y_src=to_word['text'],
172
+ y_cls=to_word['main'],
173
+ id=word_id
174
+ ))
175
+
176
+ main = merge_same_main(main)
177
+ release_main_tests(main, res_path)
178
+ release_lemma_tests(lemmas, res_path)
179
+ release_inflect_tests(inflect, res_path)
180
+
181
+
182
+ def release_numb_tests():
183
+ res_path = os.path.join(ROOT, 'Numb')
184
+ main = []
185
+ inflect = []
186
+ lemmas = []
187
+ for val in numb_data['numbers']:
188
+ n_el = etree.Element("N")
189
+ n_el.set('v', str(val))
190
+ for tp in numb_data['numbers'][val]:
191
+ if tp == 'nar_end' or tp == 'lemma':
192
+ continue
193
+
194
+ items = numb_data['numbers'][val][tp]
195
+ lemma, _ = items[0]
196
+ for text, index in items:
197
+ main.append(dict(src=text, y=index))
198
+ lemmas.append(dict(
199
+ x_src=text,
200
+ main_cls=index,
201
+ y_src=lemma
202
+ ))
203
+
204
+ un_cls_ids = []
205
+ rez_items = []
206
+ for item in items:
207
+ if item[1] in un_cls_ids:
208
+ continue
209
+
210
+ un_cls_ids.append(item[1])
211
+ rez_items.append(item)
212
+ items = rez_items
213
+
214
+ for i in range(0, len(items) - 2):
215
+ main_text, main_index = items[i]
216
+ for j in range(i, len(items) - 1):
217
+ to_text, to_index = items[j]
218
+ inflect.append(dict(
219
+ x_src=main_text,
220
+ x_cls=main_index,
221
+ y_src=to_text,
222
+ y_cls=to_index,
223
+ id=f"{val}{tp}"
224
+ ))
225
+
226
+ main = merge_same_main(main)
227
+ release_main_tests(main, res_path)
228
+ release_lemma_tests(lemmas, res_path)
229
+ release_inflect_tests(inflect, res_path)
230
+
231
+
232
+ def release_nar_numb_tests():
233
+ res_path = os.path.join(ROOT, 'NarNumb')
234
+ main = []
235
+ inflect = []
236
+ lemmas = []
237
+ for val in numb_data['numbers']:
238
+ n_el = etree.Element("N")
239
+ n_el.set('v', str(val))
240
+ items = numb_data['numbers'][val]['nar_end']
241
+ lemma_id = numb_data['numbers'][val]['p'][0][1]
242
+ lemma = f"{val}-{items[lemma_id]}"
243
+ for index in items:
244
+ text = f"{val}-{items[index]}"
245
+ main.append(dict(src=text, y=index))
246
+ lemmas.append(dict(
247
+ x_src=text,
248
+ main_cls=index,
249
+ y_src=lemma
250
+ ))
251
+
252
+ ids = list(items.keys())
253
+ for i in range(0, len(ids) - 2):
254
+ main_index = ids[i]
255
+ main_text = items[main_index]
256
+ main_text = f"{val}-{main_text}"
257
+ for j in range(i, len(items) - 1):
258
+ to_index = ids[j]
259
+ to_text = items[to_index]
260
+ to_text = f"{val}-{to_text}"
261
+ inflect.append(dict(
262
+ x_src=main_text,
263
+ x_cls=main_index,
264
+ y_src=to_text,
265
+ y_cls=to_index,
266
+ id=f"{val}nar"
267
+ ))
268
+
269
+ main = merge_same_main(main)
270
+ release_main_tests(main, res_path)
271
+ release_lemma_tests(lemmas, res_path)
272
+ release_inflect_tests(inflect, res_path)
273
+
274
+
275
+ def release_reg_tests():
276
+ res_path = os.path.join(ROOT, 'Reg')
277
+ int_tag = None
278
+ romn_tag = None
279
+ unkn_tag = None
280
+ punct_tag = None
281
+
282
+ for tag in tags:
283
+ if 'int' in tag:
284
+ int_tag = tags[tag]['i']
285
+ elif 'romn' in tag:
286
+ romn_tag = tags[tag]['i']
287
+ elif 'unkn' in tag:
288
+ unkn_tag = tags[tag]['i']
289
+ elif 'punct' in tag:
290
+ punct_tag = tags[tag]['i']
291
+
292
+ main = []
293
+ puncts = ['.', ',', '?', '!', '_', '"', '(', ')', ':', ';', '-']
294
+ for p in puncts:
295
+ main.append(dict(src=p, y=punct_tag))
296
+
297
+ text = [p]
298
+ for _ in range(1, RANDOM.randint(2, 5)):
299
+ text.append(puncts[RANDOM.randint(0, len(puncts)-1)])
300
+
301
+ text = ''.join(text)
302
+ main.append(dict(src=text, y=punct_tag))
303
+
304
+ for i in range(100):
305
+ val = RANDOM.randint(0, 1000000)
306
+ main.append(dict(src=str(val), y=int_tag))
307
+
308
+ roms = ['i', 'iii', 'iv', 'iv', 'c', 'd', 'm', 'md', 'mi']
309
+ for rom in roms:
310
+ main.append(dict(src=rom, y=romn_tag))
311
+ main.append(dict(src=rom.upper(), y=romn_tag))
312
+
313
+ unkn = ['test', 'sdasdas', 'home']
314
+ for v in unkn:
315
+ main.append(dict(src=v, y=unkn_tag))
316
+ main.append(dict(src=v.upper(), y=unkn_tag))
317
+
318
+ lemmas = [dict(x_src=item['src'], main_cls=item['y'], y_src=item['src']) for item in main]
319
+ inflect = [dict(x_src=item['src'], x_cls=item['y'], y_src=item['src'], y_cls=item['y'], id=item['src']) for item in main]
320
+
321
+ main = merge_same_main(main)
322
+ release_main_tests(main, res_path)
323
+ release_lemma_tests(lemmas, res_path)
324
+ release_inflect_tests(inflect, res_path)
325
+
326
+
327
+ release_reg_tests()
328
+ release_nar_numb_tests()
329
+ release_numb_tests()
330
+ release_dictionary_tests()
331
+ release_nn_tests()
12_release_model.py ADDED
@@ -0,0 +1,316 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import pickle
4
+ from shutil import copyfile
5
+
6
+ from xml.etree.ElementTree import ElementTree
7
+ from lxml import etree
8
+
9
+ from model import RNN
10
+ from utils import CONFIG
11
+ from tester import Tester
12
+
13
+
14
+ class Releaser:
15
+ def __init__(self):
16
+ self.config = CONFIG
17
+ self.dataset_path = self.config['dict_path']
18
+ self.model_key = self.config['model_key']
19
+ self.chars = self.config['chars']
20
+ self.gram_types = self.config['grammemes_types']
21
+ self.rnn = RNN(True)
22
+ self.tester = Tester()
23
+ self.pd_publish_paths = [
24
+ os.path.join(path, f"frozen_model_{self.model_key}.pb")
25
+ for path in self.config['publish_net_paths']
26
+ ]
27
+ self.xml_publish_paths = [
28
+ os.path.join(path, f"release_{self.model_key}.xml")
29
+ for path in self.config['publish_net_paths']
30
+ ]
31
+ self.xml_gram_paths = [
32
+ os.path.join(path, "grams.xml")
33
+ for path in self.config['publish_gramm_paths']
34
+ ]
35
+ self.xml_numbers_paths = [
36
+ os.path.join(path, "numbers.xml")
37
+ for path in self.config['publish_numbers_paths']
38
+ ]
39
+ self.xml_tags_paths = [
40
+ os.path.join(path, "tags.xml")
41
+ for path in self.config['publish_tags_paths']
42
+ ]
43
+ self.test_result_paths = [
44
+ os.path.join(path, "test_info.txt")
45
+ for path in self.config['test_results_paths']
46
+ ]
47
+ self.publish_dataset_info_paths = [
48
+ os.path.join(path, "dataset_info.txt")
49
+ for path in self.config['publish_dataset_info_paths']
50
+ ]
51
+ self.public_inflect_templates_paths = [
52
+ os.path.join(path, "inflect_templates.xml")
53
+ for path in self.config['public_inflect_templates_paths']
54
+ ]
55
+ self.classes_dic = self.config['main_classes']
56
+ self.rev_classes_dic = {
57
+ self.classes_dic[key]: ",".join([key for key in list(key) if key is not None])
58
+ for key in self.classes_dic
59
+ }
60
+ with open(CONFIG['tags_path'], 'rb') as f:
61
+ self.tags = pickle.load(f)
62
+
63
+ with open(CONFIG['numb_data_path'], 'rb') as f:
64
+ self.numb_data = pickle.load(f)
65
+
66
+ with open(self.config['inflect_templates_path'], 'rb') as f:
67
+ self.inflect_templates = pickle.load(f)
68
+
69
+ def release_model(self):
70
+ pd_release_path, gram_ops, out_ops = self.rnn.release()
71
+ for path in self.pd_publish_paths:
72
+ copyfile(pd_release_path, path)
73
+
74
+ self.__release_test_metrics__()
75
+ self.__release_numbers_xml__()
76
+ self.__release_gramm_docs__()
77
+ self.__release_inflect_docs__()
78
+ self.__release_grams_xml__()
79
+ self.__release_tags_xml__()
80
+ self.__release_dataset_info__()
81
+ self.__release_model_xml__(out_ops, gram_ops)
82
+
83
+ def __release_test_metrics__(self):
84
+ results = self.tester.test()
85
+ for path in self.test_result_paths:
86
+ with open(path, 'w+') as f:
87
+ f.write(results)
88
+
89
+ def __release_model_xml__(self, out_ops, gram_ops):
90
+ root = etree.Element('Root')
91
+ for key in out_ops:
92
+ root.set(key, out_ops[key])
93
+
94
+ chars_el = etree.Element('Chars')
95
+ chars_el.set("start_char", str(self.config['start_token']))
96
+ chars_el.set("end_char", str(self.config['end_token']))
97
+ for index, value in enumerate(self.chars):
98
+ char_el = etree.Element("Char")
99
+ char_el.set('index', str(index))
100
+ char_el.set('value', value)
101
+ chars_el.append(char_el)
102
+ root.append(chars_el)
103
+
104
+ grams_el = etree.Element('Grams')
105
+ for gram in self.gram_types:
106
+ gram_el = etree.Element("G")
107
+ gram_el.set('key', gram)
108
+ gram_el.set('op', gram_ops[gram]['prob'])
109
+ grams_el.append(gram_el)
110
+ root.append(grams_el)
111
+
112
+ inflect_el = etree.Element("Inflect")
113
+ for main_key in self.inflect_templates:
114
+ temp_el = etree.Element("Im")
115
+ temp_el.set('i', str(self.classes_dic[main_key]))
116
+ inflect_el.append(temp_el)
117
+ for form in self.inflect_templates[main_key]:
118
+ form_el = etree.Element("I")
119
+ form_el.set('i', str(self.classes_dic[form]))
120
+ temp_el.append(form_el)
121
+ root.append(inflect_el)
122
+
123
+ tree = ElementTree(root)
124
+ for path in self.xml_publish_paths:
125
+ with open(path, 'wb+') as f:
126
+ tree.write(f, xml_declaration=True, encoding='utf-8')
127
+
128
+ logging.info("Model released")
129
+
130
+ def __release_grams_xml__(self):
131
+ nn_types = self.config['grammemes_types']
132
+ dict_post_types = self.config['dict_post_types']
133
+ other_types = self.config['other_post_types']
134
+ root = etree.Element('Grams')
135
+ for gram in nn_types:
136
+ gram_el = etree.Element("G")
137
+ gram_el.set('index', str(nn_types[gram]['index']))
138
+ gram_el.set('key_en', gram)
139
+ gram_el.set('key_ru', nn_types[gram]['key_ru'])
140
+ root.append(gram_el)
141
+ gr_dic = nn_types[gram]['classes']
142
+ for key_en in gr_dic:
143
+ item = gr_dic[key_en]
144
+ cls_el = etree.Element("C")
145
+ cls_el.set('key_en', key_en)
146
+ cls_el.set('key_ru', str(item['key_ru']))
147
+ cls_el.set('nn_index', str(item['index']))
148
+ gram_el.append(cls_el)
149
+
150
+ if gram == "post":
151
+ for key_en in dict_post_types:
152
+ item = dict_post_types[key_en]
153
+ cls_el = etree.Element("C")
154
+ cls_el.set('key_en', key_en)
155
+ cls_el.set('key_ru', str(item['key_ru']))
156
+ gram_el.append(cls_el)
157
+
158
+ for key_en in other_types:
159
+ item = other_types[key_en]
160
+ cls_el = etree.Element("C")
161
+ cls_el.set('key_en', key_en)
162
+ cls_el.set('key_ru', str(item['key_ru']))
163
+ gram_el.append(cls_el)
164
+
165
+ tree = ElementTree(root)
166
+ for path in self.xml_gram_paths:
167
+ with open(path, 'wb+') as f:
168
+ tree.write(f, xml_declaration=True, encoding='utf-8')
169
+
170
+ def __release_numbers_xml__(self):
171
+ root = etree.Element('NumbData')
172
+ root.set("reg", self.numb_data['regex'])
173
+ root.set("l", ','.join([str(i) for i in self.numb_data['lemma_cls_ids']]))
174
+ for val in self.numb_data['numbers']:
175
+ n_el = etree.Element("N")
176
+ n_el.set('v', str(val))
177
+ for tp in self.numb_data['numbers'][val]:
178
+ if tp == 'nar_end' or tp == 'lemma':
179
+ continue
180
+
181
+ for tpl in self.numb_data['numbers'][val][tp]:
182
+ w_el = etree.Element("W")
183
+ w_el.set('t', tpl[0])
184
+ w_el.set('i', str(tpl[1]))
185
+ w_el.set('k', tp)
186
+ n_el.append(w_el)
187
+
188
+ nar_ends = self.numb_data['numbers'][val]['nar_end']
189
+ for cls in nar_ends:
190
+ w_el = etree.Element("E")
191
+ w_el.set('t', nar_ends[cls])
192
+ w_el.set('i', str(cls))
193
+ n_el.append(w_el)
194
+
195
+ root.append(n_el)
196
+
197
+ tree = ElementTree(root)
198
+ for path in self.xml_numbers_paths:
199
+ with open(path, 'wb+') as f:
200
+ tree.write(f, xml_declaration=True, encoding='utf-8')
201
+
202
+ def __release_tags_xml__(self):
203
+ root = etree.Element('Tags')
204
+ for tag in self.tags:
205
+ val = self.tags[tag]
206
+ cls_el = etree.Element("T")
207
+ cls_el.set('i', str(val['i']))
208
+ cls_el.set('v', ",".join([key if key is not None else '' for key in tag]))
209
+ cls_el.set('p', val['p'])
210
+ cls_el.set('o', str(val['o']))
211
+
212
+ if val['l']:
213
+ cls_el.set('l', '1')
214
+ root.append(cls_el)
215
+
216
+ tree = ElementTree(root)
217
+ for path in self.xml_tags_paths:
218
+ with open(path, 'wb+') as f:
219
+ tree.write(f, xml_declaration=True, encoding='utf-8')
220
+
221
+ def __release_dataset_info__(self):
222
+ doc = etree.iterparse(self.dataset_path, events=('start', 'end'))
223
+ itr = iter(doc)
224
+ event, element = next(itr)
225
+ while not (event == 'start' and element.tag == 'dictionary'):
226
+ pass
227
+
228
+ version = element.attrib['version']
229
+ revision = element.attrib['revision']
230
+ for path in self.publish_dataset_info_paths:
231
+ with open(path, 'w+') as f:
232
+ f.write(f"dictionary\nversion={version}\nrevision={revision}")
233
+
234
+ def __release_gramm_docs__(self):
235
+ mds = [
236
+ "# Поддерживамые грамматические категории и граммемы",
237
+ "В DeepMorphy используется слегка измененное подмножество граммем и грамматичеких категорий из словарей [OpenCorpora](http://opencorpora.org/dict.php?act=gram)."
238
+ ]
239
+
240
+ for gram_cat_key in self.gram_types:
241
+ gram_cat = self.gram_types[gram_cat_key]
242
+ mds.append(f"- **{gram_cat['name'].capitalize()}** (ru='{gram_cat['key_ru']}', en='{gram_cat_key}') :")
243
+
244
+ classes = dict(gram_cat['classes'])
245
+ if gram_cat_key == 'post':
246
+ classes.update(self.config['dict_post_types'])
247
+ classes.update(self.config['other_post_types'])
248
+
249
+ for gram in classes:
250
+ gram_obj = classes[gram]
251
+ mds.append(f" - {gram_obj['name_ru']} (ru='{gram_obj['key_ru']}',en='{gram}')")
252
+
253
+ mds = "\n".join(mds)
254
+ with open(self.config['publish_gram_doc_path'], 'w+') as f:
255
+ f.write(mds)
256
+
257
+ def __release_inflect_docs__(self):
258
+ mds = [
259
+ "# ��писок поддерживаемых словоизменений",
260
+ "Словоизменение возможно только в рамках выделенных жирным категорий:"
261
+ ]
262
+
263
+ post_index = self.gram_types['post']['index']
264
+ gndr_index = self.gram_types['gndr']['index']
265
+
266
+ en_ru_dict = {}
267
+ for gram_cat in self.gram_types:
268
+ for cls in self.gram_types[gram_cat]['classes']:
269
+ cls_data = self.gram_types[gram_cat]['classes'][cls]
270
+ en_ru_dict[cls] = cls_data['key_ru']
271
+
272
+ def create_tag_text(tag):
273
+ tag_text = [en_ru_dict[key] for key in list(tag) if key is not None]
274
+ tag_text = ",".join(tag_text)
275
+ return f" - {tag_text}"
276
+
277
+ for main_tpl in sorted(self.inflect_templates):
278
+ post = main_tpl[post_index]
279
+ gndr = main_tpl[gndr_index]
280
+ if post == "infn":
281
+ header_text = "Глаголы и глагольные формы"
282
+ elif post == "adjf":
283
+ header_text = "Прилагательные"
284
+ elif post == "noun" and gndr == 'masc':
285
+ header_text = "Существительные мужского рода"
286
+ elif post == "noun" and gndr == 'femn':
287
+ header_text = "Существительные женского рода"
288
+ elif post == "noun" and gndr == 'neut':
289
+ header_text = "Существительные среднего рода"
290
+ elif post == "noun" and gndr == 'msf':
291
+ header_text = "Существительные общего рода"
292
+ else:
293
+ raise NotImplemented()
294
+
295
+ mds.append(f"- **{header_text}**:")
296
+ items = [(item, self.tags[item]['o']) for item in self.inflect_templates[main_tpl]]
297
+ items.append((main_tpl, self.tags[main_tpl]['o']))
298
+ tags = sorted(items, key=lambda x: x[1], reverse=True)
299
+ for tag in tags:
300
+ mds.append(create_tag_text(tag[0]))
301
+
302
+ mds = "\n".join(mds)
303
+ with open(self.config['publish_inflect_doc_path'], 'w+') as f:
304
+ f.write(mds)
305
+
306
+ @staticmethod
307
+ def __build_bad_words__(tester):
308
+ words = tester.get_bad_words()
309
+ logging.info(f"Wrong words count {len(words)}")
310
+ with open(os.path.join("wrong_words.pkl"), 'wb+') as f:
311
+ pickle.dump(words, f)
312
+
313
+
314
+ if __name__ == "__main__":
315
+ tester = Releaser()
316
+ tester.release_model()
1_load_dictionary.py ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import pickle
3
+ from tqdm import tqdm
4
+ from lxml import etree
5
+ from utils import get_grams_info, CONFIG
6
+
7
+
8
+ DIC_PATH = CONFIG['dict_path']
9
+ MAX_WORD_SIZE = CONFIG['max_word_size']
10
+ DATASET_WORDS_PATH = CONFIG['dataset_words_path']
11
+ DICTS_WORDS_PATH = CONFIG['dict_words_path']
12
+ DICT_POST_TYPES = CONFIG['dict_post_types']
13
+ LEMMAS_PROPS = CONFIG['lemma_same_word']
14
+ AD_TAGS = CONFIG['ad_tags']
15
+ SRC_CONVERT, _ = get_grams_info(CONFIG)
16
+ i = 0
17
+
18
+
19
+ def parse_words(itr):
20
+ global i
21
+
22
+ cur_word = None
23
+ cur_item = None
24
+ event, element = next(itr)
25
+
26
+ while not (event == 'end' and element.tag == 'lemmata'):
27
+
28
+ if event == 'start' and element.tag == 'lemma':
29
+ cur_word = {
30
+ 'id': element.attrib['id'],
31
+ 'lemma': None,
32
+ 'forms': []
33
+ }
34
+
35
+ if event == 'start' and (element.tag == 'l' or element.tag == 'f'):
36
+ cur_item = {'text': None, 'index': i}
37
+ i += 1
38
+
39
+ if event == 'end' and element.tag == 'l':
40
+ cur_item['text'] = element.attrib['t']
41
+ cur_word['lemma'] = cur_item
42
+ cur_item = None
43
+
44
+ if event == 'end' and element.tag == 'g' and element.attrib['v'].lower() in SRC_CONVERT:
45
+ src_key = element.attrib['v'].lower()
46
+ gram_type, gram = SRC_CONVERT[src_key]
47
+ cur_item[gram_type] = gram
48
+ elif event == 'end' and element.tag == 'g' \
49
+ and element.attrib['v'].lower() in AD_TAGS \
50
+ and 'ad_tags' not in cur_item:
51
+ cur_item['ad_tags'] = [element.attrib['v'].lower()]
52
+ elif event == 'end' and element.tag == 'g' and element.attrib['v'].lower() in AD_TAGS:
53
+ cur_item['ad_tags'].append(element.attrib['v'].lower())
54
+
55
+ if event == 'end' and element.tag == 'f':
56
+ cur_item['text'] = element.attrib['t']
57
+ cur_word['forms'].append(cur_item)
58
+ cur_item = None
59
+
60
+ if event == 'end' and element.tag == 'lemma':
61
+ yield cur_word
62
+ cur_word = None
63
+
64
+ event, element = next(itr)
65
+
66
+
67
+ def get_flat_words(words):
68
+ for item in words:
69
+ lemma = item['lemma']
70
+ lemma['lemma'] = lemma['text']
71
+ lemma['id'] = item['id']
72
+ for form in item['forms']:
73
+ word = dict(lemma)
74
+ for key in form:
75
+ word[key] = form[key]
76
+
77
+ if 'ad_tags' in word:
78
+ word['ad_tags'] = ','.join(word['ad_tags'])
79
+
80
+ yield word
81
+
82
+
83
+ def parse_link_types(itr):
84
+ event, element = next(itr)
85
+ link_types = {}
86
+
87
+ while not (event == 'start' and element.tag == 'link_types'):
88
+ event, element = next(itr)
89
+
90
+ while not (event == 'end' and element.tag == 'link_types'):
91
+
92
+ if event == 'end' and element.tag == 'type':
93
+ link_types[element.text] = element.attrib['id']
94
+
95
+ event, element = next(itr)
96
+ return link_types
97
+
98
+
99
+ def parse_links(itr):
100
+ event, element = next(itr)
101
+ while not (event == 'end' and element.tag == 'links'):
102
+ if event == 'end' and element.tag == 'link':
103
+ yield {
104
+ 'from': element.attrib['from'],
105
+ 'to': element.attrib['to'],
106
+ 'type': element.attrib['type']
107
+ }
108
+ event, element = next(itr)
109
+
110
+
111
+ def set_lemma_and_inflect_id(words, link_types, links):
112
+ same_inflect_id_post = ['noun', 'adjf', 'infn']
113
+ lemmas_dict = {}
114
+ for word in words:
115
+ for norm_f in LEMMAS_PROPS:
116
+ is_lemma = True
117
+ for key in norm_f:
118
+ if key in word and word[key] != norm_f[key]:
119
+ is_lemma = False
120
+ break
121
+
122
+ if is_lemma:
123
+ lemmas_dict[word['id']] = word['text']
124
+ del word['lemma']
125
+ break
126
+
127
+ if 'lemma' in word and word['post'] in same_inflect_id_post:
128
+ word['inflect_id'] = word['id']
129
+
130
+ inv_link_type_dict = {
131
+ link_types[key]: key
132
+ for key in link_types
133
+ }
134
+
135
+ links = {
136
+ (link['to'], inv_link_type_dict[link['type']]): link['from']
137
+ for link in links
138
+ }
139
+
140
+ prtf_dict = {}
141
+ for word in words:
142
+ link_type = None
143
+ if word['post'] == 'verb':
144
+ link_type = 'INFN-VERB'
145
+ elif word['post'] == 'prtf':
146
+ link_type = 'INFN-PRTF'
147
+ elif word['post'] == 'grnd':
148
+ link_type = 'INFN-GRND'
149
+ elif word['post'] == 'adjs':
150
+ link_type = 'ADJF-ADJS'
151
+ elif word['post'] == 'comp':
152
+ link_type = 'ADJF-COMP'
153
+
154
+ if not link_type:
155
+ continue
156
+
157
+ key = (word['id'], link_type)
158
+ if key in links and links[key] in lemmas_dict:
159
+ lemma = lemmas_dict[links[key]]
160
+ word['lemma'] = lemma
161
+ lemmas_dict[word['id']] = lemma
162
+ else:
163
+ del word['lemma']
164
+
165
+ if key in links:
166
+ word['inflect_id'] = links[key]
167
+
168
+ if key in links and word['post'] == 'prtf':
169
+ prtf_dict[word['id']] = word['inflect_id']
170
+
171
+ for word in words:
172
+ if word['post'] != 'prts':
173
+ continue
174
+
175
+ key = (word['id'], 'PRTF-PRTS')
176
+ if key in links:
177
+ lemma = lemmas_dict[links[key]]
178
+ word['lemma'] = lemma
179
+ word['inflect_id'] = prtf_dict[links[key]]
180
+ lemmas_dict[word['id']] = lemma
181
+ else:
182
+ del word['lemma']
183
+
184
+
185
+ doc = etree.iterparse(DIC_PATH, events=('start', 'end'))
186
+ itr = iter(doc)
187
+ event, element = next(itr)
188
+ logging.info("Parsing dictionary xml")
189
+ while not (event == 'start' and element.tag == 'lemmata'):
190
+ event, element = next(itr)
191
+
192
+ words = list(parse_words(itr))
193
+ link_types = parse_link_types(itr)
194
+ links = list(parse_links(itr))
195
+ words = list(get_flat_words(words))
196
+ set_lemma_and_inflect_id(words, link_types, links)
197
+ words = [dict(t) for t in {tuple(sorted(d.items())) for d in words}]
198
+ dict_words = [word for word in words if word['post'] in DICT_POST_TYPES]
199
+ dataset_words = [word for word in words if word['post'] not in DICT_POST_TYPES]
200
+ dataset_words_dic = {}
201
+ for word in tqdm(dataset_words):
202
+ w_len = len(word['text'])
203
+ if w_len > MAX_WORD_SIZE:
204
+ continue
205
+
206
+ if word['text'] not in dataset_words_dic:
207
+ dataset_words_dic[word['text']] = []
208
+ dataset_words_dic[word['text']].append(word)
209
+
210
+ logging.info(f"Dict words: {len(dict_words)}")
211
+ logging.info(f"Dataset words: {len(dataset_words_dic)}")
212
+
213
+ with open(DATASET_WORDS_PATH, 'wb+') as f:
214
+ pickle.dump(dataset_words_dic, f)
215
+
216
+ with open(DICTS_WORDS_PATH, 'wb+') as f:
217
+ pickle.dump(dict_words, f)
2_vectorize.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ import numpy as np
3
+ from tqdm import tqdm
4
+ from utils import CONFIG
5
+
6
+ CHARS = CONFIG['chars']
7
+ END_TOKEN = CONFIG['end_token']
8
+ MAX_WORD_SIZE = CONFIG['max_word_size']
9
+ WORDS_PATH = CONFIG['dataset_words_path']
10
+ VECT_PATH = CONFIG['vect_words_path']
11
+ CHARS_INDEXES = {c: index for index, c in enumerate(CHARS)}
12
+
13
+
14
+ def vectorize_text(text):
15
+ word_vect = np.full((MAX_WORD_SIZE,), END_TOKEN, dtype=np.int32)
16
+ for index, c in enumerate(text):
17
+ if c in CHARS:
18
+ word_vect[index] = CHARS_INDEXES[c]
19
+ else:
20
+ word_vect[index] = CHARS_INDEXES["UNDEFINED"]
21
+
22
+ seq_len = len(text)
23
+ return word_vect, seq_len
24
+
25
+
26
+ def vectorize_words(words_dic):
27
+ vect_dic = {}
28
+ for word in tqdm(words_dic, desc="Vectorizing words"):
29
+ vect_dic[word] = {
30
+ 'vect': vectorize_text(word),
31
+ 'forms': words_dic[word]
32
+ }
33
+
34
+ return vect_dic
35
+
36
+
37
+ with open(WORDS_PATH, 'rb') as f:
38
+ words_dic = pickle.load(f)
39
+
40
+ vec_words = vectorize_words(words_dic)
41
+
42
+ with open(VECT_PATH, 'wb+') as f:
43
+ pickle.dump(vec_words, f)
3_cls_dataset.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pickle
3
+ import numpy as np
4
+ from tqdm import tqdm
5
+ from sklearn.preprocessing import normalize
6
+ from collections import defaultdict
7
+ from utils import get_grams_info, CONFIG, save_dataset
8
+
9
+
10
+ VECT_PATH = CONFIG['vect_words_path']
11
+ CLS_CLASSES_PATH = CONFIG['cls_classes_path']
12
+ GRAMMEMES_TYPES = CONFIG['grammemes_types']
13
+ SRC_CONVERT, CLASSES_INDEXES = get_grams_info(CONFIG)
14
+
15
+
16
+ def generate_dataset(vec_words, cls_type, cls_dic):
17
+ ordered_keys = [cls for cls in sorted(cls_dic, key=lambda cls: cls_dic[cls])]
18
+ weights = [0 for key in ordered_keys]
19
+ for word in tqdm(vec_words, desc=f"Calculating {cls_type} weights"):
20
+ for form in vec_words[word]['forms']:
21
+ if cls_type in form:
22
+ i = cls_dic[form[cls_type]]
23
+ weights[i] = weights[i] + 1
24
+
25
+ weights = normalize(np.asarray(weights).reshape(1, -1))
26
+ weights = np.ones((len(ordered_keys),)) - weights
27
+
28
+ rez_items = defaultdict(list)
29
+ cur_cls = None
30
+ for word in tqdm(vec_words, desc=f"Generating classification {cls_type} dataset"):
31
+ y = np.zeros((len(ordered_keys),), dtype=np.int)
32
+ has_classes = False
33
+ for form in vec_words[word]['forms']:
34
+ if cls_type in form:
35
+ cur_cls = form[cls_type]
36
+ index = cls_dic[cur_cls]
37
+ y[index] = 1
38
+ has_classes = True
39
+
40
+ if has_classes:
41
+ items = rez_items[cur_cls]
42
+ items.append({
43
+ 'src': word,
44
+ 'x': vec_words[word]['vect'],
45
+ 'y': y,
46
+ 'weight': weights.reshape(-1, 1)[y == 1].max()
47
+ })
48
+ rez_items[cur_cls] = items
49
+
50
+ save_dataset(rez_items, cls_type)
51
+
52
+
53
+ def generate_all(vec_words):
54
+ for cls_type in CLASSES_INDEXES:
55
+ cls_dic = CLASSES_INDEXES[cls_type]
56
+ generate_dataset(vec_words, cls_type, cls_dic)
57
+
58
+ un_classes = []
59
+ for word in tqdm(vec_words, desc="Setting main class"):
60
+ for form in vec_words[word]['forms']:
61
+ tpl = tuple(
62
+ form[key] if key in form else None
63
+ for key in GRAMMEMES_TYPES
64
+ )
65
+ if tpl not in un_classes:
66
+ un_classes.append(tpl)
67
+ form['main'] = tpl
68
+
69
+ with open(VECT_PATH, 'wb+') as f:
70
+ pickle.dump(vec_words, f)
71
+
72
+ cls_dic = {
73
+ tpl: index
74
+ for index, tpl in enumerate(un_classes)
75
+ }
76
+ generate_dataset(vec_words, 'main', cls_dic)
77
+ print(f"Main classes count: {len(cls_dic)}")
78
+ with open(CLS_CLASSES_PATH, 'wb+') as f:
79
+ pickle.dump(cls_dic, f)
80
+
81
+
82
+ with open(VECT_PATH, 'rb') as f:
83
+ vwords = pickle.load(f)
84
+
85
+ generate_all(vwords)
4_lemma_dataset.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ from tqdm import tqdm
3
+ from collections import defaultdict
4
+ from utils import CONFIG, save_dataset, save_dictionary_items
5
+
6
+
7
+ MIN_WORD_SIZE = CONFIG['min_word_size']
8
+ PREFIX_FILTER_LENGTH = CONFIG['prefix_filter_length']
9
+ VECT_PATH = CONFIG['vect_words_path']
10
+ CLS_CLASSES_PATH = CONFIG['cls_classes_path']
11
+ DICT_WORDS_PATH = CONFIG['dics_path']
12
+
13
+
14
+ def generate(vec_words, main_cls_dic):
15
+ dict_words = []
16
+ rez_dict = defaultdict(list)
17
+ for word in tqdm(vec_words, desc="Generating lemma dataset"):
18
+ dic = vec_words[word]
19
+ x_vec = dic['vect']
20
+
21
+ for form in dic['forms']:
22
+ main_cls = main_cls_dic[form['main']]
23
+
24
+ if 'lemma' in form:
25
+ word_y = form['lemma']
26
+ else:
27
+ continue
28
+
29
+ if word_y not in vec_words \
30
+ or MIN_WORD_SIZE > len(word_y) \
31
+ or MIN_WORD_SIZE > len(word):
32
+ continue
33
+
34
+ if word_y[:PREFIX_FILTER_LENGTH] != word[:PREFIX_FILTER_LENGTH] \
35
+ and word_y[:PREFIX_FILTER_LENGTH].replace('ё', 'е') != word[:PREFIX_FILTER_LENGTH].replace('ё', 'е')\
36
+ and form['post'] != 'comp':
37
+ #tqdm.write('Word to dictionary: {0} -> {1}'.format(word, word_y))
38
+ dict_words.append(dict(
39
+ text=word,
40
+ text_y=word_y,
41
+ main=main_cls,
42
+ id=form['inflect_id']
43
+ ))
44
+ continue
45
+
46
+ y_vec = vec_words[word_y]['vect']
47
+ items = rez_dict[main_cls]
48
+ items.append({
49
+ 'id': form['inflect_id'],
50
+ 'x_src': word,
51
+ 'x': x_vec[0],
52
+ 'x_len': x_vec[1],
53
+ 'y_src': word_y,
54
+ 'y': y_vec[0],
55
+ 'y_len': y_vec[1],
56
+ 'main_cls': main_cls
57
+ })
58
+ rez_dict[main_cls] = items
59
+
60
+ save_dataset(rez_dict, 'lemma')
61
+ save_dictionary_items(dict_words, 'lemma')
62
+
63
+
64
+ with open(VECT_PATH, 'rb') as f:
65
+ vwords = pickle.load(f)
66
+
67
+ with open(CLS_CLASSES_PATH, 'rb') as f:
68
+ cls_dic = pickle.load(f)
69
+
70
+ generate(vwords, cls_dic)
5_inflect_dataset.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ from tqdm import tqdm
3
+
4
+ from utils import CONFIG, save_dataset
5
+
6
+
7
+ MIN_WORD_SIZE = CONFIG['min_word_size']
8
+ PREFIX_FILTER_LENGTH = CONFIG['prefix_filter_length']
9
+ VECT_PATH = CONFIG['vect_words_path']
10
+ CLS_CLASSES_PATH = CONFIG['cls_classes_path']
11
+ GRAMMEMES_TYPES = CONFIG['grammemes_types']
12
+ TEMPLATES_PATH = CONFIG['inflect_templates_path']
13
+ IGNORE_TAGS = CONFIG['inflect_ignore_tags']
14
+
15
+
16
+ def create_forms_dict(vect_words):
17
+ root_posts = ['noun', 'infn', 'adjf']
18
+ forms_dict = {}
19
+ for word in vect_words:
20
+ for form in vect_words[word]['forms']:
21
+ is_main = 'inflect_id' not in form
22
+ key = form['id'] if is_main else form['inflect_id']
23
+ if key not in forms_dict:
24
+ forms_dict[key] = dict(root=None, items=[])
25
+
26
+ form_dict = forms_dict[key]
27
+ if is_main:
28
+ form_dict['root'] = form
29
+ else:
30
+ form_dict['items'].append(form)
31
+
32
+ forms_dict = {
33
+ key: forms_dict[key]
34
+ for key in forms_dict if forms_dict[key]['root'] is not None and forms_dict[key]['root']['post'] in root_posts
35
+ }
36
+ return forms_dict
37
+
38
+
39
+ def create_templates(forms_dict):
40
+ templates = dict()
41
+ for key in forms_dict:
42
+ item = forms_dict[key]
43
+ root = item['root']
44
+ if root['main'] not in templates:
45
+ templates[root['main']] = set()
46
+
47
+ for form in item['items']:
48
+ templates[root['main']].add(form['main'])
49
+
50
+ return templates
51
+
52
+
53
+ def generate_dataset(forms_dict, vect_words, cls_dic):
54
+ rez_dict = {}
55
+ for key in tqdm(forms_dict, desc="Generating dataset"):
56
+ item = forms_dict[key]
57
+ root = item['root']
58
+ x_cls = cls_dic[root['main']]
59
+ x, x_len = vect_words[root['text']]['vect']
60
+ prefix_filter = root['text'][:PREFIX_FILTER_LENGTH]
61
+ prefix_filter_e = prefix_filter.replace('ё', 'е')
62
+ if MIN_WORD_SIZE > len(root['text']):
63
+ continue
64
+
65
+ form_dict = {}
66
+ for form in item['items']:
67
+ if MIN_WORD_SIZE > len(form['text']):
68
+ continue
69
+
70
+ if 'ad_tags' in form and any([tag for tag in IGNORE_TAGS if tag in form['ad_tags']]):
71
+ #tqdm.write("Ignore form {0} for {1} by tags {2}".format(form['text'], root['text'], form['ad_tags']))
72
+ continue
73
+
74
+ if not (form['text'].startswith(prefix_filter) or
75
+ form['text'].replace('ё', 'е').startswith(prefix_filter_e)):
76
+ #tqdm.write("Ignore form {0} for {1}".format(form['text'], root['text']))
77
+ continue
78
+
79
+ y_cls = cls_dic[form['main']]
80
+ if y_cls in form_dict and form_dict[y_cls]['index'] < form['index']:
81
+ #tqdm.write("Ignore duplicate form {0} [{1}] for {2} ".format(form['text'], form_dict[y_cls]['text'], root['text']))
82
+ continue
83
+
84
+ form_dict[y_cls] = form
85
+
86
+ for y_cls in form_dict:
87
+ form = form_dict[y_cls]
88
+ y, y_len = vect_words[form['text']]['vect']
89
+ if y_cls not in rez_dict:
90
+ rez_dict[y_cls] = []
91
+
92
+ rez_dict[y_cls].append(dict(
93
+ id=form['inflect_id'],
94
+ x_src=root['text'],
95
+ x=x,
96
+ x_cls=x_cls,
97
+ x_len=x_len,
98
+ y_src=form['text'],
99
+ y=y,
100
+ y_cls=y_cls,
101
+ y_len=y_len
102
+ ))
103
+
104
+ save_dataset(rez_dict, 'inflect')
105
+
106
+
107
+ with open(VECT_PATH, 'rb') as f:
108
+ vwords = pickle.load(f)
109
+
110
+ with open(CLS_CLASSES_PATH, 'rb') as f:
111
+ cls_dic = pickle.load(f)
112
+
113
+ forms_dict = create_forms_dict(vwords)
114
+ templates = create_templates(forms_dict)
115
+ with open(TEMPLATES_PATH, 'wb+') as f:
116
+ pickle.dump(templates, f)
117
+
118
+ generate_dataset(forms_dict, vwords, cls_dic)
6_train.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from model import RNN
2
+ rnn = RNN(False)
3
+ rnn.train()
7_build_numbers.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import yaml
2
+ import pickle
3
+ from utils import CONFIG, create_cls_tuple
4
+
5
+
6
+ VECT_PATH = CONFIG['vect_words_path']
7
+ CLS_CLASSES_PATH = CONFIG['cls_classes_path']
8
+ NMB_CLASSES_PATH = CONFIG['numb_classes_path']
9
+ NMB_DATA_PATH = CONFIG['numb_data_path']
10
+ SOGL_CHARS = ['б', 'в', 'г', 'д', 'ж', 'з', 'й', 'к', 'л', 'м', 'н', 'п', 'р', 'с', 'т', 'ф', 'х', 'ц', 'ч', 'ш', 'щ']
11
+ GLASN_CHARS = ['а', 'о', 'и', 'е', 'ё', 'э', 'ы', 'у', 'ю', 'я']
12
+
13
+
14
+ with open(CLS_CLASSES_PATH, 'rb') as f:
15
+ cur_classes_count = len(pickle.load(f)) + 1
16
+
17
+ with open('numb.yml') as f:
18
+ numbr_src_dic = yaml.load(f)
19
+
20
+
21
+ def get_nar_end(text):
22
+ end = text[-1]
23
+ if text[-2] in SOGL_CHARS and text[-1] in GLASN_CHARS:
24
+ end = text[-2:]
25
+
26
+ return end
27
+
28
+
29
+ lemma_cls_ids = set()
30
+ res_dict = {}
31
+ numb_cls_dict = {}
32
+ for n_key in numbr_src_dic:
33
+ n_key_data = {
34
+ 'nar_end': {}
35
+ }
36
+ res_dict[n_key] = n_key_data
37
+ for t in numbr_src_dic[n_key]:
38
+ lemma_text = None
39
+ lemma_number_text = None
40
+
41
+ for index, item in enumerate(numbr_src_dic[n_key][t]):
42
+ item['post'] = 'numb'
43
+ cls_tpl = create_cls_tuple(item)
44
+ if cls_tpl not in numb_cls_dict:
45
+ numb_cls_dict[cls_tpl] = cur_classes_count
46
+ cur_classes_count += 1
47
+
48
+ cur_class = numb_cls_dict[cls_tpl]
49
+ if index == 0 and cls_tpl not in lemma_cls_ids:
50
+ lemma_cls_ids.add(cur_class)
51
+
52
+ if t not in n_key_data:
53
+ n_key_data[t] = []
54
+
55
+ items = n_key_data[t]
56
+ items.append((item['text'], cur_class))
57
+ if t == 'p' and cur_class not in n_key_data['nar_end']:
58
+ end = get_nar_end(item['text'])
59
+ n_key_data['nar_end'][cur_class] = end
60
+
61
+ regex = []
62
+ for val in res_dict:
63
+ cur_group = []
64
+ for key in res_dict[val]:
65
+ if key == 'nar_end' or key == 'lemma':
66
+ continue
67
+
68
+ for tpl in res_dict[val][key]:
69
+ cur_group.append(tpl[0])
70
+
71
+ cur_group = list(set(cur_group))
72
+ cur_group = '|'.join(cur_group)
73
+ cur_group = f'(?<_{val}>{cur_group})'
74
+ regex.insert(0, cur_group)
75
+
76
+ regex = '|'.join(regex)
77
+ regex = f"^({regex})+$"
78
+
79
+
80
+ with open(NMB_CLASSES_PATH, 'wb+') as f:
81
+ pickle.dump(numb_cls_dict, f)
82
+
83
+ with open(NMB_DATA_PATH, 'wb+') as f:
84
+ pickle.dump({
85
+ 'regex': regex,
86
+ 'lemma_cls_ids': list(lemma_cls_ids),
87
+ 'numbers': res_dict
88
+ }, f)
8_generate_tags.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ from utils import CONFIG, create_cls_tuple
3
+
4
+
5
+ def set_order(items):
6
+ index = 0
7
+ order_dict = {}
8
+ for post in CONFIG['dict_post_types']:
9
+ order_dict[post] = index
10
+ index += 1
11
+
12
+ for post in CONFIG['other_post_types']:
13
+ order_dict[post] = index
14
+ index += 1
15
+
16
+ for gram_cat in GRAM_TYPES:
17
+ for gram in GRAM_TYPES[gram_cat]['classes']:
18
+ order_dict[gram] = index
19
+ index += 1
20
+
21
+ order_items = []
22
+ for tpl in items:
23
+ order_key = tuple([order_dict[gram] if gram is not None else 1024 for gram in list(tpl)])
24
+ order_items.append((tpl, order_key))
25
+
26
+ for index, tpl in enumerate(sorted(order_items, key=lambda x: x[1], reverse=False)):
27
+ items[tpl[0]]['o'] = index
28
+
29
+
30
+ GRAM_TYPES = CONFIG['grammemes_types']
31
+ with open(CONFIG['numb_classes_path'], 'rb') as f:
32
+ numb_classes_dic = pickle.load(f)
33
+ with open(CONFIG['numb_data_path'], 'rb') as f:
34
+ numb_data = pickle.load(f)
35
+
36
+ lemma_same_words = []
37
+ for cls in CONFIG['lemma_same_word']:
38
+ lemma_same_words.append(create_cls_tuple(cls))
39
+
40
+ items = {}
41
+ classes_dic = CONFIG['main_classes']
42
+ for tpl in classes_dic:
43
+ items[tpl] = {
44
+ 'i': classes_dic[tpl],
45
+ 'p': 'nn',
46
+ 'l': tpl in lemma_same_words
47
+ }
48
+
49
+ for tpl in numb_classes_dic:
50
+ cls_index = numb_classes_dic[tpl]
51
+ items[tpl] = {
52
+ 'i': cls_index,
53
+ 'p': 'numb',
54
+ 'l': cls_index in numb_data['lemma_cls_ids']
55
+ }
56
+
57
+ max_cls_id = max([numb_classes_dic[key] for key in numb_classes_dic])
58
+ max_cls_id += 1
59
+ post_index = GRAM_TYPES['post']['index']
60
+ for key in CONFIG['other_post_types']:
61
+ tpl = [None for item in CONFIG['grammemes_types']]
62
+ tpl[post_index] = key
63
+ tpl = tuple(tpl)
64
+ items[tpl] = {
65
+ 'i': max_cls_id,
66
+ 'p': 'reg',
67
+ 'l': max_cls_id
68
+ }
69
+ max_cls_id += 1
70
+
71
+ tpl = [None for item in CONFIG['grammemes_types']]
72
+ tpl[post_index] = 'unkn'
73
+ tpl = tuple(tpl)
74
+ items[tpl] = {
75
+ 'i': max_cls_id,
76
+ 'p': 'reg',
77
+ 'l': max_cls_id
78
+ }
79
+ max_cls_id += 1
80
+
81
+ with open(CONFIG['dict_words_path'], 'rb') as f:
82
+ dic_words = pickle.load(f)
83
+
84
+ for dic_item in dic_words:
85
+ cls_tpl = create_cls_tuple(dic_item)
86
+ if cls_tpl not in items:
87
+ items[cls_tpl] = {
88
+ 'i': max_cls_id,
89
+ 'p': 'dict',
90
+ 'l': ('npro' in cls_tpl and 'nomn' in cls_tpl) or cls_tpl in lemma_same_words
91
+ }
92
+ max_cls_id += 1
93
+
94
+ dic_item['main'] = items[cls_tpl]['i']
95
+
96
+ with open(CONFIG['dict_words_path'], 'wb+') as f:
97
+ pickle.dump(dic_words, f)
98
+
99
+ set_order(items)
100
+
101
+ with open(CONFIG['tags_path'], 'wb+') as f:
102
+ pickle.dump(items, f)
9_build_bad_words.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from tester import Tester
2
+
3
+ tester = Tester()
4
+ tester.build_bad()
README.md ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Порядок запуска
2
+ Требуется python 3.6.8 (на более ранних версиях 3.6 падает pickle при сериализации больших файлов)
3
+ - load_dictionary - загрузка словаря opencorpora во внутренний формат. Извлекает словарные слова и слова для обучения
4
+ - vectorize - векторизует слова для обучения
5
+ - cls_dataset - генерирует датасеты для задач классификации, расставляет во всех векторизованных словах главный класс
6
+ - lemma_dataset - генерирует датасет для лемматизации
7
+ - inflect_dataset - генерирует датасет для постановки слов в форму
8
+ - train - тренирует модель
9
+ - build_numbers - формирует данные по числительным
10
+ - generate_tags - генерирует объединенную нумерацию для тегов
11
+ - build_bad_words - собирает слова, в которых сеть делает ошибки
12
+ - release_dict - опубликовывает новый словарь
13
+ - release_tests - публикует интеграционные тесты
14
+ - release_dict - публикует релизный вариант словаря
15
+ - release_dict - публикует модель
best_checkpoint/-44.data-00000-of-00001 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0b29dd603d51ab98ce3bcb1132a7a124189eb8c21b7cdc4854b884edae081c5
3
+ size 9443076
best_checkpoint/-44.index ADDED
Binary file (31 kB). View file
 
best_checkpoint/-44.meta ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d84c8ba242dc09eb81ad1c26f9470895f26da1dbc0b80117d7d35efed5f0f55
3
+ size 36330278
best_checkpoint/checkpoint ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_checkpoint_path: "-44"
2
+ all_model_checkpoint_paths: "-0"
3
+ all_model_checkpoint_paths: "-1"
4
+ all_model_checkpoint_paths: "-2"
5
+ all_model_checkpoint_paths: "-3"
6
+ all_model_checkpoint_paths: "-4"
7
+ all_model_checkpoint_paths: "-5"
8
+ all_model_checkpoint_paths: "-6"
9
+ all_model_checkpoint_paths: "-7"
10
+ all_model_checkpoint_paths: "-8"
11
+ all_model_checkpoint_paths: "-9"
12
+ all_model_checkpoint_paths: "-10"
13
+ all_model_checkpoint_paths: "-11"
14
+ all_model_checkpoint_paths: "-12"
15
+ all_model_checkpoint_paths: "-13"
16
+ all_model_checkpoint_paths: "-14"
17
+ all_model_checkpoint_paths: "-15"
18
+ all_model_checkpoint_paths: "-16"
19
+ all_model_checkpoint_paths: "-17"
20
+ all_model_checkpoint_paths: "-18"
21
+ all_model_checkpoint_paths: "-19"
22
+ all_model_checkpoint_paths: "-20"
23
+ all_model_checkpoint_paths: "-21"
24
+ all_model_checkpoint_paths: "-22"
25
+ all_model_checkpoint_paths: "-23"
26
+ all_model_checkpoint_paths: "-24"
27
+ all_model_checkpoint_paths: "-25"
28
+ all_model_checkpoint_paths: "-26"
29
+ all_model_checkpoint_paths: "-27"
30
+ all_model_checkpoint_paths: "-28"
31
+ all_model_checkpoint_paths: "-29"
32
+ all_model_checkpoint_paths: "-30"
33
+ all_model_checkpoint_paths: "-31"
34
+ all_model_checkpoint_paths: "-32"
35
+ all_model_checkpoint_paths: "-33"
36
+ all_model_checkpoint_paths: "-34"
37
+ all_model_checkpoint_paths: "-35"
38
+ all_model_checkpoint_paths: "-36"
39
+ all_model_checkpoint_paths: "-37"
40
+ all_model_checkpoint_paths: "-38"
41
+ all_model_checkpoint_paths: "-39"
42
+ all_model_checkpoint_paths: "-40"
43
+ all_model_checkpoint_paths: "-41"
44
+ all_model_checkpoint_paths: "-42"
45
+ all_model_checkpoint_paths: "-43"
46
+ all_model_checkpoint_paths: "-44"
config.yml ADDED
@@ -0,0 +1,681 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ random_seed: 1917
2
+ validation_persent: 2
3
+ test_persent: 1
4
+ min_word_size: 2
5
+ max_word_size: 32
6
+ model_key: small #big
7
+ prefix_filter_length: 1
8
+
9
+
10
+ dict_path: /media/alex/hybrid/Projects/Resources/dict.opcorpora.xml
11
+ dataset_words_path: data/dataset_words.pkl
12
+ dict_words_path: data/dict_words.pkl
13
+ vect_words_path: data/vec_words.pkl
14
+ cls_classes_path: data/classification_classes.pkl
15
+ numb_classes_path: data/numb_classes.pkl
16
+ numb_data_path: data/numb.pkl
17
+ tags_path: data/tags.pkl
18
+ dics_path: data/
19
+ dataset_path: data/dataset
20
+ inflect_templates_path: data/inflect_templates.pkl
21
+ export_path: export
22
+ save_path: checkpoints/
23
+ bad_path: data/
24
+
25
+ test_results_paths:
26
+ - latest_release
27
+
28
+ publish_net_paths:
29
+ - ../../cs/DeepMorphy/NeuralNet/
30
+ - latest_release
31
+
32
+ publish_gramm_paths:
33
+ - ../../cs/DeepMorphy/
34
+ - latest_release
35
+
36
+ publish_tags_paths:
37
+ - ../../cs/DeepMorphy/
38
+ - latest_release
39
+
40
+ publish_numbers_paths:
41
+ - ../../cs/DeepMorphy/Numb
42
+ - latest_release
43
+
44
+ publish_dictionary_paths:
45
+ - ../../cs/DeepMorphy/WordDict
46
+ - latest_release
47
+
48
+ publish_dataset_info_paths:
49
+ - latest_release
50
+
51
+ public_inflect_templates_paths:
52
+ - latest_release
53
+ - ../../cs/DeepMorphy/
54
+
55
+ publish_tests_path: ../../cs/IntegrationTester/
56
+ publish_gram_doc_path: ../../../gram.md
57
+ publish_inflect_doc_path: ../../../inflect.md
58
+
59
+ train_devices:
60
+ # - /cpu:0
61
+ - /gpu:0
62
+ - /gpu:1
63
+ - /gpu:2
64
+ filler: "############################################################################"
65
+ start_token: 36
66
+ end_token: 35
67
+ main_class_k: 4
68
+
69
+ train_steps:
70
+ #- mood
71
+ #- voic
72
+ #- post
73
+ #- gndr
74
+ #- nmbr
75
+ #- case
76
+ #- tens
77
+ #- pers
78
+ #- main
79
+ #- lemm
80
+ - inflect
81
+
82
+
83
+ ignore_restore: []
84
+ #- lemm
85
+ #- inflect
86
+
87
+ graph_part_configs:
88
+ default:
89
+ #learn params
90
+ stop_main_metric_delta: 0.1
91
+ learn_rate: 0.00001
92
+ learn_rate_decay_step: 0.1
93
+ min_learn_rate: 0.00000001
94
+ return_step: 2
95
+ keep_drop: 1
96
+ use_weights: true
97
+ clip_grads: true
98
+ test_batch_size: 2048
99
+ train_batch_size: 4096
100
+
101
+ # common params
102
+ char_vector_size: 32
103
+ rnn_state_size: 32
104
+ rnn_layers_count: 2
105
+ rnn_bidirectional: True
106
+ use_residual: false
107
+ main_metric_type: Loss
108
+
109
+ #seq2seq params
110
+ max_length: 33
111
+ encoder:
112
+ gram_vector_size: 7
113
+ ad_cls_vector_size: 8
114
+ char_vector_size: 64
115
+ rnn_state_size: 64
116
+ rnn_layers_count: 2
117
+ rnn_bidirectional: true
118
+ use_residual: false
119
+ decoder:
120
+ gram_vector_size: 14
121
+ ad_cls_vector_size: 16
122
+ rnn_state_size: 128
123
+ rnn_layers_count: 1
124
+ keep_drop: 1.0
125
+ use_residual: false
126
+
127
+ pers:
128
+ test_batch_size: 256
129
+ train_batch_size: 1024
130
+ char_vector_size: 8
131
+ rnn_state_size: 8
132
+
133
+ mood:
134
+ char_vector_size: 16
135
+ rnn_state_size: 16
136
+
137
+ main:
138
+ char_vector_size: 64
139
+ rnn_state_size: 64
140
+ test_batch_size: 4096
141
+ train_batch_size: 4096
142
+
143
+ lemm:
144
+ char_vector_size: 64
145
+ batch_size: 8192
146
+ main_metric_type: Accuracy
147
+ stop_main_metric_delta: 0.00001
148
+
149
+ inflect:
150
+ transfer_init: false
151
+ char_vector_size: 64
152
+ batch_size: 8192
153
+ main_metric_type: Accuracy
154
+ stop_main_metric_delta: 0.000001
155
+
156
+ chars:
157
+ - UNDEFINED # неизвестный символ
158
+ - а
159
+ - б
160
+ - в
161
+ - г
162
+ - д
163
+ - е
164
+ - ё
165
+ - ж
166
+ - з
167
+ - и
168
+ - й
169
+ - к
170
+ - л
171
+ - м
172
+ - н
173
+ - о
174
+ - п
175
+ - р
176
+ - с
177
+ - т
178
+ - у
179
+ - ф
180
+ - х
181
+ - ч
182
+ - ц
183
+ - ш
184
+ - щ
185
+ - ъ
186
+ - ы
187
+ - ь
188
+ - э
189
+ - ю
190
+ - я
191
+ - "-"
192
+
193
+ lemma_same_word:
194
+ - post: noun
195
+ nmbr: sing
196
+ case: nomn
197
+ gndr: masc
198
+
199
+ - post: noun
200
+ nmbr: sing
201
+ case: nomn
202
+ gndr: neut
203
+
204
+ - post: noun
205
+ nmbr: sing
206
+ case: nomn
207
+ gndr: femn
208
+
209
+ - post: noun
210
+ nmbr: sing
211
+ case: nomn
212
+ gndr: msf
213
+
214
+ - post: adjf
215
+ nmbr: sing
216
+ gndr: masc
217
+ case: nomn
218
+
219
+ - post: infn
220
+
221
+ - post: advb
222
+
223
+ - post: prep
224
+
225
+ - post: conj
226
+
227
+ - post: prcl
228
+
229
+ - post: intj
230
+
231
+ - post: prep
232
+
233
+ - post: pred
234
+ tens: pres
235
+
236
+
237
+
238
+ other_post_types:
239
+ punct:
240
+ key_ru: пункт
241
+ name_ru: пунктуция
242
+ desc_ru: пунктуация
243
+
244
+ int:
245
+ key_ru: цифра
246
+ name_ru: цифра
247
+ desc_ru: цифра
248
+
249
+ romn:
250
+ key_ru: рим_цифр
251
+ name_ru: римская цифра
252
+ desc_ru: римская цифра
253
+
254
+ unkn:
255
+ key_ru: неизв
256
+ name_ru: неизвестный токен
257
+ desc_ru: неизвестный токен
258
+
259
+
260
+ dict_post_types:
261
+ npro:
262
+ key_ru: мест
263
+ power: 2
264
+ name_ru: местоимение
265
+ desc_ru: местоимение
266
+ keys:
267
+ - NPRO
268
+
269
+ prep:
270
+ key_ru: предл
271
+ power: 2
272
+ name_ru: предлог
273
+ desc_ru: предлог
274
+ keys:
275
+ - PREP
276
+
277
+ conj:
278
+ key_ru: союз
279
+ power: 2
280
+ name_ru: союз
281
+ desc_ru: союз
282
+ keys:
283
+ - CONJ
284
+
285
+ prcl:
286
+ key_ru: част
287
+ power: 0
288
+ name_ru: частица
289
+ desc_ru: частица
290
+ keys:
291
+ - PRCL
292
+
293
+ intj:
294
+ key_ru: межд
295
+ power: 0.5
296
+ name_ru: междометие
297
+ desc_ru: междометие
298
+ keys:
299
+ - INTJ
300
+
301
+ numb:
302
+ key_ru: числ
303
+ power: 2
304
+ name_ru: числительное
305
+ desc_ru: числительное
306
+ keys:
307
+ - NUMR
308
+
309
+ pred:
310
+ key_ru: предик
311
+ power: 0
312
+ name_ru: предикатив
313
+ desc_ru: предикатив
314
+ keys:
315
+ - PRED
316
+
317
+ grammemes_types:
318
+ post:
319
+ index: 0
320
+ key_ru: чр
321
+ name: часть речи
322
+ classes:
323
+ noun:
324
+ index: 0
325
+ key_ru: сущ
326
+ name_ru: существительное
327
+ desc_ru: существительное
328
+ keys:
329
+ - NOUN
330
+
331
+ adjf:
332
+ index: 1
333
+ key_ru: прил
334
+ name_ru: полное прилагательное
335
+ desc_ru: полное прилагательное
336
+ keys:
337
+ - ADJF
338
+
339
+ adjs:
340
+ index: 2
341
+ key_ru: кр_прил
342
+ name_ru: краткое прилагательное
343
+ desc_ru: полное прилагательное
344
+ keys:
345
+ - ADJS
346
+
347
+ verb:
348
+ index: 3
349
+ key_ru: гл
350
+ name_ru: личная форма глагола
351
+ desc_ru: личная форма глагола
352
+ keys:
353
+ - VERB
354
+
355
+ infn:
356
+ index: 4
357
+ key_ru: инф_гл
358
+ name_ru: инфинитив глагола
359
+ desc_ru: инфинитив глагола
360
+ keys:
361
+ - INFN
362
+
363
+ comp:
364
+ index: 5
365
+ key_ru: комп
366
+ name_ru: компаратив
367
+ desc_ru: компаратив
368
+ keys:
369
+ - COMP
370
+
371
+ prtf:
372
+ index: 6
373
+ key_ru: прич
374
+ name_ru: причастие
375
+ desc_ru: причастие
376
+ keys:
377
+ - PRTF
378
+
379
+ prts:
380
+ index: 7
381
+ key_ru: кр_прич
382
+ name_ru: краткое причастие
383
+ desc_ru: краткое причастие
384
+ keys:
385
+ - PRTS
386
+
387
+ grnd:
388
+ index: 8
389
+ key_ru: деепр
390
+ name_ru: деепричастие
391
+ desc_ru: деепричастие
392
+ keys:
393
+ - GRND
394
+
395
+ advb:
396
+ index: 9
397
+ key_ru: нареч
398
+ name_ru: наречие
399
+ desc_ru: наречие
400
+ keys:
401
+ - ADVB
402
+
403
+ gndr:
404
+ index: 1
405
+ key_ru: род
406
+ name: род
407
+ classes:
408
+ masc:
409
+ index: 0
410
+ key_ru: муж
411
+ name_ru: мужской
412
+ desc_ru: мужской род
413
+ keys:
414
+ - masc
415
+
416
+ femn:
417
+ index: 1
418
+ key_ru: жен
419
+ name_ru: женский
420
+ desc_ru: женский род
421
+ keys:
422
+ - femn
423
+
424
+ neut:
425
+ index: 2
426
+ key_ru: ср
427
+ name_ru: средний
428
+ desc_ru: средний род
429
+ keys:
430
+ - neut
431
+
432
+ msf:
433
+ index: 3
434
+ key_ru: общ
435
+ name_ru: общий
436
+ desc_ru: общий род
437
+ keys:
438
+ - ms-f
439
+
440
+ nmbr:
441
+ index: 2
442
+ key_ru: число
443
+ name: число
444
+ classes:
445
+ sing:
446
+ index: 0
447
+ key_ru: ед
448
+ name_ru: единственное
449
+ desc_ru: единственное число
450
+ keys:
451
+ - sing
452
+
453
+ plur:
454
+ index: 1
455
+ key_ru: мн
456
+ name_ru: множественное
457
+ desc_ru: множественное число
458
+ keys:
459
+ - plur
460
+
461
+ case:
462
+ index: 3
463
+ key_ru: падеж
464
+ name: падеж
465
+ classes:
466
+ nomn:
467
+ index: 0
468
+ key_ru: им
469
+ name_ru: именительный
470
+ desc_ru: именительный падеж
471
+ keys:
472
+ - nomn
473
+
474
+ gent:
475
+ index: 1
476
+ key_ru: рд
477
+ name_ru: родительный
478
+ desc_ru: родительный падеж
479
+ keys:
480
+ - gent
481
+
482
+ datv:
483
+ index: 2
484
+ key_ru: дт
485
+ name_ru: дательный
486
+ desc_ru: дательный падеж
487
+ keys:
488
+ - datv
489
+
490
+ accs:
491
+ index: 3
492
+ key_ru: вн
493
+ name_ru: винительный
494
+ desc_ru: винительный падеж
495
+ keys:
496
+ - accs
497
+
498
+ ablt:
499
+ index: 4
500
+ key_ru: тв
501
+ name_ru: творительный
502
+ desc_ru: творительный падеж
503
+ keys:
504
+ - ablt
505
+
506
+ loct:
507
+ index: 5
508
+ key_ru: пр
509
+ name_ru: предложный
510
+ desc_ru: предложный падеж
511
+ keys:
512
+ - loct
513
+
514
+ gent1:
515
+ index: 6
516
+ key_ru: рд1
517
+ name_ru: первый родительный
518
+ desc_ru: первый родительный падеж
519
+ keys:
520
+ - gen1
521
+
522
+ gent2:
523
+ index: 7
524
+ key_ru: рд2
525
+ name_ru: второй родительный
526
+ desc_ru: второй родительный падеж
527
+ keys:
528
+ - gen2
529
+
530
+ voct:
531
+ index: 8
532
+ key_ru: зв
533
+ name_ru: звательный
534
+ desc_ru: звательный падеж
535
+ keys:
536
+ - voct
537
+
538
+ accs2:
539
+ index: 9
540
+ key_ru: вн2
541
+ name_ru: второй винительный
542
+ desc_ru: второй винительный падеж
543
+ keys:
544
+ - acc2
545
+
546
+ loct1:
547
+ index: 10
548
+ key_ru: пр1
549
+ name_ru: первый предложный
550
+ desc_ru: первый предложный падеж
551
+ keys:
552
+ - loc1
553
+
554
+ loct2:
555
+ index: 11
556
+ key_ru: пр2
557
+ name_ru: второй предложный
558
+ desc_ru: второй предложный падеж
559
+ keys:
560
+ - loc2
561
+
562
+ pers:
563
+ index: 4
564
+ key_ru: лицо
565
+ name: лицо
566
+ classes:
567
+ 1per:
568
+ index: 0
569
+ key_ru: 1л
570
+ name_ru: 1 лицо
571
+ desc_ru: 1 лицо
572
+ keys:
573
+ - 1per
574
+
575
+ 2per:
576
+ index: 1
577
+ key_ru: 2л
578
+ name_ru: 2 лицо
579
+ desc_ru: 2 лицо
580
+ keys:
581
+ - 2per
582
+
583
+ 3per:
584
+ index: 2
585
+ key_ru: 3л
586
+ name_ru: 3 лицо
587
+ desc_ru: 3 лицо
588
+ keys:
589
+ - 3per
590
+
591
+ tens:
592
+ index: 5
593
+ key_ru: время
594
+ name: время
595
+ classes:
596
+ pres:
597
+ index: 0
598
+ key_ru: наст
599
+ name_ru: настоящее
600
+ desc_ru: настоящее время
601
+ keys:
602
+ - pres
603
+
604
+ past:
605
+ index: 1
606
+ key_ru: прош
607
+ name_ru: прошедшее
608
+ desc_ru: прошедшее время
609
+ keys:
610
+ - past
611
+
612
+ futr:
613
+ index: 2
614
+ key_ru: буд
615
+ name_ru: будущее
616
+ desc_ru: будущее время
617
+ keys:
618
+ - futr
619
+
620
+ mood:
621
+ index: 6
622
+ key_ru: накл
623
+ name: наклонение
624
+ classes:
625
+ indc:
626
+ index: 0
627
+ key_ru: изъяв
628
+ name_ru: изъявительное
629
+ desc_ru: изъявительное наклонение
630
+ keys:
631
+ - indc
632
+
633
+ impr:
634
+ index: 1
635
+ key_ru: повел
636
+ name_ru: повелительное
637
+ desc_ru: повелительное наклонение
638
+ keys:
639
+ - impr
640
+
641
+ voic:
642
+ index: 7
643
+ key_ru: залог
644
+ name: залог
645
+ classes:
646
+ actv:
647
+ index: 0
648
+ key_ru: дейст
649
+ name_ru: действительный
650
+ desc_ru: действительный залог
651
+ keys:
652
+ - actv
653
+
654
+ pssv:
655
+ index: 1
656
+ key_ru: страд
657
+ name_ru: страдательный
658
+ desc_ru: страдательный залог
659
+ keys:
660
+ - pssv
661
+
662
+ ad_tags:
663
+ - incl
664
+ - excl
665
+ - abbr
666
+ - surn
667
+ - patr
668
+ - orgn
669
+ - trad
670
+ - cmp2
671
+ - geox
672
+
673
+ inflect_ignore_tags:
674
+ - incl
675
+ - cmp2
676
+
677
+ dict_ignore_tags:
678
+ - patr
679
+ - orgn
680
+ - surn
681
+
graph/base.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import tensorflow as tf
3
+ import tf_utils as tfu
4
+ from tqdm import tqdm
5
+ from abc import ABC, abstractmethod
6
+ from utils import RANDOM
7
+
8
+
9
+ class TfContext:
10
+ def __init__(self,
11
+ sess,
12
+ saver,
13
+ learn_rate_op):
14
+ self.sess = sess
15
+ self.saver = saver
16
+ self.learn_rate_op = learn_rate_op
17
+ self.epoch = 0
18
+
19
+
20
+ class GraphPartBase(ABC):
21
+ def __init__(self, for_usage, global_settings, current_settings, optimiser, reset_optimiser, key, metric_names):
22
+ self.key = key
23
+ self.global_settings = global_settings
24
+ self.filler = global_settings['filler']
25
+ self.main_metric_name = current_settings['main_metric_type']
26
+ self.settings = current_settings
27
+ self.for_usage = for_usage
28
+ self.optimiser = optimiser
29
+ self.reset_optimiser = reset_optimiser
30
+ self.metric_names = metric_names
31
+ self.max_word_size = global_settings['max_word_size']
32
+ self.checkpoints_keep = 10000
33
+ self.chars_count = len(global_settings['chars']) + 1
34
+ self.dataset_path = global_settings['dataset_path']
35
+ self.grammemes_count = len(global_settings['grammemes_types'])
36
+ self.main_classes = global_settings['main_classes']
37
+ self.main_classes_count = len(self.main_classes)
38
+ self.metrics_reset = []
39
+ self.metrics_update = []
40
+ self.devices_metrics = {metr: [] for metr in self.metric_names}
41
+ self.main_scope_name = key.title()
42
+ self.save_path = global_settings['save_path']
43
+ self.dev_grads = []
44
+ self.losses = []
45
+ self.devices = global_settings['train_devices']
46
+ self.devices_count = len(self.devices)
47
+ self.dataset_path = global_settings['dataset_path']
48
+ self.xs = []
49
+ self.x_seq_lens = []
50
+ self.prints = []
51
+ self.main_cls_dic = self.global_settings['main_classes']
52
+ self.learn_rate_val = self.settings['learn_rate']
53
+ self.best_model_metric = None
54
+ self.best_epoch = None
55
+ self.init_checkpoint = None
56
+
57
+ def train(self, tc):
58
+ return_step = 0
59
+ trains = self.__load_dataset__('train')
60
+ valids = self.__load_dataset__('valid')
61
+ self.best_model_metric = self.__valid_loop__(tc, valids)
62
+ self.best_epoch = -1
63
+ while True:
64
+ tqdm.write(self.filler)
65
+ tqdm.write(self.filler)
66
+ tqdm.write(self.main_scope_name)
67
+
68
+ train_main_metric = self.__train_loop__(tc, trains)
69
+ valid_main_metric = self.__valid_loop__(tc, valids)
70
+
71
+ tqdm.write(f"Epoch {tc.epoch} Train {self.main_metric_name}: {train_main_metric} Validation {self.main_metric_name}: {valid_main_metric}")
72
+ need_decay = False
73
+ delta = self.__calc_metric_delta__(self.best_model_metric, valid_main_metric)
74
+
75
+ if delta > 0:
76
+ if delta < self.settings['stop_main_metric_delta']:
77
+ tqdm.write(f"{self.main_metric_name} delta is less then min value")
78
+ need_decay = True
79
+ else:
80
+ return_step = 0
81
+ self.best_model_metric = valid_main_metric
82
+ self.best_epoch = tc.epoch
83
+ tc.saver.save(tc.sess, self.save_path, tc.epoch)
84
+ tc.epoch += 1
85
+ else:
86
+ tqdm.write("Best epoch is better then current")
87
+ tc.sess.run(self.reset_optimiser)
88
+ need_decay = True
89
+ self.__restore_best_epoch__(tc)
90
+
91
+ if not need_decay:
92
+ continue
93
+
94
+ if return_step == self.settings['return_step']:
95
+ self.__decay_params__()
96
+ if self.learn_rate_val < self.settings['min_learn_rate']:
97
+ tqdm.write(f"Learning rate {self.learn_rate_val} is less then min learning rate")
98
+ finish = self.__before_finish__()
99
+ if finish:
100
+ break
101
+ return_step = 0
102
+ else:
103
+ RANDOM.shuffle(trains)
104
+ tqdm.write(f"Return step increased")
105
+ return_step += 1
106
+
107
+ return self.best_epoch, self.best_model_metric
108
+
109
+ def __train_loop__(self, tc, trains):
110
+ tc.sess.run(self.metrics_reset)
111
+ for item in tqdm(trains, desc=f"Train, epoch {tc.epoch}"):
112
+ launch = [self.optimize]
113
+ launch.extend(self.metrics_update)
114
+ if len(self.prints):
115
+ launch.extend(self.prints)
116
+
117
+ feed_dic = self.__create_feed_dict__('train', item)
118
+ feed_dic[tc.learn_rate_op] = self.learn_rate_val
119
+ tc.sess.run(launch, feed_dic)
120
+
121
+ train_main_metric = self.__write_metrics_report__(tc.sess, "Train")
122
+ return train_main_metric
123
+
124
+ def __valid_loop__(self, tc, valids):
125
+ tc.sess.run(self.metrics_reset)
126
+ for item in tqdm(valids, desc=f"Validation, epoch {tc.epoch}"):
127
+ launch = []
128
+ launch.extend(self.metrics_update)
129
+ if len(self.prints):
130
+ launch.extend(self.prints)
131
+ feed_dic = self.__create_feed_dict__('valid', item)
132
+ tc.sess.run(launch, feed_dic)
133
+
134
+ valid_main_metric = self.__write_metrics_report__(tc.sess, "Valid")
135
+ return valid_main_metric
136
+
137
+ def __calc_metric_delta__(self, best_model_metric, cur_model_metric):
138
+ delta = best_model_metric - cur_model_metric
139
+ if self.main_metric_name != "Loss":
140
+ delta = -delta
141
+ return delta
142
+
143
+ def __before_finish__(self):
144
+ return True
145
+
146
+ def __decay_params__(self):
147
+ self.learn_rate_val = self.learn_rate_val * self.settings['learn_rate_decay_step']
148
+ tqdm.write(f"Learning rate decayed. New value: {self.learn_rate_val}")
149
+
150
+ def __restore_best_epoch__(self, tc):
151
+ if self.best_epoch == -1 and tc.epoch == 0:
152
+ tqdm.write(f"Restoring from init_checkpoint {self.best_epoch}")
153
+ self.restore(tc.sess, self.init_checkpoint)
154
+ elif self.best_epoch == -1:
155
+ tqdm.write(f"Restoring best epoch {tc.epoch}")
156
+ self.restore(tc.sess, os.path.join(self.save_path, f"-{tc.epoch}"))
157
+ else:
158
+ tqdm.write(f"Restoring best epoch {self.best_epoch}")
159
+ self.restore(tc.sess, os.path.join(self.save_path, f"-{self.best_epoch}"))
160
+
161
+ def build_graph_end(self):
162
+ with tf.variable_scope(self.main_scope_name, reuse=tf.AUTO_REUSE) as scope:
163
+ self.metrics = {
164
+ metr: tf.reduce_mean(self.devices_metrics[metr], name=metr)
165
+ for metr in self.devices_metrics
166
+ }
167
+ if not self.for_usage:
168
+ self.grads = tfu.average_gradients(self.dev_grads)
169
+ if self.settings['clip_grads']:
170
+ self.grads = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in self.grads]
171
+
172
+ self.optimize = self.optimiser.apply_gradients(self.grads, name='Optimize')
173
+ self.loss = tf.reduce_sum(self.losses, name='GlobalLoss')
174
+
175
+ def build_graph_for_device(self, *args):
176
+ with tf.variable_scope(self.main_scope_name, reuse=tf.AUTO_REUSE) as scope:
177
+ self.__build_graph_for_device__(*args)
178
+
179
+ def restore(self, sess, check_point):
180
+ try:
181
+ vars = [
182
+ var
183
+ for var in tf.global_variables(f"{self.main_scope_name}/")
184
+ if "Adam" not in var.name
185
+ ]
186
+ saver = tf.train.Saver(var_list=vars)
187
+ saver.restore(sess, check_point)
188
+ self.init_checkpoint = check_point
189
+ tqdm.write(f"Restoration for graph part '{self.key}', scope {self.main_scope_name} success")
190
+ except Exception as ex:
191
+ tqdm.write(f"Restoration for graph part '{self.key}', scope {self.main_scope_name} failed. Error: {ex}")
192
+
193
+ def __write_metrics_report__(self, sess, step_name):
194
+ tqdm.write('')
195
+ launch_results = sess.run(self.metrics)
196
+ result = [f"{step_name} metrics: "]
197
+
198
+ for index, metr in enumerate(self.metrics):
199
+ result.append('{:>8}'.format(self.metric_names[index]))
200
+ result.append("=")
201
+ result.append("{0:.7f}".format(launch_results[metr]))
202
+ result.append(" ")
203
+
204
+ result = "".join(result)
205
+ tqdm.write(result)
206
+ return launch_results[self.main_metric_name]
207
+
208
+ def create_mean_metric(self, metric_index, values):
209
+ metr_epoch_loss, metr_update, metr_reset = tfu.create_reset_metric(
210
+ tf.metrics.mean,
211
+ self.metric_names[metric_index],
212
+ values
213
+ )
214
+ self.metrics_reset.append(metr_reset)
215
+ self.metrics_update.append(metr_update)
216
+ self.devices_metrics[self.metric_names[metric_index]].append(metr_epoch_loss)
217
+
218
+ def create_accuracy_metric(self, metric_index, labels, predictions):
219
+ metr_epoch_loss, metr_update, metr_reset = tfu.create_reset_metric(
220
+ tf.metrics.accuracy,
221
+ self.metric_names[metric_index],
222
+ labels=labels,
223
+ predictions=predictions
224
+ )
225
+ self.metrics_reset.append(metr_reset)
226
+ self.metrics_update.append(metr_update)
227
+ self.devices_metrics[self.metric_names[metric_index]].append(metr_epoch_loss)
228
+
229
+ def __create_feed_dict__(self, op_name, item):
230
+ feed_dic = {}
231
+ for dev_num, batch in enumerate(item):
232
+ feed_dic[self.xs[dev_num]] = batch['x']
233
+ feed_dic[self.x_seq_lens[dev_num]] = batch['x_seq_len']
234
+ self.__update_feed_dict__(op_name, feed_dic, batch, dev_num)
235
+
236
+ return feed_dic
237
+
238
+ @abstractmethod
239
+ def __update_feed_dict__(self, op_name, feed_dict, batch, dev_num):
240
+ pass
241
+
242
+ @abstractmethod
243
+ def __build_graph_for_device__(self, *args):
244
+ pass
245
+
246
+ @abstractmethod
247
+ def __load_dataset__(self, operation_name):
248
+ return []
graph/gram_cls.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import tf_utils as tfu
3
+ from graph.base import GraphPartBase
4
+
5
+
6
+ class GramCls(GraphPartBase):
7
+ def __init__(self, key, for_usage, global_settings, current_settings, optimiser, reset_optimiser):
8
+ super().__init__(for_usage,
9
+ global_settings,
10
+ current_settings,
11
+ optimiser,
12
+ reset_optimiser,
13
+ key,
14
+ ['Loss', 'Accuracy'])
15
+ self.gram = key
16
+ self.grammemes = global_settings['grammemes_types']
17
+ self.classes = self.grammemes[key]['classes']
18
+ self.classes_count = len(self.classes)
19
+ self.checks = []
20
+ self.weights = []
21
+ self.keep_drops = []
22
+ self.dev_grads = []
23
+ self.probs = []
24
+ self.results = []
25
+ self.ys = []
26
+ self.losses = []
27
+
28
+ def __build_graph_for_device__(self, x, seq_len):
29
+ self.xs.append(x)
30
+ self.x_seq_lens.append(seq_len)
31
+
32
+ if self.for_usage:
33
+ keep_drop = tf.constant(1, dtype=tf.float32, name='KeepDrop')
34
+ else:
35
+ keep_drop = tf.placeholder(dtype=tf.float32, name='KeepDrop')
36
+
37
+ self.keep_drops.append(keep_drop)
38
+
39
+ y = tf.placeholder(dtype=tf.int32,
40
+ shape=(None, self.classes_count),
41
+ name='Y')
42
+ weights = tf.placeholder(dtype=tf.float32, shape=(None,), name='Weight')
43
+
44
+ x_emd_init = tf.random_normal((self.chars_count, self.settings['char_vector_size']))
45
+ x_emb = tf.get_variable("Embeddings", initializer=x_emd_init)
46
+
47
+ rnn_input = tf.nn.embedding_lookup(x_emb, x)
48
+ rnn_logits = tfu.build_rnn(rnn_input,
49
+ keep_drop,
50
+ seq_len,
51
+ self.settings,
52
+ for_usage=self.for_usage)
53
+
54
+ if not self.for_usage:
55
+ self.checks.append(tf.check_numerics(rnn_logits, "RnnLogitsNullCheck"))
56
+
57
+ logits = tfu.rnn_top('RnnTop',
58
+ rnn_logits,
59
+ self.settings,
60
+ self.classes_count)
61
+
62
+ if not self.for_usage:
63
+ self.checks.append(tf.check_numerics(logits, "LogitsNullCheck"))
64
+
65
+ float_y = tf.cast(y, tf.float32)
66
+ errors = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits,
67
+ labels=float_y)
68
+ errors = tf.reduce_sum(errors, axis=1)
69
+ errors = errors * weights
70
+
71
+ if not self.for_usage:
72
+ self.checks.append(tf.check_numerics(errors, "ErrorNullCheck"))
73
+
74
+ probs = tf.nn.softmax(logits)
75
+ result = tf.argmax(probs, axis=1, name="Results")
76
+ loss = tf.reduce_sum(errors)
77
+
78
+ if not self.for_usage:
79
+ self.checks.append(tf.check_numerics(errors, "LossNullCheck"))
80
+
81
+ grads = self.optimiser.compute_gradients(loss)
82
+ self.losses.append(loss)
83
+ self.probs.append(probs)
84
+ self.results.append(result)
85
+ self.ys.append(y)
86
+ self.weights.append(weights)
87
+ self.dev_grads.append(grads)
88
+
89
+ # metrics
90
+ self.create_mean_metric(0, loss)
91
+ labels = tf.math.argmax(y, axis=1)
92
+ predictions = tf.math.argmax(probs, axis=1)
93
+ self.create_accuracy_metric(1, labels, predictions)
94
+
95
+ def __update_feed_dict__(self, op_name, feed_dict, batch, dev_num):
96
+ feed_dict[self.keep_drops[dev_num]] = 1 if op_name == 'test' else self.settings['keep_drop']
97
+ feed_dict[self.ys[dev_num]] = batch['y']
98
+ feed_dict[self.weights[dev_num]] = batch['weight']
99
+
100
+ def __load_dataset__(self, operation_name):
101
+ return list(
102
+ tfu.load_cls_dataset(
103
+ self.dataset_path,
104
+ self.devices_count,
105
+ operation_name,
106
+ self.settings['train_batch_size'],
107
+ self.settings['use_weights'],
108
+ self.gram
109
+ )
110
+ )
111
+
112
+
graph/inflect.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import tf_utils as tfu
3
+ from graph.base import GraphPartBase
4
+
5
+
6
+ class Inflect(GraphPartBase):
7
+ def __init__(self, for_usage, global_settings, current_settings, optimiser, reset_optimiser):
8
+ super().__init__(for_usage, global_settings, current_settings, optimiser, reset_optimiser, 'inflect', ["Loss", "AccuracyByChar", "Accuracy"])
9
+ self.chars_count = self.chars_count + 1
10
+ self.start_char_index = global_settings['start_token']
11
+ self.end_char_index = global_settings['end_token']
12
+ self.results = []
13
+ self.x_cls = []
14
+ self.ys = []
15
+ self.y_seq_lens = []
16
+ self.y_cls = []
17
+ self.keep_drops = []
18
+ self.decoder_keep_drops = []
19
+
20
+ def __build_graph_for_device__(self, x, x_seq_len, batch_size, cls=None):
21
+ self.xs.append(x)
22
+ self.x_seq_lens.append(x_seq_len)
23
+
24
+ x_cls = tf.placeholder(dtype=tf.int32, shape=(None,), name='XClass')
25
+ self.x_cls.append(x_cls)
26
+
27
+ if batch_size is None:
28
+ batch_size = self.settings['batch_size']
29
+
30
+ y = tf.placeholder(dtype=tf.int32, shape=(None, None), name='Y')
31
+ self.ys.append(y)
32
+
33
+ y_seq_len = tf.placeholder(dtype=tf.int32, shape=(None,), name='YSeqLen')
34
+ self.y_seq_lens.append(y_seq_len)
35
+
36
+ y_cls = tf.placeholder(dtype=tf.int32, shape=(None,), name='YClass')
37
+ self.y_cls.append(y_cls)
38
+
39
+ tfu.seq2seq(self,
40
+ batch_size,
41
+ x,
42
+ y_cls,
43
+ x_seq_len,
44
+ y,
45
+ x_cls,
46
+ y_seq_len)
47
+
48
+ def __update_feed_dict__(self, op_name, feed_dict, batch, dev_num):
49
+ feed_dict[self.x_cls[dev_num]] = batch['x_cls']
50
+ feed_dict[self.y_cls[dev_num]] = batch['y_cls']
51
+ feed_dict[self.ys[dev_num]] = batch['y']
52
+ feed_dict[self.y_seq_lens[dev_num]] = batch['y_seq_len']
53
+ feed_dict[self.keep_drops[dev_num]] = self.settings['keep_drop']
54
+ feed_dict[self.decoder_keep_drops[dev_num]] = self.settings['decoder']['keep_drop']
55
+
56
+ def __load_dataset__(self, operation_name):
57
+ items = list(tfu.load_inflect_dataset(
58
+ self.dataset_path,
59
+ self.devices_count,
60
+ operation_name,
61
+ self.settings['batch_size']
62
+ ))
63
+ return items
64
+
65
+ def transfer_learning_init(self, sess):
66
+ my_prefix = f"{self.main_scope_name}/"
67
+ vars = {
68
+ var.name[len(my_prefix):]: var
69
+ for var in tf.global_variables(my_prefix)
70
+ if "Adam" not in var.name
71
+ }
72
+
73
+ lem_prefix = f"Lemm/"
74
+ lem_vars = {
75
+ var.name[len(lem_prefix):]: var
76
+ for var in tf.global_variables(lem_prefix)
77
+ if "Adam" not in var.name
78
+ }
79
+
80
+ for key in vars:
81
+ my_var = vars[key]
82
+ lem_var = lem_vars[key]
83
+ value = sess.run(lem_var)
84
+ sess.run(my_var.assign(value))
85
+ print()
graph/lemm.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import tensorflow as tf
3
+ import tf_utils as tfu
4
+ from tqdm import tqdm
5
+ from graph.base import GraphPartBase
6
+
7
+
8
+ class Lemm(GraphPartBase):
9
+ def __init__(self, for_usage, global_settings, current_settings, optimiser, reset_optimiser):
10
+ super().__init__(for_usage, global_settings, current_settings, optimiser, reset_optimiser, 'lemm', ["Loss", "AccuracyByChar", "Accuracy"])
11
+ self.chars_count = self.chars_count + 1
12
+ self.start_char_index = global_settings['start_token']
13
+ self.end_char_index = global_settings['end_token']
14
+ self.results = []
15
+ self.ys = []
16
+ self.y_seq_lens = []
17
+ self.cls = []
18
+ self.keep_drops = []
19
+ self.decoder_keep_drops = []
20
+
21
+ def __build_graph_for_device__(self, x, x_seq_len, batch_size, x_cls=None):
22
+ self.xs.append(x)
23
+ self.x_seq_lens.append(x_seq_len)
24
+
25
+ if x_cls is None:
26
+ x_cls = tf.placeholder(dtype=tf.int32, shape=(None,), name='XClass')
27
+ self.cls.append(x_cls)
28
+
29
+ if batch_size is None:
30
+ batch_size = self.settings['batch_size']
31
+
32
+ y = tf.placeholder(dtype=tf.int32, shape=(None, None), name='Y')
33
+ self.ys.append(y)
34
+
35
+ y_seq_len = tf.placeholder(dtype=tf.int32, shape=(None,), name='YSeqLen')
36
+ self.y_seq_lens.append(y_seq_len)
37
+
38
+ tfu.seq2seq(self,
39
+ batch_size,
40
+ x,
41
+ x_cls,
42
+ x_seq_len,
43
+ y,
44
+ x_cls,
45
+ y_seq_len)
46
+
47
+ def __update_feed_dict__(self, op_name, feed_dict, batch, dev_num):
48
+ feed_dict[self.cls[dev_num]] = batch['x_cls']
49
+ feed_dict[self.ys[dev_num]] = batch['y']
50
+ feed_dict[self.y_seq_lens[dev_num]] = batch['y_seq_len']
51
+ feed_dict[self.keep_drops[dev_num]] = self.settings['keep_drop']
52
+ feed_dict[self.decoder_keep_drops[dev_num]] = self.settings['decoder']['keep_drop']
53
+
54
+ def __load_dataset__(self, operation_name):
55
+ items = list(tfu.load_lemma_dataset(
56
+ self.dataset_path,
57
+ self.devices_count,
58
+ operation_name,
59
+ self.settings['batch_size']
60
+ ))
61
+ return items
graph/main_cls.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import tf_utils as tfu
3
+ from graph.base import GraphPartBase
4
+
5
+
6
+ class MainCls(GraphPartBase):
7
+ def __init__(self, for_usage, global_settings, current_settings, optimiser, reset_optimiser):
8
+ super().__init__(for_usage, global_settings, current_settings, optimiser, reset_optimiser, 'main', ['Loss', 'Accuracy'])
9
+ self.checks = []
10
+ self.weights = []
11
+ self.keep_drops = []
12
+ self.losses = []
13
+ self.probs = []
14
+ self.results = []
15
+ self.ys = []
16
+ self.drops = []
17
+ self.top_k = global_settings['main_class_k']
18
+
19
+ def __build_graph_for_device__(self, x, seq_len, gram_probs, gram_drop):
20
+ self.xs.append(x)
21
+ self.x_seq_lens.append(seq_len)
22
+ self.drops.append(gram_drop)
23
+
24
+ y = tf.placeholder(dtype=tf.int32, shape=(None, self.main_classes_count), name='Y')
25
+ weights = tf.placeholder(dtype=tf.float32, shape=(None,), name='Weight')
26
+ x_emd_init = tf.random_normal((self.chars_count, self.settings['char_vector_size']))
27
+ x_emb = tf.get_variable("Embeddings", initializer=x_emd_init)
28
+ rnn_input = tf.nn.embedding_lookup(x_emb, x)
29
+
30
+ if self.for_usage:
31
+ cls_keep_drop = tf.constant(1, dtype=tf.float32, name='KeepDrop')
32
+ else:
33
+ cls_keep_drop = tf.placeholder(dtype=tf.float32, name='KeepDrop')
34
+ self.keep_drops.append(cls_keep_drop)
35
+
36
+ init_state = tf.concat(gram_probs, 1)
37
+ with tf.variable_scope("InitRnnState", reuse=tf.AUTO_REUSE) as scope:
38
+ rez_size = self.settings['rnn_state_size']
39
+ w_softmax = tf.get_variable("W", (init_state.shape[1], rez_size))
40
+ b_softmax = tf.get_variable("b", [rez_size])
41
+ init_state = tf.matmul(init_state, w_softmax) + b_softmax
42
+
43
+ rnn_logits = tfu.build_rnn(rnn_input,
44
+ cls_keep_drop,
45
+ seq_len,
46
+ self.settings,
47
+ init_state,
48
+ init_state,
49
+ self.for_usage)
50
+
51
+ logits = tfu.rnn_top('RnnTop',
52
+ rnn_logits,
53
+ self.settings,
54
+ self.main_classes_count)
55
+
56
+ float_y = tf.cast(y, tf.float32)
57
+ errors = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits,
58
+ labels=float_y)
59
+ errors = tf.reduce_sum(errors, axis=1)
60
+ errors = errors * weights
61
+ if not self.for_usage:
62
+ self.checks.append(tf.check_numerics(errors, "ErrorNullCheck"))
63
+
64
+ probs = tf.nn.softmax(logits)
65
+ result = tf.math.top_k(probs, self.top_k, name="Results")
66
+ loss = tf.reduce_sum(errors)
67
+
68
+ if not self.for_usage:
69
+ self.checks.append(tf.check_numerics(errors, "LossNullCheck"))
70
+
71
+ vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=self.main_scope_name)
72
+ grads = self.optimiser.compute_gradients(loss, var_list=vars)
73
+
74
+ self.losses.append(loss)
75
+ self.probs.append(probs)
76
+ self.results.append(result)
77
+ self.ys.append(y)
78
+ self.dev_grads.append(grads)
79
+ self.weights.append(weights)
80
+
81
+ # metrics
82
+ self.create_mean_metric(0, loss)
83
+ labels = tf.math.argmax(y, axis=1)
84
+ predictions = tf.math.argmax(probs, axis=1)
85
+ self.create_accuracy_metric(1, labels, predictions)
86
+
87
+ def __update_feed_dict__(self, op_name, feed_dict, batch, dev_num):
88
+ for gram_drop in self.drops[dev_num]:
89
+ feed_dict[gram_drop] = 1
90
+ feed_dict[self.keep_drops[dev_num]] = 1 if op_name == 'test' else self.settings['keep_drop']
91
+ feed_dict[self.ys[dev_num]] = batch['y']
92
+ feed_dict[self.weights[dev_num]] = batch['weight']
93
+
94
+ def __load_dataset__(self, operation_name):
95
+ return list(
96
+ tfu.load_cls_dataset(
97
+ self.dataset_path,
98
+ self.devices_count,
99
+ operation_name,
100
+ self.settings['train_batch_size'],
101
+ self.settings['use_weights']
102
+ )
103
+ )
latest_release/dataset_info.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ dictionary
2
+ version=0.92
3
+ revision=412889
latest_release/dict.txt.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1decc4fb2b25659511d1700be8d7c84358ee15c638ffb7c4c0bf00ba832a9d90
3
+ size 8530
latest_release/dict_correction.txt.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e730df5fcbcdc077eb17cf78bad394944ac01f3b36c90b5952cd7da75512c03d
3
+ size 1015441
latest_release/dict_correction_index.txt.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c490b6acc83ef744164af6fc297110e5ed3986e0921099bc4f862a7e2ba29de0
3
+ size 1662293
latest_release/dict_index.txt.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:851335c946f2d2606942a6a7a2b75d40d10138acf3a369925a1272aa1ebecb4f
3
+ size 7699
latest_release/frozen_model_small.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32ee280767da16f99400f7c401b434fcd20613abc140a2fe9a88c44c4e5fa8a5
3
+ size 4423913
latest_release/grams.xml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ <?xml version='1.0' encoding='utf-8'?>
2
+ <Grams><G index="0" key_en="post" key_ru="чр"><C key_en="noun" key_ru="сущ" nn_index="0" /><C key_en="adjf" key_ru="прил" nn_index="1" /><C key_en="adjs" key_ru="кр_прил" nn_index="2" /><C key_en="verb" key_ru="гл" nn_index="3" /><C key_en="infn" key_ru="инф_гл" nn_index="4" /><C key_en="comp" key_ru="комп" nn_index="5" /><C key_en="prtf" key_ru="прич" nn_index="6" /><C key_en="prts" key_ru="кр_прич" nn_index="7" /><C key_en="grnd" key_ru="деепр" nn_index="8" /><C key_en="advb" key_ru="нареч" nn_index="9" /><C key_en="npro" key_ru="мест" /><C key_en="prep" key_ru="предл" /><C key_en="conj" key_ru="союз" /><C key_en="prcl" key_ru="част" /><C key_en="intj" key_ru="межд" /><C key_en="numb" key_ru="числ" /><C key_en="pred" key_ru="предик" /><C key_en="punct" key_ru="пункт" /><C key_en="int" key_ru="цифра" /><C key_en="romn" key_ru="рим_цифр" /><C key_en="unkn" key_ru="неизв" /></G><G index="1" key_en="gndr" key_ru="род"><C key_en="masc" key_ru="муж" nn_index="0" /><C key_en="femn" key_ru="жен" nn_index="1" /><C key_en="neut" key_ru="ср" nn_index="2" /><C key_en="msf" key_ru="общ" nn_index="3" /></G><G index="2" key_en="nmbr" key_ru="число"><C key_en="sing" key_ru="ед" nn_index="0" /><C key_en="plur" key_ru="мн" nn_index="1" /></G><G index="3" key_en="case" key_ru="падеж"><C key_en="nomn" key_ru="им" nn_index="0" /><C key_en="gent" key_ru="рд" nn_index="1" /><C key_en="datv" key_ru="дт" nn_index="2" /><C key_en="accs" key_ru="вн" nn_index="3" /><C key_en="ablt" key_ru="тв" nn_index="4" /><C key_en="loct" key_ru="пр" nn_index="5" /><C key_en="gent1" key_ru="рд1" nn_index="6" /><C key_en="gent2" key_ru="рд2" nn_index="7" /><C key_en="voct" key_ru="зв" nn_index="8" /><C key_en="accs2" key_ru="вн2" nn_index="9" /><C key_en="loct1" key_ru="пр1" nn_index="10" /><C key_en="loct2" key_ru="пр2" nn_index="11" /></G><G index="4" key_en="pers" key_ru="лицо"><C key_en="1per" key_ru="1л" nn_index="0" /><C key_en="2per" key_ru="2л" nn_index="1" /><C key_en="3per" key_ru="3л" nn_index="2" /></G><G index="5" key_en="tens" key_ru="время"><C key_en="pres" key_ru="наст" nn_index="0" /><C key_en="past" key_ru="прош" nn_index="1" /><C key_en="futr" key_ru="буд" nn_index="2" /></G><G index="6" key_en="mood" key_ru="накл"><C key_en="indc" key_ru="изъяв" nn_index="0" /><C key_en="impr" key_ru="повел" nn_index="1" /></G><G index="7" key_en="voic" key_ru="залог"><C key_en="actv" key_ru="дейст" nn_index="0" /><C key_en="pssv" key_ru="страд" nn_index="1" /></G></Grams>
latest_release/numbers.xml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ <?xml version='1.0' encoding='utf-8'?>
2
+ <NumbData l="261,230,231,232,279" reg="^((?&lt;_1000000000&gt;миллиард|миллиардах|миллиардная|миллиардною|миллиардном|миллиарда|миллиардами|миллиардной|миллиардов|миллиарду|миллиардное|миллиардные|миллиардом|миллиардный|миллиардам|миллиарде|миллиардную|миллиарды|миллиардных|миллиардными|миллиардного|миллиардному|миллиардным)|(?&lt;_1000000&gt;миллиона|миллионную|миллионные|миллионного|миллионными|миллион|миллионам|миллионных|миллионами|миллионном|миллионный|миллиону|миллионе|миллионной|миллионному|миллионом|миллионным|миллионное|миллионною|миллионах|миллионная|миллионов)|(?&lt;_1000&gt;тысячные|тысяч|тысячными|тысячном|тысячью|тысячных|тысячи|тысячу|тысячная|тысячного|тысячную|тысячами|тысяче|тысячей|тысячным|тысячный|тысячной|тысячам|тысячною|тысячах|тысячному|тысяча|тысячное)|(?&lt;_900&gt;девятисотая|девятисотого|девятисотом|девятисот|девятисотых|девятисотыми|девятистах|девятисотые|девятисотому|девятисотым|девятисотое|девятьюстами|девятисотую|девятисотой|девятистам|девятьсот|девятисотый)|(?&lt;_800&gt;восьмисотым|восьмистах|восьмисотая|восьмисотых|восьмисотое|восьмисотую|восьмисотые|восьмистам|восьмисотому|восьмисотой|восьмисот|восьмисотый|восемьсот|восемьюстами|восьмисотыми|восьмисотом|восьмисотого)|(?&lt;_700&gt;семисотого|семисотое|семисотая|семисотому|семисотой|семисотые|семисот|семистах|семисотую|семисотым|семьюстами|семисотыми|семисотый|семисотом|семистам|семисотых|семьсот)|(?&lt;_600&gt;шестисотые|шестисотой|шестьсот|шестистах|шестисотое|шестисотая|шестисотый|шестисотых|шестисотую|шестисотому|шестисот|шестистам|шестьюстами|шестисотого|шестисотым|шестисотом|шестисотыми)|(?&lt;_500&gt;пятьюстами|пятисотого|пятисотый|пятьсот|пятисотом|пятистах|пятисотой|пятисотое|пятисотая|пятисотые|пятисотых|пятисотыми|пятистам|пятисотому|пятисотым|пятисотую|пятисот)|(?&lt;_400&gt;четырьмястами|четыремстам|четырёхсотый|четырёхсотая|четырёхсотой|четырёхсотых|четырёхсотом|четырёхсотым|четыреста|четырёхсотого|четырехсот|четырёхсотому|четырёхсотое|четырёхсотыми|четырехстах|четырёхсотую|четырёхсотые)|(?&lt;_300&gt;тремястами|триста|трёхсотую|трёхсотые|трёхсотой|трехсот|тремстам|трёхсотый|трехстах|трёхсотому|трёхсотого|трёхсотом|трёхсотая|трёхсотым|трёхсотыми|трёхсотое|трёхсотых)|(?&lt;_200&gt;двухсотым|двухстах|двухсотое|двумстам|двухсотой|двухсотыми|двухсотого|двухсотый|двухсотом|двухсотую|двухсотая|двухсотые|двухсот|двухсотому|двухсотых|двести|двумястами)|(?&lt;_100&gt;сотням|сотней|сотый|сотнею|сотом|сотнями|сот|сотых|сотни|сотая|сотого|сотому|сотые|сотню|сотыми|сотым|сотнях|сотую|ста|сто|сотня|сотен|сотое|сотой|сотне)|(?&lt;_90&gt;девяностом|девяностое|девяностого|девяностый|девяностым|девяностые|девяностой|девяностую|девяностыми|девяностых|девяностая|девяностому)|(?&lt;_80&gt;восьмидесятую|восьмидесятое|восьмидесятая|восьмидесятых|восьмидесятом|восьмидесяти|восьмидесятому|восьмидесятой|восемьюдесятью|восьмидесятого|восьмьюдесятью|восемьдесят|восьмидесятый|восьмидесятым|восьмидесятыми|восьмидесятые)|(?&lt;_70&gt;семидесятые|семидесятого|семидесятый|семидесятой|семидесятым|семидесяти|семидесятыми|семидесятом|семидесятых|семидесятую|семидесятое|семидесятому|семьдесят|семьюдесятью|семидесятая)|(?&lt;_60&gt;шестидесятое|шестидесятого|шестидесятый|шестидесятых|шестидесятом|шестидесяти|шестидесятому|шестидесятую|шестидесятые|шестидесятыми|шестидесятой|шестидесятая|шестьдесят|шестьюдесятью|шестидесятым)|(?&lt;_50&gt;пятидесятом|пятидесятые|пятидесяти|пятьдесят|пятидесятого|пятидесятая|пятидесятыми|пятидесятых|пятидесятым|полсотни|пятьюдесятью|пятидесятую|пятидесятое|пятидесятой|пятидесятому|пятидесятый)|(?&lt;_40&gt;сороковым|сороковому|сороковые|сорок|сорока|сорокового|сороковых|сороковое|сороковом|сороковую|сороковой|сороковая|сороковыми)|(?&lt;_30&gt;тридцатой|тридцатый|тридцатых|тридцати|тридцатому|тридцатую|тридцать|тридцатью|тридцатыми|тридцатое|тридцатые|тридцатая|тридцатом|тридцатым|тридцатого)|(?&lt;_20&gt;двадцатое|двадцатый|двадцатого|двадцатых|двадцать|двадцатому|двадцатым|двадцатую|двадцатью|двадцатыми|двадцатые|двадцати|двадцатой|двадцатая|двадцатом)|(?&lt;_19&gt;девятнадцатой|девятнадцатым|девятнадцатый|девятнадцатые|девятнадцатому|девятнадцатью|девятнадцатого|девятнадцатых|девятнадцатыми|девятнадцати|девятнадцать|девятнадцатую|девятнадцатом|девятнадцатая|девятнадцатое)|(?&lt;_18&gt;восемнадцатом|восемнадцатый|восемнадцатую|восемнадцатому|восемнадцатая|восемнадцать|восемнадцатого|восемнадцатые|восемнадцати|восемнадцатыми|восемнадцатым|восемнадцатое|восемнадцатых|восемнадцатью|восемнадцатой)|(?&lt;_17&gt;семнадцатые|семнадцатому|семнадцатый|семнадцатой|семнадцатом|семнадцатая|семнадцатым|семнадцатое|семнадцатью|семнадцатыми|семнадцати|семнадцатую|семнадцатых|семнадцать|семнадцатого)|(?&lt;_16&gt;шестнадцатую|шестнадцатыми|шестнадцатые|шестнадцать|шестнадцатом|шестнадцатым|шестнадцатое|шестнадцатью|шестнадцатого|шестнадцатый|шестнадцатых|шестнадцати|шестнадцатой|шестнадцатая|шестнадцатому)|(?&lt;_15&gt;пятнадцати|пятнадцатом|пятнадцатого|пятнадцатью|пятнадцатое|пятнадцатую|пятнадцатым|пятнадцатый|пятнадцатая|пятнадцатых|пятнадцатые|пятнадцатой|пятнадцать|пятнадцатыми|пятнадцатому)|(?&lt;_14&gt;четырнадцатью|четырнадцатый|четырнадцатые|четырнадцатой|четырнадцатого|четырнадцатых|четырнадцатыми|четырнадцатом|четырнадцатому|четырнадцатую|четырнадцати|четырнадцать|четырнадцатое|четырнадцатая|четырнадцатым)|(?&lt;_13&gt;тринадцатого|тринадцатое|тринадцатому|тринадцатым|тринадцатью|тринадцатых|тринадцатыми|тринадцатые|тринадцатую|тринадцать|тринадцатой|тринадцати|тринадцатая|тринадцатый|тринадцатом)|(?&lt;_12&gt;дюжину|двенадцатому|двенадцати|дюжин|дюжины|двенадцатым|дюжинах|двенадцатое|двенадцатые|дюжинами|дюжине|двенадцатых|двенадцатыми|двенадцатом|двенадцатую|дюжиной|двенадцатью|двенадцатого|дюжина|двенадцатая|двенадцатой|дюжиною|двенадцать|дюжинам|двенадцатый)|(?&lt;_11&gt;одиннадцати|одиннадцатому|одиннадцатом|одиннадцатью|одиннадцатых|одиннадцатая|одиннадцатые|одиннадцатый|одиннадцатыми|одиннадцатым|одиннадцатой|одиннадцать|одиннадцатую|одиннадцатого|одиннадцатое)|(?&lt;_10&gt;десятый|десятая|десятыми|десяти|десятое|десятым|десятью|десятому|десять|десятого|десятую|десятые|десятых|десятой|десятом)|(?&lt;_9&gt;девятое|девять|девятого|девятая|девятому|девятую|девятый|девятом|девятых|девяти|девятью|девятым|девятые|девятой|девятыми)|(?&lt;_8&gt;восьмыми|восьмую|восьмью|восьмой|восьмом|восьми|восьмое|восьмому|восемь|восемью|восьмых|восьмого|восьмые|восьмая|восьмым)|(?&lt;_7&gt;седьмом|седьмая|седьмое|седьмую|семь|седьмых|седьмые|седьмой|седьмым|седьмыми|семью|семи|седьмому|седьмого)|(?&lt;_6&gt;шестых|шестое|шестые|шестую|шестью|шестого|шестым|шестом|шести|шестому|шестой|шестыми|шесть|шестая)|(?&lt;_5&gt;пятых|пятой|пяти|пятую|пятью|пятое|пятая|пятого|пятым|пятому|пятыми|пять|пятом|пятый|пятые)|(?&lt;_4&gt;четвёртом|четвёртого|четвёртые|четвёртым|четвёртая|четырех|четвёртый|четвёртую|четвёртому|четвёртой|четыре|четырем|четвёртое|четырьмя|четвёртых|четвёртыми)|(?&lt;_3&gt;третьего|третьему|третьим|третьих|третье|третий|третьем|третьей|третью|три|трем|трех|третьи|третьею|тремя|третьими|третья)|(?&lt;_2&gt;двух|вторыми|вторых|вторую|двумя|второму|две|двум|вторая|второй|два|вторым|второе|вторые|второго|втором)|(?&lt;_1&gt;одна|одною|первыми|первая|первому|одну|первом|первым|одному|первого|одно|одним|одни|первых|одном|первое|одного|одной|одних|первой|одними|первый|первые|первую|один)|(?&lt;_0&gt;нулём|нулевой|нулевую|нолями|нулевыми|нулевого|нулю|нулевое|нулевая|нолям|нолю|нули|нулям|нулевых|ноле|ноль|нулевым|нолём|нолей|нулями|нулевом|нуле|нуль|нулевые|нуля|нулей|ноля|нолях|ноли|нулевому|нулях|нулевою))+$"><N v="0"><W i="230" k="p" t="нулевой" /><W i="231" k="p" t="нулевая" /><W i="232" k="p" t="нулевое" /><W i="233" k="p" t="нулевые" /><W i="234" k="p" t="нулевого" /><W i="235" k="p" t="нулевой" /><W i="236" k="p" t="нулевого" /><W i="237" k="p" t="нулевых" /><W i="238" k="p" t="нулевому" /><W i="239" k="p" t="нулевой" /><W i="240" k="p" t="нулевому" /><W i="241" k="p" t="нулевым" /><W i="242" k="p" t="нулевого" /><W i="242" k="p" t="нулевой" /><W i="243" k="p" t="нулевую" /><W i="244" k="p" t="нулевое" /><W i="245" k="p" t="нулевых" /><W i="245" k="p" t="нулевые" /><W i="246" k="p" t="нулевым" /><W i="247" k="p" t="нулевой" /><W i="247" k="p" t="нулевою" /><W i="248" k="p" t="нулевым" /><W i="249" k="p" t="нулевыми" /><W i="250" k="p" t="нулевом" /><W i="251" k="p" t="нулевой" /><W i="252" k="p" t="нулевом" /><W i="253" k="p" t="нулевых" /><W i="230" k="o" t="ноль" /><W i="254" k="o" t="ноли" /><W i="234" k="o" t="ноля" /><W i="255" k="o" t="нолей" /><W i="238" k="o" t="нолю" /><W i="256" k="o" t="нолям" /><W i="242" k="o" t="ноль" /><W i="257" k="o" t="ноли" /><W i="246" k="o" t="нолём" /><W i="258" k="o" t="нолями" /><W i="250" k="o" t="ноле" /><W i="259" k="o" t="нолях" /><W i="230" k="o1" t="нуль" /><W i="254" k="o1" t="нули" /><W i="234" k="o1" t="нул��" /><W i="255" k="o1" t="нулей" /><W i="238" k="o1" t="нулю" /><W i="256" k="o1" t="нулям" /><W i="242" k="o1" t="нуль" /><W i="257" k="o1" t="нули" /><W i="246" k="o1" t="нулём" /><W i="258" k="o1" t="нулями" /><W i="250" k="o1" t="нуле" /><W i="259" k="o1" t="нулях" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="х" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="1"><W i="230" k="p" t="первый" /><W i="231" k="p" t="первая" /><W i="232" k="p" t="первое" /><W i="233" k="p" t="первые" /><W i="234" k="p" t="первого" /><W i="235" k="p" t="первой" /><W i="236" k="p" t="первого" /><W i="237" k="p" t="первых" /><W i="238" k="p" t="первому" /><W i="239" k="p" t="первой" /><W i="240" k="p" t="первому" /><W i="241" k="p" t="первым" /><W i="242" k="p" t="первого" /><W i="242" k="p" t="первый" /><W i="243" k="p" t="первую" /><W i="244" k="p" t="первое" /><W i="245" k="p" t="первые" /><W i="245" k="p" t="первых" /><W i="246" k="p" t="первым" /><W i="247" k="p" t="первой" /><W i="248" k="p" t="первым" /><W i="249" k="p" t="первыми" /><W i="250" k="p" t="первом" /><W i="251" k="p" t="первой" /><W i="252" k="p" t="первом" /><W i="253" k="p" t="первых" /><W i="230" k="o" t="один" /><W i="231" k="o" t="одна" /><W i="232" k="o" t="одно" /><W i="233" k="o" t="одни" /><W i="234" k="o" t="одного" /><W i="235" k="o" t="одной" /><W i="236" k="o" t="одного" /><W i="237" k="o" t="одних" /><W i="238" k="o" t="одному" /><W i="239" k="o" t="одной" /><W i="240" k="o" t="одному" /><W i="241" k="o" t="одним" /><W i="243" k="o" t="одного" /><W i="242" k="o" t="один" /><W i="243" k="o" t="одну" /><W i="244" k="o" t="одно" /><W i="245" k="o" t="одни" /><W i="245" k="o" t="одних" /><W i="246" k="o" t="одним" /><W i="247" k="o" t="одной" /><W i="248" k="o" t="одним" /><W i="249" k="o" t="одними" /><W i="250" k="o" t="одном" /><W i="251" k="o" t="одной" /><W i="252" k="o" t="одном" /><W i="253" k="o" t="одних" /><W i="260" k="o" t="один" /><W i="261" k="o" t="одни" /><W i="260" k="o" t="одна" /><W i="262" k="o" t="одно" /><W i="263" k="o" t="одних" /><W i="264" k="o" t="одного" /><W i="264" k="o" t="одной" /><W i="265" k="o" t="одного" /><W i="266" k="o" t="одним" /><W i="267" k="o" t="одному" /><W i="267" k="o" t="одной" /><W i="268" k="o" t="одному" /><W i="269" k="o" t="одни" /><W i="269" k="o" t="одних" /><W i="270" k="o" t="один" /><W i="270" k="o" t="одного" /><W i="271" k="o" t="одну" /><W i="272" k="o" t="одно" /><W i="273" k="o" t="одними" /><W i="274" k="o" t="одним" /><W i="275" k="o" t="одной" /><W i="275" k="o" t="одною" /><W i="276" k="o" t="одним" /><W i="277" k="o" t="одном" /><W i="277" k="o" t="одной" /><W i="278" k="o" t="одном" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="2"><W i="230" k="p" t="второй" /><W i="231" k="p" t="вторая" /><W i="232" k="p" t="второе" /><W i="233" k="p" t="вторые" /><W i="234" k="p" t="второго" /><W i="235" k="p" t="второй" /><W i="236" k="p" t="второго" /><W i="237" k="p" t="вторых" /><W i="238" k="p" t="второму" /><W i="239" k="p" t="второй" /><W i="240" k="p" t="второму" /><W i="241" k="p" t="вторым" /><W i="243" k="p" t="второго" /><W i="242" k="p" t="второй" /><W i="243" k="p" t="вторую" /><W i="244" k="p" t="второе" /><W i="245" k="p" t="вторые" /><W i="245" k="p" t="вторых" /><W i="246" k="p" t="вторым" /><W i="247" k="p" t="второй" /><W i="248" k="p" t="вторым" /><W i="249" k="p" t="вторыми" /><W i="250" k="p" t="втором" /><W i="251" k="p" t="второй" /><W i="252" k="p" t="втором" /><W i="253" k="p" t="вторых" /><W i="279" k="o" t="два" /><W i="260" k="o" t="две" /><W i="262" k="o" t="два" /><W i="280" k="o" t="двух" /><W i="264" k="o" t="двух" /><W i="265" k="o" t="двух" /><W i="281" k="o" t="двум" /><W i="267" k="o" t="двум" /><W i="268" k="o" t="двум" /><W i="270" k="o" t="два" /><W i="270" k="o" t="двух" /><W i="271" k="o" t="две" /><W i="271" k="o" t="двух" /><W i="272" k="o" t="два" /><W i="274" k="o" t="двумя" /><W i="275" k="o" t="двумя" /><W i="276" k="o" t="двумя" /><W i="282" k="o" t="двух" /><W i="277" k="o" t="двух" /><W i="278" k="o" t="двух" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="243" t="го" /><E i="242" t="й" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="3"><W i="230" k="p" t="третий" /><W i="231" k="p" t="третья" /><W i="232" k="p" t="третье" /><W i="233" k="p" t="третьи" /><W i="234" k="p" t="третьего" /><W i="235" k="p" t="третьей" /><W i="236" k="p" t="третьего" /><W i="237" k="p" t="третьих" /><W i="238" k="p" t="третьему" /><W i="239" k="p" t="третьей" /><W i="240" k="p" t="третьему" /><W i="241" k="p" t="третьим" /><W i="242" k="p" t="третьего" /><W i="242" k="p" t="третий" /><W i="243" k="p" t="третью" /><W i="244" k="p" t="третье" /><W i="245" k="p" t="третьи" /><W i="245" k="p" t="третьих" /><W i="246" k="p" t="третьим" /><W i="247" k="p" t="третьей" /><W i="247" k="p" t="третьею" /><W i="248" k="p" t="третьим" /><W i="249" k="p" t="третьими" /><W i="250" k="p" t="третьем" /><W i="251" k="p" t="третьей" /><W i="252" k="p" t="третьем" /><W i="253" k="p" t="третьих" /><W i="261" k="o" t="три" /><W i="263" k="o" t="трех" /><W i="266" k="o" t="трем" /><W i="269" k="o" t="трех" /><W i="269" k="o" t="три" /><W i="273" k="o" t="тремя" /><W i="283" k="o" t="трех" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="и" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="и" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="4"><W i="230" k="p" t="четвёртый" /><W i="231" k="p" t="четвёртая" /><W i="232" k="p" t="четвёртое" /><W i="233" k="p" t="четвёртые" /><W i="234" k="p" t="четвёртого" /><W i="235" k="p" t="четвёртой" /><W i="236" k="p" t="четвёртого" /><W i="237" k="p" t="четвёртых" /><W i="238" k="p" t="четвёртому" /><W i="239" k="p" t="четвёртой" /><W i="240" k="p" t="четвёртому" /><W i="241" k="p" t="четвёртым" /><W i="242" k="p" t="четвёртого" /><W i="242" k="p" t="четвёртый" /><W i="243" k="p" t="четвёртую" /><W i="244" k="p" t="четвёртое" /><W i="245" k="p" t="четвёртые" /><W i="245" k="p" t="четвёртых" /><W i="246" k="p" t="четвёртым" /><W i="247" k="p" t="четвёртой" /><W i="248" k="p" t="четвёртым" /><W i="249" k="p" t="четвёртыми" /><W i="250" k="p" t="четвёртом" /><W i="251" k="p" t="четвёртой" /><W i="252" k="p" t="четвёртом" /><W i="253" k="p" t="четвёртых" /><W i="261" k="o" t="четыре" /><W i="263" k="o" t="четырех" /><W i="266" k="o" t="четырем" /><W i="269" k="o" t="четыре" /><W i="269" k="o" t="четырех" /><W i="273" k="o" t="четырьмя" /><W i="283" k="o" t="четырех" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="5"><W i="230" k="p" t="пятый" /><W i="231" k="p" t="пятая" /><W i="232" k="p" t="пятое" /><W i="233" k="p" t="пятые" /><W i="234" k="p" t="пятого" /><W i="235" k="p" t="пятой" /><W i="236" k="p" t="пятого" /><W i="237" k="p" t="пятых" /><W i="238" k="p" t="пятому" /><W i="239" k="p" t="пятой" /><W i="240" k="p" t="пятому" /><W i="241" k="p" t="пятым" /><W i="243" k="p" t="пятого" /><W i="242" k="p" t="пятый" /><W i="243" k="p" t="пятую" /><W i="244" k="p" t="пятое" /><W i="245" k="p" t="пятые" /><W i="245" k="p" t="пятых" /><W i="246" k="p" t="пятым" /><W i="247" k="p" t="пятой" /><W i="248" k="p" t="пятым" /><W i="249" k="p" t="пятыми" /><W i="250" k="p" t="пятом" /><W i="251" k="p" t="пятой" /><W i="252" k="p" t="пятом" /><W i="253" k="p" t="пятых" /><W i="261" k="o" t="пять" /><W i="263" k="o" t="пяти" /><W i="266" k="o" t="пяти" /><W i="269" k="o" t="пять" /><W i="273" k="o" t="пятью" /><W i="283" k="o" t="пяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="243" t="го" /><E i="242" t="й" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="6"><W i="230" k="p" t="шестой" /><W i="231" k="p" t="шестая" /><W i="232" k="p" t="шестое" /><W i="233" k="p" t="шестые" /><W i="234" k="p" t="шестого" /><W i="235" k="p" t="шестой" /><W i="236" k="p" t="шестого" /><W i="237" k="p" t="шестых" /><W i="238" k="p" t="шестому" /><W i="239" k="p" t="шестой" /><W i="240" k="p" t="шестому" /><W i="241" k="p" t="шестым" /><W i="242" k="p" t="шестого" /><W i="242" k="p" t="шестой" /><W i="243" k="p" t="шестую" /><W i="244" k="p" t="шестое" /><W i="245" k="p" t="шестые" /><W i="245" k="p" t="шестых" /><W i="246" k="p" t="шестым" /><W i="247" k="p" t="шестой" /><W i="248" k="p" t="шестым" /><W i="249" k="p" t="шестыми" /><W i="250" k="p" t="шестом" /><W i="251" k="p" t="шестой" /><W i="252" k="p" t="шестом" /><W i="253" k="p" t="шестых" /><W i="261" k="o" t="шесть" /><W i="263" k="o" t="шести" /><W i="266" k="o" t="шести" /><W i="269" k="o" t="шесть" /><W i="273" k="o" t="шестью" /><W i="283" k="o" t="шести" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="7"><W i="230" k="p" t="седьмой" /><W i="231" k="p" t="седьмая" /><W i="232" k="p" t="седьмое" /><W i="233" k="p" t="седьмые" /><W i="234" k="p" t="седьмого" /><W i="235" k="p" t="седьмой" /><W i="236" k="p" t="седьмого" /><W i="237" k="p" t="седьмых" /><W i="238" k="p" t="седьмому" /><W i="239" k="p" t="седьмой" /><W i="240" k="p" t="седьмому" /><W i="241" k="p" t="седьмым" /><W i="242" k="p" t="седьмого" /><W i="242" k="p" t="седьмой" /><W i="243" k="p" t="седьмую" /><W i="244" k="p" t="седьмое" /><W i="245" k="p" t="седьмые" /><W i="245" k="p" t="седьмых" /><W i="246" k="p" t="седьмым" /><W i="247" k="p" t="седьмой" /><W i="248" k="p" t="седьмым" /><W i="249" k="p" t="седьмыми" /><W i="250" k="p" t="седьмом" /><W i="251" k="p" t="седьмой" /><W i="252" k="p" t="седьмом" /><W i="253" k="p" t="седьмых" /><W i="261" k="o" t="семь" /><W i="263" k="o" t="семи" /><W i="266" k="o" t="семи" /><W i="269" k="o" t="семь" /><W i="273" k="o" t="семью" /><W i="283" k="o" t="семи" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="8"><W i="230" k="p" t="восьмой" /><W i="231" k="p" t="восьмая" /><W i="232" k="p" t="восьмое" /><W i="233" k="p" t="восьмые" /><W i="234" k="p" t="восьмого" /><W i="235" k="p" t="восьмой" /><W i="236" k="p" t="восьмого" /><W i="237" k="p" t="восьмых" /><W i="238" k="p" t="восьмому" /><W i="239" k="p" t="восьмой" /><W i="240" k="p" t="восьмому" /><W i="241" k="p" t="восьмым" /><W i="242" k="p" t="восьмого" /><W i="242" k="p" t="восьмой" /><W i="243" k="p" t="восьмую" /><W i="244" k="p" t="восьмое" /><W i="245" k="p" t="восьмые" /><W i="245" k="p" t="восьмых" /><W i="246" k="p" t="восьмым" /><W i="247" k="p" t="восьмой" /><W i="248" k="p" t="восьмым" /><W i="249" k="p" t="восьмыми" /><W i="250" k="p" t="восьмом" /><W i="251" k="p" t="восьмой" /><W i="252" k="p" t="восьмом" /><W i="253" k="p" t="восьмых" /><W i="261" k="o" t="восемь" /><W i="263" k="o" t="восьми" /><W i="266" k="o" t="восьми" /><W i="269" k="o" t="восемь" /><W i="273" k="o" t="восемью" /><W i="273" k="o" t="восьмью" /><W i="283" k="o" t="восьми" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="9"><W i="230" k="p" t="девятый" /><W i="231" k="p" t="девятая" /><W i="232" k="p" t="девятое" /><W i="233" k="p" t="девятые" /><W i="234" k="p" t="девятого" /><W i="235" k="p" t="девятой" /><W i="236" k="p" t="девятого" /><W i="237" k="p" t="девятых" /><W i="238" k="p" t="девятому" /><W i="239" k="p" t="девятой" /><W i="240" k="p" t="девятому" /><W i="241" k="p" t="девятым" /><W i="242" k="p" t="девятого" /><W i="242" k="p" t="девятый" /><W i="243" k="p" t="девятую" /><W i="244" k="p" t="девятое" /><W i="245" k="p" t="девятые" /><W i="245" k="p" t="девятых" /><W i="246" k="p" t="девятым" /><W i="247" k="p" t="девятой" /><W i="248" k="p" t="девятым" /><W i="249" k="p" t="девятыми" /><W i="250" k="p" t="девятом" /><W i="251" k="p" t="девятой" /><W i="252" k="p" t="девятом" /><W i="253" k="p" t="девятых" /><W i="261" k="o" t="девять" /><W i="263" k="o" t="девяти" /><W i="266" k="o" t="девяти" /><W i="269" k="o" t="девять" /><W i="273" k="o" t="девятью" /><W i="283" k="o" t="девяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="10"><W i="230" k="p" t="десятый" /><W i="231" k="p" t="десятая" /><W i="232" k="p" t="десятое" /><W i="233" k="p" t="десятые" /><W i="234" k="p" t="десятого" /><W i="235" k="p" t="десятой" /><W i="236" k="p" t="десятого" /><W i="237" k="p" t="десятых" /><W i="238" k="p" t="десятому" /><W i="239" k="p" t="десятой" /><W i="240" k="p" t="десятому" /><W i="241" k="p" t="десятым" /><W i="242" k="p" t="десятого" /><W i="242" k="p" t="десятый" /><W i="243" k="p" t="десятую" /><W i="244" k="p" t="десятое" /><W i="245" k="p" t="десятые" /><W i="245" k="p" t="десятых" /><W i="246" k="p" t="десятым" /><W i="247" k="p" t="десятой" /><W i="248" k="p" t="десятым" /><W i="249" k="p" t="десятыми" /><W i="250" k="p" t="десятом" /><W i="251" k="p" t="десятой" /><W i="252" k="p" t="десятом" /><W i="253" k="p" t="десятых" /><W i="261" k="o" t="десять" /><W i="263" k="o" t="десяти" /><W i="266" k="o" t="десяти" /><W i="269" k="o" t="десять" /><W i="273" k="o" t="десятью" /><W i="283" k="o" t="десяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="11"><W i="230" k="p" t="одиннадцатый" /><W i="231" k="p" t="одиннадцатая" /><W i="232" k="p" t="одиннадцатое" /><W i="233" k="p" t="одиннадцатые" /><W i="234" k="p" t="одиннадцатого" /><W i="235" k="p" t="одиннадцатой" /><W i="236" k="p" t="одиннадцатого" /><W i="237" k="p" t="одиннадцатых" /><W i="238" k="p" t="одиннадцатому" /><W i="239" k="p" t="одиннадцатой" /><W i="240" k="p" t="одиннадцатому" /><W i="241" k="p" t="одиннадцат��м" /><W i="242" k="p" t="одиннадцатого" /><W i="242" k="p" t="одиннадцатый" /><W i="243" k="p" t="одиннадцатую" /><W i="244" k="p" t="одиннадцатое" /><W i="245" k="p" t="одиннадцатые" /><W i="245" k="p" t="одиннадцатых" /><W i="246" k="p" t="одиннадцатым" /><W i="247" k="p" t="одиннадцатой" /><W i="248" k="p" t="одиннадцатым" /><W i="249" k="p" t="одиннадцатыми" /><W i="250" k="p" t="одиннадцатом" /><W i="251" k="p" t="одиннадцатой" /><W i="252" k="p" t="одиннадцатом" /><W i="253" k="p" t="одиннадцатых" /><W i="261" k="o" t="одиннадцать" /><W i="263" k="o" t="одиннадцати" /><W i="266" k="o" t="одиннадцати" /><W i="269" k="o" t="одиннадцать" /><W i="273" k="o" t="одиннадцатью" /><W i="283" k="o" t="одиннадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="12"><W i="231" k="ad" t="дюжина" /><W i="284" k="ad" t="дюжины" /><W i="235" k="ad" t="дюжины" /><W i="285" k="ad" t="дюжин" /><W i="239" k="ad" t="дюжине" /><W i="286" k="ad" t="дюжинам" /><W i="243" k="ad" t="дюжину" /><W i="287" k="ad" t="дюжины" /><W i="247" k="ad" t="дюжиной" /><W i="247" k="ad" t="дюжиною" /><W i="288" k="ad" t="дюжинами" /><W i="251" k="ad" t="дюжине" /><W i="289" k="ad" t="дюжинах" /><W i="230" k="p" t="двенадцатый" /><W i="231" k="p" t="двенадцатая" /><W i="232" k="p" t="двенадцатое" /><W i="233" k="p" t="двенадцатые" /><W i="234" k="p" t="двенадцатого" /><W i="235" k="p" t="двенадцатой" /><W i="236" k="p" t="двенадцатого" /><W i="237" k="p" t="двенадцатых" /><W i="238" k="p" t="двенадцатому" /><W i="239" k="p" t="двенадцатой" /><W i="240" k="p" t="двенадцатому" /><W i="241" k="p" t="двенадцатым" /><W i="242" k="p" t="двенадцатого" /><W i="242" k="p" t="двенадцатый" /><W i="243" k="p" t="двенадцатую" /><W i="244" k="p" t="двенадцатое" /><W i="245" k="p" t="двенадцатые" /><W i="245" k="p" t="двенадцатых" /><W i="246" k="p" t="двенадцатым" /><W i="247" k="p" t="двенадцатой" /><W i="248" k="p" t="двенадцатым" /><W i="249" k="p" t="двенадцатыми" /><W i="250" k="p" t="двенадцатом" /><W i="251" k="p" t="двенадцатой" /><W i="252" k="p" t="двенадцатом" /><W i="253" k="p" t="двенадцатых" /><W i="261" k="o" t="двенадцать" /><W i="263" k="o" t="двенадцати" /><W i="266" k="o" t="двенадцати" /><W i="269" k="o" t="двенадцать" /><W i="273" k="o" t="двенадцатью" /><W i="283" k="o" t="двенадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="13"><W i="230" k="p" t="тринадцатый" /><W i="231" k="p" t="тринадцатая" /><W i="232" k="p" t="тринадцатое" /><W i="233" k="p" t="тринадцатые" /><W i="234" k="p" t="тринадцатого" /><W i="235" k="p" t="тринадцатой" /><W i="236" k="p" t="тринадцатого" /><W i="237" k="p" t="тринадцатых" /><W i="238" k="p" t="тринадцатому" /><W i="239" k="p" t="тринадцатой" /><W i="240" k="p" t="тринадцатому" /><W i="241" k="p" t="тринадцатым" /><W i="242" k="p" t="тринадцатого" /><W i="242" k="p" t="тринадцатый" /><W i="243" k="p" t="тринадцатую" /><W i="244" k="p" t="тринадцатое" /><W i="245" k="p" t="тринадцатые" /><W i="245" k="p" t="тринадцатых" /><W i="246" k="p" t="тринадцатым" /><W i="247" k="p" t="тринадцатой" /><W i="248" k="p" t="тринадцатым" /><W i="249" k="p" t="тринадцатыми" /><W i="250" k="p" t="тринадцатом" /><W i="251" k="p" t="тринадцатой" /><W i="252" k="p" t="тринадцатом" /><W i="253" k="p" t="тринадцатых" /><W i="261" k="o" t="тринадцать" /><W i="263" k="o" t="тринадцати" /><W i="266" k="o" t="тринадцати" /><W i="269" k="o" t="тринадцать" /><W i="273" k="o" t="тринадцатью" /><W i="283" k="o" t="тринадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="14"><W i="230" k="p" t="четырнадцатый" /><W i="231" k="p" t="четырнадцатая" /><W i="232" k="p" t="четырнадцатое" /><W i="233" k="p" t="четырнадцатые" /><W i="234" k="p" t="четырнадцатого" /><W i="235" k="p" t="четырнадцатой" /><W i="236" k="p" t="четырнадцатого" /><W i="237" k="p" t="четырнадцатых" /><W i="238" k="p" t="четырнадцатому" /><W i="239" k="p" t="четырнадцатой" /><W i="240" k="p" t="четырнадцатому" /><W i="241" k="p" t="четырнадцатым" /><W i="242" k="p" t="четырнадцатого" /><W i="242" k="p" t="четырнадцатый" /><W i="243" k="p" t="четырнадцатую" /><W i="244" k="p" t="четырнадцатое" /><W i="245" k="p" t="четырнадцатые" /><W i="245" k="p" t="четырнадцатых" /><W i="246" k="p" t="четырнадцатым" /><W i="247" k="p" t="четырнадцатой" /><W i="248" k="p" t="четырнадцатым" /><W i="249" k="p" t="четырнадцатыми" /><W i="250" k="p" t="четырнадцатом" /><W i="251" k="p" t="четырнадцатой" /><W i="252" k="p" t="четырнадцатом" /><W i="253" k="p" t="четырнадцатых" /><W i="261" k="o" t="четырнадцать" /><W i="263" k="o" t="четырнадцати" /><W i="266" k="o" t="четырнадцати" /><W i="269" k="o" t="четырнадцать" /><W i="273" k="o" t="четырнадцатью" /><W i="283" k="o" t="четырнадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="15"><W i="230" k="p" t="пятнадцатый" /><W i="231" k="p" t="пятнадцатая" /><W i="232" k="p" t="пятнадцатое" /><W i="233" k="p" t="пятнадцатые" /><W i="234" k="p" t="пятнадцатого" /><W i="235" k="p" t="пятнадцатой" /><W i="236" k="p" t="пятнадцатого" /><W i="237" k="p" t="пятнадцатых" /><W i="238" k="p" t="пятнадцатому" /><W i="239" k="p" t="пятнадцатой" /><W i="240" k="p" t="пятнадцатому" /><W i="241" k="p" t="пятнадцатым" /><W i="242" k="p" t="пятнадцатого" /><W i="242" k="p" t="пятнадцатый" /><W i="243" k="p" t="пятнадцатую" /><W i="244" k="p" t="пятнадцатое" /><W i="245" k="p" t="пятнадцатые" /><W i="245" k="p" t="пятнадцатых" /><W i="247" k="p" t="пятнадцатым" /><W i="247" k="p" t="пятнадцатой" /><W i="248" k="p" t="пятнадцатым" /><W i="249" k="p" t="пятнадцатыми" /><W i="251" k="p" t="пятнадцатом" /><W i="251" k="p" t="пятнадцатой" /><W i="252" k="p" t="пятнадцатом" /><W i="253" k="p" t="пятнадцатых" /><W i="261" k="o" t="пятнадцать" /><W i="263" k="o" t="пятнадцати" /><W i="266" k="o" t="пятнадцати" /><W i="269" k="o" t="пятнадцать" /><W i="273" k="o" t="пятнадцатью" /><W i="283" k="o" t="пятнадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="247" t="м" /><E i="248" t="м" /><E i="249" t="ми" /><E i="251" t="м" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="16"><W i="230" k="p" t="шестнадцатый" /><W i="231" k="p" t="шестнадцатая" /><W i="232" k="p" t="шестнадцатое" /><W i="233" k="p" t="шестнадцатые" /><W i="234" k="p" t="шестнадцатого" /><W i="235" k="p" t="шестнадцатой" /><W i="236" k="p" t="шестнадцатого" /><W i="237" k="p" t="шестнадцатых" /><W i="238" k="p" t="шестнадцатому" /><W i="239" k="p" t="шестнадцатой" /><W i="240" k="p" t="шестнадцатому" /><W i="241" k="p" t="шестнадцатым" /><W i="242" k="p" t="шестнадцатого" /><W i="242" k="p" t="шестнадцатый" /><W i="243" k="p" t="шестнадцатую" /><W i="244" k="p" t="шестнадцатое" /><W i="245" k="p" t="шестнадцатые" /><W i="245" k="p" t="шестнадцатых" /><W i="246" k="p" t="шестнадцатым" /><W i="247" k="p" t="шестнадцатой" /><W i="248" k="p" t="шестнадцатым" /><W i="249" k="p" t="шестнадцатыми" /><W i="250" k="p" t="шестнадцатом" /><W i="251" k="p" t="шестнадцатой" /><W i="252" k="p" t="шестнадцатом" /><W i="253" k="p" t="шестнадцатых" /><W i="261" k="o" t="шестнадцать" /><W i="263" k="o" t="шестнадцати" /><W i="266" k="o" t="шестнадцати" /><W i="269" k="o" t="шестнадцать" /><W i="273" k="o" t="шестнадцатью" /><W i="283" k="o" t="шестнадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="17"><W i="230" k="p" t="семнадцатый" /><W i="231" k="p" t="семнадцатая" /><W i="232" k="p" t="семнадцатое" /><W i="233" k="p" t="семнадцатые" /><W i="234" k="p" t="семнадцатого" /><W i="235" k="p" t="семнадцатой" /><W i="236" k="p" t="семнадцатого" /><W i="237" k="p" t="семнадцатых" /><W i="238" k="p" t="семнадцатому" /><W i="239" k="p" t="семнадцатой" /><W i="240" k="p" t="семнадцатому" /><W i="241" k="p" t="семнадцатым" /><W i="242" k="p" t="семнадцатого" /><W i="242" k="p" t="семнадцатый" /><W i="243" k="p" t="семнадцатую" /><W i="244" k="p" t="семнадцатое" /><W i="245" k="p" t="семнадцатые" /><W i="245" k="p" t="семнадцатых" /><W i="246" k="p" t="семнадцатым" /><W i="247" k="p" t="семнадцатой" /><W i="248" k="p" t="семнадцатым" /><W i="249" k="p" t="семнадцатыми" /><W i="250" k="p" t="семнадцатом" /><W i="251" k="p" t="семнадцатой" /><W i="252" k="p" t="семнадцатом" /><W i="253" k="p" t="семнадцатых" /><W i="261" k="o" t="семнадцать" /><W i="263" k="o" t="семнадцати" /><W i="266" k="o" t="семнадцати" /><W i="269" k="o" t="семнадцать" /><W i="273" k="o" t="семнадцатью" /><W i="283" k="o" t="семнадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="18"><W i="230" k="p" t="восемнадцатый" /><W i="231" k="p" t="восемнадцатая" /><W i="232" k="p" t="восемнадцатое" /><W i="233" k="p" t="восемнадцатые" /><W i="234" k="p" t="восемнадцатого" /><W i="235" k="p" t="восемнадцатой" /><W i="236" k="p" t="восемнадцатого" /><W i="237" k="p" t="восемнадцатых" /><W i="238" k="p" t="восемнадцатому" /><W i="239" k="p" t="восемнадцатой" /><W i="240" k="p" t="восемнадцатому" /><W i="241" k="p" t="восемнадцатым" /><W i="242" k="p" t="восемнадцатого" /><W i="242" k="p" t="восемнадцатый" /><W i="243" k="p" t="восемнадцатую" /><W i="244" k="p" t="восемнадцатое" /><W i="245" k="p" t="восемнадцатые" /><W i="245" k="p" t="восемнадцатых" /><W i="246" k="p" t="восемнадцатым" /><W i="247" k="p" t="восемнадцатой" /><W i="248" k="p" t="восемнадцатым" /><W i="249" k="p" t="восемнадцатыми" /><W i="250" k="p" t="восемнадцатом" /><W i="251" k="p" t="восемнадцатой" /><W i="252" k="p" t="восемнадцатом" /><W i="253" k="p" t="восемнадцатых" /><W i="261" k="o" t="восемнадцать" /><W i="263" k="o" t="восемнадцати" /><W i="266" k="o" t="восемнадцати" /><W i="269" k="o" t="восемнадцать" /><W i="273" k="o" t="восемнадцатью" /><W i="283" k="o" t="восемнадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="19"><W i="230" k="p" t="девятнадцатый" /><W i="231" k="p" t="девятнадцатая" /><W i="232" k="p" t="девятнадцатое" /><W i="233" k="p" t="девятнадцатые" /><W i="234" k="p" t="девятнадцатого" /><W i="235" k="p" t="девятнадцатой" /><W i="236" k="p" t="девятнадцатого" /><W i="237" k="p" t="девятнадцатых" /><W i="238" k="p" t="девятнадцатому" /><W i="239" k="p" t="девятнадцатой" /><W i="240" k="p" t="девятнадцатому" /><W i="241" k="p" t="девятнадцатым" /><W i="242" k="p" t="девятнадцатого" /><W i="242" k="p" t="девятнадцатый" /><W i="243" k="p" t="девятнадцатую" /><W i="244" k="p" t="девятнадцатое" /><W i="245" k="p" t="девятнадцатые" /><W i="245" k="p" t="девятнадцатых" /><W i="246" k="p" t="девятнадцатым" /><W i="247" k="p" t="девятнадцатой" /><W i="248" k="p" t="девятнадцатым" /><W i="249" k="p" t="девятнадцатыми" /><W i="250" k="p" t="девятнадцатом" /><W i="251" k="p" t="девятнадцатой" /><W i="252" k="p" t="девятнадцатом" /><W i="253" k="p" t="девятнадцатых" /><W i="261" k="o" t="девятнадцать" /><W i="263" k="o" t="девятнадцати" /><W i="266" k="o" t="девятнадцати" /><W i="269" k="o" t="девятнадцать" /><W i="273" k="o" t="девятнадцатью" /><W i="283" k="o" t="девятнадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="20"><W i="230" k="p" t="двадцатый" /><W i="231" k="p" t="двадцатая" /><W i="232" k="p" t="двадцатое" /><W i="233" k="p" t="двадцатые" /><W i="234" k="p" t="двадцатого" /><W i="235" k="p" t="двадцатой" /><W i="236" k="p" t="двадцатого" /><W i="237" k="p" t="двадцатых" /><W i="238" k="p" t="двадцатому" /><W i="239" k="p" t="двадцатой" /><W i="240" k="p" t="двадцатому" /><W i="241" k="p" t="двадцатым" /><W i="242" k="p" t="двадцатого" /><W i="242" k="p" t="двадцатый" /><W i="243" k="p" t="двадцатую" /><W i="244" k="p" t="двадцатое" /><W i="245" k="p" t="двадцатые" /><W i="245" k="p" t="двадцатых" /><W i="246" k="p" t="двадцатым" /><W i="247" k="p" t="двадцатой" /><W i="248" k="p" t="двадцатым" /><W i="249" k="p" t="двадцатыми" /><W i="250" k="p" t="двадцатом" /><W i="251" k="p" t="двадцатой" /><W i="252" k="p" t="двадцатом" /><W i="253" k="p" t="двадцатых" /><W i="261" k="o" t="двадцать" /><W i="263" k="o" t="двадцати" /><W i="266" k="o" t="двадцати" /><W i="269" k="o" t="двадцать" /><W i="273" k="o" t="двадцатью" /><W i="283" k="o" t="двадцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="30"><W i="230" k="p" t="тридцатый" /><W i="231" k="p" t="тридцатая" /><W i="232" k="p" t="тридцатое" /><W i="233" k="p" t="тридцатые" /><W i="234" k="p" t="тридцатого" /><W i="235" k="p" t="тридцатой" /><W i="236" k="p" t="тридцатого" /><W i="237" k="p" t="тридцатых" /><W i="238" k="p" t="тридцатому" /><W i="239" k="p" t="тридцатой" /><W i="240" k="p" t="тридцатому" /><W i="241" k="p" t="тридцатым" /><W i="242" k="p" t="тридцатого" /><W i="242" k="p" t="тридцатый" /><W i="243" k="p" t="тридцатую" /><W i="244" k="p" t="тридцатое" /><W i="245" k="p" t="тридцатые" /><W i="245" k="p" t="тридцатых" /><W i="246" k="p" t="тридцатым" /><W i="247" k="p" t="тридцатой" /><W i="248" k="p" t="тридцатым" /><W i="249" k="p" t="тридцатыми" /><W i="250" k="p" t="тридцатом" /><W i="251" k="p" t="тридцатой" /><W i="252" k="p" t="тридцатом" /><W i="253" k="p" t="тридцатых" /><W i="261" k="o" t="тридцать" /><W i="263" k="o" t="тридцати" /><W i="266" k="o" t="тридцати" /><W i="269" k="o" t="тридцать" /><W i="273" k="o" t="тридцатью" /><W i="283" k="o" t="тридцати" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="40"><W i="230" k="p" t="сороковой" /><W i="231" k="p" t="сороковая" /><W i="232" k="p" t="сороковое" /><W i="233" k="p" t="сороковые" /><W i="234" k="p" t="сорокового" /><W i="235" k="p" t="сороковой" /><W i="236" k="p" t="сорокового" /><W i="237" k="p" t="сороковых" /><W i="238" k="p" t="сороковому" /><W i="239" k="p" t="сороковой" /><W i="240" k="p" t="сороковому" /><W i="241" k="p" t="сороковым" /><W i="242" k="p" t="сорокового" /><W i="242" k="p" t="сороковой" /><W i="243" k="p" t="сороковую" /><W i="244" k="p" t="сороковое" /><W i="245" k="p" t="сороковые" /><W i="245" k="p" t="сороковых" /><W i="246" k="p" t="сороковым" /><W i="247" k="p" t="сороковой" /><W i="248" k="p" t="сороковым" /><W i="249" k="p" t="сороковыми" /><W i="250" k="p" t="сороковом" /><W i="251" k="p" t="сороковой" /><W i="252" k="p" t="сороковом" /><W i="253" k="p" t="сороковых" /><W i="261" k="o" t="сорок" /><W i="263" k="o" t="сорока" /><W i="266" k="o" t="сорока" /><W i="269" k="o" t="сорок" /><W i="273" k="o" t="сорока" /><W i="283" k="o" t="сорока" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="50"><W i="232" k="ad" t="полсотни" /><W i="244" k="ad" t="полсотни" /><W i="230" k="p" t="пятидесятый" /><W i="231" k="p" t="пятидесятая" /><W i="232" k="p" t="пятидесятое" /><W i="233" k="p" t="пятидесятые" /><W i="234" k="p" t="пятидесятого" /><W i="235" k="p" t="пятидесятой" /><W i="236" k="p" t="пятидесятого" /><W i="237" k="p" t="пятидесятых" /><W i="238" k="p" t="пятидесятому" /><W i="239" k="p" t="пятидесятой" /><W i="240" k="p" t="пятидесятому" /><W i="241" k="p" t="пятидесятым" /><W i="242" k="p" t="пятидесятого" /><W i="242" k="p" t="пятидесятый" /><W i="243" k="p" t="пятидесятую" /><W i="244" k="p" t="пятидесятое" /><W i="245" k="p" t="пятидесятые" /><W i="245" k="p" t="пятидесятых" /><W i="246" k="p" t="пятидесятым" /><W i="247" k="p" t="пятидесятой" /><W i="248" k="p" t="пятидесятым" /><W i="249" k="p" t="пятидесятыми" /><W i="250" k="p" t="пятидесятом" /><W i="251" k="p" t="пятидесятой" /><W i="252" k="p" t="пятидесятом" /><W i="253" k="p" t="пятидесятых" /><W i="261" k="o" t="пятьдесят" /><W i="263" k="o" t="пятидесяти" /><W i="266" k="o" t="пятидесяти" /><W i="269" k="o" t="пятьдесят" /><W i="273" k="o" t="пятьюдесятью" /><W i="283" k="o" t="пятидесяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="60"><W i="230" k="p" t="шестидесятый" /><W i="231" k="p" t="шестидесятая" /><W i="232" k="p" t="шестидесятое" /><W i="233" k="p" t="шестидесятые" /><W i="234" k="p" t="шестидесятого" /><W i="235" k="p" t="шестидесятой" /><W i="236" k="p" t="шестидесятого" /><W i="237" k="p" t="шестидесятых" /><W i="238" k="p" t="шестидесятому" /><W i="239" k="p" t="шестидесятой" /><W i="240" k="p" t="шестидесятому" /><W i="241" k="p" t="шестидесятым" /><W i="242" k="p" t="шестидесятого" /><W i="242" k="p" t="шестидесятый" /><W i="243" k="p" t="шестидесятую" /><W i="244" k="p" t="шестидесятое" /><W i="245" k="p" t="шестидесятые" /><W i="245" k="p" t="шестидесятых" /><W i="246" k="p" t="шестидесятым" /><W i="247" k="p" t="шестидесятой" /><W i="248" k="p" t="шестидесятым" /><W i="249" k="p" t="шестидесятыми" /><W i="250" k="p" t="шестидесятом" /><W i="251" k="p" t="шестидесятой" /><W i="252" k="p" t="шестидесятом" /><W i="253" k="p" t="шестидесятых" /><W i="261" k="o" t="шестьдесят" /><W i="263" k="o" t="шестидесяти" /><W i="266" k="o" t="шестидесяти" /><W i="269" k="o" t="шестьдесят" /><W i="273" k="o" t="шестьюдесятью" /><W i="283" k="o" t="шестидесяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="70"><W i="230" k="p" t="семидесятый" /><W i="231" k="p" t="семидесятая" /><W i="232" k="p" t="семидесятое" /><W i="233" k="p" t="семидесятые" /><W i="234" k="p" t="семидесятого" /><W i="235" k="p" t="семидесятой" /><W i="236" k="p" t="семидесятого" /><W i="237" k="p" t="семидесятых" /><W i="238" k="p" t="семидесятому" /><W i="239" k="p" t="семидесятой" /><W i="240" k="p" t="семидесятому" /><W i="241" k="p" t="семидесятым" /><W i="242" k="p" t="семидесятого" /><W i="242" k="p" t="семидесятый" /><W i="243" k="p" t="семидесятую" /><W i="244" k="p" t="семидесятое" /><W i="245" k="p" t="семидесятые" /><W i="245" k="p" t="семидесятых" /><W i="246" k="p" t="семидесятым" /><W i="247" k="p" t="семидесятой" /><W i="248" k="p" t="семидесятым" /><W i="249" k="p" t="семидесятыми" /><W i="250" k="p" t="семидесятом" /><W i="251" k="p" t="семидесятой" /><W i="252" k="p" t="семидесятом" /><W i="253" k="p" t="семидесятых" /><W i="261" k="o" t="семьдесят" /><W i="263" k="o" t="семидесяти" /><W i="266" k="o" t="семидесяти" /><W i="269" k="o" t="семьдесят" /><W i="273" k="o" t="семьюдесятью" /><W i="283" k="o" t="семидесяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="80"><W i="230" k="p" t="восьмидесятый" /><W i="231" k="p" t="восьмидесятая" /><W i="232" k="p" t="восьмидесятое" /><W i="233" k="p" t="восьмидесятые" /><W i="234" k="p" t="восьмидесятого" /><W i="235" k="p" t="восьмидесятой" /><W i="236" k="p" t="восьмидесятого" /><W i="237" k="p" t="восьмидесятых" /><W i="238" k="p" t="восьмидесятому" /><W i="239" k="p" t="восьмидесятой" /><W i="240" k="p" t="восьмидесятому" /><W i="241" k="p" t="восьмидесятым" /><W i="242" k="p" t="восьмидесятого" /><W i="242" k="p" t="восьмидесятый" /><W i="243" k="p" t="восьмидесятую" /><W i="244" k="p" t="восьмидесятое" /><W i="245" k="p" t="восьмидесятые" /><W i="245" k="p" t="восьмидесятых" /><W i="246" k="p" t="восьмидесятым" /><W i="247" k="p" t="восьмидесятой" /><W i="248" k="p" t="восьмидесятым" /><W i="249" k="p" t="восьмидесятыми" /><W i="250" k="p" t="восьмидесятом" /><W i="251" k="p" t="восьмидесятой" /><W i="252" k="p" t="восьмидесятом" /><W i="253" k="p" t="восьмидесятых" /><W i="261" k="o" t="восемьдесят" /><W i="263" k="o" t="восьмидесяти" /><W i="266" k="o" t="восьмидесяти" /><W i="269" k="o" t="восемьдесят" /><W i="273" k="o" t="восемьюдесятью" /><W i="273" k="o" t="восьмьюдесятью" /><W i="283" k="o" t="восьмидесяти" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="90"><W i="230" k="p" t="девяностый" /><W i="231" k="p" t="девяностая" /><W i="232" k="p" t="девяностое" /><W i="233" k="p" t="девяностые" /><W i="234" k="p" t="девяностого" /><W i="235" k="p" t="девяностой" /><W i="236" k="p" t="девяностого" /><W i="237" k="p" t="девяностых" /><W i="238" k="p" t="девяностому" /><W i="239" k="p" t="девяностой" /><W i="240" k="p" t="девяностому" /><W i="241" k="p" t="девяностым" /><W i="242" k="p" t="девяностого" /><W i="242" k="p" t="девяностый" /><W i="243" k="p" t="девяностую" /><W i="244" k="p" t="девяностое" /><W i="245" k="p" t="девяностые" /><W i="245" k="p" t="девяностых" /><W i="246" k="p" t="девяностым" /><W i="247" k="p" t="девяностой" /><W i="248" k="p" t="девяностым" /><W i="249" k="p" t="девяностыми" /><W i="250" k="p" t="девяностом" /><W i="251" k="p" t="девяностой" /><W i="252" k="p" t="девяностом" /><W i="253" k="p" t="девяностых" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="100"><W i="231" k="ad" t="сотня" /><W i="284" k="ad" t="сотни" /><W i="235" k="ad" t="сотни" /><W i="285" k="ad" t="сот" /><W i="285" k="ad" t="сотен" /><W i="239" k="ad" t="сотне" /><W i="286" k="ad" t="сотням" /><W i="243" k="ad" t="сотню" /><W i="287" k="ad" t="сотни" /><W i="247" k="ad" t="сотней" /><W i="247" k="ad" t="сотнею" /><W i="288" k="ad" t="сотнями" /><W i="251" k="ad" t="сотне" /><W i="289" k="ad" t="сотнях" /><W i="230" k="p" t="сотый" /><W i="231" k="p" t="сотая" /><W i="232" k="p" t="сотое" /><W i="233" k="p" t="сотые" /><W i="234" k="p" t="сотого" /><W i="235" k="p" t="сотой" /><W i="236" k="p" t="сотого" /><W i="237" k="p" t="сотых" /><W i="238" k="p" t="сотому" /><W i="239" k="p" t="сотой" /><W i="240" k="p" t="сотому" /><W i="241" k="p" t="сотым" /><W i="242" k="p" t="сотого" /><W i="242" k="p" t="сотый" /><W i="243" k="p" t="сотую" /><W i="244" k="p" t="сотое" /><W i="245" k="p" t="сотые" /><W i="245" k="p" t="сотых" /><W i="246" k="p" t="сотым" /><W i="247" k="p" t="сотой" /><W i="248" k="p" t="сотым" /><W i="249" k="p" t="сотыми" /><W i="250" k="p" t="сотом" /><W i="251" k="p" t="сотой" /><W i="252" k="p" t="сотом" /><W i="253" k="p" t="сотых" /><W i="261" k="o" t="сто" /><W i="263" k="o" t="ста" /><W i="266" k="o" t="ста" /><W i="269" k="o" t="сто" /><W i="273" k="o" t="ста" /><W i="283" k="o" t="ста" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="200"><W i="230" k="p" t="двухсотый" /><W i="231" k="p" t="двухсотая" /><W i="232" k="p" t="двухсотое" /><W i="233" k="p" t="двухсотые" /><W i="234" k="p" t="двухсотого" /><W i="235" k="p" t="двухсотой" /><W i="236" k="p" t="двухсотого" /><W i="237" k="p" t="двухсотых" /><W i="238" k="p" t="двухсотому" /><W i="239" k="p" t="двухсотой" /><W i="240" k="p" t="двухсотому" /><W i="241" k="p" t="двухсотым" /><W i="242" k="p" t="двухсотого" /><W i="242" k="p" t="двухсотый" /><W i="243" k="p" t="двухсотую" /><W i="244" k="p" t="двухсотое" /><W i="245" k="p" t="двухсотые" /><W i="245" k="p" t="двухсотых" /><W i="246" k="p" t="двухсотым" /><W i="247" k="p" t="двухсотой" /><W i="248" k="p" t="двухсотым" /><W i="249" k="p" t="двухсотыми" /><W i="250" k="p" t="двухсотом" /><W i="251" k="p" t="двухсотой" /><W i="252" k="p" t="двухсотом" /><W i="253" k="p" t="двухсотых" /><W i="261" k="o" t="двести" /><W i="263" k="o" t="двухсот" /><W i="266" k="o" t="двумстам" /><W i="269" k="o" t="двести" /><W i="273" k="o" t="двумястами" /><W i="283" k="o" t="двухстах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="300"><W i="230" k="p" t="трёхсотый" /><W i="231" k="p" t="трёхсотая" /><W i="232" k="p" t="трёхсотое" /><W i="233" k="p" t="трёхсотые" /><W i="234" k="p" t="трёхсотого" /><W i="235" k="p" t="трёхсотой" /><W i="236" k="p" t="трёхсотого" /><W i="237" k="p" t="трёхсотых" /><W i="238" k="p" t="трёхсотому" /><W i="239" k="p" t="трёхсотой" /><W i="240" k="p" t="трёхсотому" /><W i="241" k="p" t="трёхсотым" /><W i="242" k="p" t="трёхсотого" /><W i="242" k="p" t="трёхсотый" /><W i="243" k="p" t="трёхсотую" /><W i="244" k="p" t="трёхсотое" /><W i="245" k="p" t="трёхсотые" /><W i="245" k="p" t="трёхсотых" /><W i="246" k="p" t="трёхсотым" /><W i="247" k="p" t="трёхсотой" /><W i="248" k="p" t="трёхсотым" /><W i="249" k="p" t="трёхсотыми" /><W i="250" k="p" t="трёхсотом" /><W i="251" k="p" t="трёхсотой" /><W i="252" k="p" t="трёхсотом" /><W i="253" k="p" t="трёхсотых" /><W i="261" k="o" t="триста" /><W i="263" k="o" t="трехсот" /><W i="266" k="o" t="тремстам" /><W i="269" k="o" t="триста" /><W i="273" k="o" t="тремястами" /><W i="283" k="o" t="трехстах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="400"><W i="230" k="p" t="четырёхсотый" /><W i="231" k="p" t="четырёхсотая" /><W i="232" k="p" t="четырёхсотое" /><W i="233" k="p" t="четырёхсотые" /><W i="234" k="p" t="четырёхсотого" /><W i="235" k="p" t="четырёхсотой" /><W i="236" k="p" t="четырёхсотого" /><W i="237" k="p" t="четырёхсотых" /><W i="238" k="p" t="четырёхсотому" /><W i="239" k="p" t="четырёхсотой" /><W i="240" k="p" t="четырёхсотому" /><W i="241" k="p" t="четырёхсотым" /><W i="242" k="p" t="четырёхсотого" /><W i="242" k="p" t="четырёхсотый" /><W i="243" k="p" t="четырёхсотую" /><W i="244" k="p" t="четырёхсотое" /><W i="245" k="p" t="четырёхсотые" /><W i="245" k="p" t="четырёхсотых" /><W i="246" k="p" t="четырёхсотым" /><W i="247" k="p" t="четырёхсотой" /><W i="248" k="p" t="четырёхсотым" /><W i="249" k="p" t="четырёхсотыми" /><W i="250" k="p" t="четырёхсотом" /><W i="251" k="p" t="четырёхсотой" /><W i="252" k="p" t="четырёхсотом" /><W i="253" k="p" t="четырёхсотых" /><W i="261" k="o" t="четыреста" /><W i="263" k="o" t="четырехсот" /><W i="266" k="o" t="четыремстам" /><W i="269" k="o" t="четыреста" /><W i="273" k="o" t="четырьмястами" /><W i="283" k="o" t="четырехстах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="500"><W i="230" k="p" t="пятисотый" /><W i="231" k="p" t="пятисотая" /><W i="232" k="p" t="пятисотое" /><W i="233" k="p" t="пятисотые" /><W i="234" k="p" t="пятисотого" /><W i="235" k="p" t="пятисотой" /><W i="236" k="p" t="пятисотого" /><W i="237" k="p" t="пятисотых" /><W i="238" k="p" t="пятисотому" /><W i="239" k="p" t="пятисотой" /><W i="240" k="p" t="пятисотому" /><W i="241" k="p" t="пятисотым" /><W i="242" k="p" t="пятисотого" /><W i="242" k="p" t="пятисотый" /><W i="243" k="p" t="пятисотую" /><W i="244" k="p" t="пятисотое" /><W i="245" k="p" t="пятисотые" /><W i="245" k="p" t="пятисотых" /><W i="246" k="p" t="пятисотым" /><W i="247" k="p" t="пятисотой" /><W i="248" k="p" t="пятисотым" /><W i="249" k="p" t="пятисотыми" /><W i="250" k="p" t="пятисотом" /><W i="251" k="p" t="пятисотой" /><W i="252" k="p" t="пятисотом" /><W i="253" k="p" t="пятисотых" /><W i="261" k="o" t="пятьсот" /><W i="263" k="o" t="пятисот" /><W i="266" k="o" t="пятистам" /><W i="269" k="o" t="пятьсот" /><W i="273" k="o" t="пятьюстами" /><W i="283" k="o" t="пятистах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="600"><W i="230" k="p" t="шестисотый" /><W i="231" k="p" t="шестисотая" /><W i="232" k="p" t="шестисотое" /><W i="233" k="p" t="шестисотые" /><W i="234" k="p" t="шестисотого" /><W i="235" k="p" t="шестисотой" /><W i="236" k="p" t="шестисотого" /><W i="237" k="p" t="шестисотых" /><W i="238" k="p" t="шестисотому" /><W i="239" k="p" t="шестисотой" /><W i="240" k="p" t="шестисотому" /><W i="241" k="p" t="шестисотым" /><W i="242" k="p" t="шестисотого" /><W i="242" k="p" t="шестисотый" /><W i="243" k="p" t="шестисотую" /><W i="244" k="p" t="шестисотое" /><W i="245" k="p" t="шестисотые" /><W i="245" k="p" t="шестисотых" /><W i="246" k="p" t="шестисотым" /><W i="247" k="p" t="шестисотой" /><W i="248" k="p" t="шестисотым" /><W i="249" k="p" t="шестисотыми" /><W i="250" k="p" t="шестисотом" /><W i="251" k="p" t="шестисотой" /><W i="252" k="p" t="шестисотом" /><W i="253" k="p" t="шестисотых" /><W i="261" k="o" t="шестьсот" /><W i="263" k="o" t="шестисот" /><W i="266" k="o" t="шестистам" /><W i="269" k="o" t="шестьсот" /><W i="273" k="o" t="шестьюстами" /><W i="283" k="o" t="шестистах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="700"><W i="230" k="p" t="семисотый" /><W i="231" k="p" t="семисотая" /><W i="232" k="p" t="семисотое" /><W i="233" k="p" t="семисотые" /><W i="234" k="p" t="семисотого" /><W i="235" k="p" t="семисотой" /><W i="236" k="p" t="семисотого" /><W i="237" k="p" t="семисотых" /><W i="238" k="p" t="семисотому" /><W i="239" k="p" t="семисотой" /><W i="240" k="p" t="семисотому" /><W i="241" k="p" t="семисотым" /><W i="242" k="p" t="семисотого" /><W i="242" k="p" t="семисотый" /><W i="243" k="p" t="семисотую" /><W i="244" k="p" t="семисотое" /><W i="245" k="p" t="семисотые" /><W i="245" k="p" t="семисотых" /><W i="246" k="p" t="семисотым" /><W i="247" k="p" t="семисотой" /><W i="248" k="p" t="сем��сотым" /><W i="249" k="p" t="семисотыми" /><W i="250" k="p" t="семисотом" /><W i="251" k="p" t="семисотой" /><W i="252" k="p" t="семисотом" /><W i="253" k="p" t="семисотых" /><W i="261" k="o" t="семьсот" /><W i="263" k="o" t="семисот" /><W i="266" k="o" t="семистам" /><W i="269" k="o" t="семьсот" /><W i="273" k="o" t="семьюстами" /><W i="283" k="o" t="семистах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="800"><W i="230" k="p" t="восьмисотый" /><W i="231" k="p" t="восьмисотая" /><W i="232" k="p" t="восьмисотое" /><W i="233" k="p" t="восьмисотые" /><W i="234" k="p" t="восьмисотого" /><W i="235" k="p" t="восьмисотой" /><W i="236" k="p" t="восьмисотого" /><W i="237" k="p" t="восьмисотых" /><W i="238" k="p" t="восьмисотому" /><W i="239" k="p" t="восьмисотой" /><W i="240" k="p" t="восьмисотому" /><W i="241" k="p" t="восьмисотым" /><W i="242" k="p" t="восьмисотого" /><W i="242" k="p" t="восьмисотый" /><W i="243" k="p" t="восьмисотую" /><W i="244" k="p" t="восьмисотое" /><W i="245" k="p" t="восьмисотые" /><W i="245" k="p" t="восьмисотых" /><W i="246" k="p" t="восьмисотым" /><W i="247" k="p" t="восьмисотой" /><W i="248" k="p" t="восьмисотым" /><W i="249" k="p" t="восьмисотыми" /><W i="250" k="p" t="восьмисотом" /><W i="251" k="p" t="восьмисотой" /><W i="252" k="p" t="восьмисотом" /><W i="253" k="p" t="восьмисотых" /><W i="261" k="o" t="восемьсот" /><W i="263" k="o" t="восьмисот" /><W i="266" k="o" t="восьмистам" /><W i="269" k="o" t="восемьсот" /><W i="273" k="o" t="восемьюстами" /><W i="283" k="o" t="восьмистах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="900"><W i="230" k="p" t="девятисотый" /><W i="231" k="p" t="девятисотая" /><W i="232" k="p" t="девятисотое" /><W i="233" k="p" t="девятисотые" /><W i="234" k="p" t="девятисотого" /><W i="235" k="p" t="девятисотой" /><W i="236" k="p" t="девятисотого" /><W i="237" k="p" t="девятисотых" /><W i="238" k="p" t="девятисотому" /><W i="239" k="p" t="девятисотой" /><W i="240" k="p" t="девятисотому" /><W i="241" k="p" t="девятисотым" /><W i="242" k="p" t="девятисотого" /><W i="242" k="p" t="девятисотый" /><W i="243" k="p" t="девятисотую" /><W i="244" k="p" t="девятисотое" /><W i="245" k="p" t="девятисотые" /><W i="245" k="p" t="девятисотых" /><W i="246" k="p" t="девятисотым" /><W i="247" k="p" t="девятисотой" /><W i="248" k="p" t="девятисотым" /><W i="249" k="p" t="девятисотыми" /><W i="250" k="p" t="девятисотом" /><W i="251" k="p" t="девятисотой" /><W i="252" k="p" t="девятисотом" /><W i="253" k="p" t="девятисотых" /><W i="261" k="o" t="девятьсот" /><W i="263" k="o" t="девятисот" /><W i="266" k="o" t="девятистам" /><W i="269" k="o" t="девятьсот" /><W i="273" k="o" t="девятьюстами" /><W i="283" k="o" t="девятистах" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="1000"><W i="230" k="p" t="тысячный" /><W i="231" k="p" t="тысячная" /><W i="232" k="p" t="тысячное" /><W i="233" k="p" t="тысячные" /><W i="234" k="p" t="тысячн��го" /><W i="235" k="p" t="тысячной" /><W i="236" k="p" t="тысячного" /><W i="237" k="p" t="тысячных" /><W i="238" k="p" t="тысячному" /><W i="239" k="p" t="тысячной" /><W i="240" k="p" t="тысячному" /><W i="241" k="p" t="тысячным" /><W i="242" k="p" t="тысячного" /><W i="242" k="p" t="тысячный" /><W i="243" k="p" t="тысячную" /><W i="244" k="p" t="тысячное" /><W i="245" k="p" t="тысячные" /><W i="245" k="p" t="тысячных" /><W i="246" k="p" t="тысячным" /><W i="247" k="p" t="тысячной" /><W i="247" k="p" t="тысячною" /><W i="248" k="p" t="тысячным" /><W i="249" k="p" t="тысячными" /><W i="250" k="p" t="тысячном" /><W i="251" k="p" t="тысячной" /><W i="252" k="p" t="тысячном" /><W i="253" k="p" t="тысячных" /><W i="261" k="o" t="тысяча" /><W i="261" k="o" t="тысячи" /><W i="263" k="o" t="тысяч" /><W i="263" k="o" t="тысячи" /><W i="266" k="o" t="тысячам" /><W i="266" k="o" t="тысяче" /><W i="269" k="o" t="тысячи" /><W i="269" k="o" t="тысячу" /><W i="273" k="o" t="тысячами" /><W i="273" k="o" t="тысячей" /><W i="273" k="o" t="тысячью" /><W i="283" k="o" t="тысячах" /><W i="283" k="o" t="тысяче" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="1000000"><W i="230" k="p" t="миллионный" /><W i="231" k="p" t="миллионная" /><W i="232" k="p" t="миллионное" /><W i="233" k="p" t="миллионные" /><W i="234" k="p" t="миллионного" /><W i="235" k="p" t="миллионной" /><W i="236" k="p" t="миллионного" /><W i="237" k="p" t="миллионных" /><W i="238" k="p" t="миллионному" /><W i="239" k="p" t="миллионной" /><W i="240" k="p" t="миллионному" /><W i="241" k="p" t="миллионным" /><W i="242" k="p" t="миллионного" /><W i="242" k="p" t="миллионный" /><W i="243" k="p" t="миллионную" /><W i="244" k="p" t="миллионное" /><W i="245" k="p" t="миллионные" /><W i="245" k="p" t="миллионных" /><W i="246" k="p" t="миллионным" /><W i="247" k="p" t="миллионной" /><W i="247" k="p" t="миллионною" /><W i="248" k="p" t="миллионным" /><W i="249" k="p" t="миллионными" /><W i="250" k="p" t="миллионном" /><W i="251" k="p" t="миллионной" /><W i="252" k="p" t="миллионном" /><W i="253" k="p" t="миллионных" /><W i="261" k="o" t="миллион" /><W i="263" k="o" t="миллиона" /><W i="263" k="o" t="миллионов" /><W i="266" k="o" t="миллионам" /><W i="266" k="o" t="миллиону" /><W i="269" k="o" t="миллион" /><W i="273" k="o" t="миллионами" /><W i="273" k="o" t="миллионом" /><W i="283" k="o" t="миллионах" /><W i="283" k="o" t="миллионе" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="х" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="241" t="м" /><E i="242" t="го" /><E i="243" t="ю" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N><N v="1000000000"><W i="230" k="p" t="миллиардный" /><W i="231" k="p" t="миллиардная" /><W i="232" k="p" t="миллиардное" /><W i="233" k="p" t="миллиардные" /><W i="234" k="p" t="миллиардного" /><W i="235" k="p" t="миллиардной" /><W i="236" k="p" t="миллиардного" /><W i="237" k="p" t="миллиардным" /><W i="237" k="p" t="миллиардных" /><W i="238" k="p" t="миллиардному" /><W i="239" k="p" t="миллиардной" /><W i="240" k="p" t="миллиардному" /><W i="242" k="p" t="миллиардного" /><W i="243" k="p" t="миллиардный" /><W i="243" k="p" t="миллиардную" /><W i="244" k="p" t="миллиардное" /><W i="245" k="p" t="миллиардные" /><W i="245" k="p" t="миллиардных" /><W i="246" k="p" t="миллиардным" /><W i="247" k="p" t="миллиардной" /><W i="247" k="p" t="миллиардною" /><W i="248" k="p" t="миллиардным" /><W i="249" k="p" t="миллиардными" /><W i="250" k="p" t="миллиардном" /><W i="251" k="p" t="миллиардной" /><W i="252" k="p" t="миллиардном" /><W i="253" k="p" t="миллиардных" /><W i="261" k="o" t="миллиард" /><W i="261" k="o" t="миллиарды" /><W i="263" k="o" t="миллиарда" /><W i="263" k="o" t="миллиардов" /><W i="266" k="o" t="миллиардам" /><W i="266" k="o" t="миллиарду" /><W i="269" k="o" t="миллиард" /><W i="269" k="o" t="миллиарды" /><W i="273" k="o" t="миллиардами" /><W i="273" k="o" t="миллиардом" /><W i="283" k="o" t="миллиардах" /><W i="283" k="o" t="миллиарде" /><E i="230" t="й" /><E i="231" t="я" /><E i="232" t="е" /><E i="233" t="е" /><E i="234" t="го" /><E i="235" t="й" /><E i="236" t="го" /><E i="237" t="м" /><E i="238" t="му" /><E i="239" t="й" /><E i="240" t="му" /><E i="242" t="го" /><E i="243" t="й" /><E i="244" t="е" /><E i="245" t="е" /><E i="246" t="м" /><E i="247" t="й" /><E i="248" t="м" /><E i="249" t="ми" /><E i="250" t="м" /><E i="251" t="й" /><E i="252" t="м" /><E i="253" t="х" /></N></NumbData>
latest_release/release_small.xml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ <?xml version='1.0' encoding='utf-8'?>
2
+ <Root batch_size="BatchSize" inflect_result="Inflect/Decoder_1/decoder/transpose_1" inflect_x_cls="Inflect/XClass" inflect_y_cls="Inflect/YClass" lem_cls_result="Reshape_1" lem_result="ExpandDims" lem_x_cls="Lemm_1/XClass" prob_case="Case/Softmax" prob_gndr="Gndr/Softmax" prob_mood="Mood/Softmax" prob_nmbr="Nmbr/Softmax" prob_pers="Pers/Softmax" prob_post="Post/Softmax" prob_tens="Tens/Softmax" prob_voic="Voic/Softmax" res_case="Case/Results" res_gndr="Gndr/Results" res_indexes="Main/Results" res_mood="Mood/Results" res_nmbr="Nmbr/Results" res_pers="Pers/Results" res_post="Post/Results" res_tens="Tens/Results" res_values="Main/Results" res_voic="Voic/Results" seq_len="SeqLen" x_ind="XIndexes" x_shape="XShape" x_val="XValues"><Chars end_char="35" start_char="36"><Char index="0" value="UNDEFINED" /><Char index="1" value="а" /><Char index="2" value="б" /><Char index="3" value="в" /><Char index="4" value="г" /><Char index="5" value="д" /><Char index="6" value="е" /><Char index="7" value="ё" /><Char index="8" value="ж" /><Char index="9" value="з" /><Char index="10" value="и" /><Char index="11" value="й" /><Char index="12" value="к" /><Char index="13" value="л" /><Char index="14" value="м" /><Char index="15" value="н" /><Char index="16" value="о" /><Char index="17" value="п" /><Char index="18" value="р" /><Char index="19" value="с" /><Char index="20" value="т" /><Char index="21" value="у" /><Char index="22" value="ф" /><Char index="23" value="х" /><Char index="24" value="ч" /><Char index="25" value="ц" /><Char index="26" value="ш" /><Char index="27" value="щ" /><Char index="28" value="ъ" /><Char index="29" value="ы" /><Char index="30" value="ь" /><Char index="31" value="э" /><Char index="32" value="ю" /><Char index="33" value="я" /><Char index="34" value="-" /></Chars><Grams><G key="post" op="Post/Softmax" /><G key="gndr" op="Gndr/Softmax" /><G key="nmbr" op="Nmbr/Softmax" /><G key="case" op="Case/Softmax" /><G key="pers" op="Pers/Softmax" /><G key="tens" op="Tens/Softmax" /><G key="mood" op="Mood/Softmax" /><G key="voic" op="Voic/Softmax" /></Grams><Inflect><Im i="48"><I i="173" /><I i="83" /><I i="75" /><I i="214" /><I i="203" /><I i="49" /><I i="213" /><I i="85" /><I i="29" /><I i="82" /><I i="212" /><I i="207" /><I i="57" /><I i="28" /><I i="38" /><I i="0" /></Im><Im i="58"><I i="8" /><I i="78" /><I i="6" /><I i="68" /><I i="10" /><I i="216" /><I i="144" /><I i="5" /><I i="217" /><I i="1" /><I i="7" /><I i="205" /><I i="4" /><I i="47" /></Im><Im i="135"><I i="177" /><I i="105" /><I i="123" /><I i="79" /><I i="220" /><I i="180" /><I i="215" /><I i="141" /><I i="34" /><I i="59" /><I i="101" /><I i="61" /><I i="131" /><I i="138" /><I i="197" /><I i="176" /><I i="86" /><I i="35" /><I i="161" /><I i="3" /><I i="183" /><I i="104" /><I i="115" /><I i="195" /><I i="143" /><I i="111" /><I i="120" /><I i="108" /><I i="127" /><I i="134" /><I i="133" /><I i="158" /><I i="51" /><I i="218" /><I i="56" /><I i="26" /><I i="185" /><I i="102" /><I i="149" /><I i="11" /><I i="192" /><I i="94" /><I i="174" /><I i="46" /><I i="53" /><I i="52" /><I i="107" /><I i="76" /><I i="124" /><I i="121" /><I i="97" /><I i="151" /><I i="159" /><I i="50" /><I i="87" /><I i="9" /><I i="19" /><I i="27" /><I i="194" /><I i="226" /><I i="55" /><I i="25" /><I i="125" /><I i="71" /><I i="132" /><I i="130" /><I i="100" /><I i="160" /><I i="179" /><I i="150" /><I i="129" /><I i="118" /><I i="198" /><I i="178" /><I i="162" /><I i="65" /><I i="21" /><I i="122" /><I i="80" /><I i="201" /><I i="202" /><I i="72" /><I i="190" /><I i="126" /><I i="20" /><I i="181" /><I i="99" /><I i="184" /><I i="66" /><I i="175" /><I i="44" /><I i="60" /><I i="199" /><I i="74" /><I i="204" /><I i="95" /><I i="110" /><I i="24" /><I i="92" /><I i="116" /><I i="88" /><I i="77" /><I i="191" /><I i="43" /><I i="45" /><I i="30" /><I i="117" /><I i="54" /><I i="148" /><I i="62" /><I i="137" /><I i="64" /><I i="93" /><I i="23" /><I i="196" /><I i="22" /><I i="73" /><I i="67" /><I i="2" /><I i="114" /><I i="140" /><I i="139" /><I i="128" /><I i="109" /><I i="186" /><I i="200" /><I i="187" /><I i="193" /></Im><Im i="18"><I i="12" /><I i="63" /><I i="32" /><I i="90" /><I i="152" /><I i="36" /><I i="39" /><I i="41" /><I i="96" /><I i="98" /><I i="31" /><I i="40" /><I i="189" /><I i="103" /><I i="14" /><I i="17" /><I i="13" /><I i="89" /><I i="42" /><I i="16" /><I i="119" /><I i="70" /><I i="221" /><I i="15" /><I i="33" /><I i="69" /><I i="182" /><I i="37" /><I i="91" /></Im><Im i="112"><I i="136" /><I i="228" /><I i="157" /><I i="142" /><I i="113" /><I i="146" /><I i="156" /><I i="147" /><I i="153" /><I i="224" /><I i="227" /><I i="145" /><I i="155" /><I i="222" /><I i="154" /></Im><Im i="163"><I i="165" /><I i="168" /><I i="172" /><I i="81" /><I i="171" /><I i="164" /><I i="106" /><I i="167" /><I i="219" /><I i="166" /><I i="169" /><I i="170" /></Im></Inflect></Root>
latest_release/tags.xml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ <?xml version='1.0' encoding='utf-8'?>
2
+ <Tags><T i="0" o="219" p="nn" v="noun,masc,sing,datv,,,," /><T i="1" o="194" p="nn" v="noun,femn,plur,gent,,,," /><T i="2" o="21" p="nn" v="prtf,neut,sing,accs,,past,,actv" /><T i="3" o="33" p="nn" v="prtf,neut,sing,nomn,,past,,actv" /><T i="4" o="202" p="nn" v="noun,femn,sing,datv,,,," /><T i="5" o="195" p="nn" v="noun,femn,plur,nomn,,,," /><T i="6" o="199" p="nn" v="noun,femn,sing,loct,,,," /><T i="7" o="203" p="nn" v="noun,femn,sing,gent,,,," /><T i="8" o="192" p="nn" v="noun,femn,plur,accs,,,," /><T i="9" o="113" p="nn" v="verb,masc,sing,,,past,indc," /><T i="10" o="190" p="nn" v="noun,femn,plur,loct,,,," /><T i="11" o="131" p="nn" v="verb,,sing,,,,impr," /><T i="12" o="152" p="nn" v="adjf,masc,sing,datv,,,," /><T i="13" o="140" p="nn" v="adjf,neut,sing,datv,,,," /><T i="14" o="141" p="nn" v="adjf,neut,sing,gent,,,," /><T i="15" o="153" p="nn" v="adjf,masc,sing,gent,,,," /><T i="16" o="151" p="nn" v="adjf,masc,sing,accs,,,," /><T i="17" o="144" p="nn" v="adjf,femn,sing,ablt,,,," /><T i="18" l="1" o="154" p="nn" v="adjf,masc,sing,nomn,,,," /><T i="19" o="71" p="nn" v="prtf,masc,sing,accs,,pres,,actv" /><T i="20" o="83" p="nn" v="prtf,masc,sing,nomn,,pres,,actv" /><T i="21" o="46" p="nn" v="prtf,femn,sing,accs,,pres,,pssv" /><T i="22" o="75" p="nn" v="prtf,masc,sing,datv,,pres,,actv" /><T i="23" o="27" p="nn" v="prtf,neut,sing,datv,,pres,,actv" /><T i="24" o="51" p="nn" v="prtf,femn,sing,datv,,pres,,actv" /><T i="25" o="55" p="nn" v="prtf,femn,sing,gent,,pres,,actv" /><T i="26" o="43" p="nn" v="prtf,femn,sing,ablt,,pres,,actv" /><T i="27" o="39" p="nn" v="prtf,femn,sing,loct,,pres,,actv" /><T i="28" o="210" p="nn" v="noun,masc,plur,nomn,,,," /><T i="29" o="207" p="nn" v="noun,masc,plur,accs,,,," /><T i="30" o="121" p="nn" v="verb,,plur,,,past,indc," /><T i="31" o="158" p="nn" v="adjf,,plur,datv,,,," /><T i="32" o="138" p="nn" v="adjf,neut,sing,ablt,,,," /><T i="33" o="150" p="nn" v="adjf,masc,sing,ablt,,,," /><T i="34" o="8" p="nn" v="prts,masc,sing,,,past,,pssv" /><T i="35" o="115" p="nn" v="verb,,plur,,3per,futr,indc," /><T i="36" o="139" p="nn" v="adjf,neut,sing,accs,,,," /><T i="37" o="142" p="nn" v="adjf,neut,sing,nomn,,,," /><T i="38" o="209" p="nn" v="noun,masc,plur,gent,,,," /><T i="39" o="148" p="nn" v="adjf,femn,sing,nomn,,,," /><T i="40" o="143" p="nn" v="adjf,femn,sing,loct,,,," /><T i="41" o="146" p="nn" v="adjf,femn,sing,datv,,,," /><T i="42" o="147" p="nn" v="adjf,femn,sing,gent,,,," /><T i="43" o="103" p="nn" v="prtf,,plur,gent,,pres,,actv" /><T i="44" o="87" p="nn" v="prtf,,plur,loct,,pres,,actv" /><T i="45" o="95" p="nn" v="prtf,,plur,accs,,pres,,actv" /><T i="46" o="2" p="nn" v="grnd,,,,,past,," /><T i="47" o="200" p="nn" v="noun,femn,sing,ablt,,,," /><T i="48" l="1" o="221" p="nn" v="noun,masc,sing,nomn,,,," /><T i="49" o="218" p="nn" v="noun,masc,sing,accs,,,," /><T i="50" o="99" p="nn" v="prtf,,plur,datv,,pres,,actv" /><T i="51" o="67" p="nn" v="prtf,masc,sing,ablt,,pres,,actv" /><T i="52" o="19" p="nn" v="prtf,neut,sing,ablt,,pres,,actv" /><T i="53" o="53" p="nn" v="prtf,femn,sing,gent,,past,,actv" /><T i="54" o="37" p="nn" v="prtf,femn,sing,loct,,past,,actv" /><T i="55" o="49" p="nn" v="prtf,femn,sing,datv,,past,,actv" /><T i="56" o="41" p="nn" v="prtf,femn,sing,ablt,,past,,actv" /><T i="57" o="220" p="nn" v="noun,masc,sing,gent,,,," /><T i="58" l="1" o="204" p="nn" v="noun,femn,sing,nomn,,,," /><T i="59" o="111" p="nn" v="verb,femn,sing,,,past,indc," /><T i="60" o="65" p="nn" v="prtf,masc,sing,ablt,,past,,actv" /><T i="61" o="17" p="nn" v="prtf,neut,sing,ablt,,past,,actv" /><T i="62" o="97" p="nn" v="prtf,,plur,datv,,past,,actv" /><T i="63" o="145" p="nn" v="adjf,femn,sing,accs,,,," /><T i="64" o="72" p="nn" v="prtf,masc,sing,datv,,past,,pssv" /><T i="65" o="24" p="nn" v="prtf,neut,sing,datv,,past,,pssv" /><T i="66" o="25" p="nn" v="prtf,neut,sing,datv,,past,,actv" /><T i="67" o="73" p="nn" v="prtf,masc,sing,datv,,past,,actv" /><T i="68" o="193" p="nn" v="noun,femn,plur,datv,,,," /><T i="69" o="137" p="nn" v="adjf,neut,sing,loct,,,," /><T i="70" o="149" p="nn" v="adjf,masc,sing,loct,,,," /><T i="71" o="119" p="nn" v="verb,,plur,,1per,futr,indc," /><T i="72" o="107" p="nn" v="prtf,,plur,nomn,,pres,,actv" /><T i="73" o="10" p="nn" v="prts,,plur,,,past,,pssv" /><T i="74" o="56" p="nn" v="prtf,femn,sing,nomn,,past,,pssv" /><T i="75" o="216" p="nn" v="noun,masc,sing,loct,,,," /><T i="76" o="92" p="nn" v="prtf,,plur,accs,,past,,pssv" /><T i="77" o="104" p="nn" v="prtf,,plur,nomn,,past,,pssv" /><T i="78" o="201" p="nn" v="noun,femn,sing,accs,,,," /><T i="79" o="117" p="nn" v="verb,,plur,,2per,futr,indc," /><T i="80" o="122" p="nn" v="verb,,plur,,,,impr," /><T i="81" o="164" p="nn" v="noun,msf,plur,datv,,,," /><T i="82" o="217" p="nn" v="noun,masc,sing,ablt,,,," /><T i="83" o="208" p="nn" v="noun,masc,plur,datv,,,," /><T i="84" o="223" p="nn" v="noun,,plur,loct,,,," /><T i="85" o="205" p="nn" v="noun,masc,plur,loct,,,," /><T i="86" o="60" p="nn" v="prtf,masc,sing,loct,,past,,pssv" /><T i="87" o="12" p="nn" v="prtf,neut,sing,loct,,past,,pssv" /><T i="88" o="4" p="nn" v="prts,neut,sing,,,past,,pssv" /><T i="89" o="159" p="nn" v="adjf,,plur,gent,,,," /><T i="90" o="155" p="nn" v="adjf,,plur,loct,,,," /><T i="91" o="157" p="nn" v="adjf,,plur,accs,,,," /><T i="92" o="81" p="nn" v="prtf,masc,sing,nomn,,past,,actv" /><T i="93" o="69" p="nn" v="prtf,masc,sing,accs,,past,,actv" /><T i="94" o="63" p="nn" v="prtf,masc,sing,loct,,pres,,actv" /><T i="95" o="15" p="nn" v="prtf,neut,sing,loct,,pres,,actv" /><T i="96" o="160" p="nn" v="adjf,,plur,nomn,,,," /><T i="97" o="7" p="nn" v="prts,femn,sing,,,pres,,pssv" /><T i="98" o="108" p="nn" v="comp,,,,,,," /><T i="99" o="61" p="nn" v="prtf,masc,sing,loct,,past,,actv" /><T i="100" o="13" p="nn" v="prtf,neut,sing,loct,,past,,actv" /><T i="101" o="40" p="nn" v="prtf,femn,sing,ablt,,past,,pssv" /><T i="102" o="47" p="nn" v="prtf,femn,sing,accs,,pres,,actv" /><T i="103" o="156" p="nn" v="adjf,,plur,ablt,,,," /><T i="104" o="35" p="nn" v="prtf,neut,sing,nomn,,pres,,actv" /><T i="105" o="23" p="nn" v="prtf,neut,sing,accs,,pres,,actv" /><T i="106" o="162" p="nn" v="noun,msf,plur,ablt,,,," /><T i="107" o="70" p="nn" v="prtf,masc,sing,accs,,pres,,pssv" /><T i="108" o="78" p="nn" v="prtf,masc,sing,gent,,pres,,pssv" /><T i="109" o="30" p="nn" v="prtf,neut,sing,gent,,pres,,pssv" /><T i="110" o="77" p="nn" v="prtf,masc,sing,gent,,past,,actv" /><T i="111" o="29" p="nn" v="prtf,neut,sing,gent,,past,,actv" /><T i="112" l="1" o="189" p="nn" v="noun,neut,sing,nomn,,,," /><T i="113" o="186" p="nn" v="noun,neut,sing,accs,,,," /><T i="114" o="31" p="nn" v="prtf,neut,sing,gent,,pres,,actv" /><T i="115" o="79" p="nn" v="prtf,masc,sing,gent,,pres,,actv" /><T i="116" o="124" p="nn" v="verb,,sing,,3per,pres,indc," /><T i="117" o="118" p="nn" v="verb,,plur,,2per,pres,indc," /><T i="118" o="59" p="nn" v="prtf,femn,sing,nomn,,pres,,actv" /><T i="119" o="135" p="nn" v="adjs,masc,sing,,,,," /><T i="120" o="101" p="nn" v="prtf,,plur,gent,,past,,actv" /><T i="121" o="85" p="nn" v="prtf,,plur,loct,,past,,actv" /><T i="122" o="93" p="nn" v="prtf,,plur,accs,,past,,actv" /><T i="123" o="128" p="nn" v="verb,,sing,,1per,pres,indc," /><T i="124" o="98" p="nn" v="prtf,,plur,datv,,pres,,pssv" /><T i="125" o="18" p="nn" v="prtf,neut,sing,ablt,,pres,,pssv" /><T i="126" o="66" p="nn" v="prtf,masc,sing,ablt,,pres,,pssv" /><T i="127" o="89" p="nn" v="prtf,,plur,ablt,,past,,actv" /><T i="128" o="94" p="nn" v="prtf,,plur,accs,,pres,,pssv" /><T i="129" o="86" p="nn" v="prtf,,plur,loct,,pres,,pssv" /><T i="130" o="102" p="nn" v="prtf,,plur,gent,,pres,,pssv" /><T i="131" o="14" p="nn" v="prtf,neut,sing,loct,,pres,,pssv" /><T i="132" o="62" p="nn" v="prtf,masc,sing,loct,,pres,,pssv" /><T i="133" o="116" p="nn" v="verb,,plur,,3per,pres,indc," /><T i="134" o="88" p="nn" v="prtf,,plur,ablt,,past,,pssv" /><T i="135" l="1" o="109" p="nn" v="infn,,,,,,," /><T i="136" o="184" p="nn" v="noun,neut,sing,loct,,,," /><T i="137" o="68" p="nn" v="prtf,masc,sing,accs,,past,,pssv" /><T i="138" o="28" p="nn" v="prtf,neut,sing,gent,,past,,pssv" /><T i="139" o="76" p="nn" v="prtf,masc,sing,gent,,past,,pssv" /><T i="140" o="105" p="nn" v="prtf,,plur,nomn,,past,,actv" /><T i="141" o="42" p="nn" v="prtf,femn,sing,ablt,,pres,,pssv" /><T i="142" o="187" p="nn" v="noun,neut,sing,datv,,,," /><T i="143" o="9" p="nn" v="prts,masc,sing,,,pres,,pssv" /><T i="144" o="191" p="nn" v="noun,femn,plur,ablt,,,," /><T i="145" o="179" p="nn" v="noun,neut,plur,nomn,,,," /><T i="146" o="188" p="nn" v="noun,neut,sing,gent,,,," /><T i="147" o="176" p="nn" v="noun,neut,plur,accs,,,," /><T i="148" o="96" p="nn" v="prtf,,plur,datv,,past,,pssv" /><T i="149" o="16" p="nn" v="prtf,neut,sing,ablt,,past,,pssv" /><T i="150" o="64" p="nn" v="prtf,masc,sing,ablt,,past,,pssv" /><T i="151" o="58" p="nn" v="prtf,femn,sing,nomn,,pres,,pssv" /><T i="152" o="136" p="nn" v="adjs,,plur,,,,," /><T i="153" o="185" p="nn" v="noun,neut,sing,ablt,,,," /><T i="154" o="177" p="nn" v="noun,neut,plur,datv,,,," /><T i="155" o="175" p="nn" v="noun,neut,plur,ablt,,,," /><T i="156" o="174" p="nn" v="noun,neut,plur,loct,,,," /><T i="157" o="178" p="nn" v="noun,neut,plur,gent,,,," /><T i="158" o="100" p="nn" v="prtf,,plur,gent,,past,,pssv" /><T i="159" o="84" p="nn" v="prtf,,plur,loct,,past,,pssv" /><T i="160" o="20" p="nn" v="prtf,neut,sing,accs,,past,,pssv" /><T i="161" o="32" p="nn" v="prtf,neut,sing,nomn,,past,,pssv" /><T i="162" o="110" p="nn" v="verb,neut,sing,,,past,indc," /><T i="163" l="1" o="173" p="nn" v="noun,msf,sing,nomn,,,," /><T i="164" o="169" p="nn" v="noun,msf,sing,ablt,,,," /><T i="165" o="168" p="nn" v="noun,msf,sing,loct,,,," /><T i="166" o="170" p="nn" v="noun,msf,sing,accs,,,," /><T i="167" o="165" p="nn" v="noun,msf,plur,gent,,,," /><T i="168" o="166" p="nn" v="noun,msf,plur,nomn,,,," /><T i="169" o="171" p="nn" v="noun,msf,sing,datv,,,," /><T i="170" o="161" p="nn" v="noun,msf,plur,loct,,,," /><T i="171" o="163" p="nn" v="noun,msf,plur,accs,,,," /><T i="172" o="172" p="nn" v="noun,msf,sing,gent,,,," /><T i="173" o="206" p="nn" v="noun,masc,plur,ablt,,,," /><T i="174" o="120" p="nn" v="verb,,plur,,1per,pres,indc," /><T i="175" o="45" p="nn" v="prtf,femn,sing,accs,,past,,actv" /><T i="176" o="123" p="nn" v="verb,,sing,,3per,futr,indc," /><T i="177" o="52" p="nn" v="prtf,femn,sing,gent,,past,,pssv" /><T i="178" o="36" p="nn" v="prtf,femn,sing,loct,,past,,pssv" /><T i="179" o="48" p="nn" v="prtf,femn,sing,datv,,past,,pssv" /><T i="180" o="11" p="nn" v="prts,,plur,,,pres,,pssv" /><T i="181" o="44" p="nn" v="prtf,femn,sing,accs,,past,,pssv" /><T i="182" o="132" p="nn" v="adjs,neut,sing,,,,," /><T i="183" o="80" p="nn" v="prtf,masc,sing,nomn,,past,,pssv" /><T i="184" o="3" p="nn" v="grnd,,,,,pres,," /><T i="185" o="91" p="nn" v="prtf,,plur,ablt,,pres,,actv" /><T i="186" o="26" p="nn" v="prtf,neut,sing,datv,,pres,,pssv" /><T i="187" o="74" p="nn" v="prtf,masc,sing,datv,,pres,,pssv" /><T i="188" l="1" o="1" p="nn" v="advb,,,,,,," /><T i="189" o="133" p="nn" v="adjs,femn,sing,,,,," /><T i="190" o="125" p="nn" v="verb,,sing,,2per,futr,indc," /><T i="191" o="126" p="nn" v="verb,,sing,,2per,pres,indc," /><T i="192" o="54" p="nn" v="prtf,femn,sing,gent,,pres,,pssv" /><T i="193" o="50" p="nn" v="prtf,femn,sing,datv,,pres,,pssv" /><T i="194" o="38" p="nn" v="prtf,femn,sing,loct,,pres,,pssv" /><T i="195" o="127" p="nn" v="verb,,sing,,1per,futr,indc," /><T i="196" o="22" p="nn" v="prtf,neut,sing,accs,,pres,,pssv" /><T i="197" o="34" p="nn" v="prtf,neut,sing,nomn,,pres,,pssv" /><T i="198" o="57" p="nn" v="prtf,femn,sing,nomn,,past,,actv" /><T i="199" o="106" p="nn" v="prtf,,plur,nomn,,pres,,pssv" /><T i="200" o="90" p="nn" v="prtf,,plur,ablt,,pres,,pssv" /><T i="201" o="5" p="nn" v="prts,neut,sing,,,pres,,pssv" /><T i="202" o="82" p="nn" v="prtf,masc,sing,nomn,,pres,,pssv" /><T i="203" o="211" p="nn" v="noun,masc,sing,loct2,,,," /><T i="204" o="6" p="nn" v="prts,femn,sing,,,past,,pssv" /><T i="205" o="198" p="nn" v="noun,femn,sing,voct,,,," /><T i="206" o="227" p="nn" v="noun,,plur,gent,,,," /><T i="207" o="214" p="nn" v="noun,masc,sing,gent2,,,," /><T i="208" o="226" p="nn" v="noun,,plur,datv,,,," /><T i="209" o="225" p="nn" v="noun,,plur,accs,,,," /><T i="210" o="228" p="nn" v="noun,,plur,nomn,,,," /><T i="211" o="224" p="nn" v="noun,,plur,ablt,,,," /><T i="212" o="215" p="nn" v="noun,masc,sing,gent1,,,," /><T i="213" o="213" p="nn" v="noun,masc,sing,voct,,,," /><T i="214" o="212" p="nn" v="noun,masc,sing,loct1,,,," /><T i="215" o="130" p="nn" v="verb,,sing,,,pres,indc," /><T i="216" o="197" p="nn" v="noun,femn,sing,loct1,,,," /><T i="217" o="196" p="nn" v="noun,femn,sing,loct2,,,," /><T i="218" o="114" p="nn" v="verb,masc,sing,,,,impr," /><T i="219" o="167" p="nn" v="noun,msf,sing,voct,,,," /><T i="220" o="129" p="nn" v="verb,,sing,,,futr,indc," /><T i="221" o="134" p="nn" v="adjs,masc,sing,gent,,,," /><T i="222" o="180" p="nn" v="noun,neut,sing,loct2,,,," /><T i="223" o="222" p="nn" v="noun,,plur,voct,,,," /><T i="224" o="182" p="nn" v="noun,neut,sing,gent2,,,," /><T i="225" o="0" p="nn" v="advb,neut,sing,,,,," /><T i="226" o="112" p="nn" v="verb,femn,sing,,,,impr," /><T i="227" o="183" p="nn" v="noun,neut,sing,gent1,,,," /><T i="228" o="181" p="nn" v="noun,neut,sing,loct1,,,," /><T i="230" l="1" o="275" p="numb" v="numb,masc,sing,nomn,,,," /><T i="231" l="1" o="257" p="numb" v="numb,femn,sing,nomn,,,," /><T i="232" l="1" o="239" p="numb" v="numb,neut,sing,nomn,,,," /><T i="233" o="287" p="numb" v="numb,,plur,nomn,,,," /><T i="234" o="274" p="numb" v="numb,masc,sing,gent,,,," /><T i="235" o="256" p="numb" v="numb,femn,sing,gent,,,," /><T i="236" o="238" p="numb" v="numb,neut,sing,gent,,,," /><T i="237" o="286" p="numb" v="numb,,plur,gent,,,," /><T i="238" o="273" p="numb" v="numb,masc,sing,datv,,,," /><T i="239" o="255" p="numb" v="numb,femn,sing,datv,,,," /><T i="240" o="237" p="numb" v="numb,neut,sing,datv,,,," /><T i="241" o="285" p="numb" v="numb,,plur,datv,,,," /><T i="242" o="272" p="numb" v="numb,masc,sing,accs,,,," /><T i="243" o="254" p="numb" v="numb,femn,sing,accs,,,," /><T i="244" o="236" p="numb" v="numb,neut,sing,accs,,,," /><T i="245" o="284" p="numb" v="numb,,plur,accs,,,," /><T i="246" o="271" p="numb" v="numb,masc,sing,ablt,,,," /><T i="247" o="253" p="numb" v="numb,femn,sing,ablt,,,," /><T i="248" o="235" p="numb" v="numb,neut,sing,ablt,,,," /><T i="249" o="283" p="numb" v="numb,,plur,ablt,,,," /><T i="250" o="270" p="numb" v="numb,masc,sing,loct,,,," /><T i="251" o="252" p="numb" v="numb,femn,sing,loct,,,," /><T i="252" o="234" p="numb" v="numb,neut,sing,loct,,,," /><T i="253" o="282" p="numb" v="numb,,plur,loct,,,," /><T i="254" o="269" p="numb" v="numb,masc,plur,nomn,,,," /><T i="255" o="268" p="numb" v="numb,masc,plur,gent,,,," /><T i="256" o="267" p="numb" v="numb,masc,plur,datv,,,," /><T i="257" o="266" p="numb" v="numb,masc,plur,accs,,,," /><T i="258" o="265" p="numb" v="numb,masc,plur,ablt,,,," /><T i="259" o="264" p="numb" v="numb,masc,plur,loct,,,," /><T i="260" o="263" p="numb" v="numb,femn,,nomn,,,," /><T i="261" l="1" o="293" p="numb" v="numb,,,nomn,,,," /><T i="262" o="245" p="numb" v="numb,neut,,nomn,,,," /><T i="263" o="292" p="numb" v="numb,,,gent,,,," /><T i="264" o="262" p="numb" v="numb,femn,,gent,,,," /><T i="265" o="244" p="numb" v="numb,neut,,gent,,,," /><T i="266" o="291" p="numb" v="numb,,,datv,,,," /><T i="267" o="261" p="numb" v="numb,femn,,datv,,,," /><T i="268" o="243" p="numb" v="numb,neut,,datv,,,," /><T i="269" o="290" p="numb" v="numb,,,accs,,,," /><T i="270" o="278" p="numb" v="numb,masc,,accs,,,," /><T i="271" o="260" p="numb" v="numb,femn,,accs,,,," /><T i="272" o="242" p="numb" v="numb,neut,,accs,,,," /><T i="273" o="289" p="numb" v="numb,,,ablt,,,," /><T i="274" o="277" p="numb" v="numb,masc,,ablt,,,," /><T i="275" o="259" p="numb" v="numb,femn,,ablt,,,," /><T i="276" o="241" p="numb" v="numb,neut,,ablt,,,," /><T i="277" o="258" p="numb" v="numb,femn,,loct,,,," /><T i="278" o="240" p="numb" v="numb,neut,,loct,,,," /><T i="279" l="1" o="281" p="numb" v="numb,masc,,nomn,,,," /><T i="280" o="280" p="numb" v="numb,masc,,gent,,,," /><T i="281" o="279" p="numb" v="numb,masc,,datv,,,," /><T i="282" o="276" p="numb" v="numb,masc,,loct,,,," /><T i="283" o="288" p="numb" v="numb,,,loct,,,," /><T i="284" o="251" p="numb" v="numb,femn,plur,nomn,,,," /><T i="285" o="250" p="numb" v="numb,femn,plur,gent,,,," /><T i="286" o="249" p="numb" v="numb,femn,plur,datv,,,," /><T i="287" o="248" p="numb" v="numb,femn,plur,accs,,,," /><T i="288" o="247" p="numb" v="numb,femn,plur,ablt,,,," /><T i="289" o="246" p="numb" v="numb,femn,plur,loct,,,," /><T i="290" l="1" o="232" p="reg" v="punct,,,,,,," /><T i="291" l="1" o="231" p="reg" v="int,,,,,,," /><T i="292" l="1" o="230" p="reg" v="romn,,,,,,," /><T i="294" l="1" o="229" p="reg" v="unkn,,,,,,," /><T i="295" l="1" o="374" p="dict" v="npro,,sing,nomn,1per,,," /><T i="296" l="1" o="294" p="dict" v="intj,,,,,,," /><T i="297" l="1" o="296" p="dict" v="conj,,,,,,," /><T i="298" o="325" p="dict" v="npro,masc,sing,ablt,,,," /><T i="299" l="1" o="309" p="dict" v="npro,neut,sing,nomn,,,," /><T i="300" l="1" o="321" p="dict" v="npro,femn,sing,nomn,,,," /><T i="301" l="1" o="297" p="dict" v="prep,,,,,,," /><T i="302" l="1" o="295" p="dict" v="prcl,,,,,,," /><T i="303" o="299" p="dict" v="npro,neut,sing,loct,,,," /><T i="304" o="329" p="dict" v="npro,masc,sing,datv,,,," /><T i="305" o="327" p="dict" v="npro,masc,sing,accs,,,," /><T i="306" l="1" o="233" p="dict" v="pred,,,,,pres,," /><T i="307" o="303" p="dict" v="npro,neut,sing,accs,,,," /><T i="308" l="1" o="333" p="dict" v="npro,masc,sing,nomn,,,," /><T i="309" o="368" p="dict" v="npro,,sing,datv,1per,,," /><T i="310" o="372" p="dict" v="npro,,sing,gent,,,," /><T i="311" o="328" p="dict" v="npro,masc,sing,datv,3per,,," /><T i="312" o="312" p="dict" v="npro,femn,sing,ablt,3per,,," /><T i="313" o="371" p="dict" v="npro,,sing,gent,1per,,," /><T i="314" o="361" p="dict" v="npro,,sing,ablt,2per,,," /><T i="315" o="301" p="dict" v="npro,neut,sing,ablt,,,," /><T i="316" o="311" p="dict" v="npro,femn,sing,loct,,,," /><T i="317" o="323" p="dict" v="npro,masc,sing,loct,,,," /><T i="318" o="315" p="dict" v="npro,femn,sing,accs,,,," /><T i="319" o="363" p="dict" v="npro,,sing,ablt,,,," /><T i="320" o="313" p="dict" v="npro,femn,sing,ablt,,,," /><T i="321" o="342" p="dict" v="npro,,plur,accs,3per,,," /><T i="322" o="339" p="dict" v="npro,,plur,ablt,2per,,," /><T i="323" l="1" o="356" p="dict" v="npro,,plur,nomn,1per,,," /><T i="324" o="305" p="dict" v="npro,neut,sing,datv,,,," /><T i="325" l="1" o="320" p="dict" v="npro,femn,sing,nomn,3per,,," /><T i="326" o="347" p="dict" v="npro,,plur,datv,2per,,," /><T i="327" l="1" o="355" p="dict" v="npro,,plur,nomn,2per,,," /><T i="328" o="318" p="dict" v="npro,femn,sing,gent,3per,,," /><T i="329" o="351" p="dict" v="npro,,plur,gent,2per,,," /><T i="330" o="369" p="dict" v="npro,,sing,datv,,,," /><T i="331" o="344" p="dict" v="npro,,plur,accs,1per,,," /><T i="332" o="307" p="dict" v="npro,neut,sing,gent,,,," /><T i="333" o="348" p="dict" v="npro,,plur,datv,1per,,," /><T i="334" o="304" p="dict" v="npro,neut,sing,datv,3per,,," /><T i="335" o="349" p="dict" v="npro,,plur,datv,,,," /><T i="336" o="300" p="dict" v="npro,neut,sing,ablt,3per,,," /><T i="337" o="326" p="dict" v="npro,masc,sing,accs,3per,,," /><T i="338" o="314" p="dict" v="npro,femn,sing,accs,3per,,," /><T i="339" o="324" p="dict" v="npro,masc,sing,ablt,3per,,," /><T i="340" o="340" p="dict" v="npro,,plur,ablt,1per,,," /><T i="341" o="317" p="dict" v="npro,femn,sing,datv,,,," /><T i="342" o="310" p="dict" v="npro,femn,sing,loct,3per,,," /><T i="343" l="1" o="357" p="dict" v="npro,,plur,nomn,,,," /><T i="344" o="367" p="dict" v="npro,,sing,datv,2per,,," /><T i="345" o="337" p="dict" v="npro,,plur,loct,,,," /><T i="346" l="1" o="373" p="dict" v="npro,,sing,nomn,2per,,," /><T i="347" o="370" p="dict" v="npro,,sing,gent,2per,,," /><T i="348" o="341" p="dict" v="npro,,plur,ablt,,,," /><T i="349" o="352" p="dict" v="npro,,plur,gent,1per,,," /><T i="350" o="362" p="dict" v="npro,,sing,ablt,1per,,," /><T i="351" o="306" p="dict" v="npro,neut,sing,gent,3per,,," /><T i="352" o="331" p="dict" v="npro,masc,sing,gent,,,," /><T i="353" o="353" p="dict" v="npro,,plur,gent,,,," /><T i="354" o="338" p="dict" v="npro,,plur,ablt,3per,,," /><T i="355" o="360" p="dict" v="npro,,sing,loct,,,," /><T i="356" l="1" o="332" p="dict" v="npro,masc,sing,nomn,3per,,," /><T i="357" o="345" p="dict" v="npro,,plur,accs,,,," /><T i="358" o="366" p="dict" v="npro,,sing,accs,,,," /><T i="359" o="302" p="dict" v="npro,neut,sing,accs,3per,,," /><T i="360" o="365" p="dict" v="npro,,sing,accs,1per,,," /><T i="361" l="1" o="354" p="dict" v="npro,,plur,nomn,3per,,," /><T i="362" l="1" o="308" p="dict" v="npro,neut,sing,nomn,3per,,," /><T i="363" o="335" p="dict" v="npro,,plur,loct,2per,,," /><T i="364" o="334" p="dict" v="npro,,plur,loct,3per,,," /><T i="365" o="319" p="dict" v="npro,femn,sing,gent,,,," /><T i="366" o="330" p="dict" v="npro,masc,sing,gent,3per,,," /><T i="367" o="336" p="dict" v="npro,,plur,loct,1per,,," /><T i="368" o="346" p="dict" v="npro,,plur,datv,3per,,," /><T i="369" o="358" p="dict" v="npro,,sing,loct,2per,,," /><T i="370" o="350" p="dict" v="npro,,plur,gent,3per,,," /><T i="371" o="298" p="dict" v="npro,neut,sing,loct,3per,,," /><T i="372" o="322" p="dict" v="npro,masc,sing,loct,3per,,," /><T i="373" o="359" p="dict" v="npro,,sing,loct,1per,,," /><T i="374" l="1" o="375" p="dict" v="npro,,sing,nomn,,,," /><T i="375" o="316" p="dict" v="npro,femn,sing,datv,3per,,," /><T i="376" o="364" p="dict" v="npro,,sing,accs,2per,,," /><T i="377" o="343" p="dict" v="npro,,plur,accs,2per,,," /></Tags>
latest_release/test_info.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ post. full_cls_acc: 0.987678530574893; part_cls_acc: 0.9878190630048466
2
+ gndr. full_cls_acc: 0.9604920851889414; part_cls_acc: 0.9672658190851966
3
+ nmbr. full_cls_acc: 0.9931398234969664; part_cls_acc: 0.9935646606086085
4
+ case. full_cls_acc: 0.9806930486635252; part_cls_acc: 0.9876671968294527
5
+ pers. full_cls_acc: 1.0; part_cls_acc: 1.0
6
+ tens. full_cls_acc: 1.0; part_cls_acc: 1.0
7
+ mood. full_cls_acc: 0.9986893840104849; part_cls_acc: 0.9987577639751553
8
+ voic. full_cls_acc: 1.0; part_cls_acc: 1.0
9
+ main. full_cls_acc: 0.9551451187335093; part_cls_acc: 0.9647049460825275
10
+ lemma_acc: 0.9955360766171577
11
+ inflect_acc: 0.9919720286154625
model.py ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import tensorflow as tf
4
+ from graph.gram_cls import GramCls
5
+ from graph.main_cls import MainCls
6
+ from graph.lemm import Lemm
7
+ from graph.inflect import Inflect
8
+ from graph.base import TfContext
9
+ from utils import MyDefaultDict, CONFIG
10
+ from tensorflow.python.tools import freeze_graph
11
+
12
+
13
+ class RNN:
14
+ def __init__(self, for_usage):
15
+ self.config = CONFIG
16
+ self.filler = self.config['filler']
17
+ self.checkpoints_keep = 200000
18
+ self.for_usage = for_usage
19
+ self.default_config = self.config['graph_part_configs']['default']
20
+ self.key_configs = MyDefaultDict(
21
+ lambda key: self.default_config,
22
+ {
23
+ key: MyDefaultDict(lambda prop_key: self.default_config[prop_key], self.config['graph_part_configs'][key])
24
+ for key in self.config['graph_part_configs']
25
+ if key != 'default'
26
+ }
27
+ )
28
+ self.export_path = self.config['export_path']
29
+ self.save_path = self.config['save_path']
30
+ self.model_key = self.config['model_key']
31
+ self.miss_steps = self.config['miss_steps'] if 'miss_steps' in self.config else []
32
+ self.start_char = self.config['start_token']
33
+ self.end_char = self.config['end_token']
34
+ self.gram_keys = [
35
+ key
36
+ for key in sorted(self.config['grammemes_types'], key=lambda x: self.config['grammemes_types'][x]['index'])
37
+ ]
38
+ self.main_class_k = self.config['main_class_k']
39
+ self.train_steps = self.config['train_steps']
40
+
41
+ if for_usage:
42
+ self.devices = ['/cpu:0']
43
+ else:
44
+ self.devices = self.config['train_devices']
45
+
46
+ if not os.path.exists(self.save_path):
47
+ os.mkdir(self.save_path)
48
+
49
+ self._build_graph()
50
+
51
+ def _build_graph(self):
52
+ self.graph = tf.Graph()
53
+ self.checks = []
54
+ self.xs = []
55
+ self.x_seq_lens = []
56
+
57
+ self.x_inds = []
58
+ self.x_vals = []
59
+ self.x_shape = []
60
+
61
+ self.prints = []
62
+
63
+ with self.graph.as_default(), tf.device('/cpu:0'):
64
+ self.is_training = tf.placeholder(tf.bool, name="IsTraining")
65
+ self.learn_rate = tf.placeholder(tf.float32, name="LearningRate")
66
+ self.batch_size = tf.placeholder(tf.int32, [], name="BatchSize") if self.for_usage else None
67
+ self.optimiser = tf.train.AdamOptimizer(self.learn_rate)
68
+ self.reset_optimizer = tf.variables_initializer(self.optimiser.variables())
69
+ self.gram_graph_parts = {
70
+ gram: GramCls(gram, self.for_usage, self.config, self.key_configs[gram], self.optimiser, self.reset_optimizer)
71
+ for gram in self.gram_keys
72
+ }
73
+ self.lem_graph_part = Lemm(self.for_usage, self.config, self.key_configs["lemm"], self.optimiser, self.reset_optimizer)
74
+ self.main_graph_part = MainCls(self.for_usage, self.config, self.key_configs["main"], self.optimiser, self.reset_optimizer)
75
+ self.inflect_graph_part = Inflect(self.for_usage, self.config, self.key_configs['inflect'], self.optimiser, self.reset_optimizer)
76
+
77
+ for device_index, device_name in enumerate(self.devices):
78
+ with tf.device(device_name):
79
+ if self.for_usage:
80
+ x_ind_pl = tf.placeholder(dtype=tf.int32, shape=(None, None), name='XIndexes')
81
+ x_val_pl = tf.placeholder(dtype=tf.int32, shape=(None,), name='XValues')
82
+ x_shape_pl = tf.placeholder(dtype=tf.int32, shape=(2,), name='XShape')
83
+ x_ind = tf.dtypes.cast(x_ind_pl, dtype=tf.int64)
84
+ x_val = tf.dtypes.cast(x_val_pl, dtype=tf.int64)
85
+ x_shape = tf.dtypes.cast(x_shape_pl, dtype=tf.int64)
86
+
87
+ x_sparse = tf.sparse.SparseTensor(x_ind, x_val, x_shape)
88
+ x = tf.sparse.to_dense(x_sparse, default_value=self.end_char)
89
+ self.x_inds.append(x_ind_pl)
90
+ self.x_vals.append(x_val_pl)
91
+ self.x_shape.append(x_shape_pl)
92
+ else:
93
+ x = tf.placeholder(dtype=tf.int32, shape=(None, None), name='X')
94
+ self.xs.append(x)
95
+
96
+ x_seq_len = tf.placeholder(dtype=tf.int32, shape=(None,), name='SeqLen')
97
+ self.x_seq_lens.append(x_seq_len)
98
+
99
+ for gram in self.gram_keys:
100
+ self.gram_graph_parts[gram].build_graph_for_device(x, x_seq_len)
101
+
102
+ gram_probs = [self.gram_graph_parts[gram].probs[-1] for gram in self.gram_keys]
103
+ gram_keep_drops = [self.gram_graph_parts[gram].keep_drops[-1] for gram in self.gram_keys]
104
+ self.main_graph_part.build_graph_for_device(x, x_seq_len, gram_probs, gram_keep_drops)
105
+ self.prints.append(tf.print("main_result", self.main_graph_part.results[0].indices))
106
+ if self.for_usage:
107
+ x_tiled = tf.contrib.seq2seq.tile_batch(x, multiplier=self.main_class_k)
108
+ seq_len_tiled = tf.contrib.seq2seq.tile_batch(x_seq_len, multiplier=self.main_class_k)
109
+ cls = tf.reshape(self.main_graph_part.results[0].indices, (-1,))
110
+ batch_size_tiled = self.batch_size * self.main_class_k
111
+ self.lem_graph_part.build_graph_for_device(x_tiled,
112
+ seq_len_tiled,
113
+ batch_size_tiled,
114
+ cls)
115
+ self.lem_cls_result = tf.reshape(self.lem_graph_part.results[0],
116
+ (self.batch_size, self.main_class_k, -1))
117
+ self.lem_graph_part.build_graph_for_device(x,
118
+ x_seq_len,
119
+ self.batch_size)
120
+ self.lem_result = tf.expand_dims(self.lem_graph_part.results[1], 1)
121
+ self.lem_class_pl = self.lem_graph_part.cls[0]
122
+ else:
123
+ self.lem_graph_part.build_graph_for_device(x,
124
+ x_seq_len,
125
+ self.batch_size)
126
+
127
+ self.inflect_graph_part.build_graph_for_device(x, x_seq_len, self.batch_size)
128
+ if self.for_usage:
129
+ self.inflect_result = self.inflect_graph_part.results[0]
130
+ self.inflect_x_class_pl = self.inflect_graph_part.x_cls[0]
131
+ self.inflect_y_class_pl = self.inflect_graph_part.y_cls[0]
132
+
133
+ for gram in self.gram_keys:
134
+ self.gram_graph_parts[gram].build_graph_end()
135
+ self.main_graph_part.build_graph_end()
136
+ self.lem_graph_part.build_graph_end()
137
+ self.inflect_graph_part.build_graph_end()
138
+ self.saver = tf.train.Saver(tf.global_variables(), max_to_keep=self.checkpoints_keep)
139
+
140
+ def restore(self, sess):
141
+ latest_checkpoint = tf.train.latest_checkpoint(self.save_path)
142
+ for gram in self.gram_keys:
143
+ if gram not in self.config['ignore_restore']:
144
+ self.gram_graph_parts[gram].restore(sess, latest_checkpoint)
145
+ if self.main_graph_part.key not in self.config['ignore_restore']:
146
+ self.main_graph_part.restore(sess, latest_checkpoint)
147
+ if self.lem_graph_part.key not in self.config['ignore_restore']:
148
+ self.lem_graph_part.restore(sess, latest_checkpoint)
149
+ if self.inflect_graph_part not in self.config['ignore_restore']:
150
+ self.inflect_graph_part.restore(sess, latest_checkpoint)
151
+
152
+ if self.inflect_graph_part.settings['transfer_init']:
153
+ self.inflect_graph_part.transfer_learning_init(sess)
154
+
155
+ def train(self):
156
+ config = tf.ConfigProto(allow_soft_placement=True)
157
+ if not os.path.isdir(self.save_path):
158
+ os.mkdir(self.save_path)
159
+
160
+ with tf.Session(config = config, graph=self.graph) as sess:
161
+ sess.run(tf.global_variables_initializer())
162
+ sess.run(tf.local_variables_initializer())
163
+
164
+ tc = TfContext(sess, self.saver, self.learn_rate)
165
+ self.restore(sess)
166
+ sess.run(self.reset_optimizer)
167
+
168
+ for gram in self.gram_keys:
169
+ if gram in self.train_steps:
170
+ self.gram_graph_parts[gram].train(tc)
171
+
172
+ if self.main_graph_part.key in self.train_steps:
173
+ self.main_graph_part.train(tc)
174
+
175
+ if self.lem_graph_part.key in self.train_steps:
176
+ self.lem_graph_part.train(tc)
177
+
178
+ if self.inflect_graph_part.key in self.train_steps:
179
+ self.inflect_graph_part.train(tc)
180
+
181
+ def release(self):
182
+ with tf.Session(graph=self.graph) as sess:
183
+ sess.run(tf.global_variables_initializer())
184
+ sess.run(tf.local_variables_initializer())
185
+
186
+ # Loading checkpoint
187
+ latest_checkpoint = tf.train.latest_checkpoint(self.save_path)
188
+ if latest_checkpoint:
189
+ self.restore(sess)
190
+
191
+ if os.path.isdir(self.export_path):
192
+ shutil.rmtree(self.export_path)
193
+
194
+ output_dic = {}
195
+ gram_op_dic = {}
196
+ for gram in self.gram_keys:
197
+ res = self.gram_graph_parts[gram].results[0]
198
+ prob = self.gram_graph_parts[gram].probs[0]
199
+ output_dic[f'res_{gram}'] = res
200
+ output_dic[f'prob_{gram}'] = prob
201
+ gram_op_dic[gram] = {
202
+ 'res': res.op.name,
203
+ 'prob': prob.op.name
204
+ }
205
+
206
+ output_dic['res_values'] = self.main_graph_part.results[0].values
207
+ output_dic['res_indexes'] = self.main_graph_part.results[0].indices
208
+
209
+ output_dic['lem_cls_result'] = self.lem_cls_result
210
+ output_dic['lem_result'] = self.lem_result
211
+
212
+ output_dic['inflect_result'] = self.inflect_graph_part.results[0]
213
+
214
+ # Saving model
215
+ tf.saved_model.simple_save(sess,
216
+ self.export_path,
217
+ inputs={
218
+ 'x_ind': self.x_inds[0],
219
+ 'x_val': self.x_vals[0],
220
+ 'x_shape': self.x_shape[0],
221
+ 'seq_len': self.x_seq_lens[0],
222
+ 'batch_size': self.batch_size,
223
+ 'lem_x_cls': self.lem_class_pl,
224
+ 'inflect_x_cls': self.inflect_x_class_pl,
225
+ 'inflect_y_cls': self.inflect_y_class_pl
226
+ },
227
+ outputs=output_dic)
228
+
229
+ # Freezing graph
230
+ input_graph = 'graph.pbtxt'
231
+ tf.train.write_graph(sess.graph.as_graph_def(), self.export_path, input_graph, as_text=True)
232
+ input_graph = os.path.join(self.export_path, input_graph)
233
+ frozen_path = os.path.join(self.export_path, 'frozen_model.pb')
234
+ output_ops = [output_dic[key].op.name for key in output_dic]
235
+ output_ops = ",".join(output_ops)
236
+ freeze_graph.freeze_graph(input_graph,
237
+ "",
238
+ False,
239
+ latest_checkpoint,
240
+ output_ops,
241
+ "",
242
+ "",
243
+ frozen_path,
244
+ True,
245
+ "",
246
+ input_saved_model_dir=self.export_path)
247
+
248
+ op_dic = {
249
+ key: output_dic[key].op.name
250
+ for key in output_dic
251
+ }
252
+
253
+ op_dic['x_ind'] = self.x_inds[0].op.name
254
+ op_dic['x_val'] = self.x_vals[0].op.name
255
+ op_dic['x_shape'] = self.x_shape[0].op.name
256
+ op_dic['seq_len'] = self.main_graph_part.x_seq_lens[0].op.name
257
+ op_dic['batch_size'] = self.batch_size.op.name
258
+ op_dic['lem_x_cls'] = self.lem_class_pl.op.name
259
+ op_dic['inflect_x_cls'] = self.inflect_x_class_pl.op.name
260
+ op_dic['inflect_y_cls'] = self.inflect_y_class_pl.op.name
261
+
262
+ return frozen_path, \
263
+ gram_op_dic , \
264
+ op_dic
numb.yml ADDED
@@ -0,0 +1,1532 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0:
2
+ p:
3
+ - {case: nomn, gndr: masc, nmbr: sing, text: нулевой}
4
+ - {case: nomn, gndr: femn, nmbr: sing, text: нулевая}
5
+ - {case: nomn, gndr: neut, nmbr: sing, text: нулевое}
6
+ - {case: nomn, nmbr: plur, text: нулевые}
7
+ - {case: gent, gndr: masc, nmbr: sing, text: нулевого}
8
+ - {case: gent, gndr: femn, nmbr: sing, text: нулевой}
9
+ - {case: gent, gndr: neut, nmbr: sing, text: нулевого}
10
+ - {case: gent, nmbr: plur, text: нулевых}
11
+ - {case: datv, gndr: masc, nmbr: sing, text: нулевому}
12
+ - {case: datv, gndr: femn, nmbr: sing, text: нулевой}
13
+ - {case: datv, gndr: neut, nmbr: sing, text: нулевому}
14
+ - {case: datv, nmbr: plur, text: нулевым}
15
+ - {case: accs, gndr: masc, nmbr: sing, text: нулевого}
16
+ - {case: accs, gndr: masc, nmbr: sing, text: нулевой}
17
+ - {case: accs, gndr: femn, nmbr: sing, text: нулевую}
18
+ - {case: accs, gndr: neut, nmbr: sing, text: нулевое}
19
+ - {case: accs, nmbr: plur, text: нулевых}
20
+ - {case: accs, nmbr: plur, text: нулевые}
21
+ - {case: ablt, gndr: masc, nmbr: sing, text: нулевым}
22
+ - {case: ablt, gndr: femn, nmbr: sing, text: нулевой}
23
+ - {case: ablt, gndr: femn, nmbr: sing, text: нулевою}
24
+ - {case: ablt, gndr: neut, nmbr: sing, text: нулевым}
25
+ - {case: ablt, nmbr: plur, text: нулевыми}
26
+ - {case: loct, gndr: masc, nmbr: sing, text: нулевом}
27
+ - {case: loct, gndr: femn, nmbr: sing, text: нулевой}
28
+ - {case: loct, gndr: neut, nmbr: sing, text: нулевом}
29
+ - {case: loct, nmbr: plur, text: нулевых}
30
+ o:
31
+ - {case: nomn, gndr: masc, nmbr: sing, text: ноль}
32
+ - {case: nomn, gndr: masc, nmbr: plur, text: ноли}
33
+ - {case: gent, gndr: masc, nmbr: sing, text: ноля}
34
+ - {case: gent, gndr: masc, nmbr: plur, text: нолей}
35
+ - {case: datv, gndr: masc, nmbr: sing, text: нолю}
36
+ - {case: datv, gndr: masc, nmbr: plur, text: нолям}
37
+ - {case: accs, gndr: masc, nmbr: sing, text: ноль}
38
+ - {case: accs, gndr: masc, nmbr: plur, text: ноли}
39
+ - {case: ablt, gndr: masc, nmbr: sing, text: нолём}
40
+ - {case: ablt, gndr: masc, nmbr: plur, text: нолями}
41
+ - {case: loct, gndr: masc, nmbr: sing, text: ноле}
42
+ - {case: loct, gndr: masc, nmbr: plur, text: нолях}
43
+ o1:
44
+ - {case: nomn, gndr: masc, nmbr: sing, text: нуль}
45
+ - {case: nomn, gndr: masc, nmbr: plur, text: нули}
46
+ - {case: gent, gndr: masc, nmbr: sing, text: нуля}
47
+ - {case: gent, gndr: masc, nmbr: plur, text: нулей}
48
+ - {case: datv, gndr: masc, nmbr: sing, text: нулю}
49
+ - {case: datv, gndr: masc, nmbr: plur, text: нулям}
50
+ - {case: accs, gndr: masc, nmbr: sing, text: нуль}
51
+ - {case: accs, gndr: masc, nmbr: plur, text: нули}
52
+ - {case: ablt, gndr: masc, nmbr: sing, text: нулём}
53
+ - {case: ablt, gndr: masc, nmbr: plur, text: нулями}
54
+ - {case: loct, gndr: masc, nmbr: sing, text: нуле}
55
+ - {case: loct, gndr: masc, nmbr: plur, text: нулях}
56
+ 1:
57
+ p:
58
+ - {case: nomn, gndr: masc, nmbr: sing, text: первый}
59
+ - {case: nomn, gndr: femn, nmbr: sing, text: первая}
60
+ - {case: nomn, gndr: neut, nmbr: sing, text: первое}
61
+ - {case: nomn, nmbr: plur, text: первые}
62
+ - {case: gent, gndr: masc, nmbr: sing, text: первого}
63
+ - {case: gent, gndr: femn, nmbr: sing, text: первой}
64
+ - {case: gent, gndr: neut, nmbr: sing, text: первого}
65
+ - {case: gent, nmbr: plur, text: первых}
66
+ - {case: datv, gndr: masc, nmbr: sing, text: первому}
67
+ - {case: datv, gndr: femn, nmbr: sing, text: первой}
68
+ - {case: datv, gndr: neut, nmbr: sing, text: первому}
69
+ - {case: datv, nmbr: plur, text: первым}
70
+ - {case: accs, gndr: masc, nmbr: sing, text: первого}
71
+ - {case: accs, gndr: masc, nmbr: sing, text: первый}
72
+ - {case: accs, gndr: femn, nmbr: sing, text: первую}
73
+ - {case: accs, gndr: neut, nmbr: sing, text: первое}
74
+ - {case: accs, nmbr: plur, text: первые}
75
+ - {case: accs, nmbr: plur, text: первых}
76
+ - {case: ablt, gndr: masc, nmbr: sing, text: первым}
77
+ - {case: ablt, gndr: femn, nmbr: sing, text: первой}
78
+ - {case: ablt, gndr: neut, nmbr: sing, text: первым}
79
+ - {case: ablt, nmbr: plur, text: первыми}
80
+ - {case: loct, gndr: masc, nmbr: sing, text: первом}
81
+ - {case: loct, gndr: femn, nmbr: sing, text: первой}
82
+ - {case: loct, gndr: neut, nmbr: sing, text: первом}
83
+ - {case: loct, nmbr: plur, text: первых}
84
+ o:
85
+ - {case: nomn, gndr: masc, nmbr: sing, text: один}
86
+ - {case: nomn, gndr: femn, nmbr: sing, text: одна}
87
+ - {case: nomn, gndr: neut, nmbr: sing, text: одно}
88
+ - {case: nomn, nmbr: plur, text: одни}
89
+ - {case: gent, gndr: masc, nmbr: sing, text: одного}
90
+ - {case: gent, gndr: femn, nmbr: sing, text: одной}
91
+ - {case: gent, gndr: neut, nmbr: sing, text: одного}
92
+ - {case: gent, nmbr: plur, text: одних}
93
+ - {case: datv, gndr: masc, nmbr: sing, text: одному}
94
+ - {case: datv, gndr: femn, nmbr: sing, text: одной}
95
+ - {case: datv, gndr: neut, nmbr: sing, text: одному}
96
+ - {case: datv, nmbr: plur, text: одним}
97
+ - {case: accs, gndr: femn, nmbr: sing, text: одного}
98
+ - {case: accs, gndr: masc, nmbr: sing, text: один}
99
+ - {case: accs, gndr: femn, nmbr: sing, text: одну}
100
+ - {case: accs, gndr: neut, nmbr: sing, text: одно}
101
+ - {case: accs, nmbr: plur, text: одни}
102
+ - {case: accs, nmbr: plur, text: одних}
103
+ - {case: ablt, gndr: masc, nmbr: sing, text: одним}
104
+ - {case: ablt, gndr: femn, nmbr: sing, text: одной}
105
+ - {case: ablt, gndr: neut, nmbr: sing, text: одним}
106
+ - {case: ablt, nmbr: plur, text: одними}
107
+ - {case: loct, gndr: masc, nmbr: sing, text: одном}
108
+ - {case: loct, gndr: femn, nmbr: sing, text: одной}
109
+ - {case: loct, gndr: neut, nmbr: sing, text: одном}
110
+ - {case: loct, nmbr: plur, text: одних}
111
+ - {case: nomn, gndr: femn, text: один}
112
+ - {case: nomn, text: одни}
113
+ - {case: nomn, gndr: femn, text: одна}
114
+ - {case: nomn, gndr: neut, text: одно}
115
+ - {case: gent, text: одних}
116
+ - {case: gent, gndr: femn, text: одного}
117
+ - {case: gent, gndr: femn, text: одной}
118
+ - {case: gent, gndr: neut, text: одного}
119
+ - {case: datv, text: одним}
120
+ - {case: datv, gndr: femn, text: одному}
121
+ - {case: datv, gndr: femn, text: одной}
122
+ - {case: datv, gndr: neut, text: одному}
123
+ - {case: accs, text: одни}
124
+ - {case: accs, text: одних}
125
+ - {case: accs, gndr: masc, text: один}
126
+ - {case: accs, gndr: masc, text: одного}
127
+ - {case: accs, gndr: femn, text: одну}
128
+ - {case: accs, gndr: neut, text: одно}
129
+ - {case: ablt, text: одними}
130
+ - {case: ablt, gndr: masc, text: одним}
131
+ - {case: ablt, gndr: femn, text: одной}
132
+ - {case: ablt, gndr: femn, text: одною}
133
+ - {case: ablt, gndr: neut, text: одним}
134
+ - {case: loct, gndr: femn, text: одном}
135
+ - {case: loct, gndr: femn, text: одной}
136
+ - {case: loct, gndr: neut, text: одном}
137
+ 2:
138
+ p:
139
+ - {case: nomn, gndr: masc, nmbr: sing, text: второй}
140
+ - {case: nomn, gndr: femn, nmbr: sing, text: вторая}
141
+ - {case: nomn, gndr: neut, nmbr: sing, text: второе}
142
+ - {case: nomn, nmbr: plur, text: вторые}
143
+ - {case: gent, gndr: masc, nmbr: sing, text: второго}
144
+ - {case: gent, gndr: femn, nmbr: sing, text: второй}
145
+ - {case: gent, gndr: neut, nmbr: sing, text: второго}
146
+ - {case: gent, nmbr: plur, text: вторых}
147
+ - {case: datv, gndr: masc, nmbr: sing, text: второму}
148
+ - {case: datv, gndr: femn, nmbr: sing, text: второй}
149
+ - {case: datv, gndr: neut, nmbr: sing, text: второму}
150
+ - {case: datv, nmbr: plur, text: вторым}
151
+ - {case: accs, gndr: femn, nmbr: sing, text: второго}
152
+ - {case: accs, gndr: masc, nmbr: sing, text: второй}
153
+ - {case: accs, gndr: femn, nmbr: sing, text: вторую}
154
+ - {case: accs, gndr: neut, nmbr: sing, text: второе}
155
+ - {case: accs, nmbr: plur, text: вторые}
156
+ - {case: accs, nmbr: plur, text: вторых}
157
+ - {case: ablt, gndr: masc, nmbr: sing, text: вторым}
158
+ - {case: ablt, gndr: femn, nmbr: sing, text: второй}
159
+ - {case: ablt, gndr: neut, nmbr: sing, text: вторым}
160
+ - {case: ablt, nmbr: plur, text: вторыми}
161
+ - {case: loct, gndr: masc, nmbr: sing, text: втором}
162
+ - {case: loct, gndr: femn, nmbr: sing, text: второй}
163
+ - {case: loct, gndr: neut, nmbr: sing, text: втором}
164
+ - {case: loct, nmbr: plur, text: вторых}
165
+ o:
166
+ - {case: nomn, gndr: masc, text: два}
167
+ - {case: nomn, gndr: femn, text: две}
168
+ - {case: nomn, gndr: neut, text: два}
169
+ - {case: gent, gndr: masc, text: двух}
170
+ - {case: gent, gndr: femn, text: двух}
171
+ - {case: gent, gndr: neut, text: двух}
172
+ - {case: datv, gndr: masc, text: двум}
173
+ - {case: datv, gndr: femn, text: двум}
174
+ - {case: datv, gndr: neut, text: двум}
175
+ - {case: accs, gndr: masc, text: два}
176
+ - {case: accs, gndr: masc, text: двух}
177
+ - {case: accs, gndr: femn, text: две}
178
+ - {case: accs, gndr: femn, text: двух}
179
+ - {case: accs, gndr: neut, text: два}
180
+ - {case: ablt, gndr: masc, text: двумя}
181
+ - {case: ablt, gndr: femn, text: двумя}
182
+ - {case: ablt, gndr: neut, text: двумя}
183
+ - {case: loct, gndr: masc, text: двух}
184
+ - {case: loct, gndr: femn, text: двух}
185
+ - {case: loct, gndr: neut, text: двух}
186
+ 3:
187
+ p:
188
+ - {case: nomn, gndr: masc, nmbr: sing, text: третий}
189
+ - {case: nomn, gndr: femn, nmbr: sing, text: третья}
190
+ - {case: nomn, gndr: neut, nmbr: sing, text: третье}
191
+ - {case: nomn, nmbr: plur, text: третьи}
192
+ - {case: gent, gndr: masc, nmbr: sing, text: третьего}
193
+ - {case: gent, gndr: femn, nmbr: sing, text: третьей}
194
+ - {case: gent, gndr: neut, nmbr: sing, text: третьего}
195
+ - {case: gent, nmbr: plur, text: третьих}
196
+ - {case: datv, gndr: masc, nmbr: sing, text: третьему}
197
+ - {case: datv, gndr: femn, nmbr: sing, text: третьей}
198
+ - {case: datv, gndr: neut, nmbr: sing, text: третьему}
199
+ - {case: datv, nmbr: plur, text: третьим}
200
+ - {case: accs, gndr: masc, nmbr: sing, text: третьего}
201
+ - {case: accs, gndr: masc, nmbr: sing, text: третий}
202
+ - {case: accs, gndr: femn, nmbr: sing, text: третью}
203
+ - {case: accs, gndr: neut, nmbr: sing, text: третье}
204
+ - {case: accs, nmbr: plur, text: третьи}
205
+ - {case: accs, nmbr: plur, text: третьих}
206
+ - {case: ablt, gndr: masc, nmbr: sing, text: третьим}
207
+ - {case: ablt, gndr: femn, nmbr: sing, text: третьей}
208
+ - {case: ablt, gndr: femn, nmbr: sing, text: третьею}
209
+ - {case: ablt, gndr: neut, nmbr: sing, text: третьим}
210
+ - {case: ablt, nmbr: plur, text: третьими}
211
+ - {case: loct, gndr: masc, nmbr: sing, text: третьем}
212
+ - {case: loct, gndr: femn, nmbr: sing, text: третьей}
213
+ - {case: loct, gndr: neut, nmbr: sing, text: третьем}
214
+ - {case: loct, nmbr: plur, text: третьих}
215
+ o:
216
+ - {case: nomn, text: три}
217
+ - {case: gent, text: трех}
218
+ - {case: datv, text: трем}
219
+ - {case: accs, text: трех}
220
+ - {case: accs, text: три}
221
+ - {case: ablt, text: тремя}
222
+ - {case: loct, text: трех}
223
+ 4:
224
+ p:
225
+ - {case: nomn, gndr: masc, nmbr: sing, text: четвёртый}
226
+ - {case: nomn, gndr: femn, nmbr: sing, text: четвёртая}
227
+ - {case: nomn, gndr: neut, nmbr: sing, text: четвёртое}
228
+ - {case: nomn, nmbr: plur, text: четвёртые}
229
+ - {case: gent, gndr: masc, nmbr: sing, text: четвёртого}
230
+ - {case: gent, gndr: femn, nmbr: sing, text: четвёртой}
231
+ - {case: gent, gndr: neut, nmbr: sing, text: четвёртого}
232
+ - {case: gent, nmbr: plur, text: четвёртых}
233
+ - {case: datv, gndr: masc, nmbr: sing, text: четвёртому}
234
+ - {case: datv, gndr: femn, nmbr: sing, text: четвёртой}
235
+ - {case: datv, gndr: neut, nmbr: sing, text: четвёртому}
236
+ - {case: datv, nmbr: plur, text: четвёртым}
237
+ - {case: accs, gndr: masc, nmbr: sing, text: четвёртого}
238
+ - {case: accs, gndr: masc, nmbr: sing, text: четвёртый}
239
+ - {case: accs, gndr: femn, nmbr: sing, text: четвёртую}
240
+ - {case: accs, gndr: neut, nmbr: sing, text: четвёртое}
241
+ - {case: accs, nmbr: plur, text: четвёртые}
242
+ - {case: accs, nmbr: plur, text: четвёртых}
243
+ - {case: ablt, gndr: masc, nmbr: sing, text: четвёртым}
244
+ - {case: ablt, gndr: femn, nmbr: sing, text: четвёртой}
245
+ - {case: ablt, gndr: neut, nmbr: sing, text: четвёртым}
246
+ - {case: ablt, nmbr: plur, text: четвёртыми}
247
+ - {case: loct, gndr: masc, nmbr: sing, text: четвёртом}
248
+ - {case: loct, gndr: femn, nmbr: sing, text: четвёртой}
249
+ - {case: loct, gndr: neut, nmbr: sing, text: четвёртом}
250
+ - {case: loct, nmbr: plur, text: четвёртых}
251
+ o:
252
+ - {case: nomn, text: четыре}
253
+ - {case: gent, text: четырех}
254
+ - {case: datv, text: четырем}
255
+ - {case: accs, text: четыре}
256
+ - {case: accs, text: четырех}
257
+ - {case: ablt, text: четырьмя}
258
+ - {case: loct, text: четырех}
259
+ 5:
260
+ p:
261
+ - {case: nomn, gndr: masc, nmbr: sing, text: пятый}
262
+ - {case: nomn, gndr: femn, nmbr: sing, text: пятая}
263
+ - {case: nomn, gndr: neut, nmbr: sing, text: пятое}
264
+ - {case: nomn, nmbr: plur, text: пятые}
265
+ - {case: gent, gndr: masc, nmbr: sing, text: пятого}
266
+ - {case: gent, gndr: femn, nmbr: sing, text: пятой}
267
+ - {case: gent, gndr: neut, nmbr: sing, text: пятого}
268
+ - {case: gent, nmbr: plur, text: пятых}
269
+ - {case: datv, gndr: masc, nmbr: sing, text: пятому}
270
+ - {case: datv, gndr: femn, nmbr: sing, text: пятой}
271
+ - {case: datv, gndr: neut, nmbr: sing, text: пятому}
272
+ - {case: datv, nmbr: plur, text: пятым}
273
+ - {case: accs, gndr: femn, nmbr: sing, text: пятого}
274
+ - {case: accs, gndr: masc, nmbr: sing, text: пятый}
275
+ - {case: accs, gndr: femn, nmbr: sing, text: пятую}
276
+ - {case: accs, gndr: neut, nmbr: sing, text: пятое}
277
+ - {case: accs, nmbr: plur, text: пятые}
278
+ - {case: accs, nmbr: plur, text: пятых}
279
+ - {case: ablt, gndr: masc, nmbr: sing, text: пятым}
280
+ - {case: ablt, gndr: femn, nmbr: sing, text: пятой}
281
+ - {case: ablt, gndr: neut, nmbr: sing, text: пятым}
282
+ - {case: ablt, nmbr: plur, text: пятыми}
283
+ - {case: loct, gndr: masc, nmbr: sing, text: пятом}
284
+ - {case: loct, gndr: femn, nmbr: sing, text: пятой}
285
+ - {case: loct, gndr: neut, nmbr: sing, text: пятом}
286
+ - {case: loct, nmbr: plur, text: пятых}
287
+ o:
288
+ - {case: nomn, text: пять}
289
+ - {case: gent, text: пяти}
290
+ - {case: datv, text: пяти}
291
+ - {case: accs, text: пять}
292
+ - {case: ablt, text: пятью}
293
+ - {case: loct, text: пяти}
294
+ 6:
295
+ p:
296
+ - {case: nomn, gndr: masc, nmbr: sing, text: шестой}
297
+ - {case: nomn, gndr: femn, nmbr: sing, text: шестая}
298
+ - {case: nomn, gndr: neut, nmbr: sing, text: шестое}
299
+ - {case: nomn, nmbr: plur, text: шестые}
300
+ - {case: gent, gndr: masc, nmbr: sing, text: шестого}
301
+ - {case: gent, gndr: femn, nmbr: sing, text: шестой}
302
+ - {case: gent, gndr: neut, nmbr: sing, text: шестого}
303
+ - {case: gent, nmbr: plur, text: шестых}
304
+ - {case: datv, gndr: masc, nmbr: sing, text: шестому}
305
+ - {case: datv, gndr: femn, nmbr: sing, text: шестой}
306
+ - {case: datv, gndr: neut, nmbr: sing, text: шестому}
307
+ - {case: datv, nmbr: plur, text: шестым}
308
+ - {case: accs, gndr: masc, nmbr: sing, text: шестого}
309
+ - {case: accs, gndr: masc, nmbr: sing, text: шестой}
310
+ - {case: accs, gndr: femn, nmbr: sing, text: шестую}
311
+ - {case: accs, gndr: neut, nmbr: sing, text: шестое}
312
+ - {case: accs, nmbr: plur, text: шестые}
313
+ - {case: accs, nmbr: plur, text: шестых}
314
+ - {case: ablt, gndr: masc, nmbr: sing, text: шестым}
315
+ - {case: ablt, gndr: femn, nmbr: sing, text: шестой}
316
+ - {case: ablt, gndr: neut, nmbr: sing, text: шестым}
317
+ - {case: ablt, nmbr: plur, text: шестыми}
318
+ - {case: loct, gndr: masc, nmbr: sing, text: шестом}
319
+ - {case: loct, gndr: femn, nmbr: sing, text: шестой}
320
+ - {case: loct, gndr: neut, nmbr: sing, text: шестом}
321
+ - {case: loct, nmbr: plur, text: шестых}
322
+ o:
323
+ - {case: nomn, text: шесть}
324
+ - {case: gent, text: шести}
325
+ - {case: datv, text: шести}
326
+ - {case: accs, text: шесть}
327
+ - {case: ablt, text: шестью}
328
+ - {case: loct, text: шести}
329
+ 7:
330
+ p:
331
+ - {case: nomn, gndr: masc, nmbr: sing, text: седьмой}
332
+ - {case: nomn, gndr: femn, nmbr: sing, text: седьмая}
333
+ - {case: nomn, gndr: neut, nmbr: sing, text: седьмое}
334
+ - {case: nomn, nmbr: plur, text: седьмые}
335
+ - {case: gent, gndr: masc, nmbr: sing, text: седьмого}
336
+ - {case: gent, gndr: femn, nmbr: sing, text: седьмой}
337
+ - {case: gent, gndr: neut, nmbr: sing, text: седьмого}
338
+ - {case: gent, nmbr: plur, text: седьмых}
339
+ - {case: datv, gndr: masc, nmbr: sing, text: седьмому}
340
+ - {case: datv, gndr: femn, nmbr: sing, text: седьмой}
341
+ - {case: datv, gndr: neut, nmbr: sing, text: седьмому}
342
+ - {case: datv, nmbr: plur, text: седьмым}
343
+ - {case: accs, gndr: masc, nmbr: sing, text: седьмого}
344
+ - {case: accs, gndr: masc, nmbr: sing, text: седьмой}
345
+ - {case: accs, gndr: femn, nmbr: sing, text: седьмую}
346
+ - {case: accs, gndr: neut, nmbr: sing, text: седьмое}
347
+ - {case: accs, nmbr: plur, text: седьмые}
348
+ - {case: accs, nmbr: plur, text: седьмых}
349
+ - {case: ablt, gndr: masc, nmbr: sing, text: седьмым}
350
+ - {case: ablt, gndr: femn, nmbr: sing, text: седьмой}
351
+ - {case: ablt, gndr: neut, nmbr: sing, text: седьмым}
352
+ - {case: ablt, nmbr: plur, text: седьмыми}
353
+ - {case: loct, gndr: masc, nmbr: sing, text: седьмом}
354
+ - {case: loct, gndr: femn, nmbr: sing, text: седьмой}
355
+ - {case: loct, gndr: neut, nmbr: sing, text: седьмом}
356
+ - {case: loct, nmbr: plur, text: седьмых}
357
+ o:
358
+ - {case: nomn, text: семь}
359
+ - {case: gent, text: семи}
360
+ - {case: datv, text: семи}
361
+ - {case: accs, text: семь}
362
+ - {case: ablt, text: семью}
363
+ - {case: loct, text: семи}
364
+ 8:
365
+ p:
366
+ - {case: nomn, gndr: masc, nmbr: sing, text: восьмой}
367
+ - {case: nomn, gndr: femn, nmbr: sing, text: восьмая}
368
+ - {case: nomn, gndr: neut, nmbr: sing, text: восьмое}
369
+ - {case: nomn, nmbr: plur, text: восьмые}
370
+ - {case: gent, gndr: masc, nmbr: sing, text: восьмого}
371
+ - {case: gent, gndr: femn, nmbr: sing, text: восьмой}
372
+ - {case: gent, gndr: neut, nmbr: sing, text: восьмого}
373
+ - {case: gent, nmbr: plur, text: восьмых}
374
+ - {case: datv, gndr: masc, nmbr: sing, text: восьмому}
375
+ - {case: datv, gndr: femn, nmbr: sing, text: восьмой}
376
+ - {case: datv, gndr: neut, nmbr: sing, text: восьмому}
377
+ - {case: datv, nmbr: plur, text: восьмым}
378
+ - {case: accs, gndr: masc, nmbr: sing, text: восьмого}
379
+ - {case: accs, gndr: masc, nmbr: sing, text: восьмой}
380
+ - {case: accs, gndr: femn, nmbr: sing, text: восьмую}
381
+ - {case: accs, gndr: neut, nmbr: sing, text: восьмое}
382
+ - {case: accs, nmbr: plur, text: восьмые}
383
+ - {case: accs, nmbr: plur, text: восьмых}
384
+ - {case: ablt, gndr: masc, nmbr: sing, text: восьмым}
385
+ - {case: ablt, gndr: femn, nmbr: sing, text: восьмой}
386
+ - {case: ablt, gndr: neut, nmbr: sing, text: восьмым}
387
+ - {case: ablt, nmbr: plur, text: восьмыми}
388
+ - {case: loct, gndr: masc, nmbr: sing, text: восьмом}
389
+ - {case: loct, gndr: femn, nmbr: sing, text: восьмой}
390
+ - {case: loct, gndr: neut, nmbr: sing, text: восьмом}
391
+ - {case: loct, nmbr: plur, text: восьмых}
392
+ o:
393
+ - {case: nomn, text: восемь}
394
+ - {case: gent, text: восьми}
395
+ - {case: datv, text: восьми}
396
+ - {case: accs, text: восемь}
397
+ - {case: ablt, text: восемью}
398
+ - {case: ablt, text: восьмью}
399
+ - {case: loct, text: восьми}
400
+ 9:
401
+ p:
402
+ - {case: nomn, gndr: masc, nmbr: sing, text: девятый}
403
+ - {case: nomn, gndr: femn, nmbr: sing, text: девятая}
404
+ - {case: nomn, gndr: neut, nmbr: sing, text: девятое}
405
+ - {case: nomn, nmbr: plur, text: девятые}
406
+ - {case: gent, gndr: masc, nmbr: sing, text: девятого}
407
+ - {case: gent, gndr: femn, nmbr: sing, text: девятой}
408
+ - {case: gent, gndr: neut, nmbr: sing, text: девятого}
409
+ - {case: gent, nmbr: plur, text: девятых}
410
+ - {case: datv, gndr: masc, nmbr: sing, text: девятому}
411
+ - {case: datv, gndr: femn, nmbr: sing, text: девятой}
412
+ - {case: datv, gndr: neut, nmbr: sing, text: девятому}
413
+ - {case: datv, nmbr: plur, text: девятым}
414
+ - {case: accs, gndr: masc, nmbr: sing, text: девятого}
415
+ - {case: accs, gndr: masc, nmbr: sing, text: девятый}
416
+ - {case: accs, gndr: femn, nmbr: sing, text: девятую}
417
+ - {case: accs, gndr: neut, nmbr: sing, text: девятое}
418
+ - {case: accs, nmbr: plur, text: девятые}
419
+ - {case: accs, nmbr: plur, text: девятых}
420
+ - {case: ablt, gndr: masc, nmbr: sing, text: девятым}
421
+ - {case: ablt, gndr: femn, nmbr: sing, text: девятой}
422
+ - {case: ablt, gndr: neut, nmbr: sing, text: девятым}
423
+ - {case: ablt, nmbr: plur, text: девятыми}
424
+ - {case: loct, gndr: masc, nmbr: sing, text: девятом}
425
+ - {case: loct, gndr: femn, nmbr: sing, text: девятой}
426
+ - {case: loct, gndr: neut, nmbr: sing, text: девятом}
427
+ - {case: loct, nmbr: plur, text: девятых}
428
+ o:
429
+ - {case: nomn, text: девять}
430
+ - {case: gent, text: девяти}
431
+ - {case: datv, text: девяти}
432
+ - {case: accs, text: девять}
433
+ - {case: ablt, text: девятью}
434
+ - {case: loct, text: девяти}
435
+ 10:
436
+ p:
437
+ - {case: nomn, gndr: masc, nmbr: sing, text: десятый}
438
+ - {case: nomn, gndr: femn, nmbr: sing, text: десятая}
439
+ - {case: nomn, gndr: neut, nmbr: sing, text: десятое}
440
+ - {case: nomn, nmbr: plur, text: десятые}
441
+ - {case: gent, gndr: masc, nmbr: sing, text: десятого}
442
+ - {case: gent, gndr: femn, nmbr: sing, text: десятой}
443
+ - {case: gent, gndr: neut, nmbr: sing, text: десятого}
444
+ - {case: gent, nmbr: plur, text: десятых}
445
+ - {case: datv, gndr: masc, nmbr: sing, text: десятому}
446
+ - {case: datv, gndr: femn, nmbr: sing, text: десятой}
447
+ - {case: datv, gndr: neut, nmbr: sing, text: десятому}
448
+ - {case: datv, nmbr: plur, text: десятым}
449
+ - {case: accs, gndr: masc, nmbr: sing, text: десятого}
450
+ - {case: accs, gndr: masc, nmbr: sing, text: десятый}
451
+ - {case: accs, gndr: femn, nmbr: sing, text: десятую}
452
+ - {case: accs, gndr: neut, nmbr: sing, text: десятое}
453
+ - {case: accs, nmbr: plur, text: десятые}
454
+ - {case: accs, nmbr: plur, text: десятых}
455
+ - {case: ablt, gndr: masc, nmbr: sing, text: десятым}
456
+ - {case: ablt, gndr: femn, nmbr: sing, text: десятой}
457
+ - {case: ablt, gndr: neut, nmbr: sing, text: десятым}
458
+ - {case: ablt, nmbr: plur, text: десятыми}
459
+ - {case: loct, gndr: masc, nmbr: sing, text: десятом}
460
+ - {case: loct, gndr: femn, nmbr: sing, text: десятой}
461
+ - {case: loct, gndr: neut, nmbr: sing, text: десятом}
462
+ - {case: loct, nmbr: plur, text: десятых}
463
+ o:
464
+ - {case: nomn, text: десять}
465
+ - {case: gent, text: десяти}
466
+ - {case: datv, text: десяти}
467
+ - {case: accs, text: десять}
468
+ - {case: ablt, text: десятью}
469
+ - {case: loct, text: десяти}
470
+ 11:
471
+ p:
472
+ - {case: nomn, gndr: masc, nmbr: sing, text: одиннадцатый}
473
+ - {case: nomn, gndr: femn, nmbr: sing, text: одиннадцатая}
474
+ - {case: nomn, gndr: neut, nmbr: sing, text: одиннадцатое}
475
+ - {case: nomn, nmbr: plur, text: одиннадцатые}
476
+ - {case: gent, gndr: masc, nmbr: sing, text: одиннадцатого}
477
+ - {case: gent, gndr: femn, nmbr: sing, text: одиннадцатой}
478
+ - {case: gent, gndr: neut, nmbr: sing, text: одиннадцатого}
479
+ - {case: gent, nmbr: plur, text: одиннадцатых}
480
+ - {case: datv, gndr: masc, nmbr: sing, text: одиннадцатому}
481
+ - {case: datv, gndr: femn, nmbr: sing, text: одиннадцатой}
482
+ - {case: datv, gndr: neut, nmbr: sing, text: одиннадцатому}
483
+ - {case: datv, nmbr: plur, text: одиннадцатым}
484
+ - {case: accs, gndr: masc, nmbr: sing, text: одиннадцатого}
485
+ - {case: accs, gndr: masc, nmbr: sing, text: одиннадцатый}
486
+ - {case: accs, gndr: femn, nmbr: sing, text: одиннадцатую}
487
+ - {case: accs, gndr: neut, nmbr: sing, text: одиннадцатое}
488
+ - {case: accs, nmbr: plur, text: одиннадцатые}
489
+ - {case: accs, nmbr: plur, text: одиннадцатых}
490
+ - {case: ablt, gndr: masc, nmbr: sing, text: одиннадцатым}
491
+ - {case: ablt, gndr: femn, nmbr: sing, text: одиннадцатой}
492
+ - {case: ablt, gndr: neut, nmbr: sing, text: одиннадцатым}
493
+ - {case: ablt, nmbr: plur, text: одиннадцатыми}
494
+ - {case: loct, gndr: masc, nmbr: sing, text: одиннадцатом}
495
+ - {case: loct, gndr: femn, nmbr: sing, text: одиннадцатой}
496
+ - {case: loct, gndr: neut, nmbr: sing, text: одиннадцатом}
497
+ - {case: loct, nmbr: plur, text: одиннадцатых}
498
+ o:
499
+ - {case: nomn, text: одиннадцать}
500
+ - {case: gent, text: одиннадцати}
501
+ - {case: datv, text: одиннадцати}
502
+ - {case: accs, text: одиннадцать}
503
+ - {case: ablt, text: одиннадцатью}
504
+ - {case: loct, text: одиннадцати}
505
+ 12:
506
+ ad:
507
+ - {case: nomn, gndr: femn, nmbr: sing, text: дюжина}
508
+ - {case: nomn, gndr: femn, nmbr: plur, text: дюжины}
509
+ - {case: gent, gndr: femn, nmbr: sing, text: дюжины}
510
+ - {case: gent, gndr: femn, nmbr: plur, text: дюжин}
511
+ - {case: datv, gndr: femn, nmbr: sing, text: дюжине}
512
+ - {case: datv, gndr: femn, nmbr: plur, text: дюжинам}
513
+ - {case: accs, gndr: femn, nmbr: sing, text: дюжину}
514
+ - {case: accs, gndr: femn, nmbr: plur, text: дюжины}
515
+ - {case: ablt, gndr: femn, nmbr: sing, text: дюжиной}
516
+ - {case: ablt, gndr: femn, nmbr: sing, text: дюжиною}
517
+ - {case: ablt, gndr: femn, nmbr: plur, text: дюжинами}
518
+ - {case: loct, gndr: femn, nmbr: sing, text: дюжине}
519
+ - {case: loct, gndr: femn, nmbr: plur, text: дюжинах}
520
+ p:
521
+ - {case: nomn, gndr: masc, nmbr: sing, text: двенадцатый}
522
+ - {case: nomn, gndr: femn, nmbr: sing, text: двенадцатая}
523
+ - {case: nomn, gndr: neut, nmbr: sing, text: двенадцатое}
524
+ - {case: nomn, nmbr: plur, text: двенадцатые}
525
+ - {case: gent, gndr: masc, nmbr: sing, text: двенадцатого}
526
+ - {case: gent, gndr: femn, nmbr: sing, text: двенадцатой}
527
+ - {case: gent, gndr: neut, nmbr: sing, text: двенадцатого}
528
+ - {case: gent, nmbr: plur, text: двенадцатых}
529
+ - {case: datv, gndr: masc, nmbr: sing, text: двенадцатому}
530
+ - {case: datv, gndr: femn, nmbr: sing, text: двенадцатой}
531
+ - {case: datv, gndr: neut, nmbr: sing, text: двенадцатому}
532
+ - {case: datv, nmbr: plur, text: двенадцатым}
533
+ - {case: accs, gndr: masc, nmbr: sing, text: двенадцатого}
534
+ - {case: accs, gndr: masc, nmbr: sing, text: двенадцатый}
535
+ - {case: accs, gndr: femn, nmbr: sing, text: двенадцатую}
536
+ - {case: accs, gndr: neut, nmbr: sing, text: двенадцатое}
537
+ - {case: accs, nmbr: plur, text: двенадцатые}
538
+ - {case: accs, nmbr: plur, text: двенадцатых}
539
+ - {case: ablt, gndr: masc, nmbr: sing, text: двенадцатым}
540
+ - {case: ablt, gndr: femn, nmbr: sing, text: двенадцатой}
541
+ - {case: ablt, gndr: neut, nmbr: sing, text: двенадцатым}
542
+ - {case: ablt, nmbr: plur, text: двенадцатыми}
543
+ - {case: loct, gndr: masc, nmbr: sing, text: двенадцатом}
544
+ - {case: loct, gndr: femn, nmbr: sing, text: двенадцатой}
545
+ - {case: loct, gndr: neut, nmbr: sing, text: двенадцатом}
546
+ - {case: loct, nmbr: plur, text: двенадцатых}
547
+ o:
548
+ - {case: nomn, text: двенадцать}
549
+ - {case: gent, text: двенадцати}
550
+ - {case: datv, text: двенадцати}
551
+ - {case: accs, text: двенадцать}
552
+ - {case: ablt, text: двенадцатью}
553
+ - {case: loct, text: двенадцати}
554
+ 13:
555
+ p:
556
+ - {case: nomn, gndr: masc, nmbr: sing, text: тринадцатый}
557
+ - {case: nomn, gndr: femn, nmbr: sing, text: тринадцатая}
558
+ - {case: nomn, gndr: neut, nmbr: sing, text: тринадцатое}
559
+ - {case: nomn, nmbr: plur, text: тринадцатые}
560
+ - {case: gent, gndr: masc, nmbr: sing, text: тринадцатого}
561
+ - {case: gent, gndr: femn, nmbr: sing, text: тринадцатой}
562
+ - {case: gent, gndr: neut, nmbr: sing, text: тринадцатого}
563
+ - {case: gent, nmbr: plur, text: тринадцатых}
564
+ - {case: datv, gndr: masc, nmbr: sing, text: тринадцатому}
565
+ - {case: datv, gndr: femn, nmbr: sing, text: тринадцатой}
566
+ - {case: datv, gndr: neut, nmbr: sing, text: тринадцатому}
567
+ - {case: datv, nmbr: plur, text: тринадцатым}
568
+ - {case: accs, gndr: masc, nmbr: sing, text: тринадцатого}
569
+ - {case: accs, gndr: masc, nmbr: sing, text: тринадцатый}
570
+ - {case: accs, gndr: femn, nmbr: sing, text: тринадцатую}
571
+ - {case: accs, gndr: neut, nmbr: sing, text: трина��цатое}
572
+ - {case: accs, nmbr: plur, text: тринадцатые}
573
+ - {case: accs, nmbr: plur, text: тринадцатых}
574
+ - {case: ablt, gndr: masc, nmbr: sing, text: тринадцатым}
575
+ - {case: ablt, gndr: femn, nmbr: sing, text: тринадцатой}
576
+ - {case: ablt, gndr: neut, nmbr: sing, text: тринадцатым}
577
+ - {case: ablt, nmbr: plur, text: тринадцатыми}
578
+ - {case: loct, gndr: masc, nmbr: sing, text: тринадцатом}
579
+ - {case: loct, gndr: femn, nmbr: sing, text: тринадцатой}
580
+ - {case: loct, gndr: neut, nmbr: sing, text: тринадцатом}
581
+ - {case: loct, nmbr: plur, text: тринадцатых}
582
+ o:
583
+ - {case: nomn, text: тринадцать}
584
+ - {case: gent, text: тринадцати}
585
+ - {case: datv, text: тринадцати}
586
+ - {case: accs, text: тринадцать}
587
+ - {case: ablt, text: тринадцатью}
588
+ - {case: loct, text: тринадцати}
589
+ 14:
590
+ p:
591
+ - {case: nomn, gndr: masc, nmbr: sing, text: четырнадцатый}
592
+ - {case: nomn, gndr: femn, nmbr: sing, text: четырнадцатая}
593
+ - {case: nomn, gndr: neut, nmbr: sing, text: четырнадцатое}
594
+ - {case: nomn, nmbr: plur, text: четырнадцатые}
595
+ - {case: gent, gndr: masc, nmbr: sing, text: четырнадцатого}
596
+ - {case: gent, gndr: femn, nmbr: sing, text: четырнадцатой}
597
+ - {case: gent, gndr: neut, nmbr: sing, text: четырнадцатого}
598
+ - {case: gent, nmbr: plur, text: четырнадцатых}
599
+ - {case: datv, gndr: masc, nmbr: sing, text: четырнадцатому}
600
+ - {case: datv, gndr: femn, nmbr: sing, text: четырнадцатой}
601
+ - {case: datv, gndr: neut, nmbr: sing, text: четырнадцатому}
602
+ - {case: datv, nmbr: plur, text: четырнадцатым}
603
+ - {case: accs, gndr: masc, nmbr: sing, text: четырнадцатого}
604
+ - {case: accs, gndr: masc, nmbr: sing, text: четырнадцатый}
605
+ - {case: accs, gndr: femn, nmbr: sing, text: четырнадцатую}
606
+ - {case: accs, gndr: neut, nmbr: sing, text: четырнадцатое}
607
+ - {case: accs, nmbr: plur, text: четырнадцатые}
608
+ - {case: accs, nmbr: plur, text: четырнадцатых}
609
+ - {case: ablt, gndr: masc, nmbr: sing, text: четырнадцатым}
610
+ - {case: ablt, gndr: femn, nmbr: sing, text: четырнадцатой}
611
+ - {case: ablt, gndr: neut, nmbr: sing, text: четырнадцатым}
612
+ - {case: ablt, nmbr: plur, text: четырнадцатыми}
613
+ - {case: loct, gndr: masc, nmbr: sing, text: четырнадцатом}
614
+ - {case: loct, gndr: femn, nmbr: sing, text: четырнадцатой}
615
+ - {case: loct, gndr: neut, nmbr: sing, text: четырнадцатом}
616
+ - {case: loct, nmbr: plur, text: четырнадцатых}
617
+ o:
618
+ - {case: nomn, text: четырнадцать}
619
+ - {case: gent, text: четырнадцати}
620
+ - {case: datv, text: четырнадцати}
621
+ - {case: accs, text: четырнадцать}
622
+ - {case: ablt, text: четырнадцатью}
623
+ - {case: loct, text: четырнадцати}
624
+ 15:
625
+ p:
626
+ - {case: nomn, gndr: masc, nmbr: sing, text: пятнадцатый}
627
+ - {case: nomn, gndr: femn, nmbr: sing, text: пятнадцатая}
628
+ - {case: nomn, gndr: neut, nmbr: sing, text: пятнадцатое}
629
+ - {case: nomn, nmbr: plur, text: пятнадцатые}
630
+ - {case: gent, gndr: masc, nmbr: sing, text: пятнадцатого}
631
+ - {case: gent, gndr: femn, nmbr: sing, text: пятнадцатой}
632
+ - {case: gent, gndr: neut, nmbr: sing, text: пятнадцатого}
633
+ - {case: gent, nmbr: plur, text: пятнадцатых}
634
+ - {case: datv, gndr: masc, nmbr: sing, text: пятнадцатому}
635
+ - {case: datv, gndr: femn, nmbr: sing, text: пятнадцатой}
636
+ - {case: datv, gndr: neut, nmbr: sing, text: пятнадцатому}
637
+ - {case: datv, nmbr: plur, text: пятнадцатым}
638
+ - {case: accs, gndr: masc, nmbr: sing, text: пятнадцатого}
639
+ - {case: accs, gndr: masc, nmbr: sing, text: пятнадцатый}
640
+ - {case: accs, gndr: femn, nmbr: sing, text: пятнадцатую}
641
+ - {case: accs, gndr: neut, nmbr: sing, text: пятнадцатое}
642
+ - {case: accs, nmbr: plur, text: пятнадцатые}
643
+ - {case: accs, nmbr: plur, text: пятнадцатых}
644
+ - {case: ablt, gndr: femn, nmbr: sing, text: пятнадцатым}
645
+ - {case: ablt, gndr: femn, nmbr: sing, text: пятнадцатой}
646
+ - {case: ablt, gndr: neut, nmbr: sing, text: пятнадцатым}
647
+ - {case: ablt, nmbr: plur, text: пятнадцатыми}
648
+ - {case: loct, gndr: femn, nmbr: sing, text: пятнадцатом}
649
+ - {case: loct, gndr: femn, nmbr: sing, text: пятнадцатой}
650
+ - {case: loct, gndr: neut, nmbr: sing, text: пятнадцатом}
651
+ - {case: loct, nmbr: plur, text: пятнадцатых}
652
+ o:
653
+ - {case: nomn, text: пятнадцать}
654
+ - {case: gent, text: пятнадцати}
655
+ - {case: datv, text: пятнадцати}
656
+ - {case: accs, text: пятнадцать}
657
+ - {case: ablt, text: пятнадцатью}
658
+ - {case: loct, text: пятнадцати}
659
+ 16:
660
+ p:
661
+ - {case: nomn, gndr: masc, nmbr: sing, text: шестнадцатый}
662
+ - {case: nomn, gndr: femn, nmbr: sing, text: шестнадцатая}
663
+ - {case: nomn, gndr: neut, nmbr: sing, text: шестнадцатое}
664
+ - {case: nomn, nmbr: plur, text: шестнадцатые}
665
+ - {case: gent, gndr: masc, nmbr: sing, text: шестнадцатого}
666
+ - {case: gent, gndr: femn, nmbr: sing, text: шестнадцатой}
667
+ - {case: gent, gndr: neut, nmbr: sing, text: шестнадцатого}
668
+ - {case: gent, nmbr: plur, text: шестнадцатых}
669
+ - {case: datv, gndr: masc, nmbr: sing, text: шестнадцатому}
670
+ - {case: datv, gndr: femn, nmbr: sing, text: шестнадцатой}
671
+ - {case: datv, gndr: neut, nmbr: sing, text: шестнадцатому}
672
+ - {case: datv, nmbr: plur, text: шестнадцатым}
673
+ - {case: accs, gndr: masc, nmbr: sing, text: шестнадцатого}
674
+ - {case: accs, gndr: masc, nmbr: sing, text: шестнадцатый}
675
+ - {case: accs, gndr: femn, nmbr: sing, text: шестнадцатую}
676
+ - {case: accs, gndr: neut, nmbr: sing, text: шестнадцатое}
677
+ - {case: accs, nmbr: plur, text: шестнадцатые}
678
+ - {case: accs, nmbr: plur, text: шестнадцатых}
679
+ - {case: ablt, gndr: masc, nmbr: sing, text: шестнадцатым}
680
+ - {case: ablt, gndr: femn, nmbr: sing, text: шестнадцатой}
681
+ - {case: ablt, gndr: neut, nmbr: sing, text: шестнадцатым}
682
+ - {case: ablt, nmbr: plur, text: шестнадцатыми}
683
+ - {case: loct, gndr: masc, nmbr: sing, text: шестнадцатом}
684
+ - {case: loct, gndr: femn, nmbr: sing, text: шестнадцатой}
685
+ - {case: loct, gndr: neut, nmbr: sing, text: шестнадцатом}
686
+ - {case: loct, nmbr: plur, text: шестнадцатых}
687
+ o:
688
+ - {case: nomn, text: шестнадцать}
689
+ - {case: gent, text: шестнадцати}
690
+ - {case: datv, text: шестнадцати}
691
+ - {case: accs, text: шестнадцать}
692
+ - {case: ablt, text: шестнадцатью}
693
+ - {case: loct, text: шестнадцати}
694
+ 17:
695
+ p:
696
+ - {case: nomn, gndr: masc, nmbr: sing, text: семнадцатый}
697
+ - {case: nomn, gndr: femn, nmbr: sing, text: семнадцатая}
698
+ - {case: nomn, gndr: neut, nmbr: sing, text: семнадцатое}
699
+ - {case: nomn, nmbr: plur, text: семнадцатые}
700
+ - {case: gent, gndr: masc, nmbr: sing, text: семнадцатого}
701
+ - {case: gent, gndr: femn, nmbr: sing, text: семнадцатой}
702
+ - {case: gent, gndr: neut, nmbr: sing, text: семнадцатого}
703
+ - {case: gent, nmbr: plur, text: семнадцатых}
704
+ - {case: datv, gndr: masc, nmbr: sing, text: семнадцатому}
705
+ - {case: datv, gndr: femn, nmbr: sing, text: семнадцатой}
706
+ - {case: datv, gndr: neut, nmbr: sing, text: семнадцатому}
707
+ - {case: datv, nmbr: plur, text: семнадцатым}
708
+ - {case: accs, gndr: masc, nmbr: sing, text: семнадцатого}
709
+ - {case: accs, gndr: masc, nmbr: sing, text: семнадцатый}
710
+ - {case: accs, gndr: femn, nmbr: sing, text: семнадцатую}
711
+ - {case: accs, gndr: neut, nmbr: sing, text: семнадцатое}
712
+ - {case: accs, nmbr: plur, text: семнадцатые}
713
+ - {case: accs, nmbr: plur, text: семнадцатых}
714
+ - {case: ablt, gndr: masc, nmbr: sing, text: семнадцатым}
715
+ - {case: ablt, gndr: femn, nmbr: sing, text: семнадцатой}
716
+ - {case: ablt, gndr: neut, nmbr: sing, text: семнадцатым}
717
+ - {case: ablt, nmbr: plur, text: семнадцатыми}
718
+ - {case: loct, gndr: masc, nmbr: sing, text: семнадцатом}
719
+ - {case: loct, gndr: femn, nmbr: sing, text: семнадцатой}
720
+ - {case: loct, gndr: neut, nmbr: sing, text: семнадцатом}
721
+ - {case: loct, nmbr: plur, text: семнадцатых}
722
+ o:
723
+ - {case: nomn, text: семнадцать}
724
+ - {case: gent, text: семнадцати}
725
+ - {case: datv, text: семнадцати}
726
+ - {case: accs, text: семнадцать}
727
+ - {case: ablt, text: семнадцатью}
728
+ - {case: loct, text: семнадцати}
729
+ 18:
730
+ p:
731
+ - {case: nomn, gndr: masc, nmbr: sing, text: восемнадцатый}
732
+ - {case: nomn, gndr: femn, nmbr: sing, text: восемнадцатая}
733
+ - {case: nomn, gndr: neut, nmbr: sing, text: восемнадцатое}
734
+ - {case: nomn, nmbr: plur, text: восемнадцатые}
735
+ - {case: gent, gndr: masc, nmbr: sing, text: восемнадцатого}
736
+ - {case: gent, gndr: femn, nmbr: sing, text: восемнадцатой}
737
+ - {case: gent, gndr: neut, nmbr: sing, text: восемнадцатого}
738
+ - {case: gent, nmbr: plur, text: восемнадцатых}
739
+ - {case: datv, gndr: masc, nmbr: sing, text: восемнадцатому}
740
+ - {case: datv, gndr: femn, nmbr: sing, text: восемнадцатой}
741
+ - {case: datv, gndr: neut, nmbr: sing, text: восемнадцатому}
742
+ - {case: datv, nmbr: plur, text: восемнадцатым}
743
+ - {case: accs, gndr: masc, nmbr: sing, text: восемнадцатого}
744
+ - {case: accs, gndr: masc, nmbr: sing, text: восемнадцатый}
745
+ - {case: accs, gndr: femn, nmbr: sing, text: восемнадцатую}
746
+ - {case: accs, gndr: neut, nmbr: sing, text: восемнадцатое}
747
+ - {case: accs, nmbr: plur, text: восемнадцатые}
748
+ - {case: accs, nmbr: plur, text: восемнадцатых}
749
+ - {case: ablt, gndr: masc, nmbr: sing, text: восемнадцатым}
750
+ - {case: ablt, gndr: femn, nmbr: sing, text: восемнадцатой}
751
+ - {case: ablt, gndr: neut, nmbr: sing, text: восемнадцатым}
752
+ - {case: ablt, nmbr: plur, text: восемнадцатыми}
753
+ - {case: loct, gndr: masc, nmbr: sing, text: восемнадцатом}
754
+ - {case: loct, gndr: femn, nmbr: sing, text: восемнадцатой}
755
+ - {case: loct, gndr: neut, nmbr: sing, text: восемнадцатом}
756
+ - {case: loct, nmbr: plur, text: восемнадцатых}
757
+ o:
758
+ - {case: nomn, text: восемнадцать}
759
+ - {case: gent, text: восемнадцати}
760
+ - {case: datv, text: восемнадцати}
761
+ - {case: accs, text: восемнадцать}
762
+ - {case: ablt, text: восемнадцатью}
763
+ - {case: loct, text: восемнадцати}
764
+ 19:
765
+ p:
766
+ - {case: nomn, gndr: masc, nmbr: sing, text: девятнадцатый}
767
+ - {case: nomn, gndr: femn, nmbr: sing, text: девятнадцатая}
768
+ - {case: nomn, gndr: neut, nmbr: sing, text: девятнадцатое}
769
+ - {case: nomn, nmbr: plur, text: девятнадцатые}
770
+ - {case: gent, gndr: masc, nmbr: sing, text: девятнадцатого}
771
+ - {case: gent, gndr: femn, nmbr: sing, text: девятнадцатой}
772
+ - {case: gent, gndr: neut, nmbr: sing, text: девятнадцатого}
773
+ - {case: gent, nmbr: plur, text: девятнадцатых}
774
+ - {case: datv, gndr: masc, nmbr: sing, text: девятнадцатому}
775
+ - {case: datv, gndr: femn, nmbr: sing, text: девятнадцатой}
776
+ - {case: datv, gndr: neut, nmbr: sing, text: девятнадцатому}
777
+ - {case: datv, nmbr: plur, text: девятнадцатым}
778
+ - {case: accs, gndr: masc, nmbr: sing, text: девятнадцатого}
779
+ - {case: accs, gndr: masc, nmbr: sing, text: девятнадцатый}
780
+ - {case: accs, gndr: femn, nmbr: sing, text: девятнадцатую}
781
+ - {case: accs, gndr: neut, nmbr: sing, text: девятнадцатое}
782
+ - {case: accs, nmbr: plur, text: девятнадцатые}
783
+ - {case: accs, nmbr: plur, text: девятнадцатых}
784
+ - {case: ablt, gndr: masc, nmbr: sing, text: девятнадцатым}
785
+ - {case: ablt, gndr: femn, nmbr: sing, text: девятнадцатой}
786
+ - {case: ablt, gndr: neut, nmbr: sing, text: девятнадцатым}
787
+ - {case: ablt, nmbr: plur, text: девятнадцатыми}
788
+ - {case: loct, gndr: masc, nmbr: sing, text: девятнадцатом}
789
+ - {case: loct, gndr: femn, nmbr: sing, text: девятнадцатой}
790
+ - {case: loct, gndr: neut, nmbr: sing, text: девятнадцатом}
791
+ - {case: loct, nmbr: plur, text: девятнадцатых}
792
+ o:
793
+ - {case: nomn, text: девятнадцать}
794
+ - {case: gent, text: девятнадцати}
795
+ - {case: datv, text: девятнадцати}
796
+ - {case: accs, text: девятнадцать}
797
+ - {case: ablt, text: девятнадцатью}
798
+ - {case: loct, text: девятнадцати}
799
+ 20:
800
+ p:
801
+ - {case: nomn, gndr: masc, nmbr: sing, text: двадцатый}
802
+ - {case: nomn, gndr: femn, nmbr: sing, text: двадцатая}
803
+ - {case: nomn, gndr: neut, nmbr: sing, text: двадцатое}
804
+ - {case: nomn, nmbr: plur, text: двадцатые}
805
+ - {case: gent, gndr: masc, nmbr: sing, text: двадцатого}
806
+ - {case: gent, gndr: femn, nmbr: sing, text: двадцатой}
807
+ - {case: gent, gndr: neut, nmbr: sing, text: двадцатого}
808
+ - {case: gent, nmbr: plur, text: двадцатых}
809
+ - {case: datv, gndr: masc, nmbr: sing, text: двадцатому}
810
+ - {case: datv, gndr: femn, nmbr: sing, text: двадцатой}
811
+ - {case: datv, gndr: neut, nmbr: sing, text: двадцатому}
812
+ - {case: datv, nmbr: plur, text: двадцатым}
813
+ - {case: accs, gndr: masc, nmbr: sing, text: двадцатого}
814
+ - {case: accs, gndr: masc, nmbr: sing, text: двадцатый}
815
+ - {case: accs, gndr: femn, nmbr: sing, text: двадцатую}
816
+ - {case: accs, gndr: neut, nmbr: sing, text: двадцатое}
817
+ - {case: accs, nmbr: plur, text: двадцатые}
818
+ - {case: accs, nmbr: plur, text: двадцатых}
819
+ - {case: ablt, gndr: masc, nmbr: sing, text: двадцатым}
820
+ - {case: ablt, gndr: femn, nmbr: sing, text: двадцатой}
821
+ - {case: ablt, gndr: neut, nmbr: sing, text: двадцатым}
822
+ - {case: ablt, nmbr: plur, text: двадцатыми}
823
+ - {case: loct, gndr: masc, nmbr: sing, text: двадцатом}
824
+ - {case: loct, gndr: femn, nmbr: sing, text: двадцатой}
825
+ - {case: loct, gndr: neut, nmbr: sing, text: двадцатом}
826
+ - {case: loct, nmbr: plur, text: двадцатых}
827
+ o:
828
+ - {case: nomn, text: двадцать}
829
+ - {case: gent, text: двадцати}
830
+ - {case: datv, text: двадцати}
831
+ - {case: accs, text: двадцать}
832
+ - {case: ablt, text: двадцатью}
833
+ - {case: loct, text: двадцати}
834
+ 30:
835
+ p:
836
+ - {case: nomn, gndr: masc, nmbr: sing, text: тридцатый}
837
+ - {case: nomn, gndr: femn, nmbr: sing, text: тридцатая}
838
+ - {case: nomn, gndr: neut, nmbr: sing, text: тридцатое}
839
+ - {case: nomn, nmbr: plur, text: тридцатые}
840
+ - {case: gent, gndr: masc, nmbr: sing, text: тридцатого}
841
+ - {case: gent, gndr: femn, nmbr: sing, text: тридцатой}
842
+ - {case: gent, gndr: neut, nmbr: sing, text: тридцатого}
843
+ - {case: gent, nmbr: plur, text: тридцатых}
844
+ - {case: datv, gndr: masc, nmbr: sing, text: тридцатому}
845
+ - {case: datv, gndr: femn, nmbr: sing, text: тридцатой}
846
+ - {case: datv, gndr: neut, nmbr: sing, text: тридцатому}
847
+ - {case: datv, nmbr: plur, text: тридцатым}
848
+ - {case: accs, gndr: masc, nmbr: sing, text: тридцатого}
849
+ - {case: accs, gndr: masc, nmbr: sing, text: тридцатый}
850
+ - {case: accs, gndr: femn, nmbr: sing, text: тридцатую}
851
+ - {case: accs, gndr: neut, nmbr: sing, text: тридцатое}
852
+ - {case: accs, nmbr: plur, text: тридцатые}
853
+ - {case: accs, nmbr: plur, text: тридцатых}
854
+ - {case: ablt, gndr: masc, nmbr: sing, text: тридцатым}
855
+ - {case: ablt, gndr: femn, nmbr: sing, text: тридцатой}
856
+ - {case: ablt, gndr: neut, nmbr: sing, text: тридцатым}
857
+ - {case: ablt, nmbr: plur, text: тридцатыми}
858
+ - {case: loct, gndr: masc, nmbr: sing, text: тридцатом}
859
+ - {case: loct, gndr: femn, nmbr: sing, text: тридцатой}
860
+ - {case: loct, gndr: neut, nmbr: sing, text: тридцатом}
861
+ - {case: loct, nmbr: plur, text: тридцатых}
862
+ o:
863
+ - {case: nomn, text: тридцать}
864
+ - {case: gent, text: тридцати}
865
+ - {case: datv, text: тридцати}
866
+ - {case: accs, text: тридцать}
867
+ - {case: ablt, text: тридцатью}
868
+ - {case: loct, text: тридцати}
869
+ 40:
870
+ p:
871
+ - {case: nomn, gndr: masc, nmbr: sing, text: сороковой}
872
+ - {case: nomn, gndr: femn, nmbr: sing, text: сороковая}
873
+ - {case: nomn, gndr: neut, nmbr: sing, text: сороковое}
874
+ - {case: nomn, nmbr: plur, text: сороковые}
875
+ - {case: gent, gndr: masc, nmbr: sing, text: сорокового}
876
+ - {case: gent, gndr: femn, nmbr: sing, text: сороковой}
877
+ - {case: gent, gndr: neut, nmbr: sing, text: сорокового}
878
+ - {case: gent, nmbr: plur, text: сороковых}
879
+ - {case: datv, gndr: masc, nmbr: sing, text: сороковому}
880
+ - {case: datv, gndr: femn, nmbr: sing, text: сороковой}
881
+ - {case: datv, gndr: neut, nmbr: sing, text: сороковому}
882
+ - {case: datv, nmbr: plur, text: сороковым}
883
+ - {case: accs, gndr: masc, nmbr: sing, text: сорокового}
884
+ - {case: accs, gndr: masc, nmbr: sing, text: сороковой}
885
+ - {case: accs, gndr: femn, nmbr: sing, text: сороковую}
886
+ - {case: accs, gndr: neut, nmbr: sing, text: сороковое}
887
+ - {case: accs, nmbr: plur, text: сороковые}
888
+ - {case: accs, nmbr: plur, text: сороковых}
889
+ - {case: ablt, gndr: masc, nmbr: sing, text: сороковым}
890
+ - {case: ablt, gndr: femn, nmbr: sing, text: сороковой}
891
+ - {case: ablt, gndr: neut, nmbr: sing, text: сороковым}
892
+ - {case: ablt, nmbr: plur, text: сороковыми}
893
+ - {case: loct, gndr: masc, nmbr: sing, text: сороковом}
894
+ - {case: loct, gndr: femn, nmbr: sing, text: сороковой}
895
+ - {case: loct, gndr: neut, nmbr: sing, text: сороковом}
896
+ - {case: loct, nmbr: plur, text: сороковых}
897
+ o:
898
+ - {case: nomn, text: сорок}
899
+ - {case: gent, text: сорока}
900
+ - {case: datv, text: сорока}
901
+ - {case: accs, text: сорок}
902
+ - {case: ablt, text: сорока}
903
+ - {case: loct, text: сорока}
904
+ 50:
905
+ ad:
906
+ - {case: nomn, gndr: neut, nmbr: sing, text: полсотни}
907
+ - {case: accs, gndr: neut, nmbr: sing, text: полсотни}
908
+ p:
909
+ - {case: nomn, gndr: masc, nmbr: sing, text: пятидесятый}
910
+ - {case: nomn, gndr: femn, nmbr: sing, text: пятидесятая}
911
+ - {case: nomn, gndr: neut, nmbr: sing, text: пятидесятое}
912
+ - {case: nomn, nmbr: plur, text: пятидесятые}
913
+ - {case: gent, gndr: masc, nmbr: sing, text: пятидесятого}
914
+ - {case: gent, gndr: femn, nmbr: sing, text: пятидесятой}
915
+ - {case: gent, gndr: neut, nmbr: sing, text: пятидесятого}
916
+ - {case: gent, nmbr: plur, text: пятидесятых}
917
+ - {case: datv, gndr: masc, nmbr: sing, text: пятидесятому}
918
+ - {case: datv, gndr: femn, nmbr: sing, text: пятидесятой}
919
+ - {case: datv, gndr: neut, nmbr: sing, text: пятидесятому}
920
+ - {case: datv, nmbr: plur, text: пятидесятым}
921
+ - {case: accs, gndr: masc, nmbr: sing, text: пятидесятого}
922
+ - {case: accs, gndr: masc, nmbr: sing, text: пятидесятый}
923
+ - {case: accs, gndr: femn, nmbr: sing, text: пятидесятую}
924
+ - {case: accs, gndr: neut, nmbr: sing, text: пятидесятое}
925
+ - {case: accs, nmbr: plur, text: пятидесятые}
926
+ - {case: accs, nmbr: plur, text: пятидесятых}
927
+ - {case: ablt, gndr: masc, nmbr: sing, text: пятидесятым}
928
+ - {case: ablt, gndr: femn, nmbr: sing, text: пятидесятой}
929
+ - {case: ablt, gndr: neut, nmbr: sing, text: пятидесятым}
930
+ - {case: ablt, nmbr: plur, text: пятидесятыми}
931
+ - {case: loct, gndr: masc, nmbr: sing, text: пятидесятом}
932
+ - {case: loct, gndr: femn, nmbr: sing, text: пятидесятой}
933
+ - {case: loct, gndr: neut, nmbr: sing, text: пятидесятом}
934
+ - {case: loct, nmbr: plur, text: пятидесятых}
935
+ o:
936
+ - {case: nomn, text: пятьдесят}
937
+ - {case: gent, text: пятидесяти}
938
+ - {case: datv, text: пятидесяти}
939
+ - {case: accs, text: пятьдесят}
940
+ - {case: ablt, text: пятьюдесятью}
941
+ - {case: loct, text: пятидесяти}
942
+ 60:
943
+ p:
944
+ - {case: nomn, gndr: masc, nmbr: sing, text: шестидесятый}
945
+ - {case: nomn, gndr: femn, nmbr: sing, text: шестидесятая}
946
+ - {case: nomn, gndr: neut, nmbr: sing, text: шестидесятое}
947
+ - {case: nomn, nmbr: plur, text: шестидесятые}
948
+ - {case: gent, gndr: masc, nmbr: sing, text: шестидесятого}
949
+ - {case: gent, gndr: femn, nmbr: sing, text: шестидесятой}
950
+ - {case: gent, gndr: neut, nmbr: sing, text: шестидесятого}
951
+ - {case: gent, nmbr: plur, text: шестидесятых}
952
+ - {case: datv, gndr: masc, nmbr: sing, text: шестидесятому}
953
+ - {case: datv, gndr: femn, nmbr: sing, text: шестидесятой}
954
+ - {case: datv, gndr: neut, nmbr: sing, text: шестидесятому}
955
+ - {case: datv, nmbr: plur, text: шестидесятым}
956
+ - {case: accs, gndr: masc, nmbr: sing, text: шестидесятого}
957
+ - {case: accs, gndr: masc, nmbr: sing, text: шестидесятый}
958
+ - {case: accs, gndr: femn, nmbr: sing, text: шестидесятую}
959
+ - {case: accs, gndr: neut, nmbr: sing, text: шестидесятое}
960
+ - {case: accs, nmbr: plur, text: шестидесятые}
961
+ - {case: accs, nmbr: plur, text: шестидесятых}
962
+ - {case: ablt, gndr: masc, nmbr: sing, text: шестидесятым}
963
+ - {case: ablt, gndr: femn, nmbr: sing, text: шестидесятой}
964
+ - {case: ablt, gndr: neut, nmbr: sing, text: шестидесятым}
965
+ - {case: ablt, nmbr: plur, text: шестидесятыми}
966
+ - {case: loct, gndr: masc, nmbr: sing, text: шестидесятом}
967
+ - {case: loct, gndr: femn, nmbr: sing, text: шестидесятой}
968
+ - {case: loct, gndr: neut, nmbr: sing, text: шестидесятом}
969
+ - {case: loct, nmbr: plur, text: шестидесятых}
970
+ o:
971
+ - {case: nomn, text: шестьдесят}
972
+ - {case: gent, text: шестидесяти}
973
+ - {case: datv, text: шестидесяти}
974
+ - {case: accs, text: шестьдесят}
975
+ - {case: ablt, text: шестьюдесятью}
976
+ - {case: loct, text: шестидесяти}
977
+ 70:
978
+ p:
979
+ - {case: nomn, gndr: masc, nmbr: sing, text: семидесятый}
980
+ - {case: nomn, gndr: femn, nmbr: sing, text: семидесятая}
981
+ - {case: nomn, gndr: neut, nmbr: sing, text: семидесятое}
982
+ - {case: nomn, nmbr: plur, text: семидесятые}
983
+ - {case: gent, gndr: masc, nmbr: sing, text: семидесятого}
984
+ - {case: gent, gndr: femn, nmbr: sing, text: семидесятой}
985
+ - {case: gent, gndr: neut, nmbr: sing, text: семидесятого}
986
+ - {case: gent, nmbr: plur, text: семидесятых}
987
+ - {case: datv, gndr: masc, nmbr: sing, text: семидесятому}
988
+ - {case: datv, gndr: femn, nmbr: sing, text: семидесятой}
989
+ - {case: datv, gndr: neut, nmbr: sing, text: семидесятому}
990
+ - {case: datv, nmbr: plur, text: семидесятым}
991
+ - {case: accs, gndr: masc, nmbr: sing, text: семидесятого}
992
+ - {case: accs, gndr: masc, nmbr: sing, text: семидесятый}
993
+ - {case: accs, gndr: femn, nmbr: sing, text: семидесятую}
994
+ - {case: accs, gndr: neut, nmbr: sing, text: семидесятое}
995
+ - {case: accs, nmbr: plur, text: семидесятые}
996
+ - {case: accs, nmbr: plur, text: семидесятых}
997
+ - {case: ablt, gndr: masc, nmbr: sing, text: семидесятым}
998
+ - {case: ablt, gndr: femn, nmbr: sing, text: семидесятой}
999
+ - {case: ablt, gndr: neut, nmbr: sing, text: семидесятым}
1000
+ - {case: ablt, nmbr: plur, text: семидесятыми}
1001
+ - {case: loct, gndr: masc, nmbr: sing, text: семидесятом}
1002
+ - {case: loct, gndr: femn, nmbr: sing, text: семидесятой}
1003
+ - {case: loct, gndr: neut, nmbr: sing, text: семидесятом}
1004
+ - {case: loct, nmbr: plur, text: семидесятых}
1005
+ o:
1006
+ - {case: nomn, text: семьдесят}
1007
+ - {case: gent, text: семидесяти}
1008
+ - {case: datv, text: семидесяти}
1009
+ - {case: accs, text: семьдесят}
1010
+ - {case: ablt, text: семьюдесятью}
1011
+ - {case: loct, text: семидесяти}
1012
+ 80:
1013
+ p:
1014
+ - {case: nomn, gndr: masc, nmbr: sing, text: восьмидесятый}
1015
+ - {case: nomn, gndr: femn, nmbr: sing, text: восьмидесятая}
1016
+ - {case: nomn, gndr: neut, nmbr: sing, text: восьмидесятое}
1017
+ - {case: nomn, nmbr: plur, text: восьмидесятые}
1018
+ - {case: gent, gndr: masc, nmbr: sing, text: восьмидесятого}
1019
+ - {case: gent, gndr: femn, nmbr: sing, text: восьмидесятой}
1020
+ - {case: gent, gndr: neut, nmbr: sing, text: восьмидесятого}
1021
+ - {case: gent, nmbr: plur, text: восьмидесятых}
1022
+ - {case: datv, gndr: masc, nmbr: sing, text: восьмидесятому}
1023
+ - {case: datv, gndr: femn, nmbr: sing, text: восьмидесятой}
1024
+ - {case: datv, gndr: neut, nmbr: sing, text: восьмидесятому}
1025
+ - {case: datv, nmbr: plur, text: восьмидесятым}
1026
+ - {case: accs, gndr: masc, nmbr: sing, text: восьмидесятого}
1027
+ - {case: accs, gndr: masc, nmbr: sing, text: восьмидесятый}
1028
+ - {case: accs, gndr: femn, nmbr: sing, text: восьмидесятую}
1029
+ - {case: accs, gndr: neut, nmbr: sing, text: восьмидесятое}
1030
+ - {case: accs, nmbr: plur, text: восьмидесятые}
1031
+ - {case: accs, nmbr: plur, text: восьмидесятых}
1032
+ - {case: ablt, gndr: masc, nmbr: sing, text: восьмидесятым}
1033
+ - {case: ablt, gndr: femn, nmbr: sing, text: восьмидесятой}
1034
+ - {case: ablt, gndr: neut, nmbr: sing, text: восьмидесятым}
1035
+ - {case: ablt, nmbr: plur, text: восьмидесятыми}
1036
+ - {case: loct, gndr: masc, nmbr: sing, text: восьмидесятом}
1037
+ - {case: loct, gndr: femn, nmbr: sing, text: восьмидесятой}
1038
+ - {case: loct, gndr: neut, nmbr: sing, text: восьмидесятом}
1039
+ - {case: loct, nmbr: plur, text: восьмидесятых}
1040
+ o:
1041
+ - {case: nomn, text: восемьдесят}
1042
+ - {case: gent, text: восьмидесяти}
1043
+ - {case: datv, text: восьмидесяти}
1044
+ - {case: accs, text: восемьдесят}
1045
+ - {case: ablt, text: восемьюдесятью}
1046
+ - {case: ablt, text: восьмьюдесятью}
1047
+ - {case: loct, text: восьмидесяти}
1048
+ 90:
1049
+ p:
1050
+ - {case: nomn, gndr: masc, nmbr: sing, text: девяностый}
1051
+ - {case: nomn, gndr: femn, nmbr: sing, text: девяностая}
1052
+ - {case: nomn, gndr: neut, nmbr: sing, text: девяностое}
1053
+ - {case: nomn, nmbr: plur, text: девяностые}
1054
+ - {case: gent, gndr: masc, nmbr: sing, text: девяностого}
1055
+ - {case: gent, gndr: femn, nmbr: sing, text: девяностой}
1056
+ - {case: gent, gndr: neut, nmbr: sing, text: девяностого}
1057
+ - {case: gent, nmbr: plur, text: девяностых}
1058
+ - {case: datv, gndr: masc, nmbr: sing, text: девяностому}
1059
+ - {case: datv, gndr: femn, nmbr: sing, text: девяностой}
1060
+ - {case: datv, gndr: neut, nmbr: sing, text: девяностому}
1061
+ - {case: datv, nmbr: plur, text: девяностым}
1062
+ - {case: accs, gndr: masc, nmbr: sing, text: девяностого}
1063
+ - {case: accs, gndr: masc, nmbr: sing, text: девяностый}
1064
+ - {case: accs, gndr: femn, nmbr: sing, text: девяностую}
1065
+ - {case: accs, gndr: neut, nmbr: sing, text: девяностое}
1066
+ - {case: accs, nmbr: plur, text: девяностые}
1067
+ - {case: accs, nmbr: plur, text: девяностых}
1068
+ - {case: ablt, gndr: masc, nmbr: sing, text: девяностым}
1069
+ - {case: ablt, gndr: femn, nmbr: sing, text: девяностой}
1070
+ - {case: ablt, gndr: neut, nmbr: sing, text: девяностым}
1071
+ - {case: ablt, nmbr: plur, text: девяностыми}
1072
+ - {case: loct, gndr: masc, nmbr: sing, text: девяностом}
1073
+ - {case: loct, gndr: femn, nmbr: sing, text: девяностой}
1074
+ - {case: loct, gndr: neut, nmbr: sing, text: девяностом}
1075
+ - {case: loct, nmbr: plur, text: девяностых}
1076
+ o: []
1077
+ 100:
1078
+ ad:
1079
+ - {case: nomn, gndr: femn, nmbr: sing, text: сотня}
1080
+ - {case: nomn, gndr: femn, nmbr: plur, text: сотни}
1081
+ - {case: gent, gndr: femn, nmbr: sing, text: сотни}
1082
+ - {case: gent, gndr: femn, nmbr: plur, text: сот}
1083
+ - {case: gent, gndr: femn, nmbr: plur, text: сотен}
1084
+ - {case: datv, gndr: femn, nmbr: sing, text: сотне}
1085
+ - {case: datv, gndr: femn, nmbr: plur, text: сотням}
1086
+ - {case: accs, gndr: femn, nmbr: sing, text: сотню}
1087
+ - {case: accs, gndr: femn, nmbr: plur, text: сотни}
1088
+ - {case: ablt, gndr: femn, nmbr: sing, text: сотней}
1089
+ - {case: ablt, gndr: femn, nmbr: sing, text: сотнею}
1090
+ - {case: ablt, gndr: femn, nmbr: plur, text: сотнями}
1091
+ - {case: loct, gndr: femn, nmbr: sing, text: сотне}
1092
+ - {case: loct, gndr: femn, nmbr: plur, text: сотнях}
1093
+ p:
1094
+ - {case: nomn, gndr: masc, nmbr: sing, text: сотый}
1095
+ - {case: nomn, gndr: femn, nmbr: sing, text: сотая}
1096
+ - {case: nomn, gndr: neut, nmbr: sing, text: сотое}
1097
+ - {case: nomn, nmbr: plur, text: сотые}
1098
+ - {case: gent, gndr: masc, nmbr: sing, text: сотого}
1099
+ - {case: gent, gndr: femn, nmbr: sing, text: сотой}
1100
+ - {case: gent, gndr: neut, nmbr: sing, text: сотого}
1101
+ - {case: gent, nmbr: plur, text: сотых}
1102
+ - {case: datv, gndr: masc, nmbr: sing, text: сотому}
1103
+ - {case: datv, gndr: femn, nmbr: sing, text: сотой}
1104
+ - {case: datv, gndr: neut, nmbr: sing, text: сотому}
1105
+ - {case: datv, nmbr: plur, text: сотым}
1106
+ - {case: accs, gndr: masc, nmbr: sing, text: сотого}
1107
+ - {case: accs, gndr: masc, nmbr: sing, text: сотый}
1108
+ - {case: accs, gndr: femn, nmbr: sing, text: сотую}
1109
+ - {case: accs, gndr: neut, nmbr: sing, text: сотое}
1110
+ - {case: accs, nmbr: plur, text: сотые}
1111
+ - {case: accs, nmbr: plur, text: сотых}
1112
+ - {case: ablt, gndr: masc, nmbr: sing, text: сотым}
1113
+ - {case: ablt, gndr: femn, nmbr: sing, text: сотой}
1114
+ - {case: ablt, gndr: neut, nmbr: sing, text: сотым}
1115
+ - {case: ablt, nmbr: plur, text: сотыми}
1116
+ - {case: loct, gndr: masc, nmbr: sing, text: сотом}
1117
+ - {case: loct, gndr: femn, nmbr: sing, text: сотой}
1118
+ - {case: loct, gndr: neut, nmbr: sing, text: сотом}
1119
+ - {case: loct, nmbr: plur, text: сотых}
1120
+ o:
1121
+ - {case: nomn, text: сто}
1122
+ - {case: gent, text: ста}
1123
+ - {case: datv, text: ста}
1124
+ - {case: accs, text: сто}
1125
+ - {case: ablt, text: ста}
1126
+ - {case: loct, text: ста}
1127
+ 200:
1128
+ p:
1129
+ - {case: nomn, gndr: masc, nmbr: sing, text: двухсотый}
1130
+ - {case: nomn, gndr: femn, nmbr: sing, text: двухсотая}
1131
+ - {case: nomn, gndr: neut, nmbr: sing, text: двухсотое}
1132
+ - {case: nomn, nmbr: plur, text: двухсотые}
1133
+ - {case: gent, gndr: masc, nmbr: sing, text: двухсотого}
1134
+ - {case: gent, gndr: femn, nmbr: sing, text: двухсотой}
1135
+ - {case: gent, gndr: neut, nmbr: sing, text: двухсотого}
1136
+ - {case: gent, nmbr: plur, text: двухсотых}
1137
+ - {case: datv, gndr: masc, nmbr: sing, text: двухсотому}
1138
+ - {case: datv, gndr: femn, nmbr: sing, text: двухсотой}
1139
+ - {case: datv, gndr: neut, nmbr: sing, text: двухсотому}
1140
+ - {case: datv, nmbr: plur, text: двухсотым}
1141
+ - {case: accs, gndr: masc, nmbr: sing, text: двухсотого}
1142
+ - {case: accs, gndr: masc, nmbr: sing, text: двухсотый}
1143
+ - {case: accs, gndr: femn, nmbr: sing, text: двухсотую}
1144
+ - {case: accs, gndr: neut, nmbr: sing, text: двухсотое}
1145
+ - {case: accs, nmbr: plur, text: двухсотые}
1146
+ - {case: accs, nmbr: plur, text: двухсотых}
1147
+ - {case: ablt, gndr: masc, nmbr: sing, text: двухсотым}
1148
+ - {case: ablt, gndr: femn, nmbr: sing, text: двухсотой}
1149
+ - {case: ablt, gndr: neut, nmbr: sing, text: двухсотым}
1150
+ - {case: ablt, nmbr: plur, text: двухсотыми}
1151
+ - {case: loct, gndr: masc, nmbr: sing, text: двухсотом}
1152
+ - {case: loct, gndr: femn, nmbr: sing, text: двухсотой}
1153
+ - {case: loct, gndr: neut, nmbr: sing, text: двухсотом}
1154
+ - {case: loct, nmbr: plur, text: двухсотых}
1155
+ o:
1156
+ - {case: nomn, text: двести}
1157
+ - {case: gent, text: двухсот}
1158
+ - {case: datv, text: двумстам}
1159
+ - {case: accs, text: двести}
1160
+ - {case: ablt, text: двумястами}
1161
+ - {case: loct, text: двухстах}
1162
+ 300:
1163
+ p:
1164
+ - {case: nomn, gndr: masc, nmbr: sing, text: трёхсотый}
1165
+ - {case: nomn, gndr: femn, nmbr: sing, text: трёхсотая}
1166
+ - {case: nomn, gndr: neut, nmbr: sing, text: трёхсотое}
1167
+ - {case: nomn, nmbr: plur, text: трёхсотые}
1168
+ - {case: gent, gndr: masc, nmbr: sing, text: трёхсотого}
1169
+ - {case: gent, gndr: femn, nmbr: sing, text: трёхсотой}
1170
+ - {case: gent, gndr: neut, nmbr: sing, text: трёхсотого}
1171
+ - {case: gent, nmbr: plur, text: трёхсотых}
1172
+ - {case: datv, gndr: masc, nmbr: sing, text: трёхсотому}
1173
+ - {case: datv, gndr: femn, nmbr: sing, text: трёхсотой}
1174
+ - {case: datv, gndr: neut, nmbr: sing, text: трёхсотому}
1175
+ - {case: datv, nmbr: plur, text: трёхсотым}
1176
+ - {case: accs, gndr: masc, nmbr: sing, text: трёхсотого}
1177
+ - {case: accs, gndr: masc, nmbr: sing, text: трёхсотый}
1178
+ - {case: accs, gndr: femn, nmbr: sing, text: трёхсотую}
1179
+ - {case: accs, gndr: neut, nmbr: sing, text: трёхсотое}
1180
+ - {case: accs, nmbr: plur, text: трёхсотые}
1181
+ - {case: accs, nmbr: plur, text: трёхсотых}
1182
+ - {case: ablt, gndr: masc, nmbr: sing, text: трёхсотым}
1183
+ - {case: ablt, gndr: femn, nmbr: sing, text: трёхсотой}
1184
+ - {case: ablt, gndr: neut, nmbr: sing, text: трёхсотым}
1185
+ - {case: ablt, nmbr: plur, text: трёхсотыми}
1186
+ - {case: loct, gndr: masc, nmbr: sing, text: трёхсотом}
1187
+ - {case: loct, gndr: femn, nmbr: sing, text: трёхсотой}
1188
+ - {case: loct, gndr: neut, nmbr: sing, text: трёхсотом}
1189
+ - {case: loct, nmbr: plur, text: трёхсотых}
1190
+ o:
1191
+ - {case: nomn, text: триста}
1192
+ - {case: gent, text: трехсот}
1193
+ - {case: datv, text: тремстам}
1194
+ - {case: accs, text: триста}
1195
+ - {case: ablt, text: тремястами}
1196
+ - {case: loct, text: трехстах}
1197
+ 400:
1198
+ p:
1199
+ - {case: nomn, gndr: masc, nmbr: sing, text: четырёхсотый}
1200
+ - {case: nomn, gndr: femn, nmbr: sing, text: четырёхсотая}
1201
+ - {case: nomn, gndr: neut, nmbr: sing, text: четырёхсотое}
1202
+ - {case: nomn, nmbr: plur, text: четырёхсотые}
1203
+ - {case: gent, gndr: masc, nmbr: sing, text: четырёхсотого}
1204
+ - {case: gent, gndr: femn, nmbr: sing, text: четырёхсотой}
1205
+ - {case: gent, gndr: neut, nmbr: sing, text: четырёхсотого}
1206
+ - {case: gent, nmbr: plur, text: четырёхсотых}
1207
+ - {case: datv, gndr: masc, nmbr: sing, text: четырёхсотому}
1208
+ - {case: datv, gndr: femn, nmbr: sing, text: четырёхсотой}
1209
+ - {case: datv, gndr: neut, nmbr: sing, text: четырёхсотому}
1210
+ - {case: datv, nmbr: plur, text: четырёхсотым}
1211
+ - {case: accs, gndr: masc, nmbr: sing, text: четырёхсотого}
1212
+ - {case: accs, gndr: masc, nmbr: sing, text: четырёхсотый}
1213
+ - {case: accs, gndr: femn, nmbr: sing, text: четырёхсотую}
1214
+ - {case: accs, gndr: neut, nmbr: sing, text: четырёхсотое}
1215
+ - {case: accs, nmbr: plur, text: четырёхсотые}
1216
+ - {case: accs, nmbr: plur, text: четырёхсотых}
1217
+ - {case: ablt, gndr: masc, nmbr: sing, text: четырёхсотым}
1218
+ - {case: ablt, gndr: femn, nmbr: sing, text: четырёхсотой}
1219
+ - {case: ablt, gndr: neut, nmbr: sing, text: четырёхсотым}
1220
+ - {case: ablt, nmbr: plur, text: четырёхсотыми}
1221
+ - {case: loct, gndr: masc, nmbr: sing, text: четырёхсотом}
1222
+ - {case: loct, gndr: femn, nmbr: sing, text: четырёхсотой}
1223
+ - {case: loct, gndr: neut, nmbr: sing, text: четырёхсотом}
1224
+ - {case: loct, nmbr: plur, text: четырёхсотых}
1225
+ o:
1226
+ - {case: nomn, text: четыреста}
1227
+ - {case: gent, text: четырехсот}
1228
+ - {case: datv, text: четыремстам}
1229
+ - {case: accs, text: четыреста}
1230
+ - {case: ablt, text: четырьмястами}
1231
+ - {case: loct, text: четырехстах}
1232
+ 500:
1233
+ p:
1234
+ - {case: nomn, gndr: masc, nmbr: sing, text: пятисотый}
1235
+ - {case: nomn, gndr: femn, nmbr: sing, text: пятисотая}
1236
+ - {case: nomn, gndr: neut, nmbr: sing, text: пятисотое}
1237
+ - {case: nomn, nmbr: plur, text: пятисотые}
1238
+ - {case: gent, gndr: masc, nmbr: sing, text: пятисотого}
1239
+ - {case: gent, gndr: femn, nmbr: sing, text: пятисотой}
1240
+ - {case: gent, gndr: neut, nmbr: sing, text: пятисотого}
1241
+ - {case: gent, nmbr: plur, text: пятисотых}
1242
+ - {case: datv, gndr: masc, nmbr: sing, text: пятисотому}
1243
+ - {case: datv, gndr: femn, nmbr: sing, text: пятисотой}
1244
+ - {case: datv, gndr: neut, nmbr: sing, text: пятисотому}
1245
+ - {case: datv, nmbr: plur, text: пятисотым}
1246
+ - {case: accs, gndr: masc, nmbr: sing, text: пятисотого}
1247
+ - {case: accs, gndr: masc, nmbr: sing, text: пятисотый}
1248
+ - {case: accs, gndr: femn, nmbr: sing, text: пятисотую}
1249
+ - {case: accs, gndr: neut, nmbr: sing, text: пятисотое}
1250
+ - {case: accs, nmbr: plur, text: пятисотые}
1251
+ - {case: accs, nmbr: plur, text: пятисотых}
1252
+ - {case: ablt, gndr: masc, nmbr: sing, text: пятисотым}
1253
+ - {case: ablt, gndr: femn, nmbr: sing, text: пятисотой}
1254
+ - {case: ablt, gndr: neut, nmbr: sing, text: пятисотым}
1255
+ - {case: ablt, nmbr: plur, text: пятисотыми}
1256
+ - {case: loct, gndr: masc, nmbr: sing, text: пятисотом}
1257
+ - {case: loct, gndr: femn, nmbr: sing, text: пятисотой}
1258
+ - {case: loct, gndr: neut, nmbr: sing, text: пятисотом}
1259
+ - {case: loct, nmbr: plur, text: пятисотых}
1260
+ o:
1261
+ - {case: nomn, text: пятьсот}
1262
+ - {case: gent, text: пятисот}
1263
+ - {case: datv, text: пятистам}
1264
+ - {case: accs, text: пятьсот}
1265
+ - {case: ablt, text: пятьюстами}
1266
+ - {case: loct, text: пятистах}
1267
+ 600:
1268
+ p:
1269
+ - {case: nomn, gndr: masc, nmbr: sing, text: шестисотый}
1270
+ - {case: nomn, gndr: femn, nmbr: sing, text: шестисотая}
1271
+ - {case: nomn, gndr: neut, nmbr: sing, text: шестисотое}
1272
+ - {case: nomn, nmbr: plur, text: шестисотые}
1273
+ - {case: gent, gndr: masc, nmbr: sing, text: шестисотого}
1274
+ - {case: gent, gndr: femn, nmbr: sing, text: шестисотой}
1275
+ - {case: gent, gndr: neut, nmbr: sing, text: шестисотого}
1276
+ - {case: gent, nmbr: plur, text: шестисотых}
1277
+ - {case: datv, gndr: masc, nmbr: sing, text: шестисотому}
1278
+ - {case: datv, gndr: femn, nmbr: sing, text: шестисотой}
1279
+ - {case: datv, gndr: neut, nmbr: sing, text: шестисотому}
1280
+ - {case: datv, nmbr: plur, text: шестисотым}
1281
+ - {case: accs, gndr: masc, nmbr: sing, text: шестисотого}
1282
+ - {case: accs, gndr: masc, nmbr: sing, text: шестисотый}
1283
+ - {case: accs, gndr: femn, nmbr: sing, text: шестисотую}
1284
+ - {case: accs, gndr: neut, nmbr: sing, text: шестисотое}
1285
+ - {case: accs, nmbr: plur, text: шестисотые}
1286
+ - {case: accs, nmbr: plur, text: шестисотых}
1287
+ - {case: ablt, gndr: masc, nmbr: sing, text: шестисотым}
1288
+ - {case: ablt, gndr: femn, nmbr: sing, text: шестисотой}
1289
+ - {case: ablt, gndr: neut, nmbr: sing, text: шестисотым}
1290
+ - {case: ablt, nmbr: plur, text: шестисотыми}
1291
+ - {case: loct, gndr: masc, nmbr: sing, text: шестисотом}
1292
+ - {case: loct, gndr: femn, nmbr: sing, text: шестисотой}
1293
+ - {case: loct, gndr: neut, nmbr: sing, text: шестисотом}
1294
+ - {case: loct, nmbr: plur, text: шестисотых}
1295
+ o:
1296
+ - {case: nomn, text: шестьсот}
1297
+ - {case: gent, text: шестисот}
1298
+ - {case: datv, text: шестистам}
1299
+ - {case: accs, text: шестьсот}
1300
+ - {case: ablt, text: шестьюстами}
1301
+ - {case: loct, text: шестистах}
1302
+ 700:
1303
+ p:
1304
+ - {case: nomn, gndr: masc, nmbr: sing, text: семисотый}
1305
+ - {case: nomn, gndr: femn, nmbr: sing, text: семисотая}
1306
+ - {case: nomn, gndr: neut, nmbr: sing, text: семисотое}
1307
+ - {case: nomn, nmbr: plur, text: семисотые}
1308
+ - {case: gent, gndr: masc, nmbr: sing, text: семисотого}
1309
+ - {case: gent, gndr: femn, nmbr: sing, text: семисотой}
1310
+ - {case: gent, gndr: neut, nmbr: sing, text: семисотого}
1311
+ - {case: gent, nmbr: plur, text: семисотых}
1312
+ - {case: datv, gndr: masc, nmbr: sing, text: семисотому}
1313
+ - {case: datv, gndr: femn, nmbr: sing, text: семисотой}
1314
+ - {case: datv, gndr: neut, nmbr: sing, text: семисотому}
1315
+ - {case: datv, nmbr: plur, text: семисотым}
1316
+ - {case: accs, gndr: masc, nmbr: sing, text: семисотого}
1317
+ - {case: accs, gndr: masc, nmbr: sing, text: семисотый}
1318
+ - {case: accs, gndr: femn, nmbr: sing, text: семисотую}
1319
+ - {case: accs, gndr: neut, nmbr: sing, text: семисотое}
1320
+ - {case: accs, nmbr: plur, text: семисотые}
1321
+ - {case: accs, nmbr: plur, text: семисотых}
1322
+ - {case: ablt, gndr: masc, nmbr: sing, text: семисотым}
1323
+ - {case: ablt, gndr: femn, nmbr: sing, text: семисотой}
1324
+ - {case: ablt, gndr: neut, nmbr: sing, text: семисотым}
1325
+ - {case: ablt, nmbr: plur, text: семисотыми}
1326
+ - {case: loct, gndr: masc, nmbr: sing, text: семисотом}
1327
+ - {case: loct, gndr: femn, nmbr: sing, text: семисотой}
1328
+ - {case: loct, gndr: neut, nmbr: sing, text: семисотом}
1329
+ - {case: loct, nmbr: plur, text: семисотых}
1330
+ o:
1331
+ - {case: nomn, text: семьсот}
1332
+ - {case: gent, text: семисот}
1333
+ - {case: datv, text: семистам}
1334
+ - {case: accs, text: семьсот}
1335
+ - {case: ablt, text: семьюстами}
1336
+ - {case: loct, text: семистах}
1337
+ 800:
1338
+ p:
1339
+ - {case: nomn, gndr: masc, nmbr: sing, text: восьмисотый}
1340
+ - {case: nomn, gndr: femn, nmbr: sing, text: восьмисотая}
1341
+ - {case: nomn, gndr: neut, nmbr: sing, text: восьмисотое}
1342
+ - {case: nomn, nmbr: plur, text: восьмисотые}
1343
+ - {case: gent, gndr: masc, nmbr: sing, text: восьмисотого}
1344
+ - {case: gent, gndr: femn, nmbr: sing, text: восьмисотой}
1345
+ - {case: gent, gndr: neut, nmbr: sing, text: восьмисотого}
1346
+ - {case: gent, nmbr: plur, text: восьмисотых}
1347
+ - {case: datv, gndr: masc, nmbr: sing, text: восьмисотому}
1348
+ - {case: datv, gndr: femn, nmbr: sing, text: восьмисотой}
1349
+ - {case: datv, gndr: neut, nmbr: sing, text: восьмисотому}
1350
+ - {case: datv, nmbr: plur, text: восьмисотым}
1351
+ - {case: accs, gndr: masc, nmbr: sing, text: восьмисотого}
1352
+ - {case: accs, gndr: masc, nmbr: sing, text: восьмисотый}
1353
+ - {case: accs, gndr: femn, nmbr: sing, text: восьмисотую}
1354
+ - {case: accs, gndr: neut, nmbr: sing, text: восьмисотое}
1355
+ - {case: accs, nmbr: plur, text: восьмисотые}
1356
+ - {case: accs, nmbr: plur, text: восьмисотых}
1357
+ - {case: ablt, gndr: masc, nmbr: sing, text: восьмисотым}
1358
+ - {case: ablt, gndr: femn, nmbr: sing, text: восьмисотой}
1359
+ - {case: ablt, gndr: neut, nmbr: sing, text: восьмисотым}
1360
+ - {case: ablt, nmbr: plur, text: восьмисотыми}
1361
+ - {case: loct, gndr: masc, nmbr: sing, text: восьмисотом}
1362
+ - {case: loct, gndr: femn, nmbr: sing, text: восьмисотой}
1363
+ - {case: loct, gndr: neut, nmbr: sing, text: восьмисотом}
1364
+ - {case: loct, nmbr: plur, text: восьмисотых}
1365
+ o:
1366
+ - {case: nomn, text: восемьсот}
1367
+ - {case: gent, text: восьмисот}
1368
+ - {case: datv, text: восьмистам}
1369
+ - {case: accs, text: восемьсот}
1370
+ - {case: ablt, text: восемьюстами}
1371
+ - {case: loct, text: восьмистах}
1372
+ 900:
1373
+ p:
1374
+ - {case: nomn, gndr: masc, nmbr: sing, text: девятисотый}
1375
+ - {case: nomn, gndr: femn, nmbr: sing, text: девятисотая}
1376
+ - {case: nomn, gndr: neut, nmbr: sing, text: девятисотое}
1377
+ - {case: nomn, nmbr: plur, text: девятисотые}
1378
+ - {case: gent, gndr: masc, nmbr: sing, text: девятисотого}
1379
+ - {case: gent, gndr: femn, nmbr: sing, text: девятисотой}
1380
+ - {case: gent, gndr: neut, nmbr: sing, text: девятисотого}
1381
+ - {case: gent, nmbr: plur, text: девятисотых}
1382
+ - {case: datv, gndr: masc, nmbr: sing, text: девятисотому}
1383
+ - {case: datv, gndr: femn, nmbr: sing, text: девятисотой}
1384
+ - {case: datv, gndr: neut, nmbr: sing, text: девятисотому}
1385
+ - {case: datv, nmbr: plur, text: девятисотым}
1386
+ - {case: accs, gndr: masc, nmbr: sing, text: девятисотого}
1387
+ - {case: accs, gndr: masc, nmbr: sing, text: девятисотый}
1388
+ - {case: accs, gndr: femn, nmbr: sing, text: девятисотую}
1389
+ - {case: accs, gndr: neut, nmbr: sing, text: девятисотое}
1390
+ - {case: accs, nmbr: plur, text: девятисотые}
1391
+ - {case: accs, nmbr: plur, text: девятисотых}
1392
+ - {case: ablt, gndr: masc, nmbr: sing, text: девятисотым}
1393
+ - {case: ablt, gndr: femn, nmbr: sing, text: девятисотой}
1394
+ - {case: ablt, gndr: neut, nmbr: sing, text: девятисотым}
1395
+ - {case: ablt, nmbr: plur, text: девятисотыми}
1396
+ - {case: loct, gndr: masc, nmbr: sing, text: девятисотом}
1397
+ - {case: loct, gndr: femn, nmbr: sing, text: девятисотой}
1398
+ - {case: loct, gndr: neut, nmbr: sing, text: девятисотом}
1399
+ - {case: loct, nmbr: plur, text: девятисотых}
1400
+ o:
1401
+ - {case: nomn, text: девятьсот}
1402
+ - {case: gent, text: девятисот}
1403
+ - {case: datv, text: девятистам}
1404
+ - {case: accs, text: девятьсот}
1405
+ - {case: ablt, text: девятьюстами}
1406
+ - {case: loct, text: девятистах}
1407
+ 1000:
1408
+ p:
1409
+ - {case: nomn, gndr: masc, nmbr: sing, text: тысячный}
1410
+ - {case: nomn, gndr: femn, nmbr: sing, text: тысячная}
1411
+ - {case: nomn, gndr: neut, nmbr: sing, text: тысячное}
1412
+ - {case: nomn, nmbr: plur, text: тысячные}
1413
+ - {case: gent, gndr: masc, nmbr: sing, text: тысячного}
1414
+ - {case: gent, gndr: femn, nmbr: sing, text: тысячной}
1415
+ - {case: gent, gndr: neut, nmbr: sing, text: тысячного}
1416
+ - {case: gent, nmbr: plur, text: тысячных}
1417
+ - {case: datv, gndr: masc, nmbr: sing, text: тысячному}
1418
+ - {case: datv, gndr: femn, nmbr: sing, text: тысячной}
1419
+ - {case: datv, gndr: neut, nmbr: sing, text: тысячному}
1420
+ - {case: datv, nmbr: plur, text: тысячным}
1421
+ - {case: accs, gndr: masc, nmbr: sing, text: тысячного}
1422
+ - {case: accs, gndr: masc, nmbr: sing, text: тысячный}
1423
+ - {case: accs, gndr: femn, nmbr: sing, text: тысячную}
1424
+ - {case: accs, gndr: neut, nmbr: sing, text: тысячное}
1425
+ - {case: accs, nmbr: plur, text: тысячные}
1426
+ - {case: accs, nmbr: plur, text: тысячных}
1427
+ - {case: ablt, gndr: masc, nmbr: sing, text: тысячным}
1428
+ - {case: ablt, gndr: femn, nmbr: sing, text: тысячной}
1429
+ - {case: ablt, gndr: femn, nmbr: sing, text: тысячною}
1430
+ - {case: ablt, gndr: neut, nmbr: sing, text: тысячным}
1431
+ - {case: ablt, nmbr: plur, text: тысячными}
1432
+ - {case: loct, gndr: masc, nmbr: sing, text: тысячном}
1433
+ - {case: loct, gndr: femn, nmbr: sing, text: тысячной}
1434
+ - {case: loct, gndr: neut, nmbr: sing, text: тысячном}
1435
+ - {case: loct, nmbr: plur, text: тысячных}
1436
+ o:
1437
+ - {case: nomn, text: тысяча}
1438
+ - {case: nomn, text: тысячи}
1439
+ - {case: gent, text: тысяч}
1440
+ - {case: gent, text: тысячи}
1441
+ - {case: datv, text: тысячам}
1442
+ - {case: datv, text: тысяче}
1443
+ - {case: accs, text: тысячи}
1444
+ - {case: accs, text: тысячу}
1445
+ - {case: ablt, text: тысячами}
1446
+ - {case: ablt, text: тысячей}
1447
+ - {case: ablt, text: тысячью}
1448
+ - {case: loct, text: тысячах}
1449
+ - {case: loct, text: тысяче}
1450
+ 1000000:
1451
+ p:
1452
+ - {case: nomn, gndr: masc, nmbr: sing, text: миллионный}
1453
+ - {case: nomn, gndr: femn, nmbr: sing, text: миллионная}
1454
+ - {case: nomn, gndr: neut, nmbr: sing, text: миллионное}
1455
+ - {case: nomn, nmbr: plur, text: миллионные}
1456
+ - {case: gent, gndr: masc, nmbr: sing, text: миллионного}
1457
+ - {case: gent, gndr: femn, nmbr: sing, text: миллионной}
1458
+ - {case: gent, gndr: neut, nmbr: sing, text: миллионного}
1459
+ - {case: gent, nmbr: plur, text: миллионных}
1460
+ - {case: datv, gndr: masc, nmbr: sing, text: миллионному}
1461
+ - {case: datv, gndr: femn, nmbr: sing, text: миллионной}
1462
+ - {case: datv, gndr: neut, nmbr: sing, text: миллионному}
1463
+ - {case: datv, nmbr: plur, text: миллионным}
1464
+ - {case: accs, gndr: masc, nmbr: sing, text: миллионного}
1465
+ - {case: accs, gndr: masc, nmbr: sing, text: миллионный}
1466
+ - {case: accs, gndr: femn, nmbr: sing, text: миллионную}
1467
+ - {case: accs, gndr: neut, nmbr: sing, text: миллионное}
1468
+ - {case: accs, nmbr: plur, text: миллионные}
1469
+ - {case: accs, nmbr: plur, text: миллионных}
1470
+ - {case: ablt, gndr: masc, nmbr: sing, text: миллионным}
1471
+ - {case: ablt, gndr: femn, nmbr: sing, text: миллионной}
1472
+ - {case: ablt, gndr: femn, nmbr: sing, text: миллионною}
1473
+ - {case: ablt, gndr: neut, nmbr: sing, text: миллионным}
1474
+ - {case: ablt, nmbr: plur, text: миллионными}
1475
+ - {case: loct, gndr: masc, nmbr: sing, text: миллионном}
1476
+ - {case: loct, gndr: femn, nmbr: sing, text: миллионной}
1477
+ - {case: loct, gndr: neut, nmbr: sing, text: миллионном}
1478
+ - {case: loct, nmbr: plur, text: миллионных}
1479
+ o:
1480
+ - {case: nomn, text: миллион}
1481
+ - {case: gent, text: миллиона}
1482
+ - {case: gent, text: миллионов}
1483
+ - {case: datv, text: миллионам}
1484
+ - {case: datv, text: миллиону}
1485
+ - {case: accs, text: миллион}
1486
+ - {case: ablt, text: миллионами}
1487
+ - {case: ablt, text: миллионом}
1488
+ - {case: loct, text: миллионах}
1489
+ - {case: loct, text: миллионе}
1490
+
1491
+ 1000000000:
1492
+ p:
1493
+ - {case: nomn, gndr: masc, nmbr: sing, text: миллиардный}
1494
+ - {case: nomn, gndr: femn, nmbr: sing, text: миллиардная}
1495
+ - {case: nomn, gndr: neut, nmbr: sing, text: миллиардное}
1496
+ - {case: nomn, nmbr: plur, text: миллиардные}
1497
+ - {case: gent, gndr: masc, nmbr: sing, text: миллиардного}
1498
+ - {case: gent, gndr: femn, nmbr: sing, text: миллиардной}
1499
+ - {case: gent, gndr: neut, nmbr: sing, text: миллиардного}
1500
+ - {case: gent, nmbr: plur, text: миллиардным}
1501
+ - {case: gent, nmbr: plur, text: миллиардных}
1502
+ - {case: datv, gndr: masc, nmbr: sing, text: миллиардному}
1503
+ - {case: datv, gndr: femn, nmbr: sing, text: миллиардной}
1504
+ - {case: datv, gndr: neut, nmbr: sing, text: миллиардному}
1505
+ - {case: accs, gndr: masc, nmbr: sing, text: миллиардного}
1506
+ - {case: accs, gndr: femn, nmbr: sing, text: миллиардный}
1507
+ - {case: accs, gndr: femn, nmbr: sing, text: миллиардную}
1508
+ - {case: accs, gndr: neut, nmbr: sing, text: миллиардное}
1509
+ - {case: accs, nmbr: plur, text: миллиардные}
1510
+ - {case: accs, nmbr: plur, text: миллиардных}
1511
+ - {case: ablt, gndr: masc, nmbr: sing, text: миллиардным}
1512
+ - {case: ablt, gndr: femn, nmbr: sing, text: миллиардной}
1513
+ - {case: ablt, gndr: femn, nmbr: sing, text: миллиардною}
1514
+ - {case: ablt, gndr: neut, nmbr: sing, text: миллиардным}
1515
+ - {case: ablt, nmbr: plur, text: миллиардными}
1516
+ - {case: loct, gndr: masc, nmbr: sing, text: миллиардном}
1517
+ - {case: loct, gndr: femn, nmbr: sing, text: миллиардной}
1518
+ - {case: loct, gndr: neut, nmbr: sing, text: миллиардном}
1519
+ - {case: loct, nmbr: plur, text: миллиардных}
1520
+ o:
1521
+ - {case: nomn, text: миллиард}
1522
+ - {case: nomn, text: миллиарды}
1523
+ - {case: gent, text: миллиарда}
1524
+ - {case: gent, text: миллиардов}
1525
+ - {case: datv, text: миллиардам}
1526
+ - {case: datv, text: миллиарду}
1527
+ - {case: accs, text: миллиард}
1528
+ - {case: accs, text: миллиарды}
1529
+ - {case: ablt, text: миллиардами}
1530
+ - {case: ablt, text: миллиардом}
1531
+ - {case: loct, text: миллиардах}
1532
+ - {case: loct, text: миллиарде}
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ scikit-learn=0.20.3
2
+ tqdm=4.31.1
3
+ numpy==1.16.2
4
+ PyYAML==5.1
5
+ tensorflow-gpu==1.13.1
6
+ lxml=4.3.3
tester.py ADDED
@@ -0,0 +1,256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, pickle
2
+ import numpy as np
3
+ import tensorflow as tf
4
+ from tqdm import tqdm
5
+ from model import RNN
6
+ from utils import CONFIG, decode_word, load_datasets
7
+
8
+
9
+ class Tester:
10
+ def __init__(self):
11
+ self.config = CONFIG
12
+ self.config['graph_part_configs']['lemm']['use_cls_placeholder'] = True
13
+ self.rnn = RNN(True)
14
+ self.chars = {c: index for index, c in enumerate(self.config['chars'])}
15
+ self.batch_size = 65536
16
+ self.show_bad_items = False
17
+
18
+ def test(self):
19
+ config = tf.ConfigProto(allow_soft_placement=True)
20
+ results = []
21
+ with tf.Session(config=config, graph=self.rnn.graph) as sess:
22
+ sess.run(tf.global_variables_initializer())
23
+ sess.run(tf.local_variables_initializer())
24
+ self.rnn.restore(sess)
25
+ for gram in self.rnn.gram_keys:
26
+ full_cls_acc, part_cls_acc, _ = self.__test_classification__(sess, gram, self.rnn.gram_graph_parts[gram], 'test')
27
+ result = f"{gram}. full_cls_acc: {full_cls_acc}; part_cls_acc: {part_cls_acc}"
28
+ results.append(result)
29
+ tqdm.write(result)
30
+
31
+ full_cls_acc, part_cls_acc, _ = self.__test_classification__(sess, 'main', self.rnn.main_graph_part, 'test')
32
+ result = f"main. full_cls_acc: {full_cls_acc}; part_cls_acc: {part_cls_acc}"
33
+ results.append(result)
34
+ tqdm.write(result)
35
+ lemm_acc, _ = self.__test_lemmas__(sess, 'test')
36
+ result = f"lemma_acc: {lemm_acc}"
37
+ tqdm.write(result)
38
+ results.append(result)
39
+ inflect_acc, _ = self.__test_inflect__(sess, 'test')
40
+ result = f"inflect_acc: {inflect_acc}"
41
+ tqdm.write(result)
42
+ results.append(result)
43
+ tqdm.write(result)
44
+
45
+ return "\n".join(results)
46
+
47
+ def __get_classification_items__(self, sess, items, graph_part):
48
+ wi = 0
49
+ pbar = tqdm(total=len(items), desc='Getting classification info')
50
+ results = []
51
+ etalon = []
52
+
53
+ while wi < len(items):
54
+ bi = 0
55
+ xs = []
56
+ indexes = []
57
+ seq_lens = []
58
+ max_len = 0
59
+
60
+ while bi < self.batch_size and wi < len(items):
61
+ word = items[wi]['src']
62
+ etalon.append(items[wi]['y'])
63
+ for c_index, char in enumerate(word):
64
+ xs.append(self.chars[char] if char in self.chars else self.chars['UNDEFINED'])
65
+ indexes.append([bi, c_index])
66
+ cur_len = len(word)
67
+ if cur_len > max_len:
68
+ max_len = cur_len
69
+ seq_lens.append(cur_len)
70
+ bi += 1
71
+ wi += 1
72
+ pbar.update(1)
73
+
74
+ lnch = [graph_part.probs[0]]
75
+ nn_results = sess.run(
76
+ lnch,
77
+ {
78
+ self.rnn.batch_size: bi,
79
+ self.rnn.x_seq_lens[0]: np.asarray(seq_lens),
80
+ self.rnn.x_vals[0]: np.asarray(xs),
81
+ self.rnn.x_inds[0]: np.asarray(indexes),
82
+ self.rnn.x_shape[0]: np.asarray([bi, max_len])
83
+ }
84
+ )
85
+ results.extend(nn_results[0])
86
+
87
+ return results, etalon
88
+
89
+ def __get_lemma_items__(self, sess, items):
90
+ wi = 0
91
+ pbar = tqdm(total=len(items))
92
+ while wi < len(items):
93
+ bi = 0
94
+ xs = []
95
+ clss = []
96
+ indexes = []
97
+ seq_lens = []
98
+ max_len = 0
99
+
100
+ while bi < self.batch_size and wi < len(items):
101
+ item = items[wi]
102
+ word = item['x_src']
103
+ x_cls = item['main_cls']
104
+ for c_index, char in enumerate(word):
105
+ xs.append(self.chars[char])
106
+ indexes.append([bi, c_index])
107
+ cur_len = len(word)
108
+ clss.append(x_cls)
109
+ if cur_len > max_len:
110
+ max_len = cur_len
111
+ seq_lens.append(cur_len)
112
+ bi += 1
113
+ wi += 1
114
+ pbar.update(1)
115
+
116
+ lnch = [self.rnn.lem_result]
117
+ results = sess.run(
118
+ lnch,
119
+ {
120
+ self.rnn.batch_size: bi,
121
+ self.rnn.x_seq_lens[0]: np.asarray(seq_lens),
122
+ self.rnn.x_vals[0]: np.asarray(xs),
123
+ self.rnn.x_inds[0]: np.asarray(indexes),
124
+ self.rnn.lem_class_pl: np.asarray(clss),
125
+ self.rnn.x_shape[0]: np.asarray([bi, max_len])
126
+ }
127
+ )
128
+ for word_src in results[0]:
129
+ yield decode_word(word_src[0])
130
+
131
+ def __get_inflect_items__(self, sess, items):
132
+ wi = 0
133
+ pbar = tqdm(total=len(items))
134
+ while wi < len(items):
135
+ bi = 0
136
+ xs = []
137
+ x_clss = []
138
+ y_clss = []
139
+ indexes = []
140
+ seq_lens = []
141
+ max_len = 0
142
+
143
+ while bi < self.batch_size and wi < len(items):
144
+ item = items[wi]
145
+ word = item['x_src']
146
+ x_cls = item['x_cls']
147
+ y_cls = item['y_cls']
148
+ for c_index, char in enumerate(word):
149
+ xs.append(self.chars[char])
150
+ indexes.append([bi, c_index])
151
+ cur_len = len(word)
152
+ x_clss.append(x_cls)
153
+ y_clss.append(y_cls)
154
+ if cur_len > max_len:
155
+ max_len = cur_len
156
+ seq_lens.append(cur_len)
157
+ bi += 1
158
+ wi += 1
159
+ pbar.update(1)
160
+
161
+ lnch = [self.rnn.inflect_graph_part.results[0]]
162
+ results = sess.run(
163
+ lnch,
164
+ {
165
+ self.rnn.batch_size: bi,
166
+ self.rnn.x_seq_lens[0]: np.asarray(seq_lens),
167
+ self.rnn.x_vals[0]: np.asarray(xs),
168
+ self.rnn.x_inds[0]: np.asarray(indexes),
169
+ self.rnn.inflect_graph_part.x_cls[0]: np.asarray(x_clss),
170
+ self.rnn.inflect_graph_part.y_cls[0]: np.asarray(y_clss),
171
+ self.rnn.x_shape[0]: np.asarray([bi, max_len])
172
+ }
173
+ )
174
+
175
+ for word_src in results[0]:
176
+ yield decode_word(word_src)
177
+
178
+ def __test_classification__(self, sess, key, graph_part, *ds_types):
179
+ et_items = load_datasets(key, *ds_types)
180
+ results, etalon = self.__get_classification_items__(sess, et_items, graph_part)
181
+ total = len(etalon)
182
+ total_classes = 0
183
+ full_correct = 0
184
+ part_correct = 0
185
+ bad_items = []
186
+
187
+ for index, et in enumerate(etalon):
188
+ classes_count = et.sum()
189
+ good_classes = np.argwhere(et == 1).ravel()
190
+ rez_classes = np.argsort(results[index])[-classes_count:]
191
+
192
+ total_classes += classes_count
193
+ correct = True
194
+ for cls in rez_classes:
195
+ if cls in good_classes:
196
+ part_correct += 1
197
+ else:
198
+ correct = False
199
+
200
+ if correct:
201
+ full_correct += 1
202
+ else:
203
+ bad_items.append((et_items[index], rez_classes))
204
+
205
+ full_acc = full_correct / total
206
+ cls_correct = part_correct / total_classes
207
+ return full_acc, cls_correct, bad_items
208
+
209
+ def __test_lemmas__(self, sess, *ds_types):
210
+ good_items = load_datasets("lemma", *ds_types)
211
+ good_items = [
212
+ word
213
+ for word in good_items
214
+ if all([c in self.config['chars'] for c in word['x_src']])
215
+ ]
216
+ results = list(self.__get_lemma_items__(sess, good_items))
217
+
218
+ bad_words = []
219
+ total = len(good_items)
220
+ wrong = 0
221
+ for index, rez in enumerate(results):
222
+ et_word = good_items[index]
223
+ if rez != et_word['y_src']:
224
+ wrong += 1
225
+ bad_words.append((et_word, rez))
226
+
227
+ correct = total - wrong
228
+ acc = correct / total
229
+ return acc, bad_words
230
+
231
+ def __test_inflect__(self, sess, *ds_types):
232
+ good_items = load_datasets("inflect", *ds_types)
233
+ good_items = [
234
+ word
235
+ for word in good_items
236
+ if all([c in self.config['chars'] for c in word['x_src']])
237
+ ]
238
+
239
+ bad_items = []
240
+ rez_words = list(self.__get_inflect_items__(sess, good_items))
241
+ total = len(good_items)
242
+ wrong = 0
243
+ for index, rez in enumerate(rez_words):
244
+ et_word = good_items[index]
245
+ if rez != et_word['y_src']:
246
+ wrong += 1
247
+ bad_items.append((et_word, rez))
248
+
249
+ correct = total - wrong
250
+ acc = correct / total
251
+ return acc, bad_items
252
+
253
+
254
+ if __name__ == "__main__":
255
+ tester = Tester()
256
+ tester.test()
tf_utils.py ADDED
@@ -0,0 +1,495 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pickle
3
+ import numpy as np
4
+ import tensorflow as tf
5
+
6
+
7
+ def rnn_top(scope_name,
8
+ rnn_rez,
9
+ settings,
10
+ classes_number):
11
+ with tf.variable_scope(scope_name, reuse=tf.AUTO_REUSE) as scope:
12
+ rez_size = settings['rnn_state_size']
13
+ w_softmax = tf.get_variable("W", (rez_size, classes_number))
14
+ b_softmax = tf.get_variable("b", [classes_number])
15
+ logits = tf.matmul(rnn_rez, w_softmax) + b_softmax
16
+
17
+ return logits
18
+
19
+
20
+ def rnn_cell_unit(settings, for_usage, keep_drop=None):
21
+
22
+ cell = tf.contrib.rnn.GRUCell(
23
+ num_units=settings['rnn_state_size']
24
+ )
25
+ if not for_usage and keep_drop is not None:
26
+ cell = tf.contrib.rnn.DropoutWrapper(
27
+ cell,
28
+ input_keep_prob = keep_drop
29
+ )
30
+ if 'use_residual' in settings and settings['use_residual']:
31
+ cell = tf.nn.rnn_cell.ResidualWrapper(cell)
32
+
33
+ return cell
34
+
35
+
36
+ def rnn_cell(settings, for_usage, keep_drop=None):
37
+ if settings['rnn_layers_count'] > 1:
38
+ cells = []
39
+ for i in range(settings['rnn_layers_count']):
40
+ with tf.variable_scope('RnnUnit_%s' % i, reuse=tf.AUTO_REUSE) as scope:
41
+ cell = rnn_cell_unit(settings, for_usage, keep_drop)
42
+ cells.append(cell)
43
+ cell = tf.nn.rnn_cell.MultiRNNCell(cells)
44
+ else:
45
+ with tf.variable_scope('RnnUnit', reuse=tf.AUTO_REUSE) as scope:
46
+ cell = rnn_cell_unit(settings, for_usage, keep_drop)
47
+
48
+ return cell
49
+
50
+
51
+ def build_rnn(rnn_input, keep_drop, seq_len, settings, initial_state_fw=None, initial_state_bw=None, for_usage=False, with_seq=False, top_concat=False):
52
+ if settings['rnn_layers_count'] > 1 and initial_state_fw is not None:
53
+ initial_state_fw = tuple([initial_state_fw for i in range(settings['rnn_layers_count'])])
54
+
55
+ if settings['rnn_layers_count'] > 1 and initial_state_bw is not None:
56
+ initial_state_bw = tuple([initial_state_bw for i in range(settings['rnn_layers_count'])])
57
+
58
+ if settings['rnn_bidirectional']:
59
+ with tf.variable_scope('Rnn', reuse=tf.AUTO_REUSE) as scope:
60
+ with tf.variable_scope('FCell', reuse=tf.AUTO_REUSE) as scope:
61
+ fw_cell = rnn_cell(settings, for_usage, keep_drop)
62
+ with tf.variable_scope('BCell', reuse=tf.AUTO_REUSE) as scope:
63
+ bw_cell = rnn_cell(settings, for_usage, keep_drop)
64
+ seq_val, (final_fw, final_bw) = tf.nn.bidirectional_dynamic_rnn(cell_fw=fw_cell,
65
+ cell_bw=bw_cell,
66
+ sequence_length=seq_len,
67
+ initial_state_fw = initial_state_fw,
68
+ initial_state_bw = initial_state_bw,
69
+ inputs=rnn_input,
70
+ dtype=tf.float32)
71
+
72
+ if settings['rnn_layers_count'] > 1:
73
+ final_fw = final_fw[-1]
74
+ final_bw = final_bw[-1]
75
+
76
+ if top_concat:
77
+ final_state = tf.concat([final_fw, final_bw], axis=1)
78
+ final_state = tf.layers.dense(final_state, settings['rnn_state_size'])
79
+ seq_val = tf.concat([seq_val[0], seq_val[1]], axis=2)
80
+ seq_val = tf.layers.dense(seq_val, settings['rnn_state_size'])
81
+ else:
82
+ final_state = tf.add(final_fw, final_bw)
83
+ seq_val = tf.add(seq_val[0], seq_val[1])
84
+
85
+ if with_seq:
86
+ return final_state, seq_val
87
+ else:
88
+ return final_state
89
+
90
+ else:
91
+ with tf.variable_scope('Rnn', reuse=tf.AUTO_REUSE) as scope:
92
+ cell = rnn_cell(settings, for_usage, keep_drop)
93
+ rnn_rez, final_state = tf.nn.dynamic_rnn(cell=cell,
94
+ sequence_length=seq_len,
95
+ initial_state=initial_state_fw,
96
+ inputs=rnn_input,
97
+ dtype=tf.float32)
98
+ return final_state[-1]
99
+
100
+
101
+ def average_gradients(tower_grads):
102
+ average_grads = []
103
+ for grad_and_vars in zip(*tower_grads):
104
+
105
+ if any(x for x in grad_and_vars if x[0] is None):
106
+ continue
107
+
108
+ # Note that each grad_and_vars looks like the following:
109
+ # ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
110
+ grads = []
111
+ for g, _ in grad_and_vars:
112
+ # Add 0 dimension to the gradients to represent the tower.
113
+ expanded_g = tf.expand_dims(g, 0)
114
+
115
+ # Append on a 'tower' dimension which we will average over below.
116
+ grads.append(expanded_g)
117
+
118
+ # Average over the 'tower' dimension.
119
+ grad = tf.concat(grads, 0)
120
+ grad = tf.reduce_mean(grad, 0)
121
+
122
+ # Keep in mind that the Variables are redundant because they are shared
123
+ # across towers. So .. we will just return the first tower's pointer to
124
+ # the Variable.
125
+ v = grad_and_vars[0][1]
126
+ grad_and_var = (grad, v)
127
+ average_grads.append(grad_and_var)
128
+
129
+ return average_grads
130
+
131
+
132
+ def create_reset_metric(metric, scope='reset_metrics', *args, **metric_args):
133
+ """
134
+ Source: https://github.com/tensorflow/tensorflow/issues/4814#issuecomment-314801758
135
+
136
+ Usage:
137
+
138
+ epoch_loss, epoch_loss_update, epoch_loss_reset = create_reset_metric(
139
+ tf.contrib.metrics.streaming_mean_squared_error, 'epoch_loss',
140
+ predictions=output, labels=target)
141
+
142
+ :param scope:
143
+ :param metric_args:
144
+ :return:
145
+ """
146
+ with tf.variable_scope(scope) as scope:
147
+ metric_op, update_op = metric(*args, **metric_args)
148
+ vars = tf.contrib.framework.get_variables(
149
+ scope, collection=tf.GraphKeys.LOCAL_VARIABLES)
150
+ reset_op = tf.variables_initializer(vars)
151
+ return metric_op, update_op, reset_op
152
+
153
+
154
+ def load_cls_dataset(dataset_path, devices_count, type, batch_size, use_weights, gram="main"):
155
+
156
+ path = os.path.join(dataset_path, f"{gram}_{type}_dataset.pkl")
157
+ with open(path, 'rb') as f:
158
+ items = pickle.load(f)
159
+
160
+ batches = [items[i:i + batch_size] for i in range(0, len(items), batch_size)]
161
+ cur_step = []
162
+ for batch in batches:
163
+ x = np.stack([item['x'][0] for item in batch])
164
+ seq_len = np.asarray([item['x'][1] for item in batch], np.int)
165
+ y = np.asarray([item['y'] for item in batch])
166
+ weight = np.asarray([item['weight'] for item in batch]) if use_weights else [1 for _ in range(batch)]
167
+ if len(cur_step) == devices_count:
168
+ yield cur_step
169
+ cur_step = []
170
+
171
+ cur_step.append(dict(
172
+ x=x,
173
+ x_seq_len=seq_len,
174
+ y=y,
175
+ weight=weight
176
+ ))
177
+
178
+ if len(cur_step)==devices_count:
179
+ yield cur_step
180
+
181
+
182
+ def load_lemma_dataset(dataset_path, devices_count, type, batch_size):
183
+ path = os.path.join(dataset_path, f"lemma_{type}_dataset.pkl")
184
+ with open(path, 'rb') as f:
185
+ items = pickle.load(f)
186
+
187
+ batches = [items[i:i + batch_size] for i in range(0, len(items), batch_size)]
188
+ cur_step = []
189
+ for batch in batches:
190
+ x = np.stack([item['x'] for item in batch])
191
+ x_seq_len = np.asarray([item['x_len'] for item in batch], np.int)
192
+ x_cls = np.asarray([item['main_cls'] for item in batch], np.int)
193
+ y_seq_len = np.asarray([item['y_len'] for item in batch], np.int)
194
+ max_len = y_seq_len.max()
195
+ y = np.asarray([item['y'][:max_len] for item in batch])
196
+
197
+ x_src = [item['x_src'] for item in batch]
198
+ y_src = [item['y_src'] for item in batch]
199
+
200
+ if len(cur_step) == devices_count:
201
+ yield cur_step
202
+ cur_step = []
203
+
204
+ cur_step.append(dict(
205
+ x=x,
206
+ x_seq_len=x_seq_len,
207
+ x_cls=x_cls,
208
+ y=y,
209
+ y_seq_len=y_seq_len,
210
+ x_src=x_src,
211
+ y_src=y_src
212
+ ))
213
+
214
+ if len(cur_step) == devices_count and all([len(step['x']) == batch_size for step in cur_step]):
215
+ yield cur_step
216
+
217
+
218
+ def load_inflect_dataset(dataset_path, devices_count, type, batch_size):
219
+ path = os.path.join(dataset_path, f"inflect_{type}_dataset.pkl")
220
+ with open(path, 'rb') as f:
221
+ items = pickle.load(f)
222
+
223
+ batches = [items[i:i + batch_size] for i in range(0, len(items), batch_size)]
224
+ cur_step = []
225
+ for batch in batches:
226
+ x = np.stack([item['x'] for item in batch])
227
+ x_seq_len = np.asarray([item['x_len'] for item in batch], np.int)
228
+ x_cls = np.asarray([item['x_cls'] for item in batch], np.int)
229
+ y_seq_len = np.asarray([item['y_len'] for item in batch], np.int)
230
+ y_cls = np.asarray([item['y_cls'] for item in batch], np.int)
231
+ max_len = y_seq_len.max()
232
+ y = np.asarray([item['y'][:max_len] for item in batch])
233
+ x_src = [item['x_src'] for item in batch]
234
+ y_src = [item['y_src'] for item in batch]
235
+
236
+ if len(cur_step) == devices_count:
237
+ yield cur_step
238
+ cur_step = []
239
+
240
+ cur_step.append(dict(
241
+ x=x,
242
+ x_seq_len=x_seq_len,
243
+ x_cls=x_cls,
244
+ y=y,
245
+ y_seq_len=y_seq_len,
246
+ y_cls=y_cls,
247
+ x_src=x_src,
248
+ y_src=y_src
249
+ ))
250
+
251
+ if len(cur_step) == devices_count and all([len(step['x']) == batch_size for step in cur_step]):
252
+ yield cur_step
253
+
254
+
255
+ def seq2seq(graph_part,
256
+ batch_size,
257
+ x,
258
+ x_init,
259
+ x_seq_len,
260
+ y,
261
+ y_init,
262
+ y_seq_len):
263
+ start_tokens = tf.fill([batch_size], graph_part.start_char_index)
264
+ initializer = tf.random_normal_initializer()
265
+ y_seq_len += 1
266
+
267
+ with tf.variable_scope("Encoder", reuse=tf.AUTO_REUSE):
268
+ encoder_char_embeddings = tf.get_variable(
269
+ "CharEmbeddings",
270
+ [graph_part.chars_count, graph_part.settings['char_vector_size']],
271
+ initializer=initializer
272
+ )
273
+
274
+ encoder_init_state = ClsGramEmbedder(graph_part.main_cls_dic,
275
+ graph_part.settings['encoder']['gram_vector_size'],
276
+ graph_part.settings['encoder']['ad_cls_vector_size'],
277
+ graph_part.settings['encoder']['rnn_state_size'])(x_init, batch_size)
278
+ encoder_input = tf.nn.embedding_lookup(encoder_char_embeddings, x)
279
+
280
+ with tf.variable_scope("Decoder", reuse=tf.AUTO_REUSE):
281
+ decoder_char_embeddings = tf.get_variable(
282
+ "CharEmbeddings",
283
+ [graph_part.chars_count, graph_part.settings['char_vector_size']],
284
+ initializer=initializer
285
+ )
286
+ decoder_init_state = ClsGramEmbedder(graph_part.main_cls_dic,
287
+ graph_part.settings['decoder']['gram_vector_size'],
288
+ graph_part.settings['decoder']['ad_cls_vector_size'],
289
+ graph_part.settings['decoder']['rnn_state_size'])(y_init, batch_size)
290
+ decoder_output = tf.nn.embedding_lookup(decoder_char_embeddings, y)
291
+
292
+ if graph_part.for_usage:
293
+ keep_drop = tf.constant(1, dtype=tf.float32, name='KeepDrop')
294
+ decoder_keep_drop = tf.constant(1, dtype=tf.float32, name='DecoderKeepDrop')
295
+ else:
296
+ keep_drop = tf.placeholder(dtype=tf.float32, name='KeepDrop')
297
+ decoder_keep_drop = tf.placeholder(dtype=tf.float32, name='DecoderKeepDrop')
298
+
299
+ with tf.variable_scope('Encoder', reuse=tf.AUTO_REUSE) as scope:
300
+ _, encoder_output = build_rnn(
301
+ encoder_input,
302
+ keep_drop,
303
+ x_seq_len,
304
+ graph_part.settings['encoder'],
305
+ encoder_init_state,
306
+ encoder_init_state,
307
+ top_concat=False,
308
+ for_usage=graph_part.for_usage,
309
+ with_seq=True
310
+ )
311
+
312
+ with tf.variable_scope('Decoder', reuse=tf.AUTO_REUSE) as scope:
313
+ if not graph_part.for_usage:
314
+ start_tokens_emd = tf.nn.embedding_lookup(decoder_char_embeddings, start_tokens)
315
+ start_tokens_emd = tf.reshape(start_tokens_emd, (batch_size, -1, graph_part.settings['char_vector_size']))
316
+ decoder_output = tf.concat(values=[start_tokens_emd, decoder_output], axis=1)
317
+
318
+ end_tokens = tf.fill([batch_size], graph_part.end_char_index)
319
+ end_tokens_emd = tf.nn.embedding_lookup(decoder_char_embeddings, end_tokens)
320
+ end_tokens_emd = tf.reshape(end_tokens_emd, (batch_size, -1, graph_part.settings['char_vector_size']))
321
+ decoder_output = tf.concat([decoder_output, end_tokens_emd], axis=1)
322
+
323
+ end_tokens = tf.reshape(end_tokens, (batch_size, 1))
324
+ y = tf.concat([y, end_tokens], axis=1)
325
+
326
+ if graph_part.for_usage:
327
+ helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(decoder_char_embeddings,
328
+ start_tokens=start_tokens,
329
+ end_token=graph_part.end_char_index)
330
+ else:
331
+ helper = tf.contrib.seq2seq.TrainingHelper(decoder_output, y_seq_len)
332
+
333
+ attention_mechanism = tf.contrib.seq2seq.BahdanauAttention(
334
+ num_units=graph_part.settings['decoder']['rnn_state_size'],
335
+ memory=encoder_output,
336
+ memory_sequence_length=x_seq_len,
337
+ normalize=False
338
+ )
339
+
340
+ cell = rnn_cell(graph_part.settings['decoder'],
341
+ graph_part.for_usage,
342
+ decoder_keep_drop)
343
+
344
+ cell = tf.contrib.seq2seq.AttentionWrapper(
345
+ cell,
346
+ attention_mechanism,
347
+ attention_layer_size=graph_part.settings['decoder']['rnn_state_size'] / 2
348
+ )
349
+
350
+ out_cell = tf.contrib.rnn.OutputProjectionWrapper(
351
+ cell,
352
+ graph_part.chars_count
353
+ )
354
+
355
+ init_state = cell.zero_state(dtype=tf.float32, batch_size=batch_size).clone(cell_state=decoder_init_state)
356
+
357
+ decoder = tf.contrib.seq2seq.BasicDecoder(
358
+ cell=out_cell,
359
+ helper=helper,
360
+ initial_state=init_state
361
+ )
362
+
363
+ max_len = graph_part.settings['max_length'] if graph_part.for_usage else tf.reduce_max(y_seq_len)
364
+ outputs = tf.contrib.seq2seq.dynamic_decode(
365
+ decoder=decoder,
366
+ impute_finished=True,
367
+ output_time_major=False,
368
+ maximum_iterations=max_len
369
+ )
370
+ decoder_ids = outputs[0].sample_id
371
+
372
+ if not graph_part.for_usage:
373
+ decoder_logits = outputs[0].rnn_output
374
+ masks = tf.sequence_mask(
375
+ lengths=y_seq_len,
376
+ dtype=tf.float32,
377
+ maxlen=tf.reduce_max(y_seq_len)
378
+ )
379
+ seq_mask_int = tf.cast(masks, tf.int32)
380
+ seq_mask_flat = tf.cast(tf.reshape(masks, (-1,)), tf.int32)
381
+
382
+ # seq loss
383
+ loss = tf.contrib.seq2seq.sequence_loss(
384
+ decoder_logits,
385
+ y,
386
+ masks,
387
+ name="SeqLoss"
388
+ )
389
+ vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=graph_part.main_scope_name)
390
+ grads = graph_part.optimiser.compute_gradients(loss, var_list=vars)
391
+ graph_part.create_mean_metric(0, loss)
392
+
393
+ labels_flat = tf.reshape(y, (-1,))
394
+ predictions_flat = tf.reshape(decoder_ids, (-1,))
395
+ # char accuracy
396
+ nonzero_indices = tf.where(tf.not_equal(seq_mask_flat, 0))
397
+ labels_flat = tf.gather(labels_flat, nonzero_indices)
398
+ labels_flat = tf.reshape(labels_flat, (-1,))
399
+ predictions_flat = tf.gather(predictions_flat, nonzero_indices)
400
+ predictions_flat = tf.reshape(predictions_flat, (-1,))
401
+
402
+ # remove -1 items where no sampling took place
403
+ sample_indexes = tf.where(tf.not_equal(predictions_flat, -1))
404
+ labels_flat = tf.gather(labels_flat, sample_indexes)
405
+ predictions_flat = tf.gather(predictions_flat, sample_indexes)
406
+
407
+ graph_part.create_accuracy_metric(1, labels_flat, predictions_flat)
408
+
409
+ # seq accuracy
410
+ labels = y * seq_mask_int
411
+ predictions = decoder_ids * seq_mask_int
412
+ labels_flat = tf.reshape(labels, (-1,))
413
+ predictions_flat = tf.reshape(predictions, (-1,))
414
+ sample_zeros = tf.cast(tf.not_equal(predictions_flat, -1), tf.int32)
415
+ predictions = predictions_flat * sample_zeros
416
+ labels = labels_flat * sample_zeros
417
+ predictions = tf.reshape(predictions, (batch_size, max_len))
418
+ labels = tf.reshape(labels, (batch_size, max_len))
419
+ delta = labels - predictions
420
+ labels = tf.reduce_sum(delta * delta, 1)
421
+ predictions = tf.zeros(batch_size)
422
+ graph_part.create_accuracy_metric(2, labels, predictions)
423
+
424
+ graph_part.dev_grads.append(grads)
425
+ graph_part.losses.append(loss)
426
+ graph_part.keep_drops.append(keep_drop)
427
+ graph_part.decoder_keep_drops.append(decoder_keep_drop)
428
+
429
+ graph_part.results.append(decoder_ids)
430
+
431
+
432
+ class ClsGramEmbedder:
433
+ def __init__(self, cls_dic, gram_vector_size, ad_cls_vector_size, rez_size):
434
+ gram_rez_dict = {}
435
+ tpls = sorted([(key, cls_dic[key]) for key in cls_dic], key=lambda x: x[1])
436
+ cls_vectors = []
437
+ for cls_key, cls_index in tpls:
438
+ cls_vector = []
439
+ for gram, gram_index in enumerate(list(cls_key)):
440
+ gram_key = (gram_index, gram)
441
+ if gram_key not in gram_rez_dict:
442
+ gram_rez_dict[gram_key] = len(gram_rez_dict)
443
+
444
+ cls_vector.append(gram_rez_dict[gram_key])
445
+
446
+ cls_vectors.append(cls_vector)
447
+
448
+ tpls = sorted([(key, gram_rez_dict[key]) for key in gram_rez_dict], key=lambda x: x[1])
449
+ gram_vectors = []
450
+ for gram_key, gram_index in tpls:
451
+ if gram_key[0] is None:
452
+ val = np.zeros(gram_vector_size, dtype=np.float32)
453
+ else:
454
+ val = np.random.rand(gram_vector_size).astype(np.float32)
455
+
456
+ gram_vectors.append(val)
457
+
458
+ self.rez_size = rez_size
459
+ self.classes_count = len(cls_dic)
460
+ self.cls_vectors = np.asarray(cls_vectors, dtype=np.int)
461
+
462
+ self.grams_count = len(gram_vectors)
463
+ self.gram_vector_size = gram_vector_size
464
+ self.gram_vectors = np.stack(gram_vectors)
465
+
466
+ self.ad_cls_vector_size = ad_cls_vector_size
467
+
468
+ def __call__(self, cls_pl, batch_size):
469
+ # [self.grams_count, self.gram_vector_size],
470
+ gram_embeddings = tf.get_variable(
471
+ "GramEmbeddings",
472
+ initializer=tf.constant(self.gram_vectors),
473
+ dtype=tf.float32
474
+ )
475
+ # [self.classes_count, self.grams_count],
476
+ cls_embeddings = tf.get_variable(
477
+ "ClsEmbeddings",
478
+ initializer=tf.constant(self.cls_vectors),
479
+ dtype=tf.int64
480
+ )
481
+
482
+ gram_rez = tf.nn.embedding_lookup(cls_embeddings, cls_pl)
483
+ gram_rez = tf.reshape(gram_rez, (-1, ))
484
+ gram_rez = tf.nn.embedding_lookup(gram_embeddings, gram_rez)
485
+ gram_rez = tf.reshape(gram_rez, (batch_size, -1))
486
+
487
+ ad_cls_embeddings = tf.get_variable(
488
+ "AdClsEmbeddings",
489
+ [self.classes_count, self.ad_cls_vector_size],
490
+ initializer=tf.contrib.layers.xavier_initializer()
491
+ )
492
+ ad_cls_rez = tf.nn.embedding_lookup(ad_cls_embeddings, cls_pl)
493
+
494
+ result = tf.concat([gram_rez, ad_cls_rez], axis=1)
495
+ return result
utils.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import yaml
3
+ import random
4
+ import pickle
5
+ import logging
6
+ from collections import defaultdict
7
+ from tqdm import tqdm
8
+
9
+
10
+ def _get_config():
11
+ with open('config.yml', 'r') as f:
12
+ config = yaml.load(f)
13
+
14
+ classes_path = config['cls_classes_path']
15
+ if os.path.exists(classes_path):
16
+ with open(classes_path, 'rb') as f:
17
+ config['main_classes'] = pickle.load(f)
18
+ config['main_classes_count'] = len(config['main_classes'])
19
+
20
+ return config
21
+
22
+
23
+ CONFIG = _get_config()
24
+ logging.basicConfig(
25
+ level=logging.INFO,
26
+ format='%(asctime)s %(levelname)s: %(message)s',
27
+ datefmt='%d.%m.%Y %H:%M:%S'
28
+ )
29
+ DATASET_PATH = CONFIG['dataset_path']
30
+ DICS_PATH = CONFIG['dics_path']
31
+ RANDOM = random.Random(CONFIG['random_seed'])
32
+ GRAMMEMES_TYPES = CONFIG['grammemes_types']
33
+
34
+
35
+ class MyDefaultDict(defaultdict):
36
+ def __missing__(self, key):
37
+
38
+ if self.default_factory is None:
39
+ raise KeyError( key )
40
+ else:
41
+ ret = self[key] = self.default_factory(key)
42
+ return ret
43
+
44
+
45
+ def get_grams_info(config):
46
+ dict_post_types = config['dict_post_types']
47
+ grammemes_types = config['grammemes_types']
48
+ src_convert = {}
49
+ classes_indexes = {}
50
+
51
+ for gram_key in grammemes_types:
52
+ gram = grammemes_types[gram_key]
53
+ cls_dic = gram['classes']
54
+ classes_indexes[gram_key] = {}
55
+ for cls_key in cls_dic:
56
+ cls_obj = cls_dic[cls_key]
57
+ classes_indexes[gram_key][cls_key] = cls_obj['index']
58
+
59
+ for key in cls_obj['keys']:
60
+ src_convert[key.lower()] = (gram_key, cls_key)
61
+
62
+ for post_key in dict_post_types:
63
+ cls_obj = dict_post_types[post_key.lower()]
64
+ p_key = ('post', post_key.lower())
65
+ for key in cls_obj['keys']:
66
+ src_convert[key.lower()] = p_key
67
+
68
+ return src_convert, classes_indexes
69
+
70
+
71
+ def decode_word(vect_mas):
72
+ conf = CONFIG
73
+ word = []
74
+ for ci in vect_mas:
75
+ if ci == conf['end_token']:
76
+ return "".join(word)
77
+ elif ci < len((conf['chars'])):
78
+ word.append(conf['chars'][ci])
79
+ else:
80
+ word.append("0")
81
+
82
+ return "".join(word)
83
+
84
+
85
+ def select_uniform_items(items_dict, persent, ds_info):
86
+ for cls in tqdm(items_dict, desc=f"Selecting {ds_info} dataset"):
87
+ i = 0
88
+ items = items_dict[cls]
89
+ per_group_count = persent * len(items) / 100
90
+
91
+ while i <= per_group_count and len(items) > 0:
92
+ item = items[0]
93
+ items.remove(item)
94
+ yield item
95
+ i += 1
96
+
97
+
98
+ def save_dataset(items_dict, file_prefix):
99
+ if not os.path.isdir(DATASET_PATH):
100
+ os.mkdir(DATASET_PATH)
101
+
102
+ total_count = sum([len(items_dict[key]) for key in items_dict])
103
+ logging.info(f"Class '{file_prefix}': {total_count}")
104
+ for key in tqdm(items_dict, desc=f"Shuffling {file_prefix} items"):
105
+ random.shuffle(items_dict[key])
106
+
107
+ test_items = list(select_uniform_items(items_dict, CONFIG['test_persent'], f"test {file_prefix}"))
108
+ valid_items = list(select_uniform_items(items_dict, CONFIG['validation_persent'], f"valid {file_prefix}"))
109
+ items = []
110
+ for key in items_dict:
111
+ items.extend(items_dict[key])
112
+ random.shuffle(items)
113
+
114
+ logging.info(f"Saving '{file_prefix}' train dataset")
115
+ with open(os.path.join(DATASET_PATH, f"{file_prefix}_train_dataset.pkl"), 'wb+') as f:
116
+ pickle.dump(items, f)
117
+
118
+ logging.info(f"Saving '{file_prefix}' valid dataset")
119
+ with open(os.path.join(DATASET_PATH, f"{file_prefix}_valid_dataset.pkl"), 'wb+') as f:
120
+ pickle.dump(valid_items, f)
121
+
122
+ logging.info(f"Saving '{file_prefix}' test dataset")
123
+ with open(os.path.join(DATASET_PATH, f"{file_prefix}_test_dataset.pkl"), 'wb+') as f:
124
+ pickle.dump(test_items, f)
125
+
126
+
127
+ def get_dict_path(file_prefix):
128
+ return os.path.join(DICS_PATH, f"{file_prefix}_dict_items.pkl")
129
+
130
+
131
+ def save_dictionary_items(items, file_prefix):
132
+ with open(get_dict_path(file_prefix), 'wb+') as f:
133
+ pickle.dump(items, f)
134
+
135
+
136
+ def create_cls_tuple(item):
137
+ return tuple(
138
+ item[key] if key in item else None
139
+ for key in GRAMMEMES_TYPES
140
+ )
141
+
142
+
143
+ def load_datasets(main_type, *ds_type):
144
+ words = []
145
+
146
+ def load_words(type):
147
+ path = os.path.join(CONFIG['dataset_path'], f"{main_type}_{type}_dataset.pkl")
148
+ with open(path, 'rb') as f:
149
+ words.extend(pickle.load(f))
150
+
151
+ for key in ds_type:
152
+ load_words(key)
153
+
154
+ return words