Update app.py
Browse files
app.py
CHANGED
|
@@ -71,9 +71,9 @@ dict_tokenizer_tr = {
|
|
| 71 |
'en-sw': tokenizer_sw,
|
| 72 |
}
|
| 73 |
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
|
| 78 |
# print("dict", dict_reference_faiss['en-es']['input']['tokens'][1])
|
| 79 |
|
|
@@ -767,22 +767,22 @@ def first_function(w1, model):
|
|
| 767 |
# ---> preload faiss using the respective model with a initial dataset.
|
| 768 |
|
| 769 |
### to uncomment gg1 ###
|
| 770 |
-
|
| 771 |
-
|
| 772 |
-
|
| 773 |
-
|
| 774 |
-
|
| 775 |
-
|
| 776 |
-
|
| 777 |
-
|
| 778 |
-
|
| 779 |
-
|
| 780 |
-
|
| 781 |
-
|
| 782 |
-
|
| 783 |
-
|
| 784 |
-
|
| 785 |
-
|
| 786 |
|
| 787 |
## bertviz
|
| 788 |
# paramsbv, tgtbv = get_bertvis_data(w1, model)
|
|
@@ -793,10 +793,10 @@ def first_function(w1, model):
|
|
| 793 |
html_att_cross = params[4][1]
|
| 794 |
|
| 795 |
### to uncomment gg1 ###
|
| 796 |
-
|
| 797 |
### to uncomment gg1 ###
|
| 798 |
|
| 799 |
-
params = [params[0], params[1], [], params[2][0], params[3][0], params[4][0]]
|
| 800 |
# params.append([tgt, params['params'], params['html2'].data]
|
| 801 |
|
| 802 |
return [translated_text, params, html_att_enc, html_att_dec, html_att_cross]
|
|
|
|
| 71 |
'en-sw': tokenizer_sw,
|
| 72 |
}
|
| 73 |
|
| 74 |
+
dict_reference_faiss = {
|
| 75 |
+
'en-es': load_index('en-es'),
|
| 76 |
+
}
|
| 77 |
|
| 78 |
# print("dict", dict_reference_faiss['en-es']['input']['tokens'][1])
|
| 79 |
|
|
|
|
| 767 |
# ---> preload faiss using the respective model with a initial dataset.
|
| 768 |
|
| 769 |
### to uncomment gg1 ###
|
| 770 |
+
result_search = {}
|
| 771 |
+
result_search['input'] = build_search(input_embeddings, model, type='input')
|
| 772 |
+
result_search['output'] = build_search(output_embeddings, model, type='output')
|
| 773 |
+
|
| 774 |
+
json_out = {'input': {'tokens': {}, 'words': {}}, 'output': {'tokens': {}, 'words': {}}}
|
| 775 |
+
dict_projected = {}
|
| 776 |
+
for type in ['input', 'output']:
|
| 777 |
+
dict_projected[type] = {}
|
| 778 |
+
for key in ['tokens', 'words']:
|
| 779 |
+
similar_key = result_search[type][key]['similar']
|
| 780 |
+
vocab = result_search[type][key]['vocab_queries']
|
| 781 |
+
dict_projected[type][key] = filtered_projection(similar_key, vocab, model, type=type, key=key)
|
| 782 |
+
json_out[type][key]['similar_queries'] = similar_key
|
| 783 |
+
json_out[type][key]['tnse'] = dict_projected[type][key]
|
| 784 |
+
json_out[type][key]['key_text_list'] = result_search[type][key]['sentence_key_list']
|
| 785 |
+
## to uncomment gg1 ###
|
| 786 |
|
| 787 |
## bertviz
|
| 788 |
# paramsbv, tgtbv = get_bertvis_data(w1, model)
|
|
|
|
| 793 |
html_att_cross = params[4][1]
|
| 794 |
|
| 795 |
### to uncomment gg1 ###
|
| 796 |
+
params = [params[0], params[1], json_out, params[2][0], params[3][0], params[4][0]]
|
| 797 |
### to uncomment gg1 ###
|
| 798 |
|
| 799 |
+
# params = [params[0], params[1], [], params[2][0], params[3][0], params[4][0]]
|
| 800 |
# params.append([tgt, params['params'], params['html2'].data]
|
| 801 |
|
| 802 |
return [translated_text, params, html_att_enc, html_att_dec, html_att_cross]
|