File size: 528 Bytes
ee03b1a
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
import pickle
import tensorflow as tf

def load_tokenizers(en_path='tokenizers/en_tokenizer.pkl', 

                   vi_path='tokenizers/vi_tokenizer.pkl'):
    with open(en_path, 'rb') as f:
        en_tokenizer = pickle.load(f)
    with open(vi_path, 'rb') as f:
        vi_tokenizer = pickle.load(f)
    
    en_tokenizer = tf.keras.preprocessing.text.tokenizer_from_json(en_tokenizer)
    vi_tokenizer = tf.keras.preprocessing.text.tokenizer_from_json(vi_tokenizer)
    
    return en_tokenizer, vi_tokenizer