Spaces:
Running
Running
| import json | |
| from tensorflow.keras.preprocessing.text import Tokenizer | |
| from tensorflow.keras.preprocessing.text import tokenizer_from_json | |
| def allowed_model(filename): | |
| return '.' in filename and \ | |
| filename.rsplit('.', 1)[1].lower() in {'keras', 'h5'} | |
| def allowed_tokenizer(filename): | |
| return '.' in filename and \ | |
| filename.rsplit('.', 1)[1].lower() in {'json'} | |
| def load_doc(filename): | |
| # open the file as read only | |
| file = open(filename, 'r') | |
| # read all text | |
| text = file.read() | |
| # close the file | |
| file.close() | |
| return text | |
| def load_tokenizer(tokenizer_path): | |
| with open(tokenizer_path) as f: | |
| data = json.load(f) | |
| tokenizer = Tokenizer() | |
| tokenizer = tokenizer_from_json(data) | |
| return tokenizer |