File size: 290 Bytes
dc1ed61
 
 
 
 
 
 
1
2
3
4
5
6
7
import tensorflow
from tensorflow import keras
import tensorflow_text
import tensorflow_hub as tfhub
from transformers import AutoTokenizer
tf_tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")
preprocess = tfhub.load('https://tfhub.dev/tensorflow/bert_en_uncased_preprocess/3')