alaajabari's picture
Rename re/tokenizer.py to relation_module/tokenizer.py
a9c8c8e verified
raw
history blame contribute delete
247 Bytes
from transformers import PreTrainedTokenizerFast
from huggingface_hub import hf_hub_download
repo_id = "aaljabari/arabic-relation-extraction-v1"
tokenizer = PreTrainedTokenizerFast(
tokenizer_file=hf_hub_download(repo_id, "tokenizer.json")
)