BART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension
Paper
•
1910.13461
•
Published
•
6
This model is based on facebook/bart-large and was finetuned on SQuAD2.0. The corresponding papers you can found here (model) and here (data).
from transformers.pipelines import pipeline
model_name = "phiyodr/bart-large-finetuned-squad2"
nlp = pipeline('question-answering', model=model_name, tokenizer=model_name)
inputs = {
'question': 'What discipline did Winkelmann create?',
'context': 'Johann Joachim Winckelmann was a German art historian and archaeologist. He was a pioneering Hellenist who first articulated the difference between Greek, Greco-Roman and Roman art. "The prophet and founding hero of modern archaeology", Winckelmann was one of the founders of scientific archaeology and first applied the categories of style on a large, systematic basis to the history of art. '
}
nlp(inputs)
{
"base_model": "facebook/bart-large",
"do_lower_case": True,
"learning_rate": 3e-5,
"num_train_epochs": 4,
"max_seq_length": 384,
"doc_stride": 128,
"max_query_length": 64,
"batch_size": 96
}
{
"exact": 81.96748926134929,
"f1": 85.93825235371045,
"total": 11873,
"HasAns_exact": 78.71120107962213,
"HasAns_f1": 86.6641144054667,
"HasAns_total": 5928,
"NoAns_exact": 85.21446593776282,
"NoAns_f1": 85.21446593776282,
"NoAns_total": 5945
}