| from transformers import BartTokenizer, BartForConditionalGeneration |
|
|
| tokenizer = BartTokenizer.from_pretrained("facebook/bart-large-cnn") |
| model = BartForConditionalGeneration.from_pretrained("facebook/bart-large-cnn") |
|
|
| def generate_summary(text: str) -> str: |
| inputs = tokenizer([text], max_length=1024, truncation=True, return_tensors="pt") |
|
|
| summary_ids = model.generate( |
| inputs["input_ids"], |
| num_beams=4, |
| min_length=40, |
| max_length=200, |
| early_stopping=True |
| ) |
|
|
| return tokenizer.decode(summary_ids[0], skip_special_tokens=True) |
|
|