Using "facebook/blenderbot-400M-distill" model
Browse files
app.py
CHANGED
|
@@ -1,20 +1,20 @@
|
|
| 1 |
|
| 2 |
from transformers import pipeline
|
| 3 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 4 |
import gradio as gr
|
| 5 |
import torch
|
| 6 |
|
| 7 |
# Use a pipeline as a high-level helper
|
| 8 |
|
| 9 |
-
pipe = pipeline("conversational", model="
|
| 10 |
|
| 11 |
title = "🤖AI ChatBot"
|
| 12 |
description = "Building open-domain chatbots is a challenging area for machine learning research."
|
| 13 |
examples = [["How are you?"]]
|
| 14 |
|
| 15 |
|
| 16 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 17 |
-
model =
|
| 18 |
|
| 19 |
|
| 20 |
def predict(input, history=[]):
|
|
|
|
| 1 |
|
| 2 |
from transformers import pipeline
|
| 3 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoModelForSeq2SeqLM
|
| 4 |
import gradio as gr
|
| 5 |
import torch
|
| 6 |
|
| 7 |
# Use a pipeline as a high-level helper
|
| 8 |
|
| 9 |
+
pipe = pipeline("conversational", model="facebook/blenderbot-400M-distill")
|
| 10 |
|
| 11 |
title = "🤖AI ChatBot"
|
| 12 |
description = "Building open-domain chatbots is a challenging area for machine learning research."
|
| 13 |
examples = [["How are you?"]]
|
| 14 |
|
| 15 |
|
| 16 |
+
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
|
| 17 |
+
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill")
|
| 18 |
|
| 19 |
|
| 20 |
def predict(input, history=[]):
|