Update app.py
Browse files
app.py
CHANGED
|
@@ -1,11 +1,11 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from transformers import pipeline
|
| 3 |
from arabert.aragpt2.grover.modeling_gpt2 import GPT2LMHeadModel
|
| 4 |
-
from transformers import
|
| 5 |
import re
|
| 6 |
|
| 7 |
model_name = "Naseej/AskMe-Large"
|
| 8 |
-
tokenizer =
|
| 9 |
eos_token='<|endoftext|>', pad_token='<|pad|>')
|
| 10 |
model = GPT2LMHeadModel.from_pretrained(model_name).cuda()
|
| 11 |
model.resize_token_embeddings(len(tokenizer))
|
|
@@ -52,4 +52,4 @@ demo = gr.Interface(
|
|
| 52 |
examples=examples
|
| 53 |
)
|
| 54 |
|
| 55 |
-
demo.launch(
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from transformers import pipeline
|
| 3 |
from arabert.aragpt2.grover.modeling_gpt2 import GPT2LMHeadModel
|
| 4 |
+
from transformers import AutoTokenizer
|
| 5 |
import re
|
| 6 |
|
| 7 |
model_name = "Naseej/AskMe-Large"
|
| 8 |
+
tokenizer = AutoTokenizer.from_pretrained('Naseej/AskMe-Large', bos_token='<|startoftext|>',
|
| 9 |
eos_token='<|endoftext|>', pad_token='<|pad|>')
|
| 10 |
model = GPT2LMHeadModel.from_pretrained(model_name).cuda()
|
| 11 |
model.resize_token_embeddings(len(tokenizer))
|
|
|
|
| 52 |
examples=examples
|
| 53 |
)
|
| 54 |
|
| 55 |
+
demo.launch()
|