Update app.py
Browse files
app.py
CHANGED
|
@@ -14,7 +14,7 @@ st.title('🦜Seon\'s Legal QA For Dummies 🔗 ')
|
|
| 14 |
|
| 15 |
model = AutoModelForCausalLM.from_pretrained("PyaeSoneK/pythia_70m_legalQA",
|
| 16 |
device_map='auto',
|
| 17 |
-
torch_dtype=torch.
|
| 18 |
use_auth_token= st.secrets['hf_access_token'],
|
| 19 |
)
|
| 20 |
|
|
@@ -31,7 +31,7 @@ from transformers import pipeline
|
|
| 31 |
pipe = pipeline("text-generation",
|
| 32 |
model=model,
|
| 33 |
tokenizer= tokenizer,
|
| 34 |
-
torch_dtype=torch.
|
| 35 |
device_map="auto",
|
| 36 |
max_new_tokens = 512,
|
| 37 |
do_sample=True,
|
|
|
|
| 14 |
|
| 15 |
model = AutoModelForCausalLM.from_pretrained("PyaeSoneK/pythia_70m_legalQA",
|
| 16 |
device_map='auto',
|
| 17 |
+
torch_dtype=torch.float32,
|
| 18 |
use_auth_token= st.secrets['hf_access_token'],
|
| 19 |
)
|
| 20 |
|
|
|
|
| 31 |
pipe = pipeline("text-generation",
|
| 32 |
model=model,
|
| 33 |
tokenizer= tokenizer,
|
| 34 |
+
torch_dtype=torch.bfloat32,
|
| 35 |
device_map="auto",
|
| 36 |
max_new_tokens = 512,
|
| 37 |
do_sample=True,
|