Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
|
@@ -6,12 +6,12 @@ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
| 6 |
|
| 7 |
model_dir_small = 'edithram23/Redaction'
|
| 8 |
tokenizer_small = AutoTokenizer.from_pretrained(model_dir)
|
| 9 |
-
model_small = AutoModelForSeq2SeqLM.from_pretrained(
|
| 10 |
|
| 11 |
|
| 12 |
model_dir_large = 'edithram23/Redaction_Personal_info_v1'
|
| 13 |
tokenizer_large = AutoTokenizer.from_pretrained(model_dir)
|
| 14 |
-
model_large = AutoModelForSeq2SeqLM.from_pretrained(
|
| 15 |
|
| 16 |
def mask_generation(text,model=model_small,tokenizer=tokenizer_small):
|
| 17 |
import re
|
|
|
|
| 6 |
|
| 7 |
model_dir_small = 'edithram23/Redaction'
|
| 8 |
tokenizer_small = AutoTokenizer.from_pretrained(model_dir)
|
| 9 |
+
model_small = AutoModelForSeq2SeqLM.from_pretrained(model_dir_small)
|
| 10 |
|
| 11 |
|
| 12 |
model_dir_large = 'edithram23/Redaction_Personal_info_v1'
|
| 13 |
tokenizer_large = AutoTokenizer.from_pretrained(model_dir)
|
| 14 |
+
model_large = AutoModelForSeq2SeqLM.from_pretrained(model_dir_large)
|
| 15 |
|
| 16 |
def mask_generation(text,model=model_small,tokenizer=tokenizer_small):
|
| 17 |
import re
|