Spaces:
Sleeping
Sleeping
Ranam Hamoud
commited on
Commit
·
1e6feef
1
Parent(s):
67597e5
Fix meta tensor error: use device_map in from_pretrained instead of .to(device)
Browse files- plagiarism_detection.py +7 -4
plagiarism_detection.py
CHANGED
|
@@ -78,12 +78,15 @@ def ai_plagiarism_detection(text, threshold=0.5, show_results=False):
|
|
| 78 |
|
| 79 |
# Model and Tokenizer Directory
|
| 80 |
model_directory = "desklib/ai-text-detector-v1.01"
|
| 81 |
-
# Load tokenizer and model
|
| 82 |
-
tokenizer = AutoTokenizer.from_pretrained(model_directory)
|
| 83 |
-
model = DesklibAIDetectionModel.from_pretrained(model_directory)
|
| 84 |
# Set up device
|
| 85 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 86 |
-
model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
# Predict
|
| 88 |
probability, ai_detected = predict_single_text(text, model, tokenizer, device, threshold=threshold)
|
| 89 |
# to print results
|
|
|
|
| 78 |
|
| 79 |
# Model and Tokenizer Directory
|
| 80 |
model_directory = "desklib/ai-text-detector-v1.01"
|
|
|
|
|
|
|
|
|
|
| 81 |
# Set up device
|
| 82 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 83 |
+
# Load tokenizer and model directly to the target device to avoid meta tensor issues
|
| 84 |
+
tokenizer = AutoTokenizer.from_pretrained(model_directory)
|
| 85 |
+
model = DesklibAIDetectionModel.from_pretrained(
|
| 86 |
+
model_directory,
|
| 87 |
+
device_map=device,
|
| 88 |
+
low_cpu_mem_usage=False
|
| 89 |
+
)
|
| 90 |
# Predict
|
| 91 |
probability, ai_detected = predict_single_text(text, model, tokenizer, device, threshold=threshold)
|
| 92 |
# to print results
|