Spaces:
Runtime error
Runtime error
Commit ·
60dd668
1
Parent(s): 85db738
fixed error
Browse files- app.py +1 -1
- scripts/predict.py +2 -2
app.py
CHANGED
|
@@ -20,7 +20,7 @@ print(f"[INFO] Using device: {device}")
|
|
| 20 |
|
| 21 |
# Load the CLIP model and processor
|
| 22 |
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32", from_tf=True).to(device)
|
| 23 |
-
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
| 24 |
|
| 25 |
# Load the ensemble classifier model
|
| 26 |
ensemble_clf = joblib.load("model/random_forest_tuned_aug.pkl")
|
|
|
|
| 20 |
|
| 21 |
# Load the CLIP model and processor
|
| 22 |
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32", from_tf=True).to(device)
|
| 23 |
+
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32", from_tf=True)
|
| 24 |
|
| 25 |
# Load the ensemble classifier model
|
| 26 |
ensemble_clf = joblib.load("model/random_forest_tuned_aug.pkl")
|
scripts/predict.py
CHANGED
|
@@ -8,8 +8,8 @@ from io import BytesIO
|
|
| 8 |
|
| 9 |
# Load models
|
| 10 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 11 |
-
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32").to(device)
|
| 12 |
-
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
| 13 |
ensemble_clf = joblib.load("models/random_forest_aug.pkl")
|
| 14 |
|
| 15 |
label_map = {0: "real", 1: "deepfake", 2: "ai_gen"}
|
|
|
|
| 8 |
|
| 9 |
# Load models
|
| 10 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 11 |
+
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32", from_tf=True).to(device)
|
| 12 |
+
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32", from_tf=True)
|
| 13 |
ensemble_clf = joblib.load("models/random_forest_aug.pkl")
|
| 14 |
|
| 15 |
label_map = {0: "real", 1: "deepfake", 2: "ai_gen"}
|