Commit ·
1ebd823
1
Parent(s): 9cdcfee
fix pickle file paths
Browse files- speech/test.py +1 -1
- vision/test.py +1 -1
speech/test.py
CHANGED
|
@@ -11,7 +11,7 @@ warnings.filterwarnings("ignore", category=FutureWarning)
|
|
| 11 |
app = FastAPI()
|
| 12 |
|
| 13 |
# Load the best model
|
| 14 |
-
BEST_MODEL = torch.load("
|
| 15 |
BEST_MODEL.eval()
|
| 16 |
|
| 17 |
# Reverse label mapping
|
|
|
|
| 11 |
app = FastAPI()
|
| 12 |
|
| 13 |
# Load the best model
|
| 14 |
+
BEST_MODEL = torch.load("best_ser_whisper.pkl", map_location=torch.device('cpu'), weights_only=False)
|
| 15 |
BEST_MODEL.eval()
|
| 16 |
|
| 17 |
# Reverse label mapping
|
vision/test.py
CHANGED
|
@@ -34,7 +34,7 @@ app = FastAPI()
|
|
| 34 |
|
| 35 |
# Load the pre-trained model
|
| 36 |
# model = torch.load("best_fer_clip_vit.pkl", map_location=DEVICE)
|
| 37 |
-
model.load_state_dict(torch.load("
|
| 38 |
model.eval()
|
| 39 |
|
| 40 |
# Define the image transformation pipeline
|
|
|
|
| 34 |
|
| 35 |
# Load the pre-trained model
|
| 36 |
# model = torch.load("best_fer_clip_vit.pkl", map_location=DEVICE)
|
| 37 |
+
model.load_state_dict(torch.load("best_fer_clip_vit.pkl", map_location=DEVICE, weights_only=False))
|
| 38 |
model.eval()
|
| 39 |
|
| 40 |
# Define the image transformation pipeline
|