Spaces:
Runtime error
Runtime error
Commit
·
3e6c751
1
Parent(s):
0a94d84
made some changes for faster processing
Browse files
app.py
CHANGED
|
@@ -47,7 +47,7 @@ def process_text(text):
|
|
| 47 |
return emotion_dict, predicted_emotion
|
| 48 |
|
| 49 |
def preprocess_frame(frame):
|
| 50 |
-
frame = cv2.resize(frame, (
|
| 51 |
pixel_values = caption_processor(images=frame, return_tensors="pt").pixel_values
|
| 52 |
return pixel_values
|
| 53 |
|
|
@@ -95,7 +95,7 @@ def analyze_video(video=None, video_url=None):
|
|
| 95 |
emotion_dict_text, predicted_emotion_text = process_text(transcript)
|
| 96 |
|
| 97 |
# Frame-wise emotion detection from the video
|
| 98 |
-
n_frame_interval =
|
| 99 |
emotion_vectors_video = []
|
| 100 |
|
| 101 |
video_capture = cv2.VideoCapture(video_path)
|
|
|
|
| 47 |
return emotion_dict, predicted_emotion
|
| 48 |
|
| 49 |
def preprocess_frame(frame):
|
| 50 |
+
frame = cv2.resize(frame, (112, 112))
|
| 51 |
pixel_values = caption_processor(images=frame, return_tensors="pt").pixel_values
|
| 52 |
return pixel_values
|
| 53 |
|
|
|
|
| 95 |
emotion_dict_text, predicted_emotion_text = process_text(transcript)
|
| 96 |
|
| 97 |
# Frame-wise emotion detection from the video
|
| 98 |
+
n_frame_interval = 120
|
| 99 |
emotion_vectors_video = []
|
| 100 |
|
| 101 |
video_capture = cv2.VideoCapture(video_path)
|