jsakshi commited on
Commit
291a517
·
verified ·
1 Parent(s): cfc2413

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -15
app.py CHANGED
@@ -3,15 +3,14 @@ import cv2
3
  import numpy as np
4
  from keras.models import load_model
5
 
6
- # Load face classifier
7
  face_classifier = cv2.CascadeClassifier(
8
  cv2.data.haarcascades + 'haarcascade_frontalface_default.xml'
9
  )
10
 
11
- # Load emotion classification model
12
  classifier = load_model("Custom_CNN_model.keras")
13
 
14
- # Emotion labels
15
  emotion_labels = ['Angry', 'Disgust', 'Fear', 'Happy', 'Neutral', 'Sad', 'Surprise']
16
 
17
 
@@ -22,7 +21,7 @@ def detect_emotion(image):
22
  img = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
23
  gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
24
 
25
- faces = face_classifier.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5)
26
 
27
  for (x, y, w, h) in faces:
28
  cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 255), 2)
@@ -43,8 +42,7 @@ def detect_emotion(image):
43
  return cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
44
 
45
 
46
- # Modern Gradio UI
47
- with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as app:
48
  gr.Markdown(
49
  """
50
  <h1 style='text-align:center; color:#2D99FF;'>Emotion Detection App</h1>
@@ -53,18 +51,15 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as app:
53
  )
54
 
55
  with gr.Row():
56
- with gr.Column(scale=1):
57
  input_img = gr.Image(type="numpy", label="Upload Image")
58
- webcam = gr.Image(type="numpy", sources=["webcam"], label="Webcam Input")
59
- run_btn = gr.Button("Detect Emotion", variant="primary")
60
 
61
- with gr.Column(scale=1):
62
- output_img = gr.Image(type="numpy", label="Detection Result")
63
 
64
  run_btn.click(fn=detect_emotion, inputs=input_img, outputs=output_img)
65
  webcam.change(fn=detect_emotion, inputs=webcam, outputs=output_img)
66
 
67
-
68
- app.queue()
69
- if __name__ == "__main__":
70
- app.launch()
 
3
  import numpy as np
4
  from keras.models import load_model
5
 
6
+ # Load face cascade
7
  face_classifier = cv2.CascadeClassifier(
8
  cv2.data.haarcascades + 'haarcascade_frontalface_default.xml'
9
  )
10
 
11
+ # Load emotion model
12
  classifier = load_model("Custom_CNN_model.keras")
13
 
 
14
  emotion_labels = ['Angry', 'Disgust', 'Fear', 'Happy', 'Neutral', 'Sad', 'Surprise']
15
 
16
 
 
21
  img = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
22
  gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
23
 
24
+ faces = face_classifier.detectMultiScale(gray, 1.1, 5)
25
 
26
  for (x, y, w, h) in faces:
27
  cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 255), 2)
 
42
  return cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
43
 
44
 
45
+ with gr.Blocks() as app:
 
46
  gr.Markdown(
47
  """
48
  <h1 style='text-align:center; color:#2D99FF;'>Emotion Detection App</h1>
 
51
  )
52
 
53
  with gr.Row():
54
+ with gr.Column():
55
  input_img = gr.Image(type="numpy", label="Upload Image")
56
+ webcam = gr.Image(type="numpy", sources=["webcam"], label="Webcam")
57
+ run_btn = gr.Button("Detect Emotion")
58
 
59
+ with gr.Column():
60
+ output_img = gr.Image(type="numpy", label="Result")
61
 
62
  run_btn.click(fn=detect_emotion, inputs=input_img, outputs=output_img)
63
  webcam.change(fn=detect_emotion, inputs=webcam, outputs=output_img)
64
 
65
+ app.launch()