datasciencedojo commited on
Commit
94c4b5d
·
1 Parent(s): 43dd17a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -8
app.py CHANGED
@@ -3,7 +3,6 @@ import time
3
  import os
4
  import mediapipe as mp
5
  import gradio as gr
6
- import pyttsx3
7
  from threading import Thread
8
  #from cvzone.HandTrackingModule import HandDetector
9
  example_flag = False
@@ -94,8 +93,7 @@ class handDetector():
94
  return lmList
95
 
96
 
97
- def speak(text):
98
- pyttsx3.speak(text)
99
 
100
  def set_example_image(example: list) -> dict:
101
  return gr.inputs.Image.update(value=example[0])
@@ -150,13 +148,8 @@ def count(im):
150
  cv2.rectangle(img, (20, 225), (170, 425), (0, 255, 0), cv2.FILLED)
151
  cv2.putText(img, str(totalFingers), (45, 375), cv2.FONT_HERSHEY_PLAIN,
152
  10, (255, 0, 0), 25)
153
- p1=Thread(target=speak,args=(text,))
154
- p1.start()
155
  return img[:,:,::-1]
156
  else:
157
- text = f"No Hand detected, please use your right hand for correct output!"
158
- p1=Thread(target=speak,args=(text,))
159
- p1.start()
160
  return cv2.flip(img[:,:,::-1],1)
161
 
162
  with gr.Blocks() as demo:
 
3
  import os
4
  import mediapipe as mp
5
  import gradio as gr
 
6
  from threading import Thread
7
  #from cvzone.HandTrackingModule import HandDetector
8
  example_flag = False
 
93
  return lmList
94
 
95
 
96
+
 
97
 
98
  def set_example_image(example: list) -> dict:
99
  return gr.inputs.Image.update(value=example[0])
 
148
  cv2.rectangle(img, (20, 225), (170, 425), (0, 255, 0), cv2.FILLED)
149
  cv2.putText(img, str(totalFingers), (45, 375), cv2.FONT_HERSHEY_PLAIN,
150
  10, (255, 0, 0), 25)
 
 
151
  return img[:,:,::-1]
152
  else:
 
 
 
153
  return cv2.flip(img[:,:,::-1],1)
154
 
155
  with gr.Blocks() as demo: