clementBE commited on
Commit
9a7d8ba
·
verified ·
1 Parent(s): bb115d5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -57
app.py CHANGED
@@ -16,8 +16,6 @@ import cv2
16
  # ---------------------------
17
  # Device
18
  # ---------------------------
19
- if not torch.cuda.is_available():
20
- os.environ["CUDA_VISIBLE_DEVICES"] = ""
21
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
22
 
23
  # ---------------------------
@@ -76,50 +74,65 @@ def classify_zip_and_analyze_color(zip_file):
76
  zip_ref.extractall(tmpdir)
77
 
78
  for fname in sorted(os.listdir(tmpdir)):
79
- if fname.lower().endswith(('.png','.jpg','.jpeg')):
80
- img_path = os.path.join(tmpdir,fname)
81
- try:
82
- image = Image.open(img_path).convert("RGB")
83
- images_list.append((image.copy(), fname))
84
- except:
85
- continue
86
-
87
- # Classification
88
- input_tensor = transform(image).unsqueeze(0).to(device)
89
- with torch.no_grad():
90
- output = model(input_tensor)
91
- probs = F.softmax(output, dim=1)[0]
92
- top3_prob, top3_idx = torch.topk(probs,3)
93
- preds = [(imagenet_classes[idx], f"{prob.item()*100:.2f}%") for idx,prob in zip(top3_idx, top3_prob)]
94
-
95
- rgb, hex_color = get_dominant_color(image)
96
- basic_color = closest_basic_color(rgb)
97
-
98
- # Face analysis
99
- faces_data = []
100
- try:
101
- img_cv2 = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
102
- detected_faces = DeepFace.analyze(img_cv2, actions=["age","gender","emotion"], enforce_detection=False)
103
- if isinstance(detected_faces, list):
104
- for f in detected_faces:
105
- faces_data.append({"age": f["age"], "gender": f["gender"], "emotion": f["dominant_emotion"]})
 
 
 
 
 
 
 
106
  else:
107
- faces_data.append({"age": detected_faces["age"], "gender": detected_faces["gender"], "emotion": detected_faces["dominant_emotion"]})
108
- except:
109
- faces_data=[]
110
-
111
- # Convert faces data to readable string
112
- faces_str = "; ".join([f"Age: {face['age']}, Gender: {'Homme' if face['gender']=='Man' else 'Femme'}, Emotion: {face['emotion']}" for face in faces_data])
113
-
114
- results.append((
115
- fname,
116
- ", ".join([p[0] for p in preds]),
117
- ", ".join([p[1] for p in preds]),
118
- hex_color,
119
- basic_color,
120
- faces_str
121
- ))
122
-
 
 
 
 
 
 
 
 
123
  df = pd.DataFrame(results, columns=["Filename","Top 3 Predictions","Confidence","Dominant Color","Basic Color","Face Info"])
124
  out_xlsx = os.path.join(tempfile.gettempdir(), f"{zip_name}_{date_str}_results.xlsx")
125
  df.to_excel(out_xlsx,index=False)
@@ -127,12 +140,14 @@ def classify_zip_and_analyze_color(zip_file):
127
  # ---------------------------
128
  # Plots
129
  # ---------------------------
 
130
  fig1, ax1 = plt.subplots()
131
  color_counts = df["Basic Color"].value_counts()
132
  ax1.bar(color_counts.index, color_counts.values, color="skyblue")
133
  ax1.set_title("Basic Color Frequency"); ax1.set_ylabel("Count")
134
  buf1 = io.BytesIO(); plt.savefig(buf1, format="png"); plt.close(fig1); buf1.seek(0); plot1_img = Image.open(buf1)
135
 
 
136
  fig2, ax2 = plt.subplots()
137
  preds_flat = []
138
  for p in df["Top 3 Predictions"]: preds_flat.extend(p.split(", "))
@@ -142,8 +157,8 @@ def classify_zip_and_analyze_color(zip_file):
142
  buf2 = io.BytesIO(); plt.savefig(buf2, format="png", bbox_inches="tight"); plt.close(fig2); buf2.seek(0); plot2_img = Image.open(buf2)
143
 
144
  # Gender and age
145
- ages_male, ages_female = [], []
146
- gender_confidence = {"Homme":0, "Femme":0}
147
  for face_list in df["Face Info"]:
148
  if face_list.strip()=="":
149
  continue
@@ -151,16 +166,18 @@ def classify_zip_and_analyze_color(zip_file):
151
  parts = face_str.split(", ")
152
  age = int(parts[0].split(": ")[1])
153
  gender = parts[1].split(": ")[1]
154
- conf = 0.9 # approximation for histogram
155
- gender_confidence[gender] += conf
156
  if gender=="Homme": ages_male.append(age)
157
- else: ages_female.append(age)
 
 
158
 
 
159
  fig3, ax3 = plt.subplots()
160
- ax3.bar(gender_confidence.keys(), gender_confidence.values(), color=["lightblue","pink"])
161
- ax3.set_title("Gender Distribution"); ax3.set_ylabel("Sum of Confidence")
162
  buf3 = io.BytesIO(); plt.savefig(buf3, format="png"); plt.close(fig3); buf3.seek(0); plot3_img = Image.open(buf3)
163
 
 
164
  fig4, ax4 = plt.subplots()
165
  bins = range(0,101,5)
166
  ax4.hist([ages_male, ages_female], bins=bins, color=["lightblue","pink"], label=["Homme","Femme"], edgecolor="black")
@@ -185,12 +202,8 @@ with gr.Blocks() as demo:
185
  plot3 = gr.Image(label="Gender Distribution")
186
  plot4 = gr.Image(label="Age Distribution by Gender")
187
 
188
- def run_analysis(zip_file):
189
- df, images_list, out_xlsx, p1, p2, p3, p4 = classify_zip_and_analyze_color(zip_file)
190
- return df, images_list, out_xlsx, p1, p2, p3, p4
191
-
192
  analyze_btn.click(
193
- run_analysis,
194
  inputs=uploaded_zip,
195
  outputs=[output_df, image_gallery, download_file, plot1, plot2, plot3, plot4]
196
  )
 
16
  # ---------------------------
17
  # Device
18
  # ---------------------------
 
 
19
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
20
 
21
  # ---------------------------
 
74
  zip_ref.extractall(tmpdir)
75
 
76
  for fname in sorted(os.listdir(tmpdir)):
77
+ if not fname.lower().endswith(('.png','.jpg','.jpeg')):
78
+ continue
79
+ img_path = os.path.join(tmpdir,fname)
80
+ try:
81
+ image = Image.open(img_path).convert("RGB")
82
+ images_list.append((image.copy(), fname))
83
+ except:
84
+ continue
85
+
86
+ # Image classification
87
+ input_tensor = transform(image).unsqueeze(0).to(device)
88
+ with torch.no_grad():
89
+ output = model(input_tensor)
90
+ probs = F.softmax(output, dim=1)[0]
91
+ top3_prob, top3_idx = torch.topk(probs,3)
92
+ preds = [(imagenet_classes[idx], f"{prob.item()*100:.2f}%") for idx,prob in zip(top3_idx, top3_prob)]
93
+
94
+ # Dominant color
95
+ rgb, hex_color = get_dominant_color(image)
96
+ basic_color = closest_basic_color(rgb)
97
+
98
+ # Face analysis
99
+ faces_data = []
100
+ try:
101
+ img_cv2 = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
102
+ detected_faces = DeepFace.analyze(img_cv2, actions=["age","gender","emotion"], enforce_detection=False)
103
+ if not isinstance(detected_faces, list):
104
+ detected_faces = [detected_faces]
105
+ for f in detected_faces:
106
+ gender = f["gender"].lower()
107
+ if gender in ["man", "male"]:
108
+ gender_fr = "Homme"
109
+ elif gender in ["woman", "female"]:
110
+ gender_fr = "Femme"
111
  else:
112
+ gender_fr = "Inconnu"
113
+ faces_data.append({
114
+ "age": f["age"],
115
+ "gender": gender_fr,
116
+ "emotion": f["dominant_emotion"]
117
+ })
118
+ except:
119
+ faces_data=[]
120
+
121
+ faces_str = "; ".join([
122
+ f"Age: {face['age']}, Gender: {face['gender']}, Emotion: {face['emotion']}"
123
+ for face in faces_data
124
+ ])
125
+
126
+ results.append((
127
+ fname,
128
+ ", ".join([p[0] for p in preds]),
129
+ ", ".join([p[1] for p in preds]),
130
+ hex_color,
131
+ basic_color,
132
+ faces_str
133
+ ))
134
+
135
+ # Create DataFrame
136
  df = pd.DataFrame(results, columns=["Filename","Top 3 Predictions","Confidence","Dominant Color","Basic Color","Face Info"])
137
  out_xlsx = os.path.join(tempfile.gettempdir(), f"{zip_name}_{date_str}_results.xlsx")
138
  df.to_excel(out_xlsx,index=False)
 
140
  # ---------------------------
141
  # Plots
142
  # ---------------------------
143
+ # Color frequency
144
  fig1, ax1 = plt.subplots()
145
  color_counts = df["Basic Color"].value_counts()
146
  ax1.bar(color_counts.index, color_counts.values, color="skyblue")
147
  ax1.set_title("Basic Color Frequency"); ax1.set_ylabel("Count")
148
  buf1 = io.BytesIO(); plt.savefig(buf1, format="png"); plt.close(fig1); buf1.seek(0); plot1_img = Image.open(buf1)
149
 
150
+ # Top prediction distribution
151
  fig2, ax2 = plt.subplots()
152
  preds_flat = []
153
  for p in df["Top 3 Predictions"]: preds_flat.extend(p.split(", "))
 
157
  buf2 = io.BytesIO(); plt.savefig(buf2, format="png", bbox_inches="tight"); plt.close(fig2); buf2.seek(0); plot2_img = Image.open(buf2)
158
 
159
  # Gender and age
160
+ ages_male = []
161
+ ages_female = []
162
  for face_list in df["Face Info"]:
163
  if face_list.strip()=="":
164
  continue
 
166
  parts = face_str.split(", ")
167
  age = int(parts[0].split(": ")[1])
168
  gender = parts[1].split(": ")[1]
 
 
169
  if gender=="Homme": ages_male.append(age)
170
+ elif gender=="Femme": ages_female.append(age)
171
+
172
+ gender_counts = {"Homme": len(ages_male), "Femme": len(ages_female)}
173
 
174
+ # Gender distribution
175
  fig3, ax3 = plt.subplots()
176
+ ax3.bar(gender_counts.keys(), gender_counts.values(), color=["lightblue","pink"])
177
+ ax3.set_title("Gender Distribution"); ax3.set_ylabel("Count")
178
  buf3 = io.BytesIO(); plt.savefig(buf3, format="png"); plt.close(fig3); buf3.seek(0); plot3_img = Image.open(buf3)
179
 
180
+ # Age distribution
181
  fig4, ax4 = plt.subplots()
182
  bins = range(0,101,5)
183
  ax4.hist([ages_male, ages_female], bins=bins, color=["lightblue","pink"], label=["Homme","Femme"], edgecolor="black")
 
202
  plot3 = gr.Image(label="Gender Distribution")
203
  plot4 = gr.Image(label="Age Distribution by Gender")
204
 
 
 
 
 
205
  analyze_btn.click(
206
+ classify_zip_and_analyze_color,
207
  inputs=uploaded_zip,
208
  outputs=[output_df, image_gallery, download_file, plot1, plot2, plot3, plot4]
209
  )