ishworrsubedii commited on
Commit
4ea7bfb
Β·
1 Parent(s): 0fb09d5

update: necklace placement logic and gradio ui

Browse files
Files changed (1) hide show
  1. app.py +86 -78
app.py CHANGED
@@ -22,6 +22,9 @@ pipeline = StableDiffusionInpaintPipeline.from_pretrained(
22
  )
23
  pipeline = pipeline.to("cuda")
24
 
 
 
 
25
 
26
  def clear_func():
27
  torch.cuda.empty_cache()
@@ -30,29 +33,33 @@ def clear_func():
30
 
31
  @spaces.GPU
32
  def clothing_try_on_n_necklace_try_on(image, jewellery):
 
33
  image = np.array(image)
34
  copy_image = image.copy()
35
  jewellery = np.array(jewellery)
36
 
37
- detector = PoseDetector()
38
 
39
  image = detector.findPose(image)
40
- lmList, bBoxInfo = detector.findPosition(image, bboxWithHands=False, draw=False)
41
 
42
- pt12, pt11, pt10, pt9 = (
43
- lmList[12][:2],
44
- lmList[11][:2],
45
- lmList[10][:2],
46
- lmList[9][:2],
47
- )
48
 
49
- avg_x1 = int(pt12[0] + (pt10[0] - pt12[0]) / 2)
50
- avg_y1 = int(pt12[1] - (pt12[1] - pt10[1]) / 2)
 
51
 
52
- avg_x2 = int(pt11[0] - (pt11[0] - pt9[0]) / 2)
53
- avg_y2 = int(pt11[1] - (pt11[1] - pt9[1]) / 2)
54
 
55
- image_gray = cv2.cvtColor(jewellery, cv2.COLOR_BGRA2GRAY)
 
 
 
 
 
 
56
 
57
  if avg_y2 < avg_y1:
58
  angle = math.ceil(
@@ -72,100 +79,101 @@ def clothing_try_on_n_necklace_try_on(image, jewellery):
72
  origImgRatio = xdist / jewellery.shape[1]
73
  ydist = jewellery.shape[0] * origImgRatio
74
 
 
 
 
75
  for offset_orig in range(image_gray.shape[1]):
76
  pixel_value = image_gray[0, :][offset_orig]
77
  if (pixel_value != 255) & (pixel_value != 0):
78
  break
79
  else:
80
  continue
81
-
82
  offset = int(0.8 * xdist * (offset_orig / jewellery.shape[1]))
83
  jewellery = cv2.resize(
84
- jewellery, (int(xdist), int(ydist)), interpolation=cv2.INTER_CUBIC
85
  )
86
  jewellery = cvzone.rotateImage(jewellery, angle)
87
  y_coordinate = avg_y1 - offset
88
- available_space = copy_image.shape[0] - y_coordinate
89
- extra = jewellery.shape[0] - available_space
90
- if extra > 0:
91
- jewellery = jewellery[extra + 10:, :]
92
- return necklaceTryOnPipeline(
93
- Image.fromarray(copy_image), Image.fromarray(jewellery)
94
- )
95
- else:
96
- result = cvzone.overlayPNG(copy_image, jewellery, (avg_x1, y_coordinate))
97
- # masking
98
- blackedNecklace = np.zeros(shape=copy_image.shape)
99
- # overlay
100
- cvzone.overlayPNG(blackedNecklace, jewellery, (avg_x1, y_coordinate))
101
- blackedNecklace = cv2.cvtColor(blackedNecklace.astype(np.uint8), cv2.COLOR_BGR2GRAY)
102
- binaryMask = blackedNecklace * ((blackedNecklace > 5) * 255)
103
- binaryMask[binaryMask >= 255] = 255
104
- binaryMask[binaryMask < 255] = 0
105
- image = Image.fromarray(result.astype(np.uint8))
106
- mask = Image.fromarray(binaryMask.astype(np.uint8)).convert("RGB")
107
-
108
- jewellery_mask = Image.fromarray(
109
- np.bitwise_and(np.array(mask), np.array(image))
110
- )
111
- arr_orig = np.array(grayscale(mask))
112
 
113
- image = cv2.inpaint(np.array(image), arr_orig, 15, cv2.INPAINT_TELEA)
114
- image = Image.fromarray(image)
 
 
 
 
 
 
 
 
 
115
 
116
- arr = arr_orig.copy()
117
- mask_y = np.where(arr == arr[arr != 0][0])[0][0]
118
- arr[mask_y:, :] = 255
 
119
 
120
- new = Image.fromarray(arr)
 
 
 
121
 
122
- mask = new.copy()
 
123
 
124
- orig_size = image.size
 
 
125
 
126
- image = image.resize((512, 512))
127
- mask = mask.resize((512, 512))
128
 
129
- results = []
130
- prompt = f" South Indian Saree, properly worn, natural setting, elegant, natural look, neckline without jewellery, simple"
131
- negative_prompt = "necklaces, jewellery, jewelry, necklace, neckpiece, garland, chain, neck wear, jewelled neck, jeweled neck, necklace on neck, jewellery on neck, accessories, watermark, text, changed background, wider body, narrower body, bad proportions, extra limbs, mutated hands, changed sizes, altered proportions, unnatural body proportions, blury, ugly"
132
 
133
- output = pipeline(
134
- prompt=prompt,
135
- negative_prompt=negative_prompt,
136
- image=image,
137
- mask_image=mask,
138
- strength=0.95,
139
- guidance_score=9,
140
- # generator = torch.Generator("cuda").manual_seed(42)
141
- ).images[0]
142
 
143
- output = output.resize(orig_size)
144
- temp_generated = np.bitwise_and(
145
- np.array(output),
146
- np.bitwise_not(np.array(Image.fromarray(arr_orig).convert("RGB"))),
147
- )
148
- results.append(temp_generated)
149
 
150
- results = [
151
- Image.fromarray(np.bitwise_or(x, np.array(jewellery_mask))) for x in results
152
- ]
153
- clear_func()
154
- return results[0]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
 
156
 
157
  with gr.Blocks() as interface:
158
  with gr.Row():
159
  inputImage = gr.Image(label="Input Image", type="pil", image_mode="RGB", interactive=True)
160
  selectedNecklace = gr.Image(label="Selected Necklace", type="pil", image_mode="RGBA", visible=False)
 
 
161
  with gr.Row():
162
- gr.Examples(examples=choker_images, inputs=[selectedNecklace], label="Necklaces")
163
- gr.Examples(examples=person_images, inputs=[inputImage], label="Models")
164
- with gr.Row():
165
- outputOne = gr.Image(label="Output 1", interactive=False)
166
- with gr.Row():
167
- submit = gr.Button("Enter")
168
 
169
  submit.click(fn=clothing_try_on_n_necklace_try_on, inputs=[inputImage, selectedNecklace], outputs=[outputOne])
170
 
171
  interface.launch(debug=True)
 
 
22
  )
23
  pipeline = pipeline.to("cuda")
24
 
25
+ detector = PoseDetector()
26
+ meshDetector = FaceMeshDetector(staticMode=True, maxFaces=1)
27
+
28
 
29
  def clear_func():
30
  torch.cuda.empty_cache()
 
33
 
34
  @spaces.GPU
35
  def clothing_try_on_n_necklace_try_on(image, jewellery):
36
+ image, jewellery = image.convert("RGB").resize((3000, 3000)), jewellery.convert("RGBA")
37
  image = np.array(image)
38
  copy_image = image.copy()
39
  jewellery = np.array(jewellery)
40
 
41
+ logger.info(f"NECKLACE TRY ON :: detecting pose and landmarks :: {storename}")
42
 
43
  image = detector.findPose(image)
44
+ lmList, _ = detector.findPosition(image, bboxWithHands=False, draw=False)
45
 
46
+ img, faces = meshDetector.findFaceMesh(image, draw=False)
47
+ leftLandmarkIndex = 172
48
+ rightLandmarkIndex = 397
 
 
 
49
 
50
+ leftLandmark, rightLandmark = faces[0][leftLandmarkIndex], faces[0][rightLandmarkIndex]
51
+ landmarksDistance = int(
52
+ ((leftLandmark[0] - rightLandmark[0]) ** 2 + (leftLandmark[1] - rightLandmark[1]) ** 2) ** 0.5)
53
 
54
+ logger.info(f"NECKLACE TRY ON :: estimating neck points")
 
55
 
56
+ avg_x1 = int(leftLandmark[0] - landmarksDistance * 0.12)
57
+ avg_x2 = int(rightLandmark[0] + landmarksDistance * 0.12)
58
+
59
+ avg_y1 = int(leftLandmark[1] + landmarksDistance * 0.5)
60
+ avg_y2 = int(rightLandmark[1] + landmarksDistance * 0.5)
61
+
62
+ logger.info(f"NECKLACE TRY ON :: scaling the necklace image ")
63
 
64
  if avg_y2 < avg_y1:
65
  angle = math.ceil(
 
79
  origImgRatio = xdist / jewellery.shape[1]
80
  ydist = jewellery.shape[0] * origImgRatio
81
 
82
+ logger.info(f"NECKLACE TRY ON :: adding offset based on the necklace shape")
83
+
84
+ image_gray = cv2.cvtColor(jewellery, cv2.COLOR_BGRA2GRAY)
85
  for offset_orig in range(image_gray.shape[1]):
86
  pixel_value = image_gray[0, :][offset_orig]
87
  if (pixel_value != 255) & (pixel_value != 0):
88
  break
89
  else:
90
  continue
 
91
  offset = int(0.8 * xdist * (offset_orig / jewellery.shape[1]))
92
  jewellery = cv2.resize(
93
+ jewellery, (int(xdist), int(ydist)), interpolation=cv2.INTER_AREA
94
  )
95
  jewellery = cvzone.rotateImage(jewellery, angle)
96
  y_coordinate = avg_y1 - offset
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
+ logger.info(f"NECKLACE TRY ON :: generating output :: {storename}")
99
+
100
+ result = cvzone.overlayPNG(copy_image, jewellery, (avg_x1, y_coordinate))
101
+
102
+ blackedNecklace = np.zeros(shape=copy_image.shape)
103
+ # overlay
104
+ cvzone.overlayPNG(blackedNecklace, jewellery, (avg_x1, y_coordinate))
105
+ blackedNecklace = cv2.cvtColor(blackedNecklace.astype(np.uint8), cv2.COLOR_BGR2GRAY)
106
+ binaryMask = blackedNecklace * ((blackedNecklace > 5) * 255)
107
+ binaryMask[binaryMask >= 255] = 255
108
+ binaryMask[binaryMask < 255] = 0
109
 
110
+ gc.collect()
111
+
112
+ image = Image.fromarray(result.astype(np.uint8))
113
+ mask = Image.fromarray(binaryMask.astype(np.uint8)).convert("RGB")
114
 
115
+ jewellery_mask = Image.fromarray(
116
+ np.bitwise_and(np.array(mask), np.array(image))
117
+ )
118
+ arr_orig = np.array(grayscale(mask))
119
 
120
+ image = cv2.inpaint(np.array(image), arr_orig, 15, cv2.INPAINT_TELEA)
121
+ image = Image.fromarray(image)
122
 
123
+ arr = arr_orig.copy()
124
+ mask_y = np.where(arr == arr[arr != 0][0])[0][0]
125
+ arr[mask_y:, :] = 255
126
 
127
+ new = Image.fromarray(arr)
 
128
 
129
+ mask = new.copy()
 
 
130
 
131
+ orig_size = image.size
 
 
 
 
 
 
 
 
132
 
133
+ image = image.resize((512, 512))
134
+ mask = mask.resize((512, 512))
 
 
 
 
135
 
136
+ results = []
137
+ prompt = f" South Indian Saree, properly worn, natural setting, elegant, natural look, neckline without jewellery, simple"
138
+ negative_prompt = "necklaces, jewellery, jewelry, necklace, neckpiece, garland, chain, neck wear, jewelled neck, jeweled neck, necklace on neck, jewellery on neck, accessories, watermark, text, changed background, wider body, narrower body, bad proportions, extra limbs, mutated hands, changed sizes, altered proportions, unnatural body proportions, blury, ugly"
139
+
140
+ output = pipeline(
141
+ prompt=prompt,
142
+ negative_prompt=negative_prompt,
143
+ image=image,
144
+ mask_image=mask,
145
+ strength=0.95,
146
+ guidance_score=9,
147
+ # generator = torch.Generator("cuda").manual_seed(42)
148
+ ).images[0]
149
+
150
+ output = output.resize(orig_size)
151
+ temp_generated = np.bitwise_and(
152
+ np.array(output),
153
+ np.bitwise_not(np.array(Image.fromarray(arr_orig).convert("RGB"))),
154
+ )
155
+ results.append(temp_generated)
156
+
157
+ results = [
158
+ Image.fromarray(np.bitwise_or(x, np.array(jewellery_mask))) for x in results
159
+ ]
160
+ clear_func()
161
+ return results[0]
162
 
163
 
164
  with gr.Blocks() as interface:
165
  with gr.Row():
166
  inputImage = gr.Image(label="Input Image", type="pil", image_mode="RGB", interactive=True)
167
  selectedNecklace = gr.Image(label="Selected Necklace", type="pil", image_mode="RGBA", visible=False)
168
+ outputOne = gr.Image(label="Output", interactive=False)
169
+
170
  with gr.Row():
171
+ gr.Examples(examples=choker_images, inputs=[selectedNecklace], label="Select Necklace")
172
+ gr.Examples(examples=person_images, inputs=[inputImage], label="Select Model")
173
+
174
+ submit = gr.Button("Apply")
 
 
175
 
176
  submit.click(fn=clothing_try_on_n_necklace_try_on, inputs=[inputImage, selectedNecklace], outputs=[outputOne])
177
 
178
  interface.launch(debug=True)
179
+