feylur commited on
Commit
3a5360b
·
verified ·
1 Parent(s): b9f9bb9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -65,11 +65,18 @@ def generate_tryon(person_img, cloth_img):
65
  raise gr.Error("Both images required!")
66
 
67
  try:
68
- # person_img and cloth_img are already PIL Images, no conversion needed
69
  print("Images received as PIL", file=sys.stderr)
70
 
71
  load_models()
72
 
 
 
 
 
 
 
 
 
73
  target_height = 1024
74
  target_width = 768
75
  person_img = resize_and_crop(person_img, (target_width, target_height))
@@ -77,6 +84,8 @@ def generate_tryon(person_img, cloth_img):
77
 
78
  mask = automasker(person_img, "upper")['mask']
79
  gc.collect()
 
 
80
 
81
  result = pipeline(
82
  image=person_img,
@@ -84,7 +93,7 @@ def generate_tryon(person_img, cloth_img):
84
  mask=mask,
85
  num_inference_steps=50,
86
  guidance_scale=2.5,
87
- seed=42,
88
  height=target_height,
89
  width=target_width
90
  )[0]
 
65
  raise gr.Error("Both images required!")
66
 
67
  try:
 
68
  print("Images received as PIL", file=sys.stderr)
69
 
70
  load_models()
71
 
72
+ # CRITICAL: Clear cache and create fresh copies
73
+ gc.collect()
74
+ if torch.cuda.is_available():
75
+ torch.cuda.empty_cache()
76
+
77
+ person_img = person_img.copy()
78
+ cloth_img = cloth_img.copy()
79
+
80
  target_height = 1024
81
  target_width = 768
82
  person_img = resize_and_crop(person_img, (target_width, target_height))
 
84
 
85
  mask = automasker(person_img, "upper")['mask']
86
  gc.collect()
87
+ if torch.cuda.is_available():
88
+ torch.cuda.empty_cache()
89
 
90
  result = pipeline(
91
  image=person_img,
 
93
  mask=mask,
94
  num_inference_steps=50,
95
  guidance_scale=2.5,
96
+ seed=None, # CHANGED: Random seed for different results
97
  height=target_height,
98
  width=target_width
99
  )[0]