concauu commited on
Commit
56bce7c
·
verified ·
1 Parent(s): fab0540

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -24
app.py CHANGED
@@ -55,9 +55,11 @@ pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_ret
55
 
56
  # History functions
57
  def append_to_history(image, prompt, seed, width, height, guidance_scale, steps, history):
 
58
  if image is None:
59
  return history
60
 
 
61
  buffered = BytesIO()
62
  image.save(buffered, format="PNG")
63
  img_bytes = buffered.getvalue()
@@ -93,21 +95,29 @@ def create_history_html(history):
93
 
94
 
95
  @spaces.GPU(duration=75)
96
- def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=3.5, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
97
  if randomize_seed:
98
  seed = random.randint(0, MAX_SEED)
99
  generator = torch.Generator().manual_seed(seed)
 
 
 
100
  for img in pipe.flux_pipe_call_that_returns_an_iterable_of_images(
101
- prompt=prompt,
102
- guidance_scale=guidance_scale,
103
- num_inference_steps=num_inference_steps,
104
- width=width,
105
- height=height,
106
- generator=generator,
107
- output_type="pil",
108
- good_vae=good_vae,
109
- ):
110
- yield img, seed
 
 
 
 
 
111
 
112
  def enhance_prompt(user_prompt):
113
  """Enhances the given prompt using Groq and returns the refined prompt."""
@@ -215,19 +225,20 @@ with gr.Blocks(css=css) as demo:
215
 
216
  # Event handling
217
  generation_event = run_button.click(
218
- fn=infer,
219
- inputs=[enhanced_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
220
- outputs=[result, seed] # Image is OUTPUT here
221
- ).then(
222
- fn=append_to_history,
223
- inputs=[result, enhanced_prompt, seed, width, height, guidance_scale, num_inference_steps, history_state], # Image becomes INPUT here
224
- outputs=history_state
225
- ).then(
226
- fn=create_history_html,
227
- inputs=history_state,
228
- outputs=history_display
229
- )
230
-
 
231
  enhanced_prompt.submit(
232
  fn=infer,
233
  inputs=[enhanced_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
 
55
 
56
  # History functions
57
  def append_to_history(image, prompt, seed, width, height, guidance_scale, steps, history):
58
+ """Store only the final generated image"""
59
  if image is None:
60
  return history
61
 
62
+ # Convert final image to bytes
63
  buffered = BytesIO()
64
  image.save(buffered, format="PNG")
65
  img_bytes = buffered.getvalue()
 
95
 
96
 
97
  @spaces.GPU(duration=75)
98
+ def infer(prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
99
  if randomize_seed:
100
  seed = random.randint(0, MAX_SEED)
101
  generator = torch.Generator().manual_seed(seed)
102
+
103
+ # Generate all images and keep the final one
104
+ final_image = None
105
  for img in pipe.flux_pipe_call_that_returns_an_iterable_of_images(
106
+ prompt=prompt,
107
+ guidance_scale=guidance_scale,
108
+ num_inference_steps=num_inference_steps,
109
+ width=width,
110
+ height=height,
111
+ generator=generator,
112
+ output_type="pil",
113
+ good_vae=good_vae,
114
+ ):
115
+ final_image = img # Keep updating until we get the final image
116
+ yield img, seed # Live preview
117
+
118
+ # Return the final image explicitly
119
+ yield final_image, seed
120
+
121
 
122
  def enhance_prompt(user_prompt):
123
  """Enhances the given prompt using Groq and returns the refined prompt."""
 
225
 
226
  # Event handling
227
  generation_event = run_button.click(
228
+ fn=infer,
229
+ inputs=[enhanced_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
230
+ outputs=[result, seed]
231
+ )
232
+ # This will execute AFTER the generator completes
233
+ generation_event.then(
234
+ fn=append_to_history,
235
+ inputs=[result, enhanced_prompt, seed, width, height, guidance_scale, num_inference_steps, history_state],
236
+ outputs=history_state
237
+ ).then(
238
+ fn=create_history_html,
239
+ inputs=history_state,
240
+ outputs=history_display
241
+ )
242
  enhanced_prompt.submit(
243
  fn=infer,
244
  inputs=[enhanced_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],