Vector857 commited on
Commit
74d5b79
Β·
verified Β·
1 Parent(s): 75891ec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -15
app.py CHANGED
@@ -276,15 +276,12 @@ def generate(
276
  oauth_token: gr.OAuthToken | None = None,
277
  progress=gr.Progress(track_tqdm=True),
278
  ):
279
- # --- INTHA VARIGALAI MATTUM PUTHUSA SERTHIDUNGA ---
280
  try:
281
- # Palaiya lora weight-ai remove panna
282
  pipe.unload_lora_weights()
283
- # GPU memory-ai free panna
284
  torch.cuda.empty_cache()
285
  except Exception as e:
286
  print(f"Unload error (ithu paravaillai): {e}")
287
- # ---------------------------------------------
288
  if oauth_token is None:
289
  raise gr.Error("Please sign in with your HuggingFace account to generate images.")
290
  if selected_index is None:
@@ -294,10 +291,8 @@ def generate(
294
  trigger = lora["trigger_word"]
295
  prompt_in = f"{trigger} {prompt}" if trigger else prompt
296
 
297
- # ZeroGPU: LoRA loading must happen inside GPU scope
298
  pipe.to("cuda")
299
- pipe.unload_lora_weights()
300
-
301
  if speed_mode == "Fast Β· 8 steps":
302
  pipe.load_lora_weights(LIGHTNING_REPO, weight_name=LIGHTNING_WEIGHT, adapter_name="lightning")
303
  pipe.load_lora_weights(lora["repo"], weight_name=lora.get("weights"), low_cpu_mem_usage=True, adapter_name="style")
@@ -309,21 +304,31 @@ def generate(
309
  if randomize_seed:
310
  seed = random.randint(0, MAX_SEED)
311
 
312
- w, h = aspect_to_wh(aspect)
313
-
314
  generator = torch.Generator(device="cuda").manual_seed(seed)
 
315
  image = pipe(
316
  prompt=prompt_in,
317
  negative_prompt=neg_prompt,
318
  num_inference_steps=steps,
319
- true_cfg_scale=cfg,
320
- width=w,
321
  height=h,
322
  generator=generator,
323
  ).images[0]
324
-
325
  pipe.to("cpu")
326
  torch.cuda.empty_cache()
 
 
 
 
 
 
 
 
 
 
 
327
 
328
  return image, seed, meta_html, history_html, history_state
329
 
@@ -522,14 +527,13 @@ with gr.Blocks(title="D-REX Studio") as app:
522
  selected_index = gr.State(value=None)
523
  history_state = gr.State(value=[])
524
 
525
- # ── HEADER ──
526
  # ── HEADER ──
527
  gr.HTML("""<div id="drex-header"><div class="drex-logo"><div class="drex-mark"><svg viewBox="0 0 16 16"><path d="M8 0L1 4v8l7 4 7-4V4L8 0zm0 2.4L13 5.5 8 8.6 3 5.5 8 2.4z M2.5 6.8l4.7 2.7v5.1l-4.7-2.7V6.8zm6.3 7.8V9.5l4.7-2.7v5.1l-4.7 2.7z"/></svg></div><div><h1>D-REX</h1><div class="drex-sub">LoRA Studio - Qwen-Image</div></div></div></div>""")
528
 
529
- # ── LOGIN ──
530
  with gr.Row():
531
  gr.LoginButton(scale=0)
532
-
533
  # ── THEME BAR ──
534
  with gr.Row():
535
  theme_selector = gr.HTML("""
 
276
  oauth_token: gr.OAuthToken | None = None,
277
  progress=gr.Progress(track_tqdm=True),
278
  ):
 
279
  try:
 
280
  pipe.unload_lora_weights()
 
281
  torch.cuda.empty_cache()
282
  except Exception as e:
283
  print(f"Unload error (ithu paravaillai): {e}")
284
+
285
  if oauth_token is None:
286
  raise gr.Error("Please sign in with your HuggingFace account to generate images.")
287
  if selected_index is None:
 
291
  trigger = lora["trigger_word"]
292
  prompt_in = f"{trigger} {prompt}" if trigger else prompt
293
 
 
294
  pipe.to("cuda")
295
+
 
296
  if speed_mode == "Fast Β· 8 steps":
297
  pipe.load_lora_weights(LIGHTNING_REPO, weight_name=LIGHTNING_WEIGHT, adapter_name="lightning")
298
  pipe.load_lora_weights(lora["repo"], weight_name=lora.get("weights"), low_cpu_mem_usage=True, adapter_name="style")
 
304
  if randomize_seed:
305
  seed = random.randint(0, MAX_SEED)
306
 
307
+ w, h = aspect_to_wh(aspect_ratio)
 
308
  generator = torch.Generator(device="cuda").manual_seed(seed)
309
+
310
  image = pipe(
311
  prompt=prompt_in,
312
  negative_prompt=neg_prompt,
313
  num_inference_steps=steps,
314
+ guidance_scale=cfg_scale,
 
315
  height=h,
316
  generator=generator,
317
  ).images[0]
318
+
319
  pipe.to("cpu")
320
  torch.cuda.empty_cache()
321
+
322
+ new_meta = build_metadata_html(lora["title"], seed, steps, cfg_scale, aspect_ratio)
323
+
324
+ import datetime
325
+ history_entry = {
326
+ "prompt": prompt,
327
+ "model": lora["title"],
328
+ "time": datetime.datetime.now().strftime("%H:%M")
329
+ }
330
+ history_state.append(history_entry)
331
+ new_history_html = format_history_html(history_state)
332
 
333
  return image, seed, meta_html, history_html, history_state
334
 
 
527
  selected_index = gr.State(value=None)
528
  history_state = gr.State(value=[])
529
 
 
530
  # ── HEADER ──
531
  gr.HTML("""<div id="drex-header"><div class="drex-logo"><div class="drex-mark"><svg viewBox="0 0 16 16"><path d="M8 0L1 4v8l7 4 7-4V4L8 0zm0 2.4L13 5.5 8 8.6 3 5.5 8 2.4z M2.5 6.8l4.7 2.7v5.1l-4.7-2.7V6.8zm6.3 7.8V9.5l4.7-2.7v5.1l-4.7 2.7z"/></svg></div><div><h1>D-REX</h1><div class="drex-sub">LoRA Studio - Qwen-Image</div></div></div></div>""")
532
 
533
+ # ── THEME BAR & LOGIN ──
534
  with gr.Row():
535
  gr.LoginButton(scale=0)
536
+
537
  # ── THEME BAR ──
538
  with gr.Row():
539
  theme_selector = gr.HTML("""