tomiconic commited on
Commit
e5a5926
Β·
verified Β·
1 Parent(s): 47e5103

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -24,10 +24,12 @@ pipe.scheduler = DPMSolverMultistepScheduler.from_config(
24
  use_karras_sigmas=True,
25
  )
26
  pipe.enable_attention_slicing()
 
 
27
  print("Pipeline ready.")
28
 
29
  # ── Generation ────────────────────────────────────────────────────────────────
30
- @spaces.GPU(duration=120)
31
  def generate(prompt, negative_prompt, width, height, steps, guidance, seed, randomize):
32
  if not prompt.strip():
33
  raise gr.Error("Please enter a prompt.")
@@ -35,7 +37,9 @@ def generate(prompt, negative_prompt, width, height, steps, guidance, seed, rand
35
  seed = random.randint(0, 2**32 - 1)
36
  seed = int(seed)
37
 
38
- # CPU generator works correctly with ZeroGPU β€” do not hardcode "cuda"
 
 
39
  generator = torch.Generator(device="cpu").manual_seed(seed)
40
 
41
  result = pipe(
@@ -48,6 +52,10 @@ def generate(prompt, negative_prompt, width, height, steps, guidance, seed, rand
48
  generator=generator,
49
  clip_skip=2,
50
  )
 
 
 
 
51
  return result.images[0], seed
52
 
53
  # ── CSS ───────────────────────────────────────────────────────────────────────
@@ -206,11 +214,6 @@ input[type=checkbox] {
206
  width: 18px !important;
207
  height: 18px !important;
208
  }
209
- .gradio-checkbox label span {
210
- color: #9966cc !important;
211
- font-size: 0.82em !important;
212
- font-weight: 600 !important;
213
- }
214
 
215
  label span {
216
  color: #8855bb !important;
 
24
  use_karras_sigmas=True,
25
  )
26
  pipe.enable_attention_slicing()
27
+
28
+ # Keep on CPU at startup β€” ZeroGPU allocates GPU per request
29
  print("Pipeline ready.")
30
 
31
  # ── Generation ────────────────────────────────────────────────────────────────
32
+ @spaces.GPU(duration=180)
33
  def generate(prompt, negative_prompt, width, height, steps, guidance, seed, randomize):
34
  if not prompt.strip():
35
  raise gr.Error("Please enter a prompt.")
 
37
  seed = random.randint(0, 2**32 - 1)
38
  seed = int(seed)
39
 
40
+ # Move to GPU for this request
41
+ pipe.to("cuda")
42
+
43
  generator = torch.Generator(device="cpu").manual_seed(seed)
44
 
45
  result = pipe(
 
52
  generator=generator,
53
  clip_skip=2,
54
  )
55
+
56
+ # Move back to CPU to free GPU for other users
57
+ pipe.to("cpu")
58
+
59
  return result.images[0], seed
60
 
61
  # ── CSS ───────────────────────────────────────────────────────────────────────
 
214
  width: 18px !important;
215
  height: 18px !important;
216
  }
 
 
 
 
 
217
 
218
  label span {
219
  color: #8855bb !important;