alperall commited on
Commit
44262b7
·
verified ·
1 Parent(s): 00f8fb5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -15
app.py CHANGED
@@ -9,27 +9,27 @@ from diffusers import DDIMScheduler, EulerAncestralDiscreteScheduler
9
  import cv2
10
  import torch
11
 
12
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
13
 
 
14
  pipe = StableDiffusionXLPipeline.from_pretrained(
15
  "votepurchase/animagine-xl-4.0",
16
- torch_dtype=torch.float16,
17
  )
18
 
19
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
20
- pipe.to(device)
21
 
22
  MAX_SEED = np.iinfo(np.int32).max
23
  MAX_IMAGE_SIZE = 1216
24
 
25
-
26
  @spaces.GPU
27
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
28
-
29
  if randomize_seed:
30
  seed = random.randint(0, MAX_SEED)
31
 
32
- generator = torch.Generator().manual_seed(seed)
33
 
34
  output_image = pipe(
35
  prompt=prompt,
@@ -43,7 +43,6 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
43
 
44
  return output_image
45
 
46
-
47
  css = """
48
  #col-container {
49
  margin: 0 auto;
@@ -52,9 +51,7 @@ css = """
52
  """
53
 
54
  with gr.Blocks(css=css) as demo:
55
-
56
  with gr.Column(elem_id="col-container"):
57
-
58
  with gr.Row():
59
  prompt = gr.Text(
60
  label="Prompt",
@@ -63,13 +60,11 @@ with gr.Blocks(css=css) as demo:
63
  placeholder="Enter your prompt",
64
  container=False,
65
  )
66
-
67
  run_button = gr.Button("Run", scale=0)
68
 
69
  result = gr.Image(label="Result", show_label=False)
70
-
71
- with gr.Accordion("Advanced Settings", open=False):
72
 
 
73
  negative_prompt = gr.Text(
74
  label="Negative prompt",
75
  max_lines=1,
@@ -93,7 +88,7 @@ with gr.Blocks(css=css) as demo:
93
  minimum=256,
94
  maximum=MAX_IMAGE_SIZE,
95
  step=32,
96
- value=1024,#832,
97
  )
98
 
99
  height = gr.Slider(
@@ -101,7 +96,7 @@ with gr.Blocks(css=css) as demo:
101
  minimum=256,
102
  maximum=MAX_IMAGE_SIZE,
103
  step=32,
104
- value=1024,#1216,
105
  )
106
 
107
  with gr.Row():
@@ -121,7 +116,7 @@ with gr.Blocks(css=css) as demo:
121
  value=28,
122
  )
123
 
124
- run_button.click(#lambda x: None, inputs=None, outputs=result).then(
125
  fn=infer,
126
  inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
127
  outputs=[result]
 
9
  import cv2
10
  import torch
11
 
12
+ # Force CPU usage
13
+ device = torch.device("cpu")
14
 
15
+ # Load the model with float32 for CPU compatibility
16
  pipe = StableDiffusionXLPipeline.from_pretrained(
17
  "votepurchase/animagine-xl-4.0",
18
+ torch_dtype=torch.float32, # Use float32 for CPU
19
  )
20
 
21
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
22
+ pipe.to(device) # Move the model to CPU
23
 
24
  MAX_SEED = np.iinfo(np.int32).max
25
  MAX_IMAGE_SIZE = 1216
26
 
 
27
  @spaces.GPU
28
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
 
29
  if randomize_seed:
30
  seed = random.randint(0, MAX_SEED)
31
 
32
+ generator = torch.Generator(device=device).manual_seed(seed) # Use CPU generator
33
 
34
  output_image = pipe(
35
  prompt=prompt,
 
43
 
44
  return output_image
45
 
 
46
  css = """
47
  #col-container {
48
  margin: 0 auto;
 
51
  """
52
 
53
  with gr.Blocks(css=css) as demo:
 
54
  with gr.Column(elem_id="col-container"):
 
55
  with gr.Row():
56
  prompt = gr.Text(
57
  label="Prompt",
 
60
  placeholder="Enter your prompt",
61
  container=False,
62
  )
 
63
  run_button = gr.Button("Run", scale=0)
64
 
65
  result = gr.Image(label="Result", show_label=False)
 
 
66
 
67
+ with gr.Accordion("Advanced Settings", open=False):
68
  negative_prompt = gr.Text(
69
  label="Negative prompt",
70
  max_lines=1,
 
88
  minimum=256,
89
  maximum=MAX_IMAGE_SIZE,
90
  step=32,
91
+ value=1024, # Default width
92
  )
93
 
94
  height = gr.Slider(
 
96
  minimum=256,
97
  maximum=MAX_IMAGE_SIZE,
98
  step=32,
99
+ value=1024, # Default height
100
  )
101
 
102
  with gr.Row():
 
116
  value=28,
117
  )
118
 
119
+ run_button.click(
120
  fn=infer,
121
  inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
122
  outputs=[result]