bariscal commited on
Commit
ae2a1df
·
1 Parent(s): c60213b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -11
app.py CHANGED
@@ -4,22 +4,20 @@ from diffusers import StableDiffusionPipeline
4
 
5
 
6
  #gr.Interface.load("models/bariscal/cbst_style")
7
- pipe = StableDiffusionPipeline.from_pretrained("bariscal/cbst_style", safety_checker=None) #, torch_dtype=torch.float16
8
 
9
 
 
 
10
  def inference(prompt, negative_prompt, num_samples, height=512, width=512, num_inference_steps=50, guidance_scale=7.5):
11
- # Remove the torch.autocast("cuda") context manager
12
  with torch.inference_mode():
13
  return pipe(
14
- prompt, height=int(height), width=int(width),
15
- negative_prompt=negative_prompt,
16
- num_images_per_prompt=int(num_samples),
17
- num_inference_steps=int(num_inference_steps), guidance_scale=guidance_scale,
18
- # Use the CPU for inference
19
- generator="cpu"
20
- ).images
21
-
22
-
23
 
24
  with gr.Blocks() as demo:
25
  with gr.Row():
 
4
 
5
 
6
  #gr.Interface.load("models/bariscal/cbst_style")
7
+ #pipe = StableDiffusionPipeline.from_pretrained("bariscal/cbst_style", safety_checker=None) #, torch_dtype=torch.float16
8
 
9
 
10
+ pipe = StableDiffusionPipeline.from_pretrained("bariscal/cbst_style", safety_checker=None)
11
+
12
  def inference(prompt, negative_prompt, num_samples, height=512, width=512, num_inference_steps=50, guidance_scale=7.5):
 
13
  with torch.inference_mode():
14
  return pipe(
15
+ prompt, height=int(height), width=int(width),
16
+ negative_prompt=negative_prompt,
17
+ num_images_per_prompt=int(num_samples),
18
+ num_inference_steps=int(num_inference_steps), guidance_scale=guidance_scale,
19
+ generator="cpu" # Use the CPU for inference
20
+ ).images
 
 
 
21
 
22
  with gr.Blocks() as demo:
23
  with gr.Row():