Valtry commited on
Commit
ca48ebc
·
verified ·
1 Parent(s): b2cfdeb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -2,24 +2,27 @@ from fastapi import FastAPI
2
  from pydantic import BaseModel
3
  from diffusers import StableDiffusionPipeline
4
  import torch
 
 
5
  import base64
6
  from io import BytesIO
7
- from PIL import Image
8
-
9
- # Load the pipeline
10
- pipe = StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5", torch_dtype=torch.float32)
11
- pipe = pipe.to("cpu")
12
 
13
  app = FastAPI()
14
 
 
 
 
 
15
  class PromptRequest(BaseModel):
16
  prompt: str
17
 
18
- @app.post("/generate")
19
- def generate_image(req: PromptRequest):
20
- image = pipe(req.prompt).images[0]
21
-
 
22
  buffered = BytesIO()
23
  image.save(buffered, format="PNG")
24
  img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
 
25
  return {"image_base64": img_str}
 
2
  from pydantic import BaseModel
3
  from diffusers import StableDiffusionPipeline
4
  import torch
5
+ import uuid
6
+ from PIL import Image
7
  import base64
8
  from io import BytesIO
 
 
 
 
 
9
 
10
  app = FastAPI()
11
 
12
+ # Load model
13
+ pipe = StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5", torch_dtype=torch.float32)
14
+ pipe = pipe.to("cpu") # or "cuda" if using GPU
15
+
16
  class PromptRequest(BaseModel):
17
  prompt: str
18
 
19
+ @app.post("/generate-image")
20
+ def generate_image(request: PromptRequest):
21
+ image = pipe(request.prompt).images[0]
22
+
23
+ # Convert to base64
24
  buffered = BytesIO()
25
  image.save(buffered, format="PNG")
26
  img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
27
+
28
  return {"image_base64": img_str}