LogicGoInfotechSpaces commited on
Commit
b0b7636
·
verified ·
1 Parent(s): f80c88a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -21
app.py CHANGED
@@ -4,7 +4,7 @@ import traceback
4
  from datetime import datetime
5
  from typing import Optional
6
 
7
- from fastapi import FastAPI, File, UploadFile, Form, HTTPException, Header, Depends
8
  from fastapi.responses import StreamingResponse, JSONResponse
9
  from fastapi.middleware.cors import CORSMiddleware
10
  from pydantic import BaseModel
@@ -47,19 +47,6 @@ app.add_middleware(
47
  allow_headers=["*"],
48
  )
49
 
50
- # ---------------------------------------------------------------------
51
- # Authentication
52
- # ---------------------------------------------------------------------
53
- BEARER_TOKEN = "logicgo@123"
54
-
55
- def verify_token(authorization: str = Header(None)):
56
- if not authorization or not authorization.startswith("Bearer "):
57
- raise HTTPException(status_code=401, detail="Missing or invalid Authorization header")
58
- token = authorization.split(" ")[1]
59
- if token != BEARER_TOKEN:
60
- raise HTTPException(status_code=403, detail="Invalid bearer token")
61
- return True
62
-
63
  # ---------------------------------------------------------------------
64
  # Models
65
  # ---------------------------------------------------------------------
@@ -79,12 +66,12 @@ def health():
79
  except Exception as e:
80
  raise HTTPException(status_code=500, detail=f"DB ping failed: {e}")
81
 
 
82
  @app.post("/generate")
83
  async def generate(
84
  prompt: str = Form(...),
85
  image1: UploadFile = File(...),
86
  image2: Optional[UploadFile] = File(None),
87
- authorized: bool = Depends(verify_token)
88
  ):
89
  """Upload 1 or 2 images + prompt and get edited image via HF Inference"""
90
  # Read images
@@ -119,18 +106,13 @@ async def generate(
119
  if pil_img2:
120
  images_to_pass.append(pil_img2)
121
 
122
- # The "Qwen/Qwen-Image-Edit" expects a list of PIL images and a prompt
123
  pil_output = hf_client.image_to_image(
124
  images=images_to_pass,
125
  prompt=prompt,
126
  model="Qwen/Qwen-Image-Edit"
127
  )
128
 
129
- # Handle list return
130
- if isinstance(pil_output, list):
131
- output_image = pil_output[0]
132
- else:
133
- output_image = pil_output
134
 
135
  out_buf = io.BytesIO()
136
  output_image.save(out_buf, format="PNG")
@@ -186,6 +168,7 @@ def get_image(image_id: str, download: Optional[bool] = False):
186
 
187
  return StreamingResponse(iterfile(), media_type=grid_out.content_type or "application/octet-stream", headers=headers)
188
 
 
189
  # ---------------------------------------------------------------------
190
  # Run locally
191
  # ---------------------------------------------------------------------
 
4
  from datetime import datetime
5
  from typing import Optional
6
 
7
+ from fastapi import FastAPI, File, UploadFile, Form, HTTPException
8
  from fastapi.responses import StreamingResponse, JSONResponse
9
  from fastapi.middleware.cors import CORSMiddleware
10
  from pydantic import BaseModel
 
47
  allow_headers=["*"],
48
  )
49
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  # ---------------------------------------------------------------------
51
  # Models
52
  # ---------------------------------------------------------------------
 
66
  except Exception as e:
67
  raise HTTPException(status_code=500, detail=f"DB ping failed: {e}")
68
 
69
+
70
  @app.post("/generate")
71
  async def generate(
72
  prompt: str = Form(...),
73
  image1: UploadFile = File(...),
74
  image2: Optional[UploadFile] = File(None),
 
75
  ):
76
  """Upload 1 or 2 images + prompt and get edited image via HF Inference"""
77
  # Read images
 
106
  if pil_img2:
107
  images_to_pass.append(pil_img2)
108
 
 
109
  pil_output = hf_client.image_to_image(
110
  images=images_to_pass,
111
  prompt=prompt,
112
  model="Qwen/Qwen-Image-Edit"
113
  )
114
 
115
+ output_image = pil_output[0] if isinstance(pil_output, list) else pil_output
 
 
 
 
116
 
117
  out_buf = io.BytesIO()
118
  output_image.save(out_buf, format="PNG")
 
168
 
169
  return StreamingResponse(iterfile(), media_type=grid_out.content_type or "application/octet-stream", headers=headers)
170
 
171
+
172
  # ---------------------------------------------------------------------
173
  # Run locally
174
  # ---------------------------------------------------------------------