Texttra commited on
Commit
bf7ff83
·
verified ·
1 Parent(s): 4ea4b2c

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +29 -6
handler.py CHANGED
@@ -11,7 +11,7 @@ class EndpointHandler:
11
  self.pipe = DiffusionPipeline.from_pretrained(
12
  "black-forest-labs/FLUX.1-dev",
13
  torch_dtype=torch.float16,
14
- use_auth_token=True # Required for gated base model
15
  )
16
 
17
  print("Loading LoRA weights from: Texttra/Cityscape_Studio")
@@ -33,9 +33,32 @@ class EndpointHandler:
33
  def __call__(self, data: Dict) -> Dict:
34
  print("Received data:", data)
35
 
36
- inputs = data.get("inputs", {})
37
- prompt = inputs.get("prompt", "")
38
- print("Extracted prompt:", prompt)
 
 
 
 
39
 
40
- if not prompt:
41
- return {"error": "No prompt provided"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  self.pipe = DiffusionPipeline.from_pretrained(
12
  "black-forest-labs/FLUX.1-dev",
13
  torch_dtype=torch.float16,
14
+ use_auth_token=True
15
  )
16
 
17
  print("Loading LoRA weights from: Texttra/Cityscape_Studio")
 
33
  def __call__(self, data: Dict) -> Dict:
34
  print("Received data:", data)
35
 
36
+ try:
37
+ inputs = data.get("inputs", {})
38
+ if isinstance(inputs, str):
39
+ # In case the input comes in raw string form (e.g., Postman tests)
40
+ prompt = inputs
41
+ else:
42
+ prompt = inputs.get("prompt", "")
43
 
44
+ print("Extracted prompt:", prompt)
45
+
46
+ if not prompt:
47
+ return {"error": "No prompt provided"}
48
+
49
+ conditioning = self.compel(prompt)
50
+ print("Conditioning complete.")
51
+
52
+ image = self.pipe(prompt_embeds=conditioning).images[0]
53
+ print("Image generated.")
54
+
55
+ buffer = BytesIO()
56
+ image.save(buffer, format="PNG")
57
+ base64_image = base64.b64encode(buffer.getvalue()).decode("utf-8")
58
+
59
+ print("Returning image.")
60
+ return {"image": base64_image}
61
+
62
+ except Exception as e:
63
+ print(f"Error occurred: {str(e)}")
64
+ return {"error": str(e)}