English
refoundd commited on
Commit
58abc3c
·
verified ·
1 Parent(s): 69206d5

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +4 -5
handler.py CHANGED
@@ -15,7 +15,7 @@ torch.set_float32_matmul_precision("high")
15
 
16
  import torch._dynamo
17
  torch._dynamo.config.suppress_errors = False # for debugging
18
-
19
  class EndpointHandler:
20
  def __init__(self, path=""):
21
  self.pipe = FluxPipeline.from_pretrained(
@@ -47,7 +47,6 @@ class EndpointHandler:
47
  end_time = time.time()
48
  time_taken = end_time - start_time
49
  print(f"Time taken: {time_taken:.2f} seconds")
50
- self.record=0
51
 
52
  def __call__(self, data: Dict[str, Any]) -> Union[Image.Image, None]:
53
  try:
@@ -63,7 +62,7 @@ class EndpointHandler:
63
  " prompt to use for the image generation, and it needs to be a non-empty string."
64
  )
65
  if prompt=="get_queue":
66
- return self.record
67
  parameters = data.pop("parameters", {})
68
 
69
  num_inference_steps = parameters.get("num_inference_steps", 28)
@@ -75,7 +74,7 @@ class EndpointHandler:
75
  # seed generator (seed cannot be provided as is but via a generator)
76
  seed = parameters.get("seed", 0)
77
  generator = torch.manual_seed(seed)
78
- self.record+=1
79
  start_time = time.time()
80
  result = self.pipe( # type: ignore
81
  prompt,
@@ -88,7 +87,7 @@ class EndpointHandler:
88
  end_time = time.time()
89
  time_taken = end_time - start_time
90
  print(f"Time taken: {time_taken:.2f} seconds")
91
- self.record-=1
92
 
93
  return result
94
  except Exception as e:
 
15
 
16
  import torch._dynamo
17
  torch._dynamo.config.suppress_errors = False # for debugging
18
+ record=0
19
  class EndpointHandler:
20
  def __init__(self, path=""):
21
  self.pipe = FluxPipeline.from_pretrained(
 
47
  end_time = time.time()
48
  time_taken = end_time - start_time
49
  print(f"Time taken: {time_taken:.2f} seconds")
 
50
 
51
  def __call__(self, data: Dict[str, Any]) -> Union[Image.Image, None]:
52
  try:
 
62
  " prompt to use for the image generation, and it needs to be a non-empty string."
63
  )
64
  if prompt=="get_queue":
65
+ return record
66
  parameters = data.pop("parameters", {})
67
 
68
  num_inference_steps = parameters.get("num_inference_steps", 28)
 
74
  # seed generator (seed cannot be provided as is but via a generator)
75
  seed = parameters.get("seed", 0)
76
  generator = torch.manual_seed(seed)
77
+ record+=1
78
  start_time = time.time()
79
  result = self.pipe( # type: ignore
80
  prompt,
 
87
  end_time = time.time()
88
  time_taken = end_time - start_time
89
  print(f"Time taken: {time_taken:.2f} seconds")
90
+ record-=1
91
 
92
  return result
93
  except Exception as e: