John6666 commited on
Commit
cb87640
·
verified ·
1 Parent(s): 36c414c

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -28
app.py CHANGED
@@ -1887,37 +1887,32 @@ with gr.Blocks(theme=args.theme, elem_id="main", fill_width=True, fill_height=Fa
1887
  copy_prompt_btn_pony.click(gradio_copy_prompt, inputs=[output_text_pony], outputs=[prompt_gui], show_api=False)
1888
 
1889
  # Stable programmatic endpoint
1890
- from typing import Any, Dict, List, Union, Generator
1891
- Payload = Union[List[Any], Dict[str, Any]]
1892
-
1893
- def generate_minimal(payload: Payload) -> Generator[Any, None, None]:
1894
- # One-call wrapper: optionally preload a model, then delegate to the original generator.
1895
- # Accepts EITHER:
1896
- # - dict: {"args":[...full positional array...], "model_name":..., "vae_model":..., "task":..., "controlnet_model":...}
1897
- # - list: [...full positional array...] (legacy, forwarded as-is)
1898
- # Normalize input
1899
- if isinstance(payload, dict):
1900
- args = payload.get("args") or payload.get("data") or []
1901
- m = payload.get("model_name")
1902
- v = payload.get("vae_model")
1903
- t = payload.get("task")
1904
- c = payload.get("controlnet_model")
1905
- # Optional preload if all four fields are present
1906
- if all(x is not None for x in (m, v, t, c)):
1907
- for _ in sd_gen.load_new_model(m, v, t, c):
1908
- # If you want to stream loader status, you could `yield {"status": _}`
1909
- pass
1910
- else:
1911
- # Legacy: payload is the full positional array already
1912
- args = payload
1913
- # Delegate to the existing streaming generator; preserves SSE behavior
1914
- yield from sd_gen_generate_pipeline(*args)
1915
 
1916
  gr.api(
1917
  generate_minimal,
1918
- api_name="generate",
1919
- api_description="Accepts a list (positional) or a dict with {'args': [...]} + optional loader fields.",
1920
- queue=True,
 
 
1921
  )
1922
 
1923
  gr.LoginButton()
 
1887
  copy_prompt_btn_pony.click(gradio_copy_prompt, inputs=[output_text_pony], outputs=[prompt_gui], show_api=False)
1888
 
1889
  # Stable programmatic endpoint
1890
+ # --- minimal sync wrapper + stable API ---
1891
+ from typing import Any, List, Tuple, Optional
1892
+
1893
+ def generate_minimal(
1894
+ args: List[Any],
1895
+ model_name: str,
1896
+ vae_model: str,
1897
+ task: str,
1898
+ controlnet_model: str
1899
+ ) -> Tuple[str, Optional[List[str]], Optional[str]]:
1900
+ # Preload the requested model (drain loader yields so the model is ready)
1901
+ for _ in sd_gen.load_new_model(model_name, vae_model, task, controlnet_model):
1902
+ pass
1903
+ # Run the existing streaming generator and collapse to the final triple
1904
+ last_status, last_imgs, last_info = "START", None, None
1905
+ for status, imgs, info in sd_gen_generate_pipeline(*args):
1906
+ last_status, last_imgs, last_info = status, imgs, info
1907
+ return last_status or "COMPLETE", last_imgs, last_info
 
 
 
 
 
 
 
1908
 
1909
  gr.api(
1910
  generate_minimal,
1911
+ api_name="generate_image", # => POST /gradio_api/call/generate_image
1912
+ api_description="Preload model then generate with full positional args. Returns final (status, images, info).",
1913
+ show_api=True, # force listing on the API page
1914
+ queue=True, # reuse app queue
1915
+ concurrency_id="gpu" # share GPU queue with other heavy events
1916
  )
1917
 
1918
  gr.LoginButton()