Spaces:
Running on Zero
Running on Zero
Upload app.py
Browse files
app.py
CHANGED
|
@@ -1887,37 +1887,32 @@ with gr.Blocks(theme=args.theme, elem_id="main", fill_width=True, fill_height=Fa
|
|
| 1887 |
copy_prompt_btn_pony.click(gradio_copy_prompt, inputs=[output_text_pony], outputs=[prompt_gui], show_api=False)
|
| 1888 |
|
| 1889 |
# Stable programmatic endpoint
|
| 1890 |
-
|
| 1891 |
-
|
| 1892 |
-
|
| 1893 |
-
def generate_minimal(
|
| 1894 |
-
|
| 1895 |
-
|
| 1896 |
-
|
| 1897 |
-
|
| 1898 |
-
|
| 1899 |
-
|
| 1900 |
-
|
| 1901 |
-
|
| 1902 |
-
|
| 1903 |
-
|
| 1904 |
-
|
| 1905 |
-
|
| 1906 |
-
|
| 1907 |
-
|
| 1908 |
-
# If you want to stream loader status, you could `yield {"status": _}`
|
| 1909 |
-
pass
|
| 1910 |
-
else:
|
| 1911 |
-
# Legacy: payload is the full positional array already
|
| 1912 |
-
args = payload
|
| 1913 |
-
# Delegate to the existing streaming generator; preserves SSE behavior
|
| 1914 |
-
yield from sd_gen_generate_pipeline(*args)
|
| 1915 |
|
| 1916 |
gr.api(
|
| 1917 |
generate_minimal,
|
| 1918 |
-
api_name="
|
| 1919 |
-
api_description="
|
| 1920 |
-
|
|
|
|
|
|
|
| 1921 |
)
|
| 1922 |
|
| 1923 |
gr.LoginButton()
|
|
|
|
| 1887 |
copy_prompt_btn_pony.click(gradio_copy_prompt, inputs=[output_text_pony], outputs=[prompt_gui], show_api=False)
|
| 1888 |
|
| 1889 |
# Stable programmatic endpoint
|
| 1890 |
+
# --- minimal sync wrapper + stable API ---
|
| 1891 |
+
from typing import Any, List, Tuple, Optional
|
| 1892 |
+
|
| 1893 |
+
def generate_minimal(
|
| 1894 |
+
args: List[Any],
|
| 1895 |
+
model_name: str,
|
| 1896 |
+
vae_model: str,
|
| 1897 |
+
task: str,
|
| 1898 |
+
controlnet_model: str
|
| 1899 |
+
) -> Tuple[str, Optional[List[str]], Optional[str]]:
|
| 1900 |
+
# Preload the requested model (drain loader yields so the model is ready)
|
| 1901 |
+
for _ in sd_gen.load_new_model(model_name, vae_model, task, controlnet_model):
|
| 1902 |
+
pass
|
| 1903 |
+
# Run the existing streaming generator and collapse to the final triple
|
| 1904 |
+
last_status, last_imgs, last_info = "START", None, None
|
| 1905 |
+
for status, imgs, info in sd_gen_generate_pipeline(*args):
|
| 1906 |
+
last_status, last_imgs, last_info = status, imgs, info
|
| 1907 |
+
return last_status or "COMPLETE", last_imgs, last_info
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1908 |
|
| 1909 |
gr.api(
|
| 1910 |
generate_minimal,
|
| 1911 |
+
api_name="generate_image", # => POST /gradio_api/call/generate_image
|
| 1912 |
+
api_description="Preload model then generate with full positional args. Returns final (status, images, info).",
|
| 1913 |
+
show_api=True, # force listing on the API page
|
| 1914 |
+
queue=True, # reuse app queue
|
| 1915 |
+
concurrency_id="gpu" # share GPU queue with other heavy events
|
| 1916 |
)
|
| 1917 |
|
| 1918 |
gr.LoginButton()
|