Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| print("Gradio version:", gr.__version__) | |
| # Defensive patch for Gradio schema issue | |
| def patch_gradio_schema(): | |
| try: | |
| import gradio_client.utils as client_utils | |
| original_get_type = client_utils.get_type | |
| original_json_schema_to_python_type = client_utils._json_schema_to_python_type | |
| def patched_get_type(schema): | |
| # Handle case where schema is boolean instead of dict | |
| if isinstance(schema, bool): | |
| return str(schema) | |
| return original_get_type(schema) | |
| def patched_json_schema_to_python_type(schema, defs=None): | |
| # Handle case where schema is boolean instead of dict | |
| if isinstance(schema, bool): | |
| return str(schema) | |
| try: | |
| return original_json_schema_to_python_type(schema, defs) | |
| except Exception as e: | |
| if "Cannot parse schema" in str(e) and isinstance(schema, bool): | |
| return str(schema) | |
| raise e | |
| client_utils.get_type = patched_get_type | |
| client_utils._json_schema_to_python_type = patched_json_schema_to_python_type | |
| print("✅ Applied comprehensive Gradio schema patch") | |
| except Exception as e: | |
| print(f"⚠️ Could not apply Gradio schema patch: {e}") | |
| patch_gradio_schema() | |
| # ----------------------------------------------------------------------------- | |
| # Authentication | |
| # ----------------------------------------------------------------------------- | |
| TOKEN = os.getenv("HF_TOKEN") | |
| if TOKEN is None: | |
| raise RuntimeError( | |
| "HF_TOKEN secret is missing. Add it in your Space settings so the " | |
| "frontend can reach the private backend." | |
| ) | |
| # ----------------------------------------------------------------------------- | |
| # Backend loader (uses the new `token=` kwarg instead of deprecated `hf_token=`) | |
| # ----------------------------------------------------------------------------- | |
| backend = gr.load( | |
| name="devmandan/hmnqr-migrated-alpha", | |
| token=TOKEN, # ← new Gradio 5+ arg name | |
| src="spaces", | |
| queue=False | |
| ) | |
| # ----------------------------------------------------------------------------- | |
| # Wrapper UI – minimal; customise freely. | |
| # ----------------------------------------------------------------------------- | |
| with gr.Blocks(title="HMNQR") as demo: | |
| backend.render() | |
| # ----------------------------------------------------------------------------- | |
| # Work around Gradio ≤ 5.7 upload‑size bug: some `Blocks` builds don’t set | |
| # `.max_file_size`, causing AttributeError in `/upload` route. Give the frontend | |
| # a sane default (e.g. 20 MB) if it’s missing. | |
| # ----------------------------------------------------------------------------- | |
| if not hasattr(demo, "max_file_size"): | |
| demo.max_file_size = 20 * 1024 * 1024 # 20 MB | |
| # ------------------------------------------------------------------ | |
| # Enable queuing for better throughput | |
| # ------------------------------------------------------------------ | |
| demo.queue( | |
| max_size=50, | |
| default_concurrency_limit=1 # Process one request at a time | |
| ) | |
| # `demo.app` is a FastAPI instance and **not callable**, so Uvicorn won’t try | |
| # to invoke it like a function (avoids the ValueError you saw). | |
| app = demo.app | |
| # ------------------------------------------------------------------ | |
| # Local debug convenience | |
| # ------------------------------------------------------------------ | |
| if __name__ == "__main__": | |
| demo.queue().launch(show_error=True) | |