Spaces:
Running
Running
| ''' | |
| Auto-generated Daggr Node | |
| Space: HorizonRobotics/EmbodiedGen-Image-to-3D | |
| API: /image_to_3d | |
| Endpoints available: /start_session, /lambda, /lambda_1, /preprocess_image_fn, /lambda_2... | |
| ''' | |
| from daggr import GradioNode | |
| import gradio as gr | |
| from daggr import Graph | |
| # === WIRING GUIDE for embodied_gen_image_to_3_d === | |
| # Inputs (what this node expects): | |
| # - image: filepath | |
| # Wire: embodied_gen_image_to_3_d.inputs['image'] = upstream_node.image | |
| # - seed: float | |
| # Wire: embodied_gen_image_to_3_d.inputs['seed'] = upstream_node.seed | |
| # - ss_sampling_steps: float | |
| # Wire: embodied_gen_image_to_3_d.inputs['ss_sampling_steps'] = upstream_node.ss_sampling_steps | |
| # - slat_sampling_steps: float | |
| # Wire: embodied_gen_image_to_3_d.inputs['slat_sampling_steps'] = upstream_node.slat_sampling_steps | |
| # - raw_image_cache: filepath | |
| # Wire: embodied_gen_image_to_3_d.inputs['raw_image_cache'] = upstream_node.raw_image_cache | |
| # - ss_guidance_strength: float | |
| # Wire: embodied_gen_image_to_3_d.inputs['ss_guidance_strength'] = upstream_node.ss_guidance_strength | |
| # - slat_guidance_strength: float | |
| # Wire: embodied_gen_image_to_3_d.inputs['slat_guidance_strength'] = upstream_node.slat_guidance_strength | |
| # - sam_image: filepath | |
| # Wire: embodied_gen_image_to_3_d.inputs['sam_image'] = upstream_node.sam_image | |
| # | |
| # Outputs (what this node produces): | |
| # - generated_3d_asset: filepath | |
| # Access: embodied_gen_image_to_3_d.generated_3d_asset | |
| # Usage: next_node.inputs['generated_3d_asset'] = embodied_gen_image_to_3_d.generated_3d_asset | |
| # =========================================== | |
| embodied_gen_image_to_3_d = GradioNode( | |
| space_or_url="HorizonRobotics/EmbodiedGen-Image-to-3D", # Space ID | |
| api_name="/image_to_3d", # API endpoint | |
| inputs={ | |
| "image": gr.File(label="Input Image"), # UI input - connect to upstream node or provide value, | |
| "seed": gr.Number(label="Seed"), # UI input - connect to upstream node or provide value, | |
| "ss_sampling_steps": gr.Number(label="Sampling Steps"), # UI input - connect to upstream node or provide value, | |
| "slat_sampling_steps": gr.Number(label="Sampling Steps"), # UI input - connect to upstream node or provide value, | |
| "raw_image_cache": gr.File(label="parameter_7"), # UI input - connect to upstream node or provide value, | |
| "ss_guidance_strength": gr.Number(label="Guidance Strength"), # UI input - connect to upstream node or provide value, | |
| "slat_guidance_strength": gr.Number(label="Guidance Strength"), # UI input - connect to upstream node or provide value, | |
| "sam_image": gr.File(label="SAM Seg Image"), # UI input - connect to upstream node or provide value, | |
| }, | |
| outputs={ | |
| "generated_3d_asset": gr.File(label="Generated 3D Asset"), # Display in node card | |
| # Use None to hide outputs: "hidden_output": None | |
| }, | |
| # Optional: Transform outputs before downstream flow | |
| # postprocess=lambda outputs, final: final, | |
| ) | |
| # Example usage | |
| if __name__ == "__main__": | |
| graph = Graph( | |
| name="EmbodiedGen-Image-to-3D Workflow", | |
| nodes=[embodied_gen_image_to_3_d] | |
| ) | |
| graph.launch() | |
| # Or run with: daggr this_file.py | |