File size: 3,215 Bytes
1ba6b3f
 
 
 
 
 
21c2110
1ba6b3f
21c2110
1ba6b3f
21c2110
1ba6b3f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21c2110
1ba6b3f
 
 
21c2110
1ba6b3f
592efa4
 
 
 
 
 
 
 
1ba6b3f
21c2110
1ba6b3f
 
 
 
21c2110
1ba6b3f
 
21c2110
 
1ba6b3f
21c2110
1ba6b3f
 
 
 
 
 
592efa4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
'''
Auto-generated Daggr Node
Space: HorizonRobotics/EmbodiedGen-Image-to-3D
API: /image_to_3d
Endpoints available: /start_session, /lambda, /lambda_1, /preprocess_image_fn, /lambda_2...
'''

from daggr import GradioNode
import gradio as gr
from daggr import Graph

# === WIRING GUIDE for embodied_gen_image_to_3_d ===
# Inputs (what this node expects):
#   - image: filepath
#     Wire: embodied_gen_image_to_3_d.inputs['image'] = upstream_node.image
#   - seed: float
#     Wire: embodied_gen_image_to_3_d.inputs['seed'] = upstream_node.seed
#   - ss_sampling_steps: float
#     Wire: embodied_gen_image_to_3_d.inputs['ss_sampling_steps'] = upstream_node.ss_sampling_steps
#   - slat_sampling_steps: float
#     Wire: embodied_gen_image_to_3_d.inputs['slat_sampling_steps'] = upstream_node.slat_sampling_steps
#   - raw_image_cache: filepath
#     Wire: embodied_gen_image_to_3_d.inputs['raw_image_cache'] = upstream_node.raw_image_cache
#   - ss_guidance_strength: float
#     Wire: embodied_gen_image_to_3_d.inputs['ss_guidance_strength'] = upstream_node.ss_guidance_strength
#   - slat_guidance_strength: float
#     Wire: embodied_gen_image_to_3_d.inputs['slat_guidance_strength'] = upstream_node.slat_guidance_strength
#   - sam_image: filepath
#     Wire: embodied_gen_image_to_3_d.inputs['sam_image'] = upstream_node.sam_image
#
# Outputs (what this node produces):
#   - generated_3d_asset: filepath
#     Access: embodied_gen_image_to_3_d.generated_3d_asset
#     Usage: next_node.inputs['generated_3d_asset'] = embodied_gen_image_to_3_d.generated_3d_asset
# ===========================================

embodied_gen_image_to_3_d = GradioNode(
    space_or_url="HorizonRobotics/EmbodiedGen-Image-to-3D",  # Space ID
    api_name="/image_to_3d",  # API endpoint

    inputs={
        "image": gr.File(label="Input Image"),  # UI input - connect to upstream node or provide value,
        "seed": gr.Number(label="Seed"),  # UI input - connect to upstream node or provide value,
        "ss_sampling_steps": gr.Number(label="Sampling Steps"), # UI input - connect to upstream node or provide value,
        "slat_sampling_steps": gr.Number(label="Sampling Steps"),  # UI input - connect to upstream node or provide value,
        "raw_image_cache": gr.File(label="parameter_7"),  # UI input - connect to upstream node or provide value,
        "ss_guidance_strength": gr.Number(label="Guidance Strength"),  # UI input - connect to upstream node or provide value,
        "slat_guidance_strength": gr.Number(label="Guidance Strength"),  # UI input - connect to upstream node or provide value,
        "sam_image": gr.File(label="SAM Seg Image"),  # UI input - connect to upstream node or provide value,
    },

    outputs={
        "generated_3d_asset": gr.File(label="Generated 3D Asset"),  # Display in node card
        # Use None to hide outputs: "hidden_output": None
    },

    # Optional: Transform outputs before downstream flow
    # postprocess=lambda outputs, final: final,
)

# Example usage
if __name__ == "__main__":
    graph = Graph(
        name="EmbodiedGen-Image-to-3D Workflow",
        nodes=[embodied_gen_image_to_3_d]
    )
    graph.launch()

    # Or run with: daggr this_file.py