import gradio as gr from huggingface_hub import InferenceClient model_id = "stabilityai/stable-zero123" client = InferenceClient(model_id) def generate_3d(prompt): try: response = client.post(json={"inputs": prompt}) filename = "output.glb" with open(filename, "wb") as f: f.write(response) return filename except Exception as e: raise gr.Error(str(e)) demo = gr.Interface( fn=generate_3d, inputs=gr.Textbox(label="Prompt", placeholder="Describe the 3D model you want..."), outputs=gr.Model3D(label="3D Model"), title="text-to-3dmodel", description="Text to 3D generation powered by " + model_id, theme="soft" ) if __name__ == "__main__": demo.launch()