Apple
Initial CADFusion Space with Gradio
b498278
raw
history blame
910 Bytes
import gradio as gr
import torch
from cadfusion.models import CADFusionModel
# Load model (from HF weights + GitHub code)
print("Loading CADFusion model...")
device = "cuda" if torch.cuda.is_available() else "cpu"
model = CADFusionModel.from_pretrained("microsoft/CADFusion")
model = model.to(device)
model.eval()
def generate(prompt):
"""Run CADFusion inference on user prompt"""
with torch.no_grad():
output = model.generate(
prompt,
max_new_tokens=256,
temperature=0.7,
top_p=0.9,
)
return output
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## ๐Ÿ—๏ธ CADFusion Demo\nEnter a CAD prompt below:")
inp = gr.Textbox(label="Your CAD prompt")
out = gr.Textbox(label="Model Output")
btn = gr.Button("Generate")
btn.click(fn=generate, inputs=inp, outputs=out)
if __name__ == "__main__":
demo.launch()