stardust-coder's picture
debug
455bdaf
import os
import re
import sys
import base64
import shutil
import mimetypes
import subprocess
import tempfile
from pathlib import Path
# =========================
# Paths / environment
# =========================
BASE_DIR = Path.cwd().resolve()
GRADIO_TEMP_DIR = BASE_DIR / "gradio_tmp"
ARTIFACT_DIR = BASE_DIR / "outputs"
GRADIO_TEMP_DIR.mkdir(parents=True, exist_ok=True)
ARTIFACT_DIR.mkdir(parents=True, exist_ok=True)
os.environ["GRADIO_TEMP_DIR"] = str(GRADIO_TEMP_DIR)
import gradio as gr
from anthropic import Anthropic
from openai import OpenAI
# =========================
# Clients
# =========================
ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
if not ANTHROPIC_API_KEY:
raise RuntimeError("ANTHROPIC_API_KEY is not set")
if not OPENAI_API_KEY:
raise RuntimeError("OPENAI_API_KEY is not set")
anthropic_client = Anthropic(api_key=ANTHROPIC_API_KEY)
openai_client = OpenAI(api_key=OPENAI_API_KEY)
# =========================
# Model options
# =========================
CLAUDE_MODELS = [
"claude-opus-4-6",
"claude-sonnet-4-6",
"claude-sonnet-4-5",
"claude-haiku-4-5-20251001"
]
OPENAI_MODELS = [
"gpt-5.4-pro-2026-03-05",
"gpt-5.2-2025-12-11",
"gpt-5-nano-2025-08-07",
]
DEFAULT_CLAUDE_MODEL = "claude-opus-4-6"
DEFAULT_OPENAI_MODEL = "gpt-5-nano-2025-08-07"
# =========================
# Helpers
# =========================
def strip_code_fences(text: str) -> str:
text = text.strip()
match = re.search(r"```(?:python)?\s*(.*?)```", text, re.DOTALL | re.IGNORECASE)
if match:
return match.group(1).strip()
return text
def detect_media_type(image_path: str) -> str:
media_type, _ = mimetypes.guess_type(image_path)
return media_type or "application/octet-stream"
def copy_to_artifact(src_path: Path, suffix: str) -> str:
dst = ARTIFACT_DIR / f"{next(tempfile._get_candidate_names())}{suffix}"
shutil.copy2(src_path, dst)
return str(dst.resolve())
def truncate_text(text: str, max_chars: int = 4000) -> str:
if not text:
return ""
return text[-max_chars:]
# =========================
# LLM steps
# =========================
def generate_cad_code(text_prompt, image_path, claude_model):
content = []
spec_text = (text_prompt or "").strip()
if spec_text:
content.append(
{
"type": "text",
"text": f"""Generate CadQuery Python code to build a 3D CAD model.
Requirements:
- Use CadQuery only
- Save files in the current working directory
- Export BOTH:
- STEP as ./output.step
- STL as ./output.stl
- The script must be directly executable with `python model.py`
- Do not use markdown fences
- Output ONLY valid Python code
- At the end, ensure the exports are actually executed
Specification:
{spec_text}
""",
}
)
if image_path:
with open(image_path, "rb") as f:
img_b64 = base64.b64encode(f.read()).decode("utf-8")
content.append(
{
"type": "image",
"source": {
"type": "base64",
"media_type": detect_media_type(image_path),
"data": img_b64,
},
}
)
if not content:
raise gr.Error("Text CAD specification or image is required.")
response = anthropic_client.messages.create(
model=claude_model,
max_tokens=2500,
messages=[{"role": "user", "content": content}],
)
code = "".join(
block.text
for block in response.content
if getattr(block, "type", None) == "text"
).strip()
code = strip_code_fences(code)
if not code:
raise gr.Error("Claude returned empty code.")
return code
def run_cadquery(code):
with tempfile.TemporaryDirectory(dir=str(GRADIO_TEMP_DIR)) as tmpdir:
tmpdir = Path(tmpdir)
script_path = tmpdir / "model.py"
with open(script_path, "w", encoding="utf-8") as f:
f.write(code)
result = subprocess.run(
[sys.executable, str(script_path)],
cwd=str(tmpdir),
capture_output=True,
text=True,
timeout=180,
)
if result.returncode != 0:
raise gr.Error(
"CadQuery execution failed.\n\n"
f"STDOUT:\n{truncate_text(result.stdout)}\n\n"
f"STDERR:\n{truncate_text(result.stderr)}"
)
step_path = tmpdir / "output.step"
stl_path = tmpdir / "output.stl"
files = [p.name for p in tmpdir.iterdir()]
if not step_path.exists() and not stl_path.exists():
raise gr.Error(
"CAD script finished but did not generate output.step or output.stl.\n\n"
f"Files found in temp dir: {files}\n\n"
"Make sure the generated CadQuery code exports exactly "
"./output.step and ./output.stl"
)
if not step_path.exists():
raise gr.Error(
"output.step was not created.\n\n"
f"Files found in temp dir: {files}"
)
if not stl_path.exists():
raise gr.Error(
"output.stl was not created.\n\n"
f"Files found in temp dir: {files}"
)
final_step = copy_to_artifact(step_path, ".step")
final_stl = copy_to_artifact(stl_path, ".stl")
return final_step, final_stl
def gpt_check(step_path, openai_model):
if not step_path or not os.path.exists(step_path):
return "STEP file not found, so review was skipped."
size = os.path.getsize(step_path)
prompt = f"""A CAD model STEP file was generated.
File size: {size} bytes
Check if this seems reasonable for a CAD model and list possible issues.
Keep the answer concise.
"""
response = openai_client.responses.create(
model=openai_model,
input=prompt,
)
return response.output_text.strip()
# =========================
# Pipeline
# =========================
def pipeline(text_prompt, image, claude_model, openai_model):
code = generate_cad_code(text_prompt, image, claude_model)
step_path, stl_path = run_cadquery(code)
review = gpt_check(step_path, openai_model)
return stl_path, code, review, step_path
# =========================
# UI
# =========================
with gr.Blocks(delete_cache=(86400, 86400)) as demo:
gr.Markdown("## CAD Generator")
with gr.Row():
with gr.Column(scale=1):
claude_model = gr.Radio(
choices=CLAUDE_MODELS,
value=DEFAULT_CLAUDE_MODEL,
label="Claude model for CAD code generation",
visible=False
)
openai_model = gr.Radio(
choices=OPENAI_MODELS,
value=DEFAULT_OPENAI_MODEL,
label="OpenAI model for STEP review",
visible=False
)
text_prompt = gr.Textbox(
label="Text CAD specification\nδ»•ζ§˜γ‚’θ¨˜θΌ‰γ—γ¦γγ γ•γ„\n\n δΎ‹οΌ‰A bolt with a hexagonal socket head and a cylindrical shaft",
lines=10,
placeholder="e.g. A 60mm x 40mm x 10mm enclosure with 4 corner holes..."
)
image_input = gr.Image(
label="2D drawing (optional)\n2D図青ををップロードすることも可能です",
type="filepath"
)
run_btn = gr.Button("Generate CAD")
with gr.Column(scale=3):
viewer = gr.Model3D(label="CAD Viewer (STL)")
code_box = gr.Code(label="Generated CAD Code", language="python")
review_box = gr.Textbox(
label="GPT Model Check",
lines=8
)
step_file = gr.File(label="Download STEP")
run_btn.click(
fn=pipeline,
inputs=[text_prompt, image_input, claude_model, openai_model],
outputs=[viewer, code_box, review_box, step_file]
)
demo.launch(
allowed_paths=[str(ARTIFACT_DIR), str(GRADIO_TEMP_DIR)]
)