Wei Liu Claude Sonnet 4.6 commited on
Commit
7e09fa7
·
1 Parent(s): de43c55

Fix Genesis from_torch patch: avoid torch ops on genesis tensors inside ZeroGPU

Browse files

The previous patch used gs_tensor.clone() which triggers __torch_function__
on a genesis.grad.Tensor. Inside a ZeroGPU GPU slot, spaces/zero/torch/
patching.py intercepts __torch_function__ and then PyTorch's _convert()
calls ret.as_subclass(genesis.grad.Tensor), which hits the same "already
associated" error.

Fix: do all tensor ops (to, clone, detach) on plain torch.Tensor BEFORE
wrapping as a genesis.grad.Tensor via _make_subclass. No torch operations
are ever called on the genesis tensor inside from_torch, so ZeroGPU's
__torch_function__ interceptor is never triggered.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>

Files changed (1) hide show
  1. app.py +11 -9
app.py CHANGED
@@ -56,18 +56,20 @@ def _patch_genesis_from_torch():
56
  _gs.raise_exception(f"Unsupported dtype: {dtype}")
57
  if torch_tensor.requires_grad and (not detach) and (not requires_grad):
58
  requires_grad = True
59
- t = torch_tensor.to(device=_gs.device, dtype=dtype)
60
- # _make_subclass creates a SubClass view without triggering the PyTorch
61
- # "already associated" check that Tensor(existing_tensor) hits.
62
- gs_tensor = torch.Tensor._make_subclass(_Tensor, t)
 
 
 
 
 
 
 
63
  gs_tensor.scene = scene
64
  gs_tensor.uid = _gs.UID()
65
  gs_tensor.parents = []
66
- gs_tensor = gs_tensor.clone()
67
- if detach:
68
- gs_tensor = gs_tensor.detach(sceneless=False)
69
- if requires_grad:
70
- gs_tensor = gs_tensor.requires_grad_()
71
  return gs_tensor
72
 
73
  _gc_ops.from_torch = _patched_from_torch
 
56
  _gs.raise_exception(f"Unsupported dtype: {dtype}")
57
  if torch_tensor.requires_grad and (not detach) and (not requires_grad):
58
  requires_grad = True
59
+ # Perform ALL tensor operations on plain torch.Tensor objects BEFORE
60
+ # wrapping as genesis.grad.Tensor. This avoids __torch_function__
61
+ # interference from ZeroGPU (spaces/zero/torch/patching.py), which
62
+ # intercepts operations on tensor subclasses and then fails when
63
+ # PyTorch tries to restore the subclass type via as_subclass().
64
+ t = torch_tensor.to(device=_gs.device, dtype=dtype).clone()
65
+ if detach:
66
+ t = t.detach()
67
+ # _make_subclass uses MAYBE_UNINITIALIZED status, bypassing the
68
+ # "already associated" check that Tensor(existing_tensor) triggers.
69
+ gs_tensor = torch.Tensor._make_subclass(_Tensor, t, requires_grad)
70
  gs_tensor.scene = scene
71
  gs_tensor.uid = _gs.UID()
72
  gs_tensor.parents = []
 
 
 
 
 
73
  return gs_tensor
74
 
75
  _gc_ops.from_torch = _patched_from_torch