Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- msx_assets/object/004_sugar_box/textured.mtl +3 -0
- msx_assets/object/006_mustard_bottle/material_0.mtl +7 -0
- msx_assets/object/006_mustard_bottle/textured.mtl +3 -0
- msx_assets/object/015_peach/collision.obj +0 -0
- msx_assets/object/015_peach/material_0.mtl +7 -0
- msx_assets/object/015_peach/textured.mtl +3 -0
- msx_assets/object/018_plum/collision.obj +0 -0
- msx_assets/object/018_plum/textured.mtl +3 -0
- msx_assets/object/018_plum/textured.obj +0 -0
- msx_assets/object/021_bleach_cleanser/collision.obj +3090 -0
- msx_assets/object/028_skillet_lid/textured.mtl +3 -0
- msx_assets/object/054_softball/textured.obj +0 -0
- msx_assets/object/057_racquetball/collision.obj +0 -0
- msx_assets/object/065-e_cups/textured.mtl +3 -0
- msx_assets/object/065-e_cups/textured.obj +0 -0
- msx_assets/object/065-i_cups/material_0.mtl +7 -0
- msx_assets/object/065-i_cups/textured.obj +0 -0
- msx_assets/object/071_nine_hole_peg_test/material_0.mtl +7 -0
- msx_assets/object/073-d_lego_duplo/textured.mtl +3 -0
- msx_assets/object/073-d_lego_duplo/textured.obj +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/__init__.py +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/__pycache__/__init__.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/__init__.py +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/__pycache__/__init__.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__init__.py +3 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__pycache__/__init__.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__pycache__/layers.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__pycache__/layers_attention.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/layers.py +504 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/layers_attention.py +194 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__init__.py +5 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__pycache__/helpers.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__pycache__/mlp_model.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__pycache__/sample_functions.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/diffusion_model_base.py +689 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/flow_matching_base.py +129 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/gaussian_diffusion_loss.py +52 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/guide_managers.py +1636 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/helpers.py +109 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/mlp_model.py +56 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/sample_functions.py +286 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/temporal_unet.py +370 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/__init__.py +1 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/__pycache__/__init__.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/__pycache__/trainer.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/trainer.py +385 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__init__.py +1 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__pycache__/loaders.cpython-310.pyc +0 -0
- project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__pycache__/summary_trajectory.cpython-310.pyc +0 -0
msx_assets/object/004_sugar_box/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/006_mustard_bottle/material_0.mtl
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://github.com/mikedh/trimesh
|
| 2 |
+
newmtl material_0
|
| 3 |
+
Ka 0.40000000 0.40000000 0.40000000
|
| 4 |
+
Kd 0.40000000 0.40000000 0.40000000
|
| 5 |
+
Ks 0.40000000 0.40000000 0.40000000
|
| 6 |
+
Ns 1.00000000
|
| 7 |
+
map_Kd material_0.png
|
msx_assets/object/006_mustard_bottle/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/015_peach/collision.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/015_peach/material_0.mtl
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://github.com/mikedh/trimesh
|
| 2 |
+
newmtl material_0
|
| 3 |
+
Ka 0.40000000 0.40000000 0.40000000
|
| 4 |
+
Kd 0.40000000 0.40000000 0.40000000
|
| 5 |
+
Ks 0.40000000 0.40000000 0.40000000
|
| 6 |
+
Ns 1.00000000
|
| 7 |
+
map_Kd material_0.png
|
msx_assets/object/015_peach/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/018_plum/collision.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/018_plum/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/018_plum/textured.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/021_bleach_cleanser/collision.obj
ADDED
|
@@ -0,0 +1,3090 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
o convex_0
|
| 2 |
+
v 0.00290953 0.0260712 0.105665
|
| 3 |
+
v 0.00287678 0.0260348 0.117146
|
| 4 |
+
v 0.0097389 0.0243356 0.105665
|
| 5 |
+
v -0.00279556 0.0260712 0.105665
|
| 6 |
+
v 0.00572933 0.0231822 0.125709
|
| 7 |
+
v 0.0066826 0.0241319 0.118099
|
| 8 |
+
v -0.00282831 0.0260348 0.117146
|
| 9 |
+
v 0.0123913 0.0212793 0.118099
|
| 10 |
+
v 0.0115072 0.0232477 0.105665
|
| 11 |
+
v -0.00288653 0.026053 0.105665
|
| 12 |
+
v 0.00477969 0.022229 0.127612
|
| 13 |
+
v 0.00715924 0.021756 0.127137
|
| 14 |
+
v 0.00953515 0.0212793 0.126662
|
| 15 |
+
v -0.00282831 0.0231822 0.125709
|
| 16 |
+
v -0.00602287 0.0253544 0.105665
|
| 17 |
+
v -0.00610655 0.0253216 0.105665
|
| 18 |
+
v -0.00996331 0.0224691 0.124283
|
| 19 |
+
v 0.0123913 0.0155706 0.126662
|
| 20 |
+
v 0.0181001 0.0155706 0.106685
|
| 21 |
+
v 0.0183038 0.0157744 0.105665
|
| 22 |
+
v 0.0172123 0.017539 0.105665
|
| 23 |
+
v 0.0143597 0.0203952 0.105665
|
| 24 |
+
v 0.00572933 0.0203261 0.128563
|
| 25 |
+
v -0.00377795 0.022229 0.127612
|
| 26 |
+
v 0.00810887 0.0198531 0.128087
|
| 27 |
+
v 0.0104884 0.0165239 0.127612
|
| 28 |
+
v -0.0118153 0.0224691 0.105665
|
| 29 |
+
v -0.00948668 0.022229 0.124759
|
| 30 |
+
v -0.0123429 0.0212793 0.117146
|
| 31 |
+
v -0.0151954 0.0184268 0.122856
|
| 32 |
+
v -0.0117462 0.0203261 0.124996
|
| 33 |
+
v 0.0109651 0.0141443 0.128087
|
| 34 |
+
v 0.013341 0.0108151 0.127612
|
| 35 |
+
v 0.0142942 0.0117648 0.125709
|
| 36 |
+
v 0.0186094 0.0145518 0.105665
|
| 37 |
+
v 0.00858551 0.0174735 0.128563
|
| 38 |
+
v -0.00282831 0.0203261 0.128563
|
| 39 |
+
v -0.0146023 0.0174735 0.124996
|
| 40 |
+
v -0.0142785 0.0203952 0.105665
|
| 41 |
+
v -0.0163852 0.0160472 0.124283
|
| 42 |
+
v 0.0114381 0.0117648 0.128563
|
| 43 |
+
v 0.013341 0.00511005 0.127612
|
| 44 |
+
v 0.0142942 0.00605969 0.125709
|
| 45 |
+
v 0.0171468 0.00891223 0.117146
|
| 46 |
+
v 0.0186094 0.000285467 0.105665
|
| 47 |
+
v -0.00889361 0.014621 0.12785
|
| 48 |
+
v -0.00853704 0.0117648 0.128563
|
| 49 |
+
v -0.0161487 0.014621 0.124759
|
| 50 |
+
v -0.0142458 0.014621 0.125709
|
| 51 |
+
v -0.0172002 0.0145518 0.105665
|
| 52 |
+
v -0.0170983 0.014621 0.122856
|
| 53 |
+
v 0.0114381 0.00605969 0.128563
|
| 54 |
+
v 0.0172123 -0.00253433 0.105665
|
| 55 |
+
v 0.0161971 0.00511005 0.119049
|
| 56 |
+
v 0.0104884 -0.000598677 0.127612
|
| 57 |
+
v 0.0171468 0.00605969 0.117146
|
| 58 |
+
v -0.0113896 0.00605969 0.128563
|
| 59 |
+
v -0.0161487 0.00320714 0.124759
|
| 60 |
+
v -0.0151954 0.00605969 0.126662
|
| 61 |
+
v -0.018561 0.0117648 0.105665
|
| 62 |
+
v -0.0170983 0.00320714 0.122856
|
| 63 |
+
v 0.00858551 0.000350959 0.128563
|
| 64 |
+
v 0.0123913 -0.00345486 0.117146
|
| 65 |
+
v 0.0143597 -0.00539052 0.105665
|
| 66 |
+
v 0.0100118 -0.00178845 0.127137
|
| 67 |
+
v 0.00858551 -0.00285815 0.12785
|
| 68 |
+
v -0.0113896 0.000350959 0.128563
|
| 69 |
+
v -0.0161487 0.00130423 0.123806
|
| 70 |
+
v -0.015672 0.00178087 0.124996
|
| 71 |
+
v -0.0151954 0.00320714 0.126662
|
| 72 |
+
v -0.018561 0.00320714 0.105665
|
| 73 |
+
v -0.0183427 0.00233391 0.105665
|
| 74 |
+
v 0.00572933 -0.00250159 0.128563
|
| 75 |
+
v 0.00715924 -0.004641 0.127137
|
| 76 |
+
v 0.00858551 -0.00642747 0.117146
|
| 77 |
+
v 0.0128643 -0.00607091 0.10716
|
| 78 |
+
v 0.00858551 -0.00932368 0.105665
|
| 79 |
+
v 0.00572933 -0.0044045 0.127612
|
| 80 |
+
v -0.0132925 0.000350959 0.127612
|
| 81 |
+
v -0.0128159 -0.00107531 0.12785
|
| 82 |
+
v -0.00853704 -0.00250159 0.128563
|
| 83 |
+
v -0.0151554 -0.0034112 0.105665
|
| 84 |
+
v -0.0132925 -0.0044045 0.118099
|
| 85 |
+
v -0.0123429 -0.00345486 0.124759
|
| 86 |
+
v -0.0132925 -0.00155195 0.126662
|
| 87 |
+
v -0.0149589 0.000350959 0.125353
|
| 88 |
+
v 0.00287678 -0.00535413 0.125709
|
| 89 |
+
v 0.00287678 -0.00821032 0.117146
|
| 90 |
+
v 0.00430305 -0.00749718 0.118574
|
| 91 |
+
v 0.00715924 -0.00749718 0.115723
|
| 92 |
+
v 0.00572933 -0.00821032 0.114295
|
| 93 |
+
v 2.4235e-05 -0.0110992 0.105665
|
| 94 |
+
v -0.00853704 -0.0044045 0.127612
|
| 95 |
+
v -0.00996331 -0.00392786 0.12785
|
| 96 |
+
v -0.0118153 -0.00749718 0.105665
|
| 97 |
+
v -0.0104399 -0.0044045 0.126662
|
| 98 |
+
v -0.00853704 -0.00535413 0.125709
|
| 99 |
+
v -0.00568086 -0.00821032 0.117146
|
| 100 |
+
v -7.40032e-05 -0.0110992 0.105665
|
| 101 |
+
v -0.00853704 -0.00880702 0.105665
|
| 102 |
+
v -0.00758377 -0.00725704 0.118099
|
| 103 |
+
v -0.00282831 -0.0106626 0.105665
|
| 104 |
+
f 1 2 3
|
| 105 |
+
f 1 3 9
|
| 106 |
+
f 1 9 22
|
| 107 |
+
f 1 22 21
|
| 108 |
+
f 1 21 20
|
| 109 |
+
f 1 20 35
|
| 110 |
+
f 1 35 45
|
| 111 |
+
f 1 45 53
|
| 112 |
+
f 1 53 64
|
| 113 |
+
f 1 64 77
|
| 114 |
+
f 1 77 92
|
| 115 |
+
f 1 92 99
|
| 116 |
+
f 1 99 102
|
| 117 |
+
f 1 102 100
|
| 118 |
+
f 1 100 95
|
| 119 |
+
f 1 95 82
|
| 120 |
+
f 1 82 72
|
| 121 |
+
f 1 72 71
|
| 122 |
+
f 1 71 60
|
| 123 |
+
f 1 60 50
|
| 124 |
+
f 1 50 39
|
| 125 |
+
f 1 39 27
|
| 126 |
+
f 1 27 16
|
| 127 |
+
f 1 16 15
|
| 128 |
+
f 1 15 10
|
| 129 |
+
f 1 10 4
|
| 130 |
+
f 1 4 7
|
| 131 |
+
f 1 7 2
|
| 132 |
+
f 2 5 13
|
| 133 |
+
f 2 13 6
|
| 134 |
+
f 2 6 3
|
| 135 |
+
f 2 7 14
|
| 136 |
+
f 2 14 5
|
| 137 |
+
f 3 6 8
|
| 138 |
+
f 3 8 9
|
| 139 |
+
f 4 10 7
|
| 140 |
+
f 5 11 12
|
| 141 |
+
f 5 12 13
|
| 142 |
+
f 5 14 24
|
| 143 |
+
f 5 24 11
|
| 144 |
+
f 6 13 8
|
| 145 |
+
f 7 10 15
|
| 146 |
+
f 7 15 16
|
| 147 |
+
f 7 16 17
|
| 148 |
+
f 7 17 14
|
| 149 |
+
f 8 18 19
|
| 150 |
+
f 8 19 20
|
| 151 |
+
f 8 20 21
|
| 152 |
+
f 8 21 22
|
| 153 |
+
f 8 22 9
|
| 154 |
+
f 8 13 18
|
| 155 |
+
f 11 23 25
|
| 156 |
+
f 11 25 12
|
| 157 |
+
f 11 24 37
|
| 158 |
+
f 11 37 23
|
| 159 |
+
f 12 25 13
|
| 160 |
+
f 13 25 26
|
| 161 |
+
f 13 26 18
|
| 162 |
+
f 14 17 24
|
| 163 |
+
f 16 27 17
|
| 164 |
+
f 17 28 24
|
| 165 |
+
f 17 27 29
|
| 166 |
+
f 17 29 30
|
| 167 |
+
f 17 30 31
|
| 168 |
+
f 17 31 28
|
| 169 |
+
f 18 32 33
|
| 170 |
+
f 18 33 34
|
| 171 |
+
f 18 34 19
|
| 172 |
+
f 18 26 32
|
| 173 |
+
f 19 34 35
|
| 174 |
+
f 19 35 20
|
| 175 |
+
f 23 36 25
|
| 176 |
+
f 23 37 47
|
| 177 |
+
f 23 47 57
|
| 178 |
+
f 23 57 67
|
| 179 |
+
f 23 67 81
|
| 180 |
+
f 23 81 73
|
| 181 |
+
f 23 73 62
|
| 182 |
+
f 23 62 52
|
| 183 |
+
f 23 52 41
|
| 184 |
+
f 23 41 36
|
| 185 |
+
f 24 28 31
|
| 186 |
+
f 24 31 38
|
| 187 |
+
f 24 38 37
|
| 188 |
+
f 25 36 32
|
| 189 |
+
f 25 32 26
|
| 190 |
+
f 27 39 29
|
| 191 |
+
f 29 39 30
|
| 192 |
+
f 30 38 31
|
| 193 |
+
f 30 39 40
|
| 194 |
+
f 30 40 38
|
| 195 |
+
f 32 41 33
|
| 196 |
+
f 32 36 41
|
| 197 |
+
f 33 41 52
|
| 198 |
+
f 33 52 42
|
| 199 |
+
f 33 42 43
|
| 200 |
+
f 33 43 34
|
| 201 |
+
f 34 43 56
|
| 202 |
+
f 34 56 44
|
| 203 |
+
f 34 44 35
|
| 204 |
+
f 35 44 56
|
| 205 |
+
f 35 56 45
|
| 206 |
+
f 37 38 46
|
| 207 |
+
f 37 46 47
|
| 208 |
+
f 38 40 48
|
| 209 |
+
f 38 48 49
|
| 210 |
+
f 38 49 46
|
| 211 |
+
f 39 50 40
|
| 212 |
+
f 40 50 51
|
| 213 |
+
f 40 51 48
|
| 214 |
+
f 42 53 45
|
| 215 |
+
f 42 45 54
|
| 216 |
+
f 42 54 43
|
| 217 |
+
f 42 52 62
|
| 218 |
+
f 42 62 55
|
| 219 |
+
f 42 55 53
|
| 220 |
+
f 43 54 56
|
| 221 |
+
f 45 56 54
|
| 222 |
+
f 46 49 57
|
| 223 |
+
f 46 57 47
|
| 224 |
+
f 48 51 61
|
| 225 |
+
f 48 61 58
|
| 226 |
+
f 48 58 70
|
| 227 |
+
f 48 70 59
|
| 228 |
+
f 48 59 57
|
| 229 |
+
f 48 57 49
|
| 230 |
+
f 50 60 51
|
| 231 |
+
f 51 60 71
|
| 232 |
+
f 51 71 61
|
| 233 |
+
f 53 63 64
|
| 234 |
+
f 53 55 65
|
| 235 |
+
f 53 65 63
|
| 236 |
+
f 55 62 66
|
| 237 |
+
f 55 66 65
|
| 238 |
+
f 57 59 70
|
| 239 |
+
f 57 70 79
|
| 240 |
+
f 57 79 67
|
| 241 |
+
f 58 68 69
|
| 242 |
+
f 58 69 70
|
| 243 |
+
f 58 61 68
|
| 244 |
+
f 61 71 72
|
| 245 |
+
f 61 72 68
|
| 246 |
+
f 62 73 66
|
| 247 |
+
f 63 65 74
|
| 248 |
+
f 63 74 64
|
| 249 |
+
f 64 74 75
|
| 250 |
+
f 64 75 76
|
| 251 |
+
f 64 76 77
|
| 252 |
+
f 65 66 74
|
| 253 |
+
f 66 73 78
|
| 254 |
+
f 66 78 74
|
| 255 |
+
f 67 79 80
|
| 256 |
+
f 67 80 94
|
| 257 |
+
f 67 94 81
|
| 258 |
+
f 68 72 82
|
| 259 |
+
f 68 82 83
|
| 260 |
+
f 68 83 84
|
| 261 |
+
f 68 84 85
|
| 262 |
+
f 68 85 86
|
| 263 |
+
f 68 86 69
|
| 264 |
+
f 69 86 70
|
| 265 |
+
f 70 86 80
|
| 266 |
+
f 70 80 79
|
| 267 |
+
f 73 81 93
|
| 268 |
+
f 73 93 78
|
| 269 |
+
f 74 78 87
|
| 270 |
+
f 74 87 88
|
| 271 |
+
f 74 88 89
|
| 272 |
+
f 74 89 90
|
| 273 |
+
f 74 90 77
|
| 274 |
+
f 74 77 75
|
| 275 |
+
f 75 77 76
|
| 276 |
+
f 77 90 91
|
| 277 |
+
f 77 91 92
|
| 278 |
+
f 78 93 97
|
| 279 |
+
f 78 97 87
|
| 280 |
+
f 80 86 85
|
| 281 |
+
f 80 85 96
|
| 282 |
+
f 80 96 94
|
| 283 |
+
f 81 94 93
|
| 284 |
+
f 82 95 83
|
| 285 |
+
f 83 95 96
|
| 286 |
+
f 83 96 84
|
| 287 |
+
f 84 96 85
|
| 288 |
+
f 87 97 98
|
| 289 |
+
f 87 98 88
|
| 290 |
+
f 88 91 90
|
| 291 |
+
f 88 90 89
|
| 292 |
+
f 88 98 99
|
| 293 |
+
f 88 99 92
|
| 294 |
+
f 88 92 91
|
| 295 |
+
f 93 94 96
|
| 296 |
+
f 93 96 97
|
| 297 |
+
f 95 100 98
|
| 298 |
+
f 95 98 101
|
| 299 |
+
f 95 101 96
|
| 300 |
+
f 96 101 98
|
| 301 |
+
f 96 98 97
|
| 302 |
+
f 98 100 102
|
| 303 |
+
f 98 102 99
|
| 304 |
+
o convex_1
|
| 305 |
+
v 0.0513991 0.00605665 -0.034114
|
| 306 |
+
v 0.0513991 0.00320025 -0.034114
|
| 307 |
+
v 0.0513991 0.00890634 -0.0455246
|
| 308 |
+
v 0.0485438 0.0146191 0.00297761
|
| 309 |
+
v 0.0485438 0.0117627 0.0143882
|
| 310 |
+
v 0.0485438 0.00890634 0.0200935
|
| 311 |
+
v 0.0513991 0.00035056 -0.036974
|
| 312 |
+
v 0.0485438 0.00320025 0.0200935
|
| 313 |
+
v 0.0485438 0.00035056 0.0172481
|
| 314 |
+
v 0.0485438 0.0203252 -0.0455246
|
| 315 |
+
v 0.0485438 0.0174688 -0.0112929
|
| 316 |
+
v 0.0513991 0.00890634 -0.102607
|
| 317 |
+
v 0.0456886 0.0174688 0.0315186
|
| 318 |
+
v 0.0456886 0.0146191 0.0372239
|
| 319 |
+
v 0.0456886 0.0117627 0.0429292
|
| 320 |
+
v 0.0456886 0.00890634 0.045702
|
| 321 |
+
v 0.0513991 -0.00249913 -0.0455246
|
| 322 |
+
v 0.0485438 -0.00249913 0.0115428
|
| 323 |
+
v 0.0485438 -0.00535553 0.00297761
|
| 324 |
+
v 0.0456886 0.00310638 0.045702
|
| 325 |
+
v 0.0456886 0.00035056 0.0429292
|
| 326 |
+
v 0.0456886 -0.00535553 0.0315186
|
| 327 |
+
v 0.0485438 0.0203252 -0.102607
|
| 328 |
+
v 0.0456788 0.0260514 -0.102607
|
| 329 |
+
v 0.0456886 0.0260313 -0.0883506
|
| 330 |
+
v 0.0466371 0.0241337 -0.044581
|
| 331 |
+
v 0.0466371 0.0212773 -0.0103493
|
| 332 |
+
v 0.0437818 0.0241337 0.0153463
|
| 333 |
+
v 0.0456886 0.0203252 0.0143882
|
| 334 |
+
v 0.0513991 -0.00820522 -0.102607
|
| 335 |
+
v 0.0437818 0.0212773 0.0324622
|
| 336 |
+
v 0.0409265 0.0241337 0.0438873
|
| 337 |
+
v 0.0428333 0.0203252 0.0429292
|
| 338 |
+
v 0.0421684 0.0202313 0.045702
|
| 339 |
+
v 0.0450139 0.0116689 0.045702
|
| 340 |
+
v 0.0452094 0.0112867 0.04531
|
| 341 |
+
v 0.0455908 0.00936899 0.045702
|
| 342 |
+
v 0.0513991 -0.00535553 -0.0569497
|
| 343 |
+
v 0.0456886 -0.00820522 0.0200935
|
| 344 |
+
v 0.0485438 -0.00820522 -0.0112929
|
| 345 |
+
v 0.0449943 8.59719e-06 0.045702
|
| 346 |
+
v 0.0421195 -0.00856729 0.0429292
|
| 347 |
+
v 0.0428333 -0.00820522 0.0400838
|
| 348 |
+
v 0.0418848 -0.0101095 0.0400838
|
| 349 |
+
v 0.0437818 -0.00915735 0.0315186
|
| 350 |
+
v 0.0437818 -0.0120137 0.0200935
|
| 351 |
+
v 0.0437818 0.0298398 -0.102607
|
| 352 |
+
v 0.0437818 0.0298398 -0.0988031
|
| 353 |
+
v 0.0437818 0.0269834 -0.0131946
|
| 354 |
+
v 0.0409265 0.0269834 0.0239115
|
| 355 |
+
v 0.04783 -0.0199661 -0.102607
|
| 356 |
+
v 0.0513991 -0.00820522 -0.0826308
|
| 357 |
+
v 0.0380712 0.0269834 0.045702
|
| 358 |
+
v 0.0405647 0.0237717 0.045702
|
| 359 |
+
v 0.0485438 -0.013918 -0.0512444
|
| 360 |
+
v 0.0485438 -0.0110616 -0.0284233
|
| 361 |
+
v 0.0466371 -0.0120137 -0.0112929
|
| 362 |
+
v 0.0428039 -0.0054561 0.045702
|
| 363 |
+
v 0.0409265 -0.0120137 0.0400838
|
| 364 |
+
v 0.0418848 -0.00730002 0.045702
|
| 365 |
+
v 0.0409558 -0.00914394 0.045702
|
| 366 |
+
v 0.0437818 -0.0148701 0.00297761
|
| 367 |
+
v 0.0418848 0.030792 -0.102607
|
| 368 |
+
v 0.0418848 0.030792 -0.100719
|
| 369 |
+
v 0.039978 0.0317441 -0.0997612
|
| 370 |
+
v 0.0409265 0.0298398 -0.0502863
|
| 371 |
+
v 0.0418848 0.0279356 -0.012251
|
| 372 |
+
v 0.0371227 0.0288877 0.0229534
|
| 373 |
+
v 0.0342675 0.0288877 0.045702
|
| 374 |
+
v 0.0370347 0.0275064 0.045702
|
| 375 |
+
v 0.039978 0.0288877 -0.0112929
|
| 376 |
+
v 0.0478202 -0.0199862 -0.102607
|
| 377 |
+
v 0.0485438 -0.0167677 -0.0769255
|
| 378 |
+
v 0.0466371 -0.0177198 -0.0512444
|
| 379 |
+
v 0.0466371 -0.0205762 -0.0769255
|
| 380 |
+
v 0.0466371 -0.0148701 -0.0284233
|
| 381 |
+
v 0.0408972 -0.0092244 0.045702
|
| 382 |
+
v 0.0381103 -0.0120003 0.045702
|
| 383 |
+
v 0.0385504 -0.0132006 0.0415065
|
| 384 |
+
v 0.0385504 -0.0160569 0.0215307
|
| 385 |
+
v 0.0409265 -0.0148701 0.0200935
|
| 386 |
+
v 0.0437818 -0.0177198 -0.0198581
|
| 387 |
+
v 0.0399976 0.0317374 -0.102607
|
| 388 |
+
v 0.039978 0.0317441 -0.102607
|
| 389 |
+
v 0.0314122 0.0345938 -0.102607
|
| 390 |
+
v 0.0371227 0.0317441 -0.0512444
|
| 391 |
+
v 0.0342675 0.0317441 -0.0284233
|
| 392 |
+
v 0.0142903 0.0317441 0.045702
|
| 393 |
+
v 0.0314122 0.0317441 -0.0112929
|
| 394 |
+
v 0.0285569 0.0317441 0.000117704
|
| 395 |
+
v 0.0447303 -0.0243848 -0.0940559
|
| 396 |
+
v 0.0463926 -0.0221251 -0.102607
|
| 397 |
+
v 0.0447303 -0.0215284 -0.0655149
|
| 398 |
+
v 0.0437818 -0.0234326 -0.0740801
|
| 399 |
+
v 0.0380419 -0.0120406 0.045702
|
| 400 |
+
v 0.0362036 -0.0129592 0.045702
|
| 401 |
+
v 0.0342675 -0.013918 0.045702
|
| 402 |
+
v 0.0341794 -0.0139515 0.045702
|
| 403 |
+
v 0.0371227 -0.013918 0.0400838
|
| 404 |
+
v 0.0356951 -0.0160569 0.0300959
|
| 405 |
+
v 0.0414056 -0.0189133 -0.0184354
|
| 406 |
+
v 0.0414056 -0.0160569 0.00726021
|
| 407 |
+
v 0.0371227 -0.0167677 0.0200935
|
| 408 |
+
v 0.0414056 -0.0246194 -0.0726429
|
| 409 |
+
v -0.0170982 0.0345938 -0.102607
|
| 410 |
+
v 0.0257114 0.0345938 -0.0797854
|
| 411 |
+
v -0.0228088 0.0317441 0.045702
|
| 412 |
+
v 0.0228561 0.0345938 -0.0683603
|
| 413 |
+
v 0.0200008 0.0345938 -0.0598096
|
| 414 |
+
v 0.00857973 0.0345938 -0.034114
|
| 415 |
+
v 0.00572445 0.0345938 -0.0312541
|
| 416 |
+
v 3.34544e-05 0.0345938 -0.0255634
|
| 417 |
+
v 0.0442609 -0.0246194 -0.0926187
|
| 418 |
+
v 0.0385504 -0.0274758 -0.0926187
|
| 419 |
+
v 0.0447303 -0.0243848 -0.102607
|
| 420 |
+
v 0.0257114 -0.0167677 0.045702
|
| 421 |
+
v 0.0299845 -0.0160569 0.0415065
|
| 422 |
+
v 0.0285569 -0.0167677 0.0400838
|
| 423 |
+
v 0.0342675 -0.0167677 0.0286587
|
| 424 |
+
v 0.0314122 -0.031043 -0.0997612
|
| 425 |
+
v 0.039978 -0.0253302 -0.0740801
|
| 426 |
+
v -0.0285095 0.0317441 -0.102607
|
| 427 |
+
v -0.0170982 0.0345938 -0.0883506
|
| 428 |
+
v -0.0313648 0.0288877 0.0286587
|
| 429 |
+
v -0.0332716 0.0279356 0.0286587
|
| 430 |
+
v -0.0266125 0.0298398 0.045702
|
| 431 |
+
v -0.0056771 0.0345938 -0.0284233
|
| 432 |
+
v -0.00853238 0.0345938 -0.034114
|
| 433 |
+
v -0.0113877 0.0345938 -0.0455246
|
| 434 |
+
v -0.0142429 0.0345938 -0.062655
|
| 435 |
+
v -0.0256543 0.0317441 0.00297761
|
| 436 |
+
v -0.0342201 0.0288877 0.005823
|
| 437 |
+
v 0.033319 -0.0300908 -0.0997612
|
| 438 |
+
v 0.033319 -0.0300908 -0.102607
|
| 439 |
+
v 0.0314122 -0.0196241 0.0086829
|
| 440 |
+
v 0.00572445 -0.0196241 0.045702
|
| 441 |
+
v 0.00857973 -0.0196241 0.0429292
|
| 442 |
+
v 0.0200008 -0.0224805 0.00297761
|
| 443 |
+
v 0.0171553 -0.033886 -0.102607
|
| 444 |
+
v 0.0314122 -0.031043 -0.102607
|
| 445 |
+
v -0.0370754 0.0288877 -0.102607
|
| 446 |
+
v -0.0285095 0.0317441 -0.0455246
|
| 447 |
+
v -0.0351686 0.0269834 0.0286587
|
| 448 |
+
v -0.0361269 0.0279356 0.005823
|
| 449 |
+
v -0.0323133 0.0269834 0.0400838
|
| 450 |
+
v -0.026681 0.0297862 0.045702
|
| 451 |
+
v -0.0370754 0.0288877 -0.0483845
|
| 452 |
+
v -0.0143309 -0.0210054 0.045702
|
| 453 |
+
v -0.0056771 -0.0224805 0.0286587
|
| 454 |
+
v 0.00572445 -0.0253302 -0.00558759
|
| 455 |
+
v 0.00857973 -0.0253302 -0.00844749
|
| 456 |
+
v 0.0171456 -0.0253302 -0.0198581
|
| 457 |
+
v 0.00572445 -0.0338927 -0.0940559
|
| 458 |
+
v 0.011435 -0.0338927 -0.102607
|
| 459 |
+
v 0.011435 -0.0281866 -0.0398193
|
| 460 |
+
v 0.00286918 -0.0281866 -0.0312541
|
| 461 |
+
v -0.0389626 0.0279423 -0.102607
|
| 462 |
+
v -0.0389822 0.0279356 -0.0997612
|
| 463 |
+
v -0.0380239 0.0241337 0.0200935
|
| 464 |
+
v -0.0363616 0.0246098 0.0300959
|
| 465 |
+
v -0.0352957 0.0231816 0.0400838
|
| 466 |
+
v -0.0380239 0.0269834 0.005823
|
| 467 |
+
v -0.0389822 0.0279356 -0.0483845
|
| 468 |
+
v -0.0335063 0.0246098 0.0415065
|
| 469 |
+
v -0.0320786 0.0246098 0.0443664
|
| 470 |
+
v -0.0295265 0.0269566 0.045702
|
| 471 |
+
v -0.0228968 -0.0196241 0.045702
|
| 472 |
+
v -0.0256543 -0.0196241 0.0429292
|
| 473 |
+
v -0.0256543 -0.0224805 0.0229534
|
| 474 |
+
v -0.0228088 -0.0224805 0.0258133
|
| 475 |
+
v -0.0199535 -0.0224805 0.0286587
|
| 476 |
+
v -0.0170982 -0.0253302 0.000117704
|
| 477 |
+
v -0.00282182 -0.0253302 0.000117704
|
| 478 |
+
v 3.34544e-05 -0.0338927 -0.091196
|
| 479 |
+
v -0.0170982 -0.0338927 -0.102607
|
| 480 |
+
v -0.0399307 0.0260313 -0.102607
|
| 481 |
+
v -0.0437344 0.0212773 -0.0940559
|
| 482 |
+
v -0.0408792 0.0269834 -0.091196
|
| 483 |
+
v -0.0392168 0.0217534 0.0215307
|
| 484 |
+
v -0.0392168 0.0246098 0.00726021
|
| 485 |
+
v -0.0363616 0.0217534 0.0386466
|
| 486 |
+
v -0.0342592 0.0174688 0.045702
|
| 487 |
+
v -0.0325089 0.0230877 0.045702
|
| 488 |
+
v -0.0324111 0.0232755 0.045702
|
| 489 |
+
v -0.0408792 0.0241337 -0.00844749
|
| 490 |
+
v -0.0408792 0.0269834 -0.0483845
|
| 491 |
+
v -0.0256543 -0.0190139 0.045702
|
| 492 |
+
v -0.0260356 -0.0188664 0.045702
|
| 493 |
+
v -0.027561 -0.018672 0.0438873
|
| 494 |
+
v -0.0285095 -0.0196241 0.0372239
|
| 495 |
+
v -0.0285095 -0.0224805 0.0172481
|
| 496 |
+
v -0.0285095 -0.0253302 -0.0141528
|
| 497 |
+
v -0.0228088 -0.0253302 -0.00558759
|
| 498 |
+
v -0.0199535 -0.0253302 -0.00272768
|
| 499 |
+
v -0.0113877 -0.0338927 -0.0940559
|
| 500 |
+
v -0.0056771 -0.0338927 -0.091196
|
| 501 |
+
v -0.0313648 -0.031043 -0.102607
|
| 502 |
+
v -0.0142429 -0.0338927 -0.0969013
|
| 503 |
+
v -0.0228088 -0.031043 -0.0740801
|
| 504 |
+
v -0.0285095 -0.031043 -0.0854907
|
| 505 |
+
v -0.0313648 -0.031043 -0.0940559
|
| 506 |
+
v -0.0456412 0.0146191 -0.102607
|
| 507 |
+
v -0.0456412 0.0174688 -0.0940559
|
| 508 |
+
v -0.0427859 0.0231816 -0.091196
|
| 509 |
+
v -0.0420721 0.0246098 -0.0412566
|
| 510 |
+
v -0.0399307 0.0203252 0.0200935
|
| 511 |
+
v -0.0399307 0.0174688 0.0258133
|
| 512 |
+
v -0.0399307 0.0146191 0.0315186
|
| 513 |
+
v -0.0370754 0.0203252 0.0372239
|
| 514 |
+
v -0.0420721 0.018897 0.00726021
|
| 515 |
+
v -0.0420721 0.0217534 -0.00701028
|
| 516 |
+
v -0.0392168 0.0131909 0.0358012
|
| 517 |
+
v -0.0363616 0.0160473 0.0415065
|
| 518 |
+
v -0.034269 0.0173347 0.045702
|
| 519 |
+
v -0.0270819 -0.0181959 0.045702
|
| 520 |
+
v -0.0299372 -0.0181959 0.0407951
|
| 521 |
+
v -0.0304163 -0.018672 0.038182
|
| 522 |
+
v -0.0304163 -0.0215284 0.0181917
|
| 523 |
+
v -0.0361269 -0.0215284 -0.00462945
|
| 524 |
+
v -0.0342201 -0.0224805 -0.00558759
|
| 525 |
+
v -0.0342201 -0.0253302 -0.034114
|
| 526 |
+
v -0.0313648 -0.0253302 -0.022718
|
| 527 |
+
v -0.0313648 -0.0281866 -0.0569497
|
| 528 |
+
v -0.0228088 -0.0281866 -0.0398193
|
| 529 |
+
v -0.0335258 -0.0301579 -0.102607
|
| 530 |
+
v -0.0332716 -0.0300908 -0.0930978
|
| 531 |
+
v -0.0390408 -0.0274959 -0.102607
|
| 532 |
+
v -0.0342201 -0.0281866 -0.0683603
|
| 533 |
+
v -0.0456412 -0.013918 -0.102607
|
| 534 |
+
v -0.0484965 0.00035056 -0.0712202
|
| 535 |
+
v -0.0456412 0.0174688 -0.0398193
|
| 536 |
+
v -0.0449274 0.018897 -0.0383966
|
| 537 |
+
v -0.0427859 0.0146191 0.0115428
|
| 538 |
+
v -0.0427859 0.0174688 0.005823
|
| 539 |
+
v -0.0399307 0.0117627 0.034364
|
| 540 |
+
v -0.0427859 0.00890634 0.0172481
|
| 541 |
+
v -0.0427859 0.00605665 0.0200935
|
| 542 |
+
v -0.0427859 0.0203252 -0.00844749
|
| 543 |
+
v -0.0389822 0.0117627 0.0362803
|
| 544 |
+
v -0.034269 -0.00249913 0.045702
|
| 545 |
+
v -0.0284704 -0.0168616 0.045702
|
| 546 |
+
v -0.0286073 -0.0167275 0.045702
|
| 547 |
+
v -0.0327925 -0.0153462 0.0407951
|
| 548 |
+
v -0.0323133 -0.0177198 0.0362803
|
| 549 |
+
v -0.0332716 -0.018672 0.0267569
|
| 550 |
+
v -0.0351686 -0.0177198 0.0248552
|
| 551 |
+
v -0.0380239 -0.0205762 -0.00654573
|
| 552 |
+
v -0.0380239 -0.0234326 -0.0350722
|
| 553 |
+
v -0.0361269 -0.0243848 -0.0331704
|
| 554 |
+
v -0.0361269 -0.0272344 -0.0674167
|
| 555 |
+
v -0.0420623 -0.021763 -0.102607
|
| 556 |
+
v -0.0380239 -0.0262823 -0.0693185
|
| 557 |
+
v -0.0408792 -0.0234326 -0.0607533
|
| 558 |
+
v -0.0447514 -0.0160771 -0.102607
|
| 559 |
+
v -0.0456412 -0.013918 -0.0598096
|
| 560 |
+
v -0.0484965 0.00035056 -0.0683603
|
| 561 |
+
v -0.0456412 0.0146191 -0.0198581
|
| 562 |
+
v -0.0456412 0.0117627 -0.0112929
|
| 563 |
+
v -0.0456412 0.00320025 -0.00272768
|
| 564 |
+
v -0.0399307 0.00035056 0.034364
|
| 565 |
+
v -0.0427859 0.00320025 0.0200935
|
| 566 |
+
v -0.0389822 0.00035056 0.0362803
|
| 567 |
+
v -0.0342397 -0.00535553 0.045702
|
| 568 |
+
v -0.0361269 -0.00249913 0.0419855
|
| 569 |
+
v -0.0299372 -0.0153462 0.045702
|
| 570 |
+
v -0.0306021 -0.0142935 0.045702
|
| 571 |
+
v -0.0308368 -0.013918 0.045702
|
| 572 |
+
v -0.0327925 -0.0124898 0.0436405
|
| 573 |
+
v -0.0356477 -0.0124898 0.0379352
|
| 574 |
+
v -0.0332716 -0.0158223 0.038182
|
| 575 |
+
v -0.0361269 -0.0158223 0.0267569
|
| 576 |
+
v -0.0389822 -0.0158223 0.00964104
|
| 577 |
+
v -0.0380239 -0.0177198 0.00772476
|
| 578 |
+
v -0.0389822 -0.0215284 -0.0246197
|
| 579 |
+
v -0.0389822 -0.018672 -0.00462945
|
| 580 |
+
v -0.0408792 -0.0177198 -0.0150964
|
| 581 |
+
v -0.0408792 -0.0205762 -0.0322123
|
| 582 |
+
v -0.0389822 -0.0243848 -0.0531462
|
| 583 |
+
v -0.0418277 -0.0215284 -0.0588515
|
| 584 |
+
v -0.0446829 -0.0158223 -0.0588515
|
| 585 |
+
v -0.0427859 -0.0167677 -0.0312541
|
| 586 |
+
v -0.0456412 -0.0110616 -0.034114
|
| 587 |
+
v -0.0456412 -0.00820522 -0.022718
|
| 588 |
+
v -0.0456412 -0.00535553 -0.0141528
|
| 589 |
+
v -0.0456412 -0.00249913 -0.00844749
|
| 590 |
+
v -0.0427859 -0.00535553 0.0115428
|
| 591 |
+
v -0.0427859 0.00035056 0.0172481
|
| 592 |
+
v -0.0399307 -0.00249913 0.0315186
|
| 593 |
+
v -0.0392168 -0.00249913 0.0347269
|
| 594 |
+
v -0.0370754 -0.00249913 0.0400838
|
| 595 |
+
v -0.0342201 -0.0054494 0.045702
|
| 596 |
+
v -0.0363616 -0.0110616 0.0375868
|
| 597 |
+
v -0.0335063 -0.00865446 0.045702
|
| 598 |
+
v -0.0361269 -0.0129659 0.0353221
|
| 599 |
+
v -0.0389822 -0.0129659 0.0181917
|
| 600 |
+
v -0.0427859 -0.0110616 -0.00272768
|
| 601 |
+
v -0.0418277 -0.0129659 -0.00178406
|
| 602 |
+
v -0.0418277 -0.0158223 -0.0131946
|
| 603 |
+
v -0.0418277 -0.018672 -0.0303105
|
| 604 |
+
v -0.0427859 -0.013918 -0.0141528
|
| 605 |
+
v -0.0427859 -0.00820522 0.005823
|
| 606 |
+
v -0.0399307 -0.0110616 0.0172481
|
| 607 |
+
v -0.0399307 -0.00535553 0.0286587
|
| 608 |
+
v -0.0392168 -0.00535553 0.031867
|
| 609 |
+
v -0.0370754 -0.0110616 0.034364
|
| 610 |
+
f 103 104 109
|
| 611 |
+
f 103 109 119
|
| 612 |
+
f 103 119 140
|
| 613 |
+
f 103 140 154
|
| 614 |
+
f 103 154 132
|
| 615 |
+
f 103 132 114
|
| 616 |
+
f 103 114 105
|
| 617 |
+
f 103 105 106
|
| 618 |
+
f 103 106 107
|
| 619 |
+
f 103 107 108
|
| 620 |
+
f 103 108 110
|
| 621 |
+
f 103 110 104
|
| 622 |
+
f 104 110 111
|
| 623 |
+
f 104 111 109
|
| 624 |
+
f 105 112 113
|
| 625 |
+
f 105 113 106
|
| 626 |
+
f 105 114 125
|
| 627 |
+
f 105 125 112
|
| 628 |
+
f 106 113 115
|
| 629 |
+
f 106 115 107
|
| 630 |
+
f 107 115 116
|
| 631 |
+
f 107 116 117
|
| 632 |
+
f 107 117 108
|
| 633 |
+
f 108 117 118
|
| 634 |
+
f 108 118 122
|
| 635 |
+
f 108 122 110
|
| 636 |
+
f 109 111 120
|
| 637 |
+
f 109 120 121
|
| 638 |
+
f 109 121 119
|
| 639 |
+
f 110 122 111
|
| 640 |
+
f 111 122 123
|
| 641 |
+
f 111 123 124
|
| 642 |
+
f 111 124 120
|
| 643 |
+
f 112 125 126
|
| 644 |
+
f 112 126 127
|
| 645 |
+
f 112 127 128
|
| 646 |
+
f 112 128 129
|
| 647 |
+
f 112 129 113
|
| 648 |
+
f 113 129 130
|
| 649 |
+
f 113 130 131
|
| 650 |
+
f 113 131 115
|
| 651 |
+
f 114 132 153
|
| 652 |
+
f 114 153 174
|
| 653 |
+
f 114 174 194
|
| 654 |
+
f 114 194 217
|
| 655 |
+
f 114 217 236
|
| 656 |
+
f 114 236 242
|
| 657 |
+
f 114 242 241
|
| 658 |
+
f 114 241 256
|
| 659 |
+
f 114 256 277
|
| 660 |
+
f 114 277 299
|
| 661 |
+
f 114 299 327
|
| 662 |
+
f 114 327 329
|
| 663 |
+
f 114 329 353
|
| 664 |
+
f 114 353 356
|
| 665 |
+
f 114 356 331
|
| 666 |
+
f 114 331 304
|
| 667 |
+
f 114 304 278
|
| 668 |
+
f 114 278 259
|
| 669 |
+
f 114 259 243
|
| 670 |
+
f 114 243 224
|
| 671 |
+
f 114 224 207
|
| 672 |
+
f 114 207 187
|
| 673 |
+
f 114 187 186
|
| 674 |
+
f 114 186 185
|
| 675 |
+
f 114 185 165
|
| 676 |
+
f 114 165 149
|
| 677 |
+
f 114 149 126
|
| 678 |
+
f 114 126 125
|
| 679 |
+
f 115 133 134
|
| 680 |
+
f 115 134 135
|
| 681 |
+
f 115 135 116
|
| 682 |
+
f 115 131 130
|
| 683 |
+
f 115 130 133
|
| 684 |
+
f 116 135 117
|
| 685 |
+
f 117 135 136
|
| 686 |
+
f 117 136 137
|
| 687 |
+
f 117 137 138
|
| 688 |
+
f 117 138 139
|
| 689 |
+
f 117 139 118
|
| 690 |
+
f 118 139 137
|
| 691 |
+
f 118 137 136
|
| 692 |
+
f 118 136 156
|
| 693 |
+
f 118 156 155
|
| 694 |
+
f 118 155 172
|
| 695 |
+
f 118 172 171
|
| 696 |
+
f 118 171 190
|
| 697 |
+
f 118 190 209
|
| 698 |
+
f 118 209 228
|
| 699 |
+
f 118 228 248
|
| 700 |
+
f 118 248 268
|
| 701 |
+
f 118 268 286
|
| 702 |
+
f 118 286 285
|
| 703 |
+
f 118 285 284
|
| 704 |
+
f 118 284 316
|
| 705 |
+
f 118 316 342
|
| 706 |
+
f 118 342 365
|
| 707 |
+
f 118 365 393
|
| 708 |
+
f 118 393 395
|
| 709 |
+
f 118 395 369
|
| 710 |
+
f 118 369 368
|
| 711 |
+
f 118 368 367
|
| 712 |
+
f 118 367 344
|
| 713 |
+
f 118 344 343
|
| 714 |
+
f 118 343 317
|
| 715 |
+
f 118 317 290
|
| 716 |
+
f 118 290 289
|
| 717 |
+
f 118 289 269
|
| 718 |
+
f 118 269 250
|
| 719 |
+
f 118 250 238
|
| 720 |
+
f 118 238 218
|
| 721 |
+
f 118 218 200
|
| 722 |
+
f 118 200 199
|
| 723 |
+
f 118 199 198
|
| 724 |
+
f 118 198 197
|
| 725 |
+
f 118 197 180
|
| 726 |
+
f 118 180 179
|
| 727 |
+
f 118 179 163
|
| 728 |
+
f 118 163 162
|
| 729 |
+
f 118 162 160
|
| 730 |
+
f 118 160 143
|
| 731 |
+
f 118 143 122
|
| 732 |
+
f 119 121 140
|
| 733 |
+
f 120 124 121
|
| 734 |
+
f 121 124 141
|
| 735 |
+
f 121 141 142
|
| 736 |
+
f 121 142 140
|
| 737 |
+
f 122 143 123
|
| 738 |
+
f 123 143 144
|
| 739 |
+
f 123 144 124
|
| 740 |
+
f 124 144 145
|
| 741 |
+
f 124 145 146
|
| 742 |
+
f 124 146 147
|
| 743 |
+
f 124 147 148
|
| 744 |
+
f 124 148 141
|
| 745 |
+
f 126 149 150
|
| 746 |
+
f 126 150 127
|
| 747 |
+
f 127 150 128
|
| 748 |
+
f 128 150 151
|
| 749 |
+
f 128 151 129
|
| 750 |
+
f 129 151 130
|
| 751 |
+
f 130 152 134
|
| 752 |
+
f 130 134 133
|
| 753 |
+
f 130 151 152
|
| 754 |
+
f 132 154 153
|
| 755 |
+
f 134 155 156
|
| 756 |
+
f 134 156 136
|
| 757 |
+
f 134 136 135
|
| 758 |
+
f 134 152 155
|
| 759 |
+
f 137 139 138
|
| 760 |
+
f 140 157 175
|
| 761 |
+
f 140 175 154
|
| 762 |
+
f 140 142 158
|
| 763 |
+
f 140 158 157
|
| 764 |
+
f 141 148 159
|
| 765 |
+
f 141 159 142
|
| 766 |
+
f 142 159 178
|
| 767 |
+
f 142 178 158
|
| 768 |
+
f 143 160 144
|
| 769 |
+
f 144 161 146
|
| 770 |
+
f 144 146 145
|
| 771 |
+
f 144 160 162
|
| 772 |
+
f 144 162 163
|
| 773 |
+
f 144 163 161
|
| 774 |
+
f 146 161 147
|
| 775 |
+
f 147 161 148
|
| 776 |
+
f 148 161 164
|
| 777 |
+
f 148 164 159
|
| 778 |
+
f 149 165 166
|
| 779 |
+
f 149 166 150
|
| 780 |
+
f 150 166 167
|
| 781 |
+
f 150 167 188
|
| 782 |
+
f 150 188 168
|
| 783 |
+
f 150 168 151
|
| 784 |
+
f 151 169 152
|
| 785 |
+
f 151 168 169
|
| 786 |
+
f 152 170 171
|
| 787 |
+
f 152 171 172
|
| 788 |
+
f 152 172 155
|
| 789 |
+
f 152 169 173
|
| 790 |
+
f 152 173 170
|
| 791 |
+
f 153 154 175
|
| 792 |
+
f 153 175 174
|
| 793 |
+
f 157 176 177
|
| 794 |
+
f 157 177 175
|
| 795 |
+
f 157 158 178
|
| 796 |
+
f 157 178 176
|
| 797 |
+
f 159 164 178
|
| 798 |
+
f 161 163 179
|
| 799 |
+
f 161 179 180
|
| 800 |
+
f 161 180 181
|
| 801 |
+
f 161 181 182
|
| 802 |
+
f 161 182 183
|
| 803 |
+
f 161 183 164
|
| 804 |
+
f 164 184 176
|
| 805 |
+
f 164 176 178
|
| 806 |
+
f 164 183 184
|
| 807 |
+
f 165 185 166
|
| 808 |
+
f 166 185 167
|
| 809 |
+
f 167 185 186
|
| 810 |
+
f 167 186 187
|
| 811 |
+
f 167 187 188
|
| 812 |
+
f 168 188 173
|
| 813 |
+
f 168 173 169
|
| 814 |
+
f 170 173 188
|
| 815 |
+
f 170 188 189
|
| 816 |
+
f 170 189 171
|
| 817 |
+
f 171 189 191
|
| 818 |
+
f 171 191 192
|
| 819 |
+
f 171 192 190
|
| 820 |
+
f 174 193 194
|
| 821 |
+
f 174 175 177
|
| 822 |
+
f 174 177 193
|
| 823 |
+
f 176 184 177
|
| 824 |
+
f 177 195 196
|
| 825 |
+
f 177 196 193
|
| 826 |
+
f 177 184 195
|
| 827 |
+
f 180 197 181
|
| 828 |
+
f 181 197 198
|
| 829 |
+
f 181 198 199
|
| 830 |
+
f 181 199 200
|
| 831 |
+
f 181 200 201
|
| 832 |
+
f 181 201 202
|
| 833 |
+
f 181 202 182
|
| 834 |
+
f 182 203 204
|
| 835 |
+
f 182 204 183
|
| 836 |
+
f 182 202 221
|
| 837 |
+
f 182 221 205
|
| 838 |
+
f 182 205 223
|
| 839 |
+
f 182 223 206
|
| 840 |
+
f 182 206 203
|
| 841 |
+
f 183 204 184
|
| 842 |
+
f 184 204 203
|
| 843 |
+
f 184 203 196
|
| 844 |
+
f 184 196 195
|
| 845 |
+
f 187 207 225
|
| 846 |
+
f 187 225 232
|
| 847 |
+
f 187 232 231
|
| 848 |
+
f 187 231 230
|
| 849 |
+
f 187 230 229
|
| 850 |
+
f 187 229 214
|
| 851 |
+
f 187 214 213
|
| 852 |
+
f 187 213 212
|
| 853 |
+
f 187 212 211
|
| 854 |
+
f 187 211 210
|
| 855 |
+
f 187 210 208
|
| 856 |
+
f 187 208 191
|
| 857 |
+
f 187 191 189
|
| 858 |
+
f 187 189 188
|
| 859 |
+
f 190 192 210
|
| 860 |
+
f 190 210 211
|
| 861 |
+
f 190 211 212
|
| 862 |
+
f 190 212 213
|
| 863 |
+
f 190 213 214
|
| 864 |
+
f 190 214 209
|
| 865 |
+
f 191 208 210
|
| 866 |
+
f 191 210 192
|
| 867 |
+
f 193 215 216
|
| 868 |
+
f 193 216 217
|
| 869 |
+
f 193 217 194
|
| 870 |
+
f 193 196 215
|
| 871 |
+
f 196 206 215
|
| 872 |
+
f 196 203 206
|
| 873 |
+
f 200 218 219
|
| 874 |
+
f 200 219 201
|
| 875 |
+
f 201 219 202
|
| 876 |
+
f 202 219 220
|
| 877 |
+
f 202 220 221
|
| 878 |
+
f 205 221 222
|
| 879 |
+
f 205 222 223
|
| 880 |
+
f 206 223 222
|
| 881 |
+
f 206 222 216
|
| 882 |
+
f 206 216 215
|
| 883 |
+
f 207 224 244
|
| 884 |
+
f 207 244 225
|
| 885 |
+
f 209 226 227
|
| 886 |
+
f 209 227 228
|
| 887 |
+
f 209 214 229
|
| 888 |
+
f 209 229 230
|
| 889 |
+
f 209 230 231
|
| 890 |
+
f 209 231 232
|
| 891 |
+
f 209 232 225
|
| 892 |
+
f 209 225 233
|
| 893 |
+
f 209 233 234
|
| 894 |
+
f 209 234 226
|
| 895 |
+
f 216 222 235
|
| 896 |
+
f 216 235 236
|
| 897 |
+
f 216 236 217
|
| 898 |
+
f 218 237 220
|
| 899 |
+
f 218 220 219
|
| 900 |
+
f 218 238 239
|
| 901 |
+
f 218 239 240
|
| 902 |
+
f 218 240 237
|
| 903 |
+
f 220 237 221
|
| 904 |
+
f 221 237 222
|
| 905 |
+
f 222 241 242
|
| 906 |
+
f 222 242 236
|
| 907 |
+
f 222 236 235
|
| 908 |
+
f 222 237 240
|
| 909 |
+
f 222 240 241
|
| 910 |
+
f 224 243 249
|
| 911 |
+
f 224 249 244
|
| 912 |
+
f 225 244 233
|
| 913 |
+
f 226 234 246
|
| 914 |
+
f 226 246 227
|
| 915 |
+
f 227 245 228
|
| 916 |
+
f 227 246 264
|
| 917 |
+
f 227 264 245
|
| 918 |
+
f 228 247 248
|
| 919 |
+
f 228 245 247
|
| 920 |
+
f 233 244 234
|
| 921 |
+
f 234 244 249
|
| 922 |
+
f 234 249 265
|
| 923 |
+
f 234 265 246
|
| 924 |
+
f 238 250 251
|
| 925 |
+
f 238 251 252
|
| 926 |
+
f 238 252 239
|
| 927 |
+
f 239 252 253
|
| 928 |
+
f 239 253 240
|
| 929 |
+
f 240 253 254
|
| 930 |
+
f 240 254 241
|
| 931 |
+
f 241 255 256
|
| 932 |
+
f 241 254 257
|
| 933 |
+
f 241 257 258
|
| 934 |
+
f 241 258 255
|
| 935 |
+
f 243 259 260
|
| 936 |
+
f 243 260 265
|
| 937 |
+
f 243 265 249
|
| 938 |
+
f 245 261 262
|
| 939 |
+
f 245 262 263
|
| 940 |
+
f 245 263 247
|
| 941 |
+
f 245 264 261
|
| 942 |
+
f 246 265 288
|
| 943 |
+
f 246 288 264
|
| 944 |
+
f 247 263 266
|
| 945 |
+
f 247 266 267
|
| 946 |
+
f 247 267 268
|
| 947 |
+
f 247 268 248
|
| 948 |
+
f 250 269 270
|
| 949 |
+
f 250 270 271
|
| 950 |
+
f 250 271 272
|
| 951 |
+
f 250 272 273
|
| 952 |
+
f 250 273 274
|
| 953 |
+
f 250 274 275
|
| 954 |
+
f 250 275 251
|
| 955 |
+
f 251 275 252
|
| 956 |
+
f 252 275 258
|
| 957 |
+
f 252 258 257
|
| 958 |
+
f 252 257 253
|
| 959 |
+
f 253 257 254
|
| 960 |
+
f 255 276 298
|
| 961 |
+
f 255 298 297
|
| 962 |
+
f 255 297 300
|
| 963 |
+
f 255 300 277
|
| 964 |
+
f 255 277 256
|
| 965 |
+
f 255 258 276
|
| 966 |
+
f 258 275 276
|
| 967 |
+
f 259 278 279
|
| 968 |
+
f 259 279 280
|
| 969 |
+
f 259 280 260
|
| 970 |
+
f 260 280 288
|
| 971 |
+
f 260 288 265
|
| 972 |
+
f 261 281 262
|
| 973 |
+
f 261 264 282
|
| 974 |
+
f 261 282 281
|
| 975 |
+
f 262 283 263
|
| 976 |
+
f 262 281 283
|
| 977 |
+
f 263 283 284
|
| 978 |
+
f 263 284 285
|
| 979 |
+
f 263 285 286
|
| 980 |
+
f 263 286 267
|
| 981 |
+
f 263 267 266
|
| 982 |
+
f 264 287 282
|
| 983 |
+
f 264 288 287
|
| 984 |
+
f 267 286 268
|
| 985 |
+
f 269 289 270
|
| 986 |
+
f 270 289 290
|
| 987 |
+
f 270 290 291
|
| 988 |
+
f 270 291 319
|
| 989 |
+
f 270 319 292
|
| 990 |
+
f 270 292 293
|
| 991 |
+
f 270 293 271
|
| 992 |
+
f 271 293 294
|
| 993 |
+
f 271 294 295
|
| 994 |
+
f 271 295 272
|
| 995 |
+
f 272 295 296
|
| 996 |
+
f 272 296 274
|
| 997 |
+
f 272 274 273
|
| 998 |
+
f 274 296 300
|
| 999 |
+
f 274 300 297
|
| 1000 |
+
f 274 297 298
|
| 1001 |
+
f 274 298 276
|
| 1002 |
+
f 274 276 275
|
| 1003 |
+
f 277 300 301
|
| 1004 |
+
f 277 301 302
|
| 1005 |
+
f 277 302 303
|
| 1006 |
+
f 277 303 299
|
| 1007 |
+
f 278 304 305
|
| 1008 |
+
f 278 305 279
|
| 1009 |
+
f 279 305 306
|
| 1010 |
+
f 279 306 280
|
| 1011 |
+
f 280 306 307
|
| 1012 |
+
f 280 307 288
|
| 1013 |
+
f 281 308 309
|
| 1014 |
+
f 281 309 310
|
| 1015 |
+
f 281 310 311
|
| 1016 |
+
f 281 311 283
|
| 1017 |
+
f 281 282 313
|
| 1018 |
+
f 281 313 312
|
| 1019 |
+
f 281 312 336
|
| 1020 |
+
f 281 336 308
|
| 1021 |
+
f 282 287 313
|
| 1022 |
+
f 283 311 314
|
| 1023 |
+
f 283 314 315
|
| 1024 |
+
f 283 315 284
|
| 1025 |
+
f 284 315 316
|
| 1026 |
+
f 287 288 307
|
| 1027 |
+
f 287 307 313
|
| 1028 |
+
f 290 317 291
|
| 1029 |
+
f 291 317 318
|
| 1030 |
+
f 291 318 319
|
| 1031 |
+
f 292 319 293
|
| 1032 |
+
f 293 319 320
|
| 1033 |
+
f 293 320 321
|
| 1034 |
+
f 293 321 322
|
| 1035 |
+
f 293 322 323
|
| 1036 |
+
f 293 323 324
|
| 1037 |
+
f 293 324 294
|
| 1038 |
+
f 294 324 325
|
| 1039 |
+
f 294 325 302
|
| 1040 |
+
f 294 302 301
|
| 1041 |
+
f 294 301 326
|
| 1042 |
+
f 294 326 295
|
| 1043 |
+
f 295 326 300
|
| 1044 |
+
f 295 300 296
|
| 1045 |
+
f 299 303 327
|
| 1046 |
+
f 300 326 301
|
| 1047 |
+
f 302 325 303
|
| 1048 |
+
f 303 328 329
|
| 1049 |
+
f 303 329 327
|
| 1050 |
+
f 303 325 330
|
| 1051 |
+
f 303 330 328
|
| 1052 |
+
f 304 331 332
|
| 1053 |
+
f 304 332 305
|
| 1054 |
+
f 305 332 358
|
| 1055 |
+
f 305 358 333
|
| 1056 |
+
f 305 333 334
|
| 1057 |
+
f 305 334 307
|
| 1058 |
+
f 305 307 306
|
| 1059 |
+
f 307 334 313
|
| 1060 |
+
f 308 335 309
|
| 1061 |
+
f 308 336 335
|
| 1062 |
+
f 309 335 310
|
| 1063 |
+
f 310 337 314
|
| 1064 |
+
f 310 314 311
|
| 1065 |
+
f 310 335 338
|
| 1066 |
+
f 310 338 339
|
| 1067 |
+
f 310 339 337
|
| 1068 |
+
f 312 313 340
|
| 1069 |
+
f 312 340 336
|
| 1070 |
+
f 313 334 333
|
| 1071 |
+
f 313 333 340
|
| 1072 |
+
f 314 337 341
|
| 1073 |
+
f 314 341 316
|
| 1074 |
+
f 314 316 315
|
| 1075 |
+
f 316 341 364
|
| 1076 |
+
f 316 364 342
|
| 1077 |
+
f 317 343 318
|
| 1078 |
+
f 318 343 344
|
| 1079 |
+
f 318 344 345
|
| 1080 |
+
f 318 345 346
|
| 1081 |
+
f 318 346 319
|
| 1082 |
+
f 319 346 348
|
| 1083 |
+
f 319 348 347
|
| 1084 |
+
f 319 347 321
|
| 1085 |
+
f 319 321 320
|
| 1086 |
+
f 321 347 348
|
| 1087 |
+
f 321 348 349
|
| 1088 |
+
f 321 349 350
|
| 1089 |
+
f 321 350 351
|
| 1090 |
+
f 321 351 322
|
| 1091 |
+
f 322 351 323
|
| 1092 |
+
f 323 351 330
|
| 1093 |
+
f 323 330 325
|
| 1094 |
+
f 323 325 324
|
| 1095 |
+
f 328 330 352
|
| 1096 |
+
f 328 352 329
|
| 1097 |
+
f 329 352 354
|
| 1098 |
+
f 329 354 355
|
| 1099 |
+
f 329 355 353
|
| 1100 |
+
f 330 351 352
|
| 1101 |
+
f 331 356 357
|
| 1102 |
+
f 331 357 358
|
| 1103 |
+
f 331 358 332
|
| 1104 |
+
f 333 358 359
|
| 1105 |
+
f 333 359 340
|
| 1106 |
+
f 335 336 360
|
| 1107 |
+
f 335 360 361
|
| 1108 |
+
f 335 361 338
|
| 1109 |
+
f 336 340 359
|
| 1110 |
+
f 336 359 360
|
| 1111 |
+
f 337 339 363
|
| 1112 |
+
f 337 363 362
|
| 1113 |
+
f 337 362 364
|
| 1114 |
+
f 337 364 341
|
| 1115 |
+
f 338 361 339
|
| 1116 |
+
f 339 361 363
|
| 1117 |
+
f 342 364 366
|
| 1118 |
+
f 342 366 365
|
| 1119 |
+
f 344 367 345
|
| 1120 |
+
f 345 367 368
|
| 1121 |
+
f 345 368 369
|
| 1122 |
+
f 345 369 370
|
| 1123 |
+
f 345 370 371
|
| 1124 |
+
f 345 371 396
|
| 1125 |
+
f 345 396 372
|
| 1126 |
+
f 345 372 346
|
| 1127 |
+
f 346 372 373
|
| 1128 |
+
f 346 373 348
|
| 1129 |
+
f 348 373 374
|
| 1130 |
+
f 348 374 375
|
| 1131 |
+
f 348 375 349
|
| 1132 |
+
f 349 376 380
|
| 1133 |
+
f 349 380 350
|
| 1134 |
+
f 349 375 374
|
| 1135 |
+
f 349 374 377
|
| 1136 |
+
f 349 377 378
|
| 1137 |
+
f 349 378 379
|
| 1138 |
+
f 349 379 376
|
| 1139 |
+
f 350 380 351
|
| 1140 |
+
f 351 380 352
|
| 1141 |
+
f 352 380 354
|
| 1142 |
+
f 353 355 381
|
| 1143 |
+
f 353 381 382
|
| 1144 |
+
f 353 382 357
|
| 1145 |
+
f 353 357 356
|
| 1146 |
+
f 354 380 355
|
| 1147 |
+
f 355 380 379
|
| 1148 |
+
f 355 379 401
|
| 1149 |
+
f 355 401 381
|
| 1150 |
+
f 357 382 383
|
| 1151 |
+
f 357 383 384
|
| 1152 |
+
f 357 384 358
|
| 1153 |
+
f 358 361 360
|
| 1154 |
+
f 358 360 359
|
| 1155 |
+
f 358 384 385
|
| 1156 |
+
f 358 385 386
|
| 1157 |
+
f 358 386 387
|
| 1158 |
+
f 358 387 361
|
| 1159 |
+
f 361 387 388
|
| 1160 |
+
f 361 388 389
|
| 1161 |
+
f 361 389 363
|
| 1162 |
+
f 362 363 389
|
| 1163 |
+
f 362 389 390
|
| 1164 |
+
f 362 390 391
|
| 1165 |
+
f 362 391 392
|
| 1166 |
+
f 362 392 364
|
| 1167 |
+
f 364 392 366
|
| 1168 |
+
f 365 366 392
|
| 1169 |
+
f 365 392 394
|
| 1170 |
+
f 365 394 393
|
| 1171 |
+
f 369 395 370
|
| 1172 |
+
f 370 395 371
|
| 1173 |
+
f 371 394 396
|
| 1174 |
+
f 371 395 394
|
| 1175 |
+
f 372 396 373
|
| 1176 |
+
f 373 396 374
|
| 1177 |
+
f 374 396 397
|
| 1178 |
+
f 374 397 403
|
| 1179 |
+
f 374 403 398
|
| 1180 |
+
f 374 398 399
|
| 1181 |
+
f 374 399 400
|
| 1182 |
+
f 374 400 377
|
| 1183 |
+
f 376 379 380
|
| 1184 |
+
f 377 400 378
|
| 1185 |
+
f 378 400 379
|
| 1186 |
+
f 379 400 401
|
| 1187 |
+
f 381 401 382
|
| 1188 |
+
f 382 401 383
|
| 1189 |
+
f 383 401 400
|
| 1190 |
+
f 383 400 402
|
| 1191 |
+
f 383 402 384
|
| 1192 |
+
f 384 402 398
|
| 1193 |
+
f 384 398 385
|
| 1194 |
+
f 385 398 386
|
| 1195 |
+
f 386 398 403
|
| 1196 |
+
f 386 403 388
|
| 1197 |
+
f 386 388 387
|
| 1198 |
+
f 388 404 407
|
| 1199 |
+
f 388 407 405
|
| 1200 |
+
f 388 405 390
|
| 1201 |
+
f 388 390 389
|
| 1202 |
+
f 388 403 404
|
| 1203 |
+
f 390 405 406
|
| 1204 |
+
f 390 406 391
|
| 1205 |
+
f 391 394 392
|
| 1206 |
+
f 391 406 394
|
| 1207 |
+
f 393 394 395
|
| 1208 |
+
f 394 406 405
|
| 1209 |
+
f 394 405 407
|
| 1210 |
+
f 394 407 396
|
| 1211 |
+
f 396 407 404
|
| 1212 |
+
f 396 404 397
|
| 1213 |
+
f 397 404 403
|
| 1214 |
+
f 398 402 400
|
| 1215 |
+
f 398 400 399
|
| 1216 |
+
o convex_2
|
| 1217 |
+
v 0.0314144 0.0345938 -0.102607
|
| 1218 |
+
v 0.0399735 0.0317441 -0.102607
|
| 1219 |
+
v 0.0314144 0.0345938 -0.116892
|
| 1220 |
+
v -0.017094 0.0345938 -0.102607
|
| 1221 |
+
v 0.0399937 0.0317374 -0.102607
|
| 1222 |
+
v 0.0399735 0.0317441 -0.116892
|
| 1223 |
+
v 0.0314144 0.033883 -0.120103
|
| 1224 |
+
v 0.028558 0.0345938 -0.119745
|
| 1225 |
+
v -0.0256632 0.0345938 -0.111186
|
| 1226 |
+
v -0.0288716 0.033883 -0.111186
|
| 1227 |
+
v -0.0285095 0.0317441 -0.102607
|
| 1228 |
+
v 0.04285 0.0303092 -0.102607
|
| 1229 |
+
v 0.0418845 0.030792 -0.117843
|
| 1230 |
+
v 0.0399735 0.0310266 -0.120103
|
| 1231 |
+
v 0.028558 0.0326962 -0.123552
|
| 1232 |
+
v 0.028558 0.0336484 -0.12165
|
| 1233 |
+
v 0.0371272 0.0298398 -0.123552
|
| 1234 |
+
v 0.0142861 0.0345938 -0.122601
|
| 1235 |
+
v -0.0285095 0.0345938 -0.114039
|
| 1236 |
+
v -0.0370787 0.0317441 -0.114039
|
| 1237 |
+
v -0.0374307 0.0310266 -0.111186
|
| 1238 |
+
v -0.0370787 0.0288877 -0.102607
|
| 1239 |
+
v 0.0437854 0.0298398 -0.102607
|
| 1240 |
+
v 0.0437854 0.0298398 -0.115941
|
| 1241 |
+
v 0.0414017 0.0303159 -0.12046
|
| 1242 |
+
v 0.0299862 0.0303159 -0.124739
|
| 1243 |
+
v 0.0157143 0.0331723 -0.124739
|
| 1244 |
+
v 0.0142861 0.0336484 -0.124503
|
| 1245 |
+
v 0.0414017 0.0274595 -0.123313
|
| 1246 |
+
v 0.0385554 0.0274595 -0.124739
|
| 1247 |
+
v -0.0228068 0.0345938 -0.122601
|
| 1248 |
+
v -0.0285095 0.0345938 -0.119745
|
| 1249 |
+
v -0.0370787 0.0317441 -0.122601
|
| 1250 |
+
v -0.0442097 0.0281769 -0.121172
|
| 1251 |
+
v -0.0446824 0.0279356 -0.114039
|
| 1252 |
+
v -0.0431435 0.0281769 -0.111186
|
| 1253 |
+
v -0.0389696 0.0279423 -0.102607
|
| 1254 |
+
v 0.0466418 0.0241337 -0.102607
|
| 1255 |
+
v 0.0466418 0.0241337 -0.115941
|
| 1256 |
+
v 0.0447308 0.0279356 -0.117843
|
| 1257 |
+
v 0.0442581 0.0274595 -0.12046
|
| 1258 |
+
v 0.028558 0.0288877 -0.125454
|
| 1259 |
+
v 0.0142861 0.0317441 -0.125454
|
| 1260 |
+
v -0.00996309 0.0274595 -0.127595
|
| 1261 |
+
v -0.0228068 0.0336484 -0.124503
|
| 1262 |
+
v 0.0414017 0.0246098 -0.124739
|
| 1263 |
+
v 0.0437854 0.0231816 -0.123552
|
| 1264 |
+
v 0.0371272 0.0260313 -0.125454
|
| 1265 |
+
v -0.024235 0.033883 -0.124027
|
| 1266 |
+
v -0.0299377 0.033883 -0.121172
|
| 1267 |
+
v -0.0385069 0.0310266 -0.124027
|
| 1268 |
+
v -0.0442097 0.0267487 -0.124027
|
| 1269 |
+
v -0.0465933 0.0241337 -0.122601
|
| 1270 |
+
v -0.0446824 0.0279356 -0.119745
|
| 1271 |
+
v -0.0449238 0.0274595 -0.112613
|
| 1272 |
+
v -0.0434956 0.0274595 -0.109759
|
| 1273 |
+
v -0.046704 0.0231816 -0.111186
|
| 1274 |
+
v -0.0408806 0.0241337 -0.102607
|
| 1275 |
+
v 0.0485427 0.0203252 -0.102607
|
| 1276 |
+
v 0.0475872 0.0222295 -0.117843
|
| 1277 |
+
v 0.0485427 0.0203252 -0.116892
|
| 1278 |
+
v 0.0471145 0.0217534 -0.12046
|
| 1279 |
+
v -0.0113913 0.0260313 -0.128307
|
| 1280 |
+
v -0.0370787 0.0260313 -0.128307
|
| 1281 |
+
v -0.0370787 0.0298398 -0.126405
|
| 1282 |
+
v 0.0418845 0.0231816 -0.124503
|
| 1283 |
+
v 0.0399735 0.0231816 -0.125454
|
| 1284 |
+
v 0.0471145 0.018897 -0.121886
|
| 1285 |
+
v -0.0413533 0.0267487 -0.126881
|
| 1286 |
+
v -0.0442097 0.0238924 -0.126881
|
| 1287 |
+
v -0.0456379 0.0231816 -0.125454
|
| 1288 |
+
v -0.0470661 0.0224642 -0.124027
|
| 1289 |
+
v -0.0475388 0.0212773 -0.124503
|
| 1290 |
+
v -0.0484943 0.0203252 -0.122601
|
| 1291 |
+
v -0.0465933 0.0241337 -0.114039
|
| 1292 |
+
v -0.0477802 0.0217534 -0.112613
|
| 1293 |
+
v -0.0456379 0.0146191 -0.102607
|
| 1294 |
+
v 0.0513991 0.00890634 -0.102607
|
| 1295 |
+
v 0.0478286 0.0203252 -0.120103
|
| 1296 |
+
v 0.0485427 0.0174688 -0.119745
|
| 1297 |
+
v 0.0513991 0.00890634 -0.111186
|
| 1298 |
+
v 0.0513991 0.00605665 -0.116892
|
| 1299 |
+
v -0.0399251 -0.0167677 -0.128307
|
| 1300 |
+
v 0.0399735 0.0117627 -0.125454
|
| 1301 |
+
v -0.0427815 0.0203252 -0.128307
|
| 1302 |
+
v -0.0427815 0.023537 -0.127595
|
| 1303 |
+
v -0.0399251 0.0263934 -0.127595
|
| 1304 |
+
v 0.0418845 0.0117627 -0.124503
|
| 1305 |
+
v 0.0475872 0.0174688 -0.12165
|
| 1306 |
+
v -0.0446824 0.0212773 -0.127356
|
| 1307 |
+
v -0.0465933 0.0193731 -0.126405
|
| 1308 |
+
v -0.0475388 0.0127149 -0.127356
|
| 1309 |
+
v -0.0503952 0.00700878 -0.124503
|
| 1310 |
+
v -0.0513506 0.00605665 -0.122601
|
| 1311 |
+
v -0.0513506 0.0117627 -0.116892
|
| 1312 |
+
v -0.0484943 0.0203252 -0.114039
|
| 1313 |
+
v -0.0506365 0.0131909 -0.115466
|
| 1314 |
+
v -0.0506365 0.0103345 -0.112613
|
| 1315 |
+
v -0.0513506 0.00890634 -0.114039
|
| 1316 |
+
v -0.0456379 -0.013918 -0.102607
|
| 1317 |
+
v 0.0513991 -0.00820522 -0.102607
|
| 1318 |
+
v 0.0513991 -0.00820522 -0.116892
|
| 1319 |
+
v 0.050685 -0.00963341 -0.118319
|
| 1320 |
+
v 0.0494881 0.00605665 -0.120696
|
| 1321 |
+
v -0.0142376 -0.031043 -0.125454
|
| 1322 |
+
v -0.0394524 -0.019148 -0.127832
|
| 1323 |
+
v -0.0427815 -0.013918 -0.128307
|
| 1324 |
+
v 0.0142861 -0.0253302 -0.125454
|
| 1325 |
+
v 0.00572697 -0.0281866 -0.125454
|
| 1326 |
+
v 0.0456863 -0.013918 -0.122601
|
| 1327 |
+
v 0.0428299 -0.0196241 -0.122601
|
| 1328 |
+
v -0.0456379 0.0117627 -0.128307
|
| 1329 |
+
v 0.0475872 -0.013918 -0.12165
|
| 1330 |
+
v -0.0494397 0.00510451 -0.126405
|
| 1331 |
+
v -0.0513506 0.00035056 -0.122601
|
| 1332 |
+
v -0.0513506 -0.00820522 -0.114039
|
| 1333 |
+
v -0.0470661 -0.0210523 -0.110471
|
| 1334 |
+
v -0.0447528 -0.0160771 -0.102607
|
| 1335 |
+
v -0.0477802 -0.0196241 -0.110829
|
| 1336 |
+
v 0.0478286 -0.0199661 -0.102607
|
| 1337 |
+
v 0.0478286 -0.0199795 -0.102624
|
| 1338 |
+
v 0.0485427 -0.0196241 -0.105477
|
| 1339 |
+
v 0.0485427 -0.0196241 -0.116892
|
| 1340 |
+
v 0.0496088 -0.0110616 -0.119745
|
| 1341 |
+
v 0.0494881 -0.00820522 -0.120696
|
| 1342 |
+
v 0.0478286 -0.0210523 -0.118319
|
| 1343 |
+
v 0.0478286 -0.0153462 -0.121172
|
| 1344 |
+
v -0.0132922 -0.0329472 -0.124503
|
| 1345 |
+
v -0.0313659 -0.031043 -0.125454
|
| 1346 |
+
v 0.00668245 -0.0300908 -0.124503
|
| 1347 |
+
v -0.0370787 -0.0281866 -0.125454
|
| 1348 |
+
v -0.0413533 -0.0210523 -0.126881
|
| 1349 |
+
v -0.0423087 -0.0162983 -0.127832
|
| 1350 |
+
v -0.0442097 -0.0181959 -0.126881
|
| 1351 |
+
v -0.0465933 -0.0148701 -0.126405
|
| 1352 |
+
v -0.0456379 0.00035056 -0.128307
|
| 1353 |
+
v 0.0399735 -0.0224805 -0.122601
|
| 1354 |
+
v 0.0342708 -0.0253302 -0.122601
|
| 1355 |
+
v 0.0460383 -0.0167677 -0.121886
|
| 1356 |
+
v 0.043192 -0.0224805 -0.121886
|
| 1357 |
+
v -0.0494397 -0.000594867 -0.126405
|
| 1358 |
+
v -0.0513506 -0.00820522 -0.119745
|
| 1359 |
+
v -0.0494397 -0.00915735 -0.123552
|
| 1360 |
+
v -0.0484943 -0.0196241 -0.114039
|
| 1361 |
+
v -0.0475388 -0.0215284 -0.113088
|
| 1362 |
+
v -0.0442097 -0.0267584 -0.110471
|
| 1363 |
+
v -0.0442097 -0.0239087 -0.107618
|
| 1364 |
+
v -0.0420674 -0.021763 -0.102607
|
| 1365 |
+
v 0.0478186 -0.0199862 -0.102607
|
| 1366 |
+
v 0.0466418 -0.0234326 -0.105477
|
| 1367 |
+
v 0.0466418 -0.0234326 -0.116892
|
| 1368 |
+
v 0.043544 -0.0239087 -0.121172
|
| 1369 |
+
v 0.0466418 -0.0196241 -0.120696
|
| 1370 |
+
v -0.0142376 -0.0338927 -0.122601
|
| 1371 |
+
v -0.0304105 -0.0329472 -0.124503
|
| 1372 |
+
v 0.00477148 -0.0319951 -0.123552
|
| 1373 |
+
v 0.0123852 -0.0329472 -0.12165
|
| 1374 |
+
v -0.0356505 -0.0324712 -0.124027
|
| 1375 |
+
v 0.0257117 -0.0281866 -0.122601
|
| 1376 |
+
v 0.0266571 -0.0300908 -0.12165
|
| 1377 |
+
v -0.0413533 -0.0296148 -0.124027
|
| 1378 |
+
v -0.0442097 -0.0267584 -0.124027
|
| 1379 |
+
v -0.0470661 -0.0210523 -0.124027
|
| 1380 |
+
v 0.040929 -0.0243848 -0.12165
|
| 1381 |
+
v 0.0352162 -0.0272344 -0.12165
|
| 1382 |
+
v -0.0484943 -0.0196241 -0.119745
|
| 1383 |
+
v -0.0465933 -0.0234326 -0.114993
|
| 1384 |
+
v -0.0446824 -0.0272344 -0.113088
|
| 1385 |
+
v -0.041836 -0.0300908 -0.110235
|
| 1386 |
+
v -0.03904 -0.0274959 -0.102607
|
| 1387 |
+
v 0.0464004 -0.0221251 -0.102607
|
| 1388 |
+
v 0.0447308 -0.0243848 -0.102607
|
| 1389 |
+
v 0.0437854 -0.0262823 -0.108333
|
| 1390 |
+
v 0.0437854 -0.0262823 -0.116892
|
| 1391 |
+
v 0.0421158 -0.0267584 -0.118319
|
| 1392 |
+
v 0.040929 -0.0272344 -0.118794
|
| 1393 |
+
v 0.0378413 -0.0267584 -0.121172
|
| 1394 |
+
v -0.0313659 -0.0338927 -0.122601
|
| 1395 |
+
v 0.0114398 -0.0338927 -0.119745
|
| 1396 |
+
v 0.0257117 -0.031043 -0.119745
|
| 1397 |
+
v 0.018098 -0.0329472 -0.118794
|
| 1398 |
+
v -0.0370787 -0.0338927 -0.119745
|
| 1399 |
+
v -0.0380242 -0.0319951 -0.123552
|
| 1400 |
+
v 0.0292721 -0.0296148 -0.121172
|
| 1401 |
+
v 0.0323699 -0.0300908 -0.118794
|
| 1402 |
+
v -0.0408806 -0.0319951 -0.120696
|
| 1403 |
+
v -0.0437369 -0.0291387 -0.120696
|
| 1404 |
+
v -0.0465933 -0.0234326 -0.120696
|
| 1405 |
+
v -0.0437369 -0.0291387 -0.114993
|
| 1406 |
+
v -0.0408806 -0.0319951 -0.114993
|
| 1407 |
+
v -0.0389796 -0.0329472 -0.113088
|
| 1408 |
+
v -0.0385069 -0.0324712 -0.110471
|
| 1409 |
+
v -0.0399251 -0.0303255 -0.107976
|
| 1410 |
+
v -0.0335283 -0.0301579 -0.102607
|
| 1411 |
+
v 0.0314144 -0.031043 -0.102607
|
| 1412 |
+
v 0.0428299 -0.026403 -0.105477
|
| 1413 |
+
v 0.0414017 -0.0274758 -0.106904
|
| 1414 |
+
v 0.0418845 -0.0272344 -0.108333
|
| 1415 |
+
v 0.0418845 -0.0272344 -0.116892
|
| 1416 |
+
v 0.0399735 -0.0281866 -0.116892
|
| 1417 |
+
v 0.0171425 -0.0338927 -0.116892
|
| 1418 |
+
v 0.0314144 -0.031043 -0.116892
|
| 1419 |
+
v -0.0370787 -0.0338927 -0.114039
|
| 1420 |
+
v -0.0389796 -0.0329472 -0.118794
|
| 1421 |
+
v -0.0370787 -0.0331819 -0.110829
|
| 1422 |
+
v -0.0313659 -0.031043 -0.102607
|
| 1423 |
+
v 0.0399735 -0.0281866 -0.108333
|
| 1424 |
+
v 0.0171626 -0.033886 -0.102607
|
| 1425 |
+
v 0.0199989 -0.0338927 -0.105477
|
| 1426 |
+
v 0.0199989 -0.0338927 -0.114039
|
| 1427 |
+
v -0.0342223 -0.0338927 -0.111186
|
| 1428 |
+
v -0.017094 -0.0338927 -0.102607
|
| 1429 |
+
v 0.0114398 -0.0338927 -0.102607
|
| 1430 |
+
v 0.0171425 -0.0338927 -0.102624
|
| 1431 |
+
f 408 409 413
|
| 1432 |
+
f 408 413 410
|
| 1433 |
+
f 408 410 415
|
| 1434 |
+
f 408 415 425
|
| 1435 |
+
f 408 425 438
|
| 1436 |
+
f 408 438 439
|
| 1437 |
+
f 408 439 426
|
| 1438 |
+
f 408 426 416
|
| 1439 |
+
f 408 416 411
|
| 1440 |
+
f 408 411 418
|
| 1441 |
+
f 408 418 429
|
| 1442 |
+
f 408 429 444
|
| 1443 |
+
f 408 444 465
|
| 1444 |
+
f 408 465 484
|
| 1445 |
+
f 408 484 507
|
| 1446 |
+
f 408 507 525
|
| 1447 |
+
f 408 525 555
|
| 1448 |
+
f 408 555 577
|
| 1449 |
+
f 408 577 601
|
| 1450 |
+
f 408 601 613
|
| 1451 |
+
f 408 613 619
|
| 1452 |
+
f 408 619 620
|
| 1453 |
+
f 408 620 615
|
| 1454 |
+
f 408 615 602
|
| 1455 |
+
f 408 602 579
|
| 1456 |
+
f 408 579 578
|
| 1457 |
+
f 408 578 556
|
| 1458 |
+
f 408 556 527
|
| 1459 |
+
f 408 527 508
|
| 1460 |
+
f 408 508 485
|
| 1461 |
+
f 408 485 466
|
| 1462 |
+
f 408 466 445
|
| 1463 |
+
f 408 445 430
|
| 1464 |
+
f 408 430 419
|
| 1465 |
+
f 408 419 412
|
| 1466 |
+
f 408 412 409
|
| 1467 |
+
f 409 412 413
|
| 1468 |
+
f 410 414 415
|
| 1469 |
+
f 410 413 414
|
| 1470 |
+
f 411 416 417
|
| 1471 |
+
f 411 417 418
|
| 1472 |
+
f 412 419 420
|
| 1473 |
+
f 412 420 413
|
| 1474 |
+
f 413 420 421
|
| 1475 |
+
f 413 421 414
|
| 1476 |
+
f 414 422 423
|
| 1477 |
+
f 414 423 415
|
| 1478 |
+
f 414 421 424
|
| 1479 |
+
f 414 424 422
|
| 1480 |
+
f 415 423 425
|
| 1481 |
+
f 416 426 417
|
| 1482 |
+
f 417 426 427
|
| 1483 |
+
f 417 427 428
|
| 1484 |
+
f 417 428 429
|
| 1485 |
+
f 417 429 418
|
| 1486 |
+
f 419 430 431
|
| 1487 |
+
f 419 431 420
|
| 1488 |
+
f 420 432 421
|
| 1489 |
+
f 420 431 432
|
| 1490 |
+
f 421 432 424
|
| 1491 |
+
f 422 424 437
|
| 1492 |
+
f 422 437 433
|
| 1493 |
+
f 422 433 434
|
| 1494 |
+
f 422 434 435
|
| 1495 |
+
f 422 435 423
|
| 1496 |
+
f 423 435 425
|
| 1497 |
+
f 424 432 436
|
| 1498 |
+
f 424 436 437
|
| 1499 |
+
f 425 435 452
|
| 1500 |
+
f 425 452 438
|
| 1501 |
+
f 426 439 440
|
| 1502 |
+
f 426 440 427
|
| 1503 |
+
f 427 440 441
|
| 1504 |
+
f 427 441 442
|
| 1505 |
+
f 427 442 443
|
| 1506 |
+
f 427 443 428
|
| 1507 |
+
f 428 443 429
|
| 1508 |
+
f 429 443 444
|
| 1509 |
+
f 430 445 446
|
| 1510 |
+
f 430 446 431
|
| 1511 |
+
f 431 446 467
|
| 1512 |
+
f 431 467 447
|
| 1513 |
+
f 431 447 448
|
| 1514 |
+
f 431 448 432
|
| 1515 |
+
f 432 448 436
|
| 1516 |
+
f 433 449 450
|
| 1517 |
+
f 433 450 434
|
| 1518 |
+
f 433 437 455
|
| 1519 |
+
f 433 455 449
|
| 1520 |
+
f 434 450 435
|
| 1521 |
+
f 435 450 451
|
| 1522 |
+
f 435 451 452
|
| 1523 |
+
f 436 453 437
|
| 1524 |
+
f 436 448 454
|
| 1525 |
+
f 436 454 453
|
| 1526 |
+
f 437 453 474
|
| 1527 |
+
f 437 474 455
|
| 1528 |
+
f 438 452 456
|
| 1529 |
+
f 438 456 457
|
| 1530 |
+
f 438 457 439
|
| 1531 |
+
f 439 457 440
|
| 1532 |
+
f 440 458 441
|
| 1533 |
+
f 440 457 458
|
| 1534 |
+
f 441 459 460
|
| 1535 |
+
f 441 460 461
|
| 1536 |
+
f 441 461 442
|
| 1537 |
+
f 441 458 459
|
| 1538 |
+
f 442 461 462
|
| 1539 |
+
f 442 462 443
|
| 1540 |
+
f 443 462 463
|
| 1541 |
+
f 443 463 444
|
| 1542 |
+
f 444 463 464
|
| 1543 |
+
f 444 464 465
|
| 1544 |
+
f 445 466 468
|
| 1545 |
+
f 445 468 446
|
| 1546 |
+
f 446 468 467
|
| 1547 |
+
f 447 467 469
|
| 1548 |
+
f 447 469 448
|
| 1549 |
+
f 448 469 454
|
| 1550 |
+
f 449 455 470
|
| 1551 |
+
f 449 470 450
|
| 1552 |
+
f 450 470 451
|
| 1553 |
+
f 451 470 471
|
| 1554 |
+
f 451 471 472
|
| 1555 |
+
f 451 472 452
|
| 1556 |
+
f 452 472 456
|
| 1557 |
+
f 453 473 474
|
| 1558 |
+
f 453 454 473
|
| 1559 |
+
f 454 469 475
|
| 1560 |
+
f 454 475 496
|
| 1561 |
+
f 454 496 520
|
| 1562 |
+
f 454 520 517
|
| 1563 |
+
f 454 517 495
|
| 1564 |
+
f 454 495 473
|
| 1565 |
+
f 455 474 470
|
| 1566 |
+
f 456 472 458
|
| 1567 |
+
f 456 458 457
|
| 1568 |
+
f 458 472 476
|
| 1569 |
+
f 458 476 459
|
| 1570 |
+
f 459 476 477
|
| 1571 |
+
f 459 477 478
|
| 1572 |
+
f 459 478 479
|
| 1573 |
+
f 459 479 460
|
| 1574 |
+
f 460 479 480
|
| 1575 |
+
f 460 480 481
|
| 1576 |
+
f 460 481 503
|
| 1577 |
+
f 460 503 482
|
| 1578 |
+
f 460 482 462
|
| 1579 |
+
f 460 462 461
|
| 1580 |
+
f 462 482 483
|
| 1581 |
+
f 462 483 464
|
| 1582 |
+
f 462 464 463
|
| 1583 |
+
f 464 483 484
|
| 1584 |
+
f 464 484 465
|
| 1585 |
+
f 466 485 488
|
| 1586 |
+
f 466 488 468
|
| 1587 |
+
f 467 486 469
|
| 1588 |
+
f 467 468 486
|
| 1589 |
+
f 468 487 486
|
| 1590 |
+
f 468 488 489
|
| 1591 |
+
f 468 489 487
|
| 1592 |
+
f 469 486 475
|
| 1593 |
+
f 470 490 514
|
| 1594 |
+
f 470 514 543
|
| 1595 |
+
f 470 543 519
|
| 1596 |
+
f 470 519 492
|
| 1597 |
+
f 470 492 471
|
| 1598 |
+
f 470 474 491
|
| 1599 |
+
f 470 491 490
|
| 1600 |
+
f 471 492 493
|
| 1601 |
+
f 471 493 494
|
| 1602 |
+
f 471 494 472
|
| 1603 |
+
f 472 494 476
|
| 1604 |
+
f 473 495 491
|
| 1605 |
+
f 473 491 474
|
| 1606 |
+
f 475 486 496
|
| 1607 |
+
f 476 494 493
|
| 1608 |
+
f 476 493 477
|
| 1609 |
+
f 477 493 497
|
| 1610 |
+
f 477 497 498
|
| 1611 |
+
f 477 498 480
|
| 1612 |
+
f 477 480 478
|
| 1613 |
+
f 478 480 479
|
| 1614 |
+
f 480 498 499
|
| 1615 |
+
f 480 499 500
|
| 1616 |
+
f 480 500 501
|
| 1617 |
+
f 480 501 481
|
| 1618 |
+
f 481 501 502
|
| 1619 |
+
f 481 502 503
|
| 1620 |
+
f 482 503 483
|
| 1621 |
+
f 483 503 502
|
| 1622 |
+
f 483 502 504
|
| 1623 |
+
f 483 504 505
|
| 1624 |
+
f 483 505 484
|
| 1625 |
+
f 484 505 506
|
| 1626 |
+
f 484 506 523
|
| 1627 |
+
f 484 523 507
|
| 1628 |
+
f 485 508 509
|
| 1629 |
+
f 485 509 489
|
| 1630 |
+
f 485 489 488
|
| 1631 |
+
f 486 487 496
|
| 1632 |
+
f 487 489 496
|
| 1633 |
+
f 489 509 510
|
| 1634 |
+
f 489 510 511
|
| 1635 |
+
f 489 511 496
|
| 1636 |
+
f 490 512 513
|
| 1637 |
+
f 490 513 540
|
| 1638 |
+
f 490 540 514
|
| 1639 |
+
f 490 491 515
|
| 1640 |
+
f 490 515 516
|
| 1641 |
+
f 490 516 512
|
| 1642 |
+
f 491 495 517
|
| 1643 |
+
f 491 517 518
|
| 1644 |
+
f 491 518 515
|
| 1645 |
+
f 492 497 493
|
| 1646 |
+
f 492 519 499
|
| 1647 |
+
f 492 499 497
|
| 1648 |
+
f 496 511 532
|
| 1649 |
+
f 496 532 520
|
| 1650 |
+
f 497 499 498
|
| 1651 |
+
f 499 519 543
|
| 1652 |
+
f 499 543 548
|
| 1653 |
+
f 499 548 521
|
| 1654 |
+
f 499 521 500
|
| 1655 |
+
f 500 521 548
|
| 1656 |
+
f 500 548 522
|
| 1657 |
+
f 500 522 501
|
| 1658 |
+
f 501 522 549
|
| 1659 |
+
f 501 549 523
|
| 1660 |
+
f 501 523 506
|
| 1661 |
+
f 501 506 502
|
| 1662 |
+
f 502 506 505
|
| 1663 |
+
f 502 505 504
|
| 1664 |
+
f 507 524 525
|
| 1665 |
+
f 507 523 526
|
| 1666 |
+
f 507 526 524
|
| 1667 |
+
f 508 527 528
|
| 1668 |
+
f 508 528 529
|
| 1669 |
+
f 508 529 530
|
| 1670 |
+
f 508 530 509
|
| 1671 |
+
f 509 530 533
|
| 1672 |
+
f 509 533 510
|
| 1673 |
+
f 510 531 532
|
| 1674 |
+
f 510 532 511
|
| 1675 |
+
f 510 533 534
|
| 1676 |
+
f 510 534 531
|
| 1677 |
+
f 512 535 562
|
| 1678 |
+
f 512 562 536
|
| 1679 |
+
f 512 536 513
|
| 1680 |
+
f 512 516 537
|
| 1681 |
+
f 512 537 535
|
| 1682 |
+
f 513 538 539
|
| 1683 |
+
f 513 539 540
|
| 1684 |
+
f 513 536 538
|
| 1685 |
+
f 514 540 541
|
| 1686 |
+
f 514 541 542
|
| 1687 |
+
f 514 542 543
|
| 1688 |
+
f 515 518 544
|
| 1689 |
+
f 515 544 545
|
| 1690 |
+
f 515 545 566
|
| 1691 |
+
f 515 566 537
|
| 1692 |
+
f 515 537 516
|
| 1693 |
+
f 517 520 546
|
| 1694 |
+
f 517 546 547
|
| 1695 |
+
f 517 547 518
|
| 1696 |
+
f 518 547 544
|
| 1697 |
+
f 520 532 534
|
| 1698 |
+
f 520 534 546
|
| 1699 |
+
f 522 548 550
|
| 1700 |
+
f 522 550 549
|
| 1701 |
+
f 523 549 573
|
| 1702 |
+
f 523 573 551
|
| 1703 |
+
f 523 551 526
|
| 1704 |
+
f 524 526 552
|
| 1705 |
+
f 524 552 575
|
| 1706 |
+
f 524 575 553
|
| 1707 |
+
f 524 553 554
|
| 1708 |
+
f 524 554 525
|
| 1709 |
+
f 525 554 555
|
| 1710 |
+
f 526 551 552
|
| 1711 |
+
f 527 556 528
|
| 1712 |
+
f 528 556 557
|
| 1713 |
+
f 528 557 529
|
| 1714 |
+
f 529 557 558
|
| 1715 |
+
f 529 558 530
|
| 1716 |
+
f 530 558 533
|
| 1717 |
+
f 531 534 532
|
| 1718 |
+
f 533 558 559
|
| 1719 |
+
f 533 559 560
|
| 1720 |
+
f 533 560 534
|
| 1721 |
+
f 534 560 546
|
| 1722 |
+
f 535 561 585
|
| 1723 |
+
f 535 585 562
|
| 1724 |
+
f 535 537 563
|
| 1725 |
+
f 535 563 564
|
| 1726 |
+
f 535 564 561
|
| 1727 |
+
f 536 562 565
|
| 1728 |
+
f 536 565 538
|
| 1729 |
+
f 537 566 567
|
| 1730 |
+
f 537 567 563
|
| 1731 |
+
f 538 565 568
|
| 1732 |
+
f 538 568 539
|
| 1733 |
+
f 539 568 569
|
| 1734 |
+
f 539 569 541
|
| 1735 |
+
f 539 541 540
|
| 1736 |
+
f 541 569 542
|
| 1737 |
+
f 542 570 548
|
| 1738 |
+
f 542 548 543
|
| 1739 |
+
f 542 569 570
|
| 1740 |
+
f 544 547 571
|
| 1741 |
+
f 544 571 545
|
| 1742 |
+
f 545 571 572
|
| 1743 |
+
f 545 572 567
|
| 1744 |
+
f 545 567 566
|
| 1745 |
+
f 546 560 547
|
| 1746 |
+
f 547 560 559
|
| 1747 |
+
f 547 559 571
|
| 1748 |
+
f 548 570 550
|
| 1749 |
+
f 549 550 570
|
| 1750 |
+
f 549 570 573
|
| 1751 |
+
f 551 573 595
|
| 1752 |
+
f 551 595 574
|
| 1753 |
+
f 551 574 552
|
| 1754 |
+
f 552 574 596
|
| 1755 |
+
f 552 596 575
|
| 1756 |
+
f 553 576 577
|
| 1757 |
+
f 553 577 554
|
| 1758 |
+
f 553 575 576
|
| 1759 |
+
f 554 577 555
|
| 1760 |
+
f 556 578 557
|
| 1761 |
+
f 557 578 579
|
| 1762 |
+
f 557 579 580
|
| 1763 |
+
f 557 580 581
|
| 1764 |
+
f 557 581 558
|
| 1765 |
+
f 558 581 559
|
| 1766 |
+
f 559 581 582
|
| 1767 |
+
f 559 582 583
|
| 1768 |
+
f 559 583 584
|
| 1769 |
+
f 559 584 571
|
| 1770 |
+
f 561 564 586
|
| 1771 |
+
f 561 586 608
|
| 1772 |
+
f 561 608 617
|
| 1773 |
+
f 561 617 616
|
| 1774 |
+
f 561 616 621
|
| 1775 |
+
f 561 621 620
|
| 1776 |
+
f 561 620 619
|
| 1777 |
+
f 561 619 618
|
| 1778 |
+
f 561 618 610
|
| 1779 |
+
f 561 610 589
|
| 1780 |
+
f 561 589 585
|
| 1781 |
+
f 562 585 565
|
| 1782 |
+
f 563 567 564
|
| 1783 |
+
f 564 567 587
|
| 1784 |
+
f 564 587 588
|
| 1785 |
+
f 564 588 586
|
| 1786 |
+
f 565 589 590
|
| 1787 |
+
f 565 590 568
|
| 1788 |
+
f 565 585 589
|
| 1789 |
+
f 567 572 584
|
| 1790 |
+
f 567 584 591
|
| 1791 |
+
f 567 591 592
|
| 1792 |
+
f 567 592 587
|
| 1793 |
+
f 568 593 594
|
| 1794 |
+
f 568 594 569
|
| 1795 |
+
f 568 590 593
|
| 1796 |
+
f 569 594 595
|
| 1797 |
+
f 569 595 570
|
| 1798 |
+
f 570 595 573
|
| 1799 |
+
f 571 584 572
|
| 1800 |
+
f 574 595 594
|
| 1801 |
+
f 574 594 596
|
| 1802 |
+
f 575 596 576
|
| 1803 |
+
f 576 596 597
|
| 1804 |
+
f 576 597 598
|
| 1805 |
+
f 576 598 599
|
| 1806 |
+
f 576 599 600
|
| 1807 |
+
f 576 600 577
|
| 1808 |
+
f 577 600 601
|
| 1809 |
+
f 579 602 603
|
| 1810 |
+
f 579 603 580
|
| 1811 |
+
f 580 603 604
|
| 1812 |
+
f 580 604 605
|
| 1813 |
+
f 580 605 606
|
| 1814 |
+
f 580 606 581
|
| 1815 |
+
f 581 606 583
|
| 1816 |
+
f 581 583 582
|
| 1817 |
+
f 583 606 607
|
| 1818 |
+
f 583 607 609
|
| 1819 |
+
f 583 609 592
|
| 1820 |
+
f 583 592 584
|
| 1821 |
+
f 584 592 591
|
| 1822 |
+
f 586 588 608
|
| 1823 |
+
f 587 592 609
|
| 1824 |
+
f 587 609 588
|
| 1825 |
+
f 588 609 608
|
| 1826 |
+
f 589 610 598
|
| 1827 |
+
f 589 598 611
|
| 1828 |
+
f 589 611 593
|
| 1829 |
+
f 589 593 590
|
| 1830 |
+
f 593 597 596
|
| 1831 |
+
f 593 596 594
|
| 1832 |
+
f 593 611 598
|
| 1833 |
+
f 593 598 597
|
| 1834 |
+
f 598 610 612
|
| 1835 |
+
f 598 612 599
|
| 1836 |
+
f 599 612 613
|
| 1837 |
+
f 599 613 601
|
| 1838 |
+
f 599 601 600
|
| 1839 |
+
f 602 609 607
|
| 1840 |
+
f 602 607 614
|
| 1841 |
+
f 602 614 604
|
| 1842 |
+
f 602 604 603
|
| 1843 |
+
f 602 615 616
|
| 1844 |
+
f 602 616 617
|
| 1845 |
+
f 602 617 609
|
| 1846 |
+
f 604 614 607
|
| 1847 |
+
f 604 607 606
|
| 1848 |
+
f 604 606 605
|
| 1849 |
+
f 608 609 617
|
| 1850 |
+
f 610 618 612
|
| 1851 |
+
f 612 618 619
|
| 1852 |
+
f 612 619 613
|
| 1853 |
+
f 615 620 621
|
| 1854 |
+
f 615 621 616
|
| 1855 |
+
o convex_3
|
| 1856 |
+
v 0.0228511 0.0346005 0.0686284
|
| 1857 |
+
v 0.025387 0.0342755 0.0673881
|
| 1858 |
+
v 0.0228511 0.0346005 0.0673881
|
| 1859 |
+
v 0.0171472 0.0346005 0.0771906
|
| 1860 |
+
v 0.00573144 0.0346005 0.0943113
|
| 1861 |
+
v 0.0199992 0.0317414 0.0886044
|
| 1862 |
+
v 0.0271371 0.0331682 0.070056
|
| 1863 |
+
v 0.031415 0.0317414 0.0686284
|
| 1864 |
+
v 0.0301754 0.0323639 0.0673881
|
| 1865 |
+
v -0.00853629 0.0346005 0.0673881
|
| 1866 |
+
v 0.00667938 0.0326944 0.0981184
|
| 1867 |
+
v 0.0157213 0.0303146 0.0985943
|
| 1868 |
+
v 0.0214251 0.0303146 0.0928874
|
| 1869 |
+
v -0.00282434 0.0346005 0.0943113
|
| 1870 |
+
v 0.028563 0.0317414 0.074339
|
| 1871 |
+
v 0.0328409 0.0303146 0.0729114
|
| 1872 |
+
v 0.0352229 0.0298409 0.0695801
|
| 1873 |
+
v 0.033319 0.0307939 0.0676766
|
| 1874 |
+
v 0.033319 0.0307939 0.0673881
|
| 1875 |
+
v -0.0161441 0.0307939 0.0673881
|
| 1876 |
+
v -0.0185261 0.0310308 0.0729114
|
| 1877 |
+
v -0.0171002 0.0317414 0.074339
|
| 1878 |
+
v -0.00853629 0.0346005 0.0829013
|
| 1879 |
+
v 0.0095313 0.0298409 0.100974
|
| 1880 |
+
v -0.0018764 0.0326944 0.0981184
|
| 1881 |
+
v 0.00291192 0.0260674 0.105665
|
| 1882 |
+
v 0.0095313 0.0269873 0.103825
|
| 1883 |
+
v 0.0238071 0.0269873 0.0981184
|
| 1884 |
+
v 0.029989 0.0274611 0.0871768
|
| 1885 |
+
v 0.029989 0.0303146 0.0786183
|
| 1886 |
+
v -0.00567626 0.0346005 0.0914597
|
| 1887 |
+
v -0.00604086 0.0338843 0.0943113
|
| 1888 |
+
v -0.00378038 0.0336475 0.0962149
|
| 1889 |
+
v 0.032371 0.0298409 0.0752908
|
| 1890 |
+
v 0.0352229 0.0298409 0.0673881
|
| 1891 |
+
v 0.0339023 0.0305019 0.0673881
|
| 1892 |
+
v 0.0347449 0.0293671 0.0719596
|
| 1893 |
+
v 0.0380749 0.0269873 0.0724355
|
| 1894 |
+
v -0.0180481 0.0298409 0.0673881
|
| 1895 |
+
v -0.0171002 0.0317414 0.0886044
|
| 1896 |
+
v -0.0209 0.0298409 0.074339
|
| 1897 |
+
v -0.026612 0.0269873 0.0857528
|
| 1898 |
+
v -0.024708 0.0279404 0.0857528
|
| 1899 |
+
v -0.024708 0.0279404 0.0886044
|
| 1900 |
+
v -0.00601655 0.0253567 0.105665
|
| 1901 |
+
v -0.00288105 0.0260509 0.105665
|
| 1902 |
+
v -0.00279193 0.0260674 0.105665
|
| 1903 |
+
v 0.00973385 0.0243376 0.105665
|
| 1904 |
+
v 0.0128613 0.0246076 0.104301
|
| 1905 |
+
v 0.0271371 0.0246076 0.0985943
|
| 1906 |
+
v 0.032371 0.0269873 0.083853
|
| 1907 |
+
v 0.031893 0.0293671 0.0776665
|
| 1908 |
+
v -0.0174567 0.0310308 0.0914597
|
| 1909 |
+
v -0.00639735 0.0331682 0.095739
|
| 1910 |
+
v -0.0145966 0.0310308 0.0943113
|
| 1911 |
+
v 0.0380749 0.0269873 0.0673881
|
| 1912 |
+
v 0.0385529 0.0246076 0.0757629
|
| 1913 |
+
v 0.0404569 0.0236545 0.0719596
|
| 1914 |
+
v 0.0409268 0.0241338 0.0695801
|
| 1915 |
+
v 0.032371 0.0241338 0.0895562
|
| 1916 |
+
v -0.0190285 0.0291137 0.0673881
|
| 1917 |
+
v -0.02709 0.0253237 0.0786183
|
| 1918 |
+
v -0.0294639 0.0241338 0.0829013
|
| 1919 |
+
v -0.026612 0.0269873 0.0886044
|
| 1920 |
+
v -0.0231605 0.0281772 0.0914597
|
| 1921 |
+
v -0.00610567 0.0253237 0.105665
|
| 1922 |
+
v -0.0149531 0.0303146 0.095739
|
| 1923 |
+
v 0.0115001 0.0232469 0.105665
|
| 1924 |
+
v 0.0271371 0.0189005 0.10145
|
| 1925 |
+
v 0.0290411 0.0179474 0.100498
|
| 1926 |
+
v 0.031893 0.0179474 0.0976425
|
| 1927 |
+
v 0.0328409 0.0189005 0.095739
|
| 1928 |
+
v 0.0328409 0.021754 0.0928874
|
| 1929 |
+
v 0.0242771 0.021754 0.10145
|
| 1930 |
+
v -0.0178131 0.0303146 0.0928874
|
| 1931 |
+
v 0.0418828 0.0222278 0.0676766
|
| 1932 |
+
v 0.041656 0.0224096 0.0673881
|
| 1933 |
+
v 0.035701 0.0189005 0.0900321
|
| 1934 |
+
v 0.0414048 0.021754 0.0729114
|
| 1935 |
+
v 0.0428308 0.0203272 0.0686284
|
| 1936 |
+
v -0.0206975 0.0274005 0.0673881
|
| 1937 |
+
v -0.026612 0.0212803 0.0673881
|
| 1938 |
+
v -0.0323158 0.0184267 0.0800459
|
| 1939 |
+
v -0.0335068 0.0189005 0.0871768
|
| 1940 |
+
v -0.0324374 0.0203272 0.0886044
|
| 1941 |
+
v -0.0294639 0.0241338 0.0886044
|
| 1942 |
+
v -0.023517 0.0274611 0.0928874
|
| 1943 |
+
v -0.0118095 0.0224701 0.105665
|
| 1944 |
+
v -0.0149531 0.021754 0.104301
|
| 1945 |
+
v 0.0143601 0.0203933 0.105665
|
| 1946 |
+
v 0.029511 0.0155676 0.100974
|
| 1947 |
+
v 0.0172121 0.0175398 0.105665
|
| 1948 |
+
v 0.0182977 0.0157715 0.105665
|
| 1949 |
+
v 0.0257111 0.0146201 0.102877
|
| 1950 |
+
v 0.0328409 0.0131934 0.0985943
|
| 1951 |
+
v 0.032371 0.0155676 0.0981184
|
| 1952 |
+
v 0.0419152 0.0219854 0.0673881
|
| 1953 |
+
v 0.0371269 0.0146201 0.0886044
|
| 1954 |
+
v 0.0414048 0.0160469 0.0786183
|
| 1955 |
+
v 0.0423609 0.0198535 0.0710078
|
| 1956 |
+
v 0.0456908 0.00890753 0.0686284
|
| 1957 |
+
v 0.0425553 0.0203272 0.0673881
|
| 1958 |
+
v -0.028516 0.0174737 0.0673881
|
| 1959 |
+
v -0.0332719 0.0155676 0.0781424
|
| 1960 |
+
v -0.0327939 0.0167575 0.0786183
|
| 1961 |
+
v -0.0332719 0.0165207 0.0800459
|
| 1962 |
+
v -0.0342198 0.0146201 0.0800459
|
| 1963 |
+
v -0.0370798 0.00890753 0.0857528
|
| 1964 |
+
v -0.0363588 0.0103398 0.0871768
|
| 1965 |
+
v -0.0335068 0.0160469 0.0900321
|
| 1966 |
+
v -0.0306549 0.021754 0.0900321
|
| 1967 |
+
v -0.0288725 0.0231808 0.090744
|
| 1968 |
+
v -0.0142725 0.0203933 0.105665
|
| 1969 |
+
v 0.033319 0.00796002 0.0990702
|
| 1970 |
+
v 0.0186056 0.014554 0.105665
|
| 1971 |
+
v 0.0257111 0.0003524 0.102877
|
| 1972 |
+
v 0.0342669 0.00890753 0.0971667
|
| 1973 |
+
v 0.0428308 0.0117666 0.0771906
|
| 1974 |
+
v 0.0423609 0.0141409 0.0767147
|
| 1975 |
+
v 0.0456908 0.0060595 0.0686284
|
| 1976 |
+
v 0.0450669 0.00890753 0.0673881
|
| 1977 |
+
v 0.0428308 0.0190822 0.0673881
|
| 1978 |
+
v -0.028516 -0.00250115 0.0673881
|
| 1979 |
+
v -0.0370798 0.0060595 0.0857528
|
| 1980 |
+
v -0.0351759 0.00890753 0.0895562
|
| 1981 |
+
v -0.0352893 0.0117666 0.0886044
|
| 1982 |
+
v -0.0323158 0.0117666 0.0924115
|
| 1983 |
+
v -0.0185585 0.0117666 0.105665
|
| 1984 |
+
v -0.0171974 0.014554 0.105665
|
| 1985 |
+
v -0.0171326 0.0146862 0.105665
|
| 1986 |
+
v -0.0143455 0.0202611 0.105665
|
| 1987 |
+
v 0.029511 0.00130542 0.100974
|
| 1988 |
+
v 0.030467 -0.000600619 0.0990702
|
| 1989 |
+
v 0.0328409 0.00534886 0.0985943
|
| 1990 |
+
v 0.0328409 -0.000358233 0.095739
|
| 1991 |
+
v 0.033319 0.00225844 0.0962149
|
| 1992 |
+
v 0.0390309 0.00225844 0.0848011
|
| 1993 |
+
v 0.0418828 0.00225844 0.0790942
|
| 1994 |
+
v 0.0186056 0.000286294 0.105665
|
| 1995 |
+
v 0.0172121 -0.0025342 0.105665
|
| 1996 |
+
v 0.027607 -0.000600619 0.101926
|
| 1997 |
+
v 0.0428308 0.00320595 0.0771906
|
| 1998 |
+
v 0.0428308 -0.0053547 0.0686284
|
| 1999 |
+
v 0.042215 -0.00659968 0.0673881
|
| 2000 |
+
v 0.0450669 0.00482002 0.0673881
|
| 2001 |
+
v 0.0421178 -5.67076e-06 0.0771906
|
| 2002 |
+
v 0.0421178 -0.00285922 0.074339
|
| 2003 |
+
v 0.0421178 -0.00571277 0.0714837
|
| 2004 |
+
v -0.0281595 -0.00392241 0.0673881
|
| 2005 |
+
v -0.0327939 -0.00392792 0.0786183
|
| 2006 |
+
v -0.0342198 0.0003524 0.0800459
|
| 2007 |
+
v -0.0337499 -0.00202739 0.0805218
|
| 2008 |
+
v -0.0342198 -0.00250115 0.0829013
|
| 2009 |
+
v -0.0342198 -0.00250115 0.0886044
|
| 2010 |
+
v -0.0332719 -0.00250115 0.090508
|
| 2011 |
+
v -0.0351759 0.0060595 0.0895562
|
| 2012 |
+
v -0.026612 0.0060595 0.0981184
|
| 2013 |
+
v -0.0185585 0.00320595 0.105665
|
| 2014 |
+
v 0.0271371 -0.00321178 0.10145
|
| 2015 |
+
v 0.029989 -0.00321178 0.0985943
|
| 2016 |
+
v 0.035701 -0.000358233 0.0900321
|
| 2017 |
+
v 0.0385529 -0.000358233 0.0843289
|
| 2018 |
+
v 0.0414048 -0.000358233 0.0786183
|
| 2019 |
+
v 0.0257111 -0.00285922 0.102162
|
| 2020 |
+
v 0.0143601 -0.00538775 0.105665
|
| 2021 |
+
v 0.0418828 -0.00725522 0.0686284
|
| 2022 |
+
v 0.0409268 -0.00916126 0.0686284
|
| 2023 |
+
v 0.0420854 -0.00684207 0.0673881
|
| 2024 |
+
v 0.0414048 -0.00607084 0.0729114
|
| 2025 |
+
v -0.0277139 -0.00500764 0.0673881
|
| 2026 |
+
v -0.0323158 -0.00630771 0.0819495
|
| 2027 |
+
v -0.0259314 -0.00820824 0.0673881
|
| 2028 |
+
v -0.0237601 -0.0120148 0.0673881
|
| 2029 |
+
v -0.0332719 -0.00440168 0.083853
|
| 2030 |
+
v -0.0332719 -0.00440168 0.0895562
|
| 2031 |
+
v -0.0327939 -0.00392792 0.090744
|
| 2032 |
+
v -0.0323158 0.0003524 0.0924115
|
| 2033 |
+
v -0.0183398 0.00233556 0.105665
|
| 2034 |
+
v 0.0271371 -0.00749761 0.0985943
|
| 2035 |
+
v 0.027607 -0.00630771 0.0990702
|
| 2036 |
+
v 0.0214251 -0.00892439 0.10145
|
| 2037 |
+
v 0.029989 -0.00607084 0.095739
|
| 2038 |
+
v 0.035701 -0.00607084 0.0843289
|
| 2039 |
+
v 0.0199992 -0.00856631 0.102162
|
| 2040 |
+
v 0.00858336 -0.00932102 0.105665
|
| 2041 |
+
v 0.0409835 -0.0087481 0.0673881
|
| 2042 |
+
v 0.0380749 -0.0120148 0.0714837
|
| 2043 |
+
v 0.0380749 -0.0120148 0.0673881
|
| 2044 |
+
v 0.035701 -0.00892439 0.0814736
|
| 2045 |
+
v 0.0385529 -0.00892439 0.0757629
|
| 2046 |
+
v -0.0294639 -0.00916126 0.0790942
|
| 2047 |
+
v -0.0323158 -0.00630771 0.0876526
|
| 2048 |
+
v -0.026612 -0.0120148 0.0790942
|
| 2049 |
+
v -0.0209 -0.0148684 0.0673881
|
| 2050 |
+
v -0.02709 -0.00963502 0.0921717
|
| 2051 |
+
v -0.0313679 -0.00250115 0.0925277
|
| 2052 |
+
v -0.0156661 -0.00678147 0.103589
|
| 2053 |
+
v -0.0156661 -0.00392792 0.105017
|
| 2054 |
+
v -0.0151557 -0.0034101 0.105665
|
| 2055 |
+
v 0.031415 -0.012136 0.0829013
|
| 2056 |
+
v 0.0328409 -0.0103512 0.0843289
|
| 2057 |
+
v 0.0328409 -0.00892439 0.0871768
|
| 2058 |
+
v 0.0214251 -0.0117779 0.0985943
|
| 2059 |
+
v 0.0257111 -0.012136 0.0914597
|
| 2060 |
+
v 0.0171472 -0.0120148 0.100974
|
| 2061 |
+
v 0.00858336 -0.0148684 0.100974
|
| 2062 |
+
v 2.75871e-05 -0.0110948 0.105665
|
| 2063 |
+
v 0.0352229 -0.0120148 0.0771906
|
| 2064 |
+
v 0.0342669 -0.0149896 0.0714837
|
| 2065 |
+
v 0.0352229 -0.0148684 0.0686284
|
| 2066 |
+
v 0.0352229 -0.0148684 0.0673881
|
| 2067 |
+
v -0.026612 -0.0120148 0.0876526
|
| 2068 |
+
v -0.0165492 -0.0181681 0.0673881
|
| 2069 |
+
v -0.02423 -0.0124886 0.0921717
|
| 2070 |
+
v -0.0213781 -0.0124886 0.095027
|
| 2071 |
+
v -0.0118095 -0.00749761 0.105665
|
| 2072 |
+
v 0.0328409 -0.0132047 0.0786183
|
| 2073 |
+
v 0.0157213 -0.0146315 0.0985943
|
| 2074 |
+
v 0.0171472 -0.0149896 0.0971667
|
| 2075 |
+
v 0.0214251 -0.0132047 0.095739
|
| 2076 |
+
v 0.0271371 -0.0132047 0.0871768
|
| 2077 |
+
v 0.0128613 -0.0160583 0.0985943
|
| 2078 |
+
v 2.75871e-05 -0.0177219 0.0981184
|
| 2079 |
+
v -6.96376e-05 -0.0110948 0.105665
|
| 2080 |
+
v 0.028563 -0.0149896 0.0800459
|
| 2081 |
+
v 0.0328409 -0.0160583 0.070056
|
| 2082 |
+
v 0.031415 -0.0167744 0.0686284
|
| 2083 |
+
v 0.031415 -0.0167744 0.0673881
|
| 2084 |
+
v -0.024708 -0.0129678 0.0895562
|
| 2085 |
+
v -0.0155932 -0.0187465 0.0673881
|
| 2086 |
+
v -0.018996 -0.0158214 0.0924115
|
| 2087 |
+
v -0.0185261 -0.0153421 0.0935994
|
| 2088 |
+
v -0.0171002 -0.0148684 0.0952631
|
| 2089 |
+
v -0.0113882 -0.0148684 0.0981184
|
| 2090 |
+
v -0.00853629 -0.0088087 0.105665
|
| 2091 |
+
v 0.0157213 -0.0160583 0.095739
|
| 2092 |
+
v 0.0114353 -0.0167744 0.0971667
|
| 2093 |
+
v 0.00145355 -0.0189118 0.095739
|
| 2094 |
+
v -0.00282434 -0.0177219 0.0981184
|
| 2095 |
+
v 2.75871e-05 -0.0196279 0.0943113
|
| 2096 |
+
v -0.00282434 -0.0106597 0.105665
|
| 2097 |
+
v 0.0271371 -0.0160583 0.0786183
|
| 2098 |
+
v 0.0273153 -0.0187741 0.0673881
|
| 2099 |
+
v 0.0305886 -0.0171876 0.0673881
|
| 2100 |
+
v -0.0142401 -0.0192699 0.0673881
|
| 2101 |
+
v -0.0113882 -0.0196279 0.0829013
|
| 2102 |
+
v -0.0171002 -0.0167744 0.0914597
|
| 2103 |
+
v -0.0104403 -0.0186749 0.0924115
|
| 2104 |
+
v -0.00996225 -0.0182012 0.0935994
|
| 2105 |
+
v -0.00853629 -0.0177219 0.0952631
|
| 2106 |
+
v -0.00711033 -0.0153421 0.0993063
|
| 2107 |
+
v -0.00655129 -0.00945323 0.105665
|
| 2108 |
+
v 0.0142872 -0.0167744 0.0943113
|
| 2109 |
+
v 0.0114353 -0.0196279 0.0829013
|
| 2110 |
+
v 0.00287951 -0.0196279 0.0914597
|
| 2111 |
+
v -0.00567626 -0.0189118 0.0946711
|
| 2112 |
+
v -0.00282434 -0.0196279 0.0943113
|
| 2113 |
+
v 0.00660646 -0.0216772 0.0673881
|
| 2114 |
+
v 0.00733565 -0.0216276 0.0673881
|
| 2115 |
+
v 0.0142872 -0.0196279 0.0800459
|
| 2116 |
+
v 0.0269507 -0.0188677 0.0673881
|
| 2117 |
+
v -0.0126278 -0.0196279 0.0673881
|
| 2118 |
+
v -0.00336718 -0.0215725 0.0673881
|
| 2119 |
+
v -0.00282434 -0.0216772 0.0673881
|
| 2120 |
+
v -0.00853629 -0.0196279 0.0914597
|
| 2121 |
+
v 0.0228511 -0.0196279 0.0673881
|
| 2122 |
+
v 0.0228511 -0.0196279 0.0686284
|
| 2123 |
+
f 622 623 624
|
| 2124 |
+
f 622 624 631
|
| 2125 |
+
f 622 631 644
|
| 2126 |
+
f 622 644 652
|
| 2127 |
+
f 622 652 635
|
| 2128 |
+
f 622 635 626
|
| 2129 |
+
f 622 626 625
|
| 2130 |
+
f 622 625 623
|
| 2131 |
+
f 623 625 626
|
| 2132 |
+
f 623 626 627
|
| 2133 |
+
f 623 627 628
|
| 2134 |
+
f 623 628 629
|
| 2135 |
+
f 623 629 630
|
| 2136 |
+
f 623 630 640
|
| 2137 |
+
f 623 640 657
|
| 2138 |
+
f 623 657 656
|
| 2139 |
+
f 623 656 677
|
| 2140 |
+
f 623 677 698
|
| 2141 |
+
f 623 698 718
|
| 2142 |
+
f 623 718 723
|
| 2143 |
+
f 623 723 743
|
| 2144 |
+
f 623 743 742
|
| 2145 |
+
f 623 742 766
|
| 2146 |
+
f 623 766 765
|
| 2147 |
+
f 623 765 789
|
| 2148 |
+
f 623 789 807
|
| 2149 |
+
f 623 807 809
|
| 2150 |
+
f 623 809 832
|
| 2151 |
+
f 623 832 849
|
| 2152 |
+
f 623 849 865
|
| 2153 |
+
f 623 865 864
|
| 2154 |
+
f 623 864 882
|
| 2155 |
+
f 623 882 887
|
| 2156 |
+
f 623 887 880
|
| 2157 |
+
f 623 880 879
|
| 2158 |
+
f 623 879 885
|
| 2159 |
+
f 623 885 884
|
| 2160 |
+
f 623 884 883
|
| 2161 |
+
f 623 883 866
|
| 2162 |
+
f 623 866 851
|
| 2163 |
+
f 623 851 834
|
| 2164 |
+
f 623 834 815
|
| 2165 |
+
f 623 815 794
|
| 2166 |
+
f 623 794 793
|
| 2167 |
+
f 623 793 791
|
| 2168 |
+
f 623 791 770
|
| 2169 |
+
f 623 770 744
|
| 2170 |
+
f 623 744 724
|
| 2171 |
+
f 623 724 703
|
| 2172 |
+
f 623 703 702
|
| 2173 |
+
f 623 702 682
|
| 2174 |
+
f 623 682 660
|
| 2175 |
+
f 623 660 641
|
| 2176 |
+
f 623 641 631
|
| 2177 |
+
f 623 631 624
|
| 2178 |
+
f 626 632 633
|
| 2179 |
+
f 626 633 634
|
| 2180 |
+
f 626 634 627
|
| 2181 |
+
f 626 635 654
|
| 2182 |
+
f 626 654 646
|
| 2183 |
+
f 626 646 632
|
| 2184 |
+
f 627 634 636
|
| 2185 |
+
f 627 636 628
|
| 2186 |
+
f 628 636 637
|
| 2187 |
+
f 628 637 629
|
| 2188 |
+
f 629 637 638
|
| 2189 |
+
f 629 638 630
|
| 2190 |
+
f 630 638 639
|
| 2191 |
+
f 630 639 640
|
| 2192 |
+
f 631 641 642
|
| 2193 |
+
f 631 642 643
|
| 2194 |
+
f 631 643 661
|
| 2195 |
+
f 631 661 644
|
| 2196 |
+
f 632 645 633
|
| 2197 |
+
f 632 646 668
|
| 2198 |
+
f 632 668 647
|
| 2199 |
+
f 632 647 645
|
| 2200 |
+
f 633 645 648
|
| 2201 |
+
f 633 648 649
|
| 2202 |
+
f 633 649 634
|
| 2203 |
+
f 634 649 650
|
| 2204 |
+
f 634 650 651
|
| 2205 |
+
f 634 651 636
|
| 2206 |
+
f 635 652 653
|
| 2207 |
+
f 635 653 654
|
| 2208 |
+
f 636 651 637
|
| 2209 |
+
f 637 651 655
|
| 2210 |
+
f 637 655 638
|
| 2211 |
+
f 638 656 657
|
| 2212 |
+
f 638 657 639
|
| 2213 |
+
f 638 655 658
|
| 2214 |
+
f 638 658 659
|
| 2215 |
+
f 638 659 680
|
| 2216 |
+
f 638 680 677
|
| 2217 |
+
f 638 677 656
|
| 2218 |
+
f 639 657 640
|
| 2219 |
+
f 641 660 642
|
| 2220 |
+
f 642 661 643
|
| 2221 |
+
f 642 660 662
|
| 2222 |
+
f 642 662 663
|
| 2223 |
+
f 642 663 664
|
| 2224 |
+
f 642 664 665
|
| 2225 |
+
f 642 665 661
|
| 2226 |
+
f 644 661 652
|
| 2227 |
+
f 645 647 648
|
| 2228 |
+
f 646 654 666
|
| 2229 |
+
f 646 666 667
|
| 2230 |
+
f 646 667 668
|
| 2231 |
+
f 647 668 667
|
| 2232 |
+
f 647 667 666
|
| 2233 |
+
f 647 666 687
|
| 2234 |
+
f 647 687 709
|
| 2235 |
+
f 647 709 734
|
| 2236 |
+
f 647 734 752
|
| 2237 |
+
f 647 752 751
|
| 2238 |
+
f 647 751 750
|
| 2239 |
+
f 647 750 749
|
| 2240 |
+
f 647 749 779
|
| 2241 |
+
f 647 779 799
|
| 2242 |
+
f 647 799 820
|
| 2243 |
+
f 647 820 837
|
| 2244 |
+
f 647 837 856
|
| 2245 |
+
f 647 856 873
|
| 2246 |
+
f 647 873 862
|
| 2247 |
+
f 647 862 845
|
| 2248 |
+
f 647 845 828
|
| 2249 |
+
f 647 828 806
|
| 2250 |
+
f 647 806 786
|
| 2251 |
+
f 647 786 761
|
| 2252 |
+
f 647 761 760
|
| 2253 |
+
f 647 760 736
|
| 2254 |
+
f 647 736 714
|
| 2255 |
+
f 647 714 713
|
| 2256 |
+
f 647 713 711
|
| 2257 |
+
f 647 711 689
|
| 2258 |
+
f 647 689 669
|
| 2259 |
+
f 647 669 648
|
| 2260 |
+
f 648 669 670
|
| 2261 |
+
f 648 670 671
|
| 2262 |
+
f 648 671 649
|
| 2263 |
+
f 649 671 650
|
| 2264 |
+
f 650 671 672
|
| 2265 |
+
f 650 672 651
|
| 2266 |
+
f 651 672 673
|
| 2267 |
+
f 651 673 655
|
| 2268 |
+
f 652 661 674
|
| 2269 |
+
f 652 674 653
|
| 2270 |
+
f 653 675 654
|
| 2271 |
+
f 653 674 676
|
| 2272 |
+
f 653 676 688
|
| 2273 |
+
f 653 688 675
|
| 2274 |
+
f 654 675 666
|
| 2275 |
+
f 655 673 672
|
| 2276 |
+
f 655 672 659
|
| 2277 |
+
f 655 659 658
|
| 2278 |
+
f 659 678 679
|
| 2279 |
+
f 659 679 680
|
| 2280 |
+
f 659 672 681
|
| 2281 |
+
f 659 681 678
|
| 2282 |
+
f 660 682 662
|
| 2283 |
+
f 661 665 674
|
| 2284 |
+
f 662 682 683
|
| 2285 |
+
f 662 683 684
|
| 2286 |
+
f 662 684 663
|
| 2287 |
+
f 663 685 665
|
| 2288 |
+
f 663 665 664
|
| 2289 |
+
f 663 684 707
|
| 2290 |
+
f 663 707 685
|
| 2291 |
+
f 665 685 686
|
| 2292 |
+
f 665 686 674
|
| 2293 |
+
f 666 675 688
|
| 2294 |
+
f 666 688 687
|
| 2295 |
+
f 669 689 670
|
| 2296 |
+
f 670 689 671
|
| 2297 |
+
f 671 690 691
|
| 2298 |
+
f 671 691 692
|
| 2299 |
+
f 671 692 693
|
| 2300 |
+
f 671 693 694
|
| 2301 |
+
f 671 694 681
|
| 2302 |
+
f 671 681 672
|
| 2303 |
+
f 671 689 695
|
| 2304 |
+
f 671 695 690
|
| 2305 |
+
f 674 696 688
|
| 2306 |
+
f 674 688 676
|
| 2307 |
+
f 674 686 708
|
| 2308 |
+
f 674 708 696
|
| 2309 |
+
f 677 680 697
|
| 2310 |
+
f 677 697 698
|
| 2311 |
+
f 678 699 700
|
| 2312 |
+
f 678 700 679
|
| 2313 |
+
f 678 681 694
|
| 2314 |
+
f 678 694 699
|
| 2315 |
+
f 679 700 680
|
| 2316 |
+
f 680 700 701
|
| 2317 |
+
f 680 701 697
|
| 2318 |
+
f 682 702 683
|
| 2319 |
+
f 683 702 703
|
| 2320 |
+
f 683 703 684
|
| 2321 |
+
f 684 704 705
|
| 2322 |
+
f 684 705 706
|
| 2323 |
+
f 684 706 707
|
| 2324 |
+
f 684 703 704
|
| 2325 |
+
f 685 708 686
|
| 2326 |
+
f 685 707 708
|
| 2327 |
+
f 687 688 709
|
| 2328 |
+
f 688 696 708
|
| 2329 |
+
f 688 708 710
|
| 2330 |
+
f 688 710 709
|
| 2331 |
+
f 689 711 690
|
| 2332 |
+
f 689 690 695
|
| 2333 |
+
f 690 712 691
|
| 2334 |
+
f 690 711 713
|
| 2335 |
+
f 690 713 714
|
| 2336 |
+
f 690 714 715
|
| 2337 |
+
f 690 715 712
|
| 2338 |
+
f 691 712 716
|
| 2339 |
+
f 691 716 717
|
| 2340 |
+
f 691 717 692
|
| 2341 |
+
f 692 717 693
|
| 2342 |
+
f 693 717 716
|
| 2343 |
+
f 693 716 699
|
| 2344 |
+
f 693 699 694
|
| 2345 |
+
f 697 701 718
|
| 2346 |
+
f 697 718 698
|
| 2347 |
+
f 699 719 739
|
| 2348 |
+
f 699 739 720
|
| 2349 |
+
f 699 720 700
|
| 2350 |
+
f 699 716 719
|
| 2351 |
+
f 700 720 721
|
| 2352 |
+
f 700 721 701
|
| 2353 |
+
f 701 722 723
|
| 2354 |
+
f 701 723 718
|
| 2355 |
+
f 701 721 722
|
| 2356 |
+
f 703 724 725
|
| 2357 |
+
f 703 725 726
|
| 2358 |
+
f 703 726 704
|
| 2359 |
+
f 704 726 725
|
| 2360 |
+
f 704 725 727
|
| 2361 |
+
f 704 727 705
|
| 2362 |
+
f 705 727 728
|
| 2363 |
+
f 705 728 729
|
| 2364 |
+
f 705 729 730
|
| 2365 |
+
f 705 730 731
|
| 2366 |
+
f 705 731 706
|
| 2367 |
+
f 706 731 732
|
| 2368 |
+
f 706 732 707
|
| 2369 |
+
f 707 732 733
|
| 2370 |
+
f 707 733 708
|
| 2371 |
+
f 708 733 732
|
| 2372 |
+
f 708 732 710
|
| 2373 |
+
f 709 710 734
|
| 2374 |
+
f 710 732 734
|
| 2375 |
+
f 712 735 716
|
| 2376 |
+
f 712 715 735
|
| 2377 |
+
f 714 736 715
|
| 2378 |
+
f 715 736 760
|
| 2379 |
+
f 715 760 737
|
| 2380 |
+
f 715 737 735
|
| 2381 |
+
f 716 735 738
|
| 2382 |
+
f 716 738 719
|
| 2383 |
+
f 719 738 739
|
| 2384 |
+
f 720 739 740
|
| 2385 |
+
f 720 740 721
|
| 2386 |
+
f 721 740 722
|
| 2387 |
+
f 722 739 763
|
| 2388 |
+
f 722 763 741
|
| 2389 |
+
f 722 741 766
|
| 2390 |
+
f 722 766 742
|
| 2391 |
+
f 722 742 743
|
| 2392 |
+
f 722 743 723
|
| 2393 |
+
f 722 740 739
|
| 2394 |
+
f 724 744 745
|
| 2395 |
+
f 724 745 729
|
| 2396 |
+
f 724 729 728
|
| 2397 |
+
f 724 728 725
|
| 2398 |
+
f 725 728 727
|
| 2399 |
+
f 729 745 777
|
| 2400 |
+
f 729 777 746
|
| 2401 |
+
f 729 746 730
|
| 2402 |
+
f 730 746 747
|
| 2403 |
+
f 730 747 731
|
| 2404 |
+
f 731 747 746
|
| 2405 |
+
f 731 746 748
|
| 2406 |
+
f 731 748 749
|
| 2407 |
+
f 731 749 750
|
| 2408 |
+
f 731 750 751
|
| 2409 |
+
f 731 751 732
|
| 2410 |
+
f 732 751 752
|
| 2411 |
+
f 732 752 734
|
| 2412 |
+
f 735 737 753
|
| 2413 |
+
f 735 753 754
|
| 2414 |
+
f 735 754 781
|
| 2415 |
+
f 735 781 755
|
| 2416 |
+
f 735 755 756
|
| 2417 |
+
f 735 756 757
|
| 2418 |
+
f 735 757 758
|
| 2419 |
+
f 735 758 759
|
| 2420 |
+
f 735 759 763
|
| 2421 |
+
f 735 763 738
|
| 2422 |
+
f 737 760 761
|
| 2423 |
+
f 737 761 762
|
| 2424 |
+
f 737 762 753
|
| 2425 |
+
f 738 763 739
|
| 2426 |
+
f 741 764 765
|
| 2427 |
+
f 741 765 766
|
| 2428 |
+
f 741 763 767
|
| 2429 |
+
f 741 767 768
|
| 2430 |
+
f 741 768 769
|
| 2431 |
+
f 741 769 764
|
| 2432 |
+
f 744 770 771
|
| 2433 |
+
f 744 771 772
|
| 2434 |
+
f 744 772 745
|
| 2435 |
+
f 745 772 773
|
| 2436 |
+
f 745 773 774
|
| 2437 |
+
f 745 774 775
|
| 2438 |
+
f 745 775 776
|
| 2439 |
+
f 745 776 777
|
| 2440 |
+
f 746 777 778
|
| 2441 |
+
f 746 778 748
|
| 2442 |
+
f 748 778 749
|
| 2443 |
+
f 749 778 779
|
| 2444 |
+
f 753 762 780
|
| 2445 |
+
f 753 780 781
|
| 2446 |
+
f 753 781 754
|
| 2447 |
+
f 755 781 756
|
| 2448 |
+
f 756 781 782
|
| 2449 |
+
f 756 782 757
|
| 2450 |
+
f 757 782 758
|
| 2451 |
+
f 758 782 783
|
| 2452 |
+
f 758 783 759
|
| 2453 |
+
f 759 783 784
|
| 2454 |
+
f 759 784 767
|
| 2455 |
+
f 759 767 763
|
| 2456 |
+
f 761 785 762
|
| 2457 |
+
f 761 786 785
|
| 2458 |
+
f 762 785 780
|
| 2459 |
+
f 764 769 787
|
| 2460 |
+
f 764 787 765
|
| 2461 |
+
f 765 787 788
|
| 2462 |
+
f 765 788 789
|
| 2463 |
+
f 767 784 790
|
| 2464 |
+
f 767 790 768
|
| 2465 |
+
f 768 790 769
|
| 2466 |
+
f 769 790 788
|
| 2467 |
+
f 769 788 787
|
| 2468 |
+
f 770 791 771
|
| 2469 |
+
f 771 792 774
|
| 2470 |
+
f 771 774 773
|
| 2471 |
+
f 771 773 772
|
| 2472 |
+
f 771 791 793
|
| 2473 |
+
f 771 793 794
|
| 2474 |
+
f 771 794 792
|
| 2475 |
+
f 774 792 795
|
| 2476 |
+
f 774 795 796
|
| 2477 |
+
f 774 796 775
|
| 2478 |
+
f 775 796 776
|
| 2479 |
+
f 776 796 797
|
| 2480 |
+
f 776 797 798
|
| 2481 |
+
f 776 798 777
|
| 2482 |
+
f 777 798 778
|
| 2483 |
+
f 778 798 779
|
| 2484 |
+
f 779 798 799
|
| 2485 |
+
f 780 800 801
|
| 2486 |
+
f 780 801 781
|
| 2487 |
+
f 780 785 802
|
| 2488 |
+
f 780 802 800
|
| 2489 |
+
f 781 801 800
|
| 2490 |
+
f 781 800 803
|
| 2491 |
+
f 781 803 804
|
| 2492 |
+
f 781 804 782
|
| 2493 |
+
f 782 804 783
|
| 2494 |
+
f 783 804 790
|
| 2495 |
+
f 783 790 784
|
| 2496 |
+
f 785 786 805
|
| 2497 |
+
f 785 805 802
|
| 2498 |
+
f 786 806 805
|
| 2499 |
+
f 788 807 789
|
| 2500 |
+
f 788 790 808
|
| 2501 |
+
f 788 808 831
|
| 2502 |
+
f 788 831 832
|
| 2503 |
+
f 788 832 809
|
| 2504 |
+
f 788 809 807
|
| 2505 |
+
f 790 804 810
|
| 2506 |
+
f 790 810 811
|
| 2507 |
+
f 790 811 808
|
| 2508 |
+
f 792 812 814
|
| 2509 |
+
f 792 814 833
|
| 2510 |
+
f 792 833 813
|
| 2511 |
+
f 792 813 796
|
| 2512 |
+
f 792 796 795
|
| 2513 |
+
f 792 794 812
|
| 2514 |
+
f 794 814 812
|
| 2515 |
+
f 794 815 814
|
| 2516 |
+
f 796 813 816
|
| 2517 |
+
f 796 816 797
|
| 2518 |
+
f 797 817 798
|
| 2519 |
+
f 797 816 818
|
| 2520 |
+
f 797 818 819
|
| 2521 |
+
f 797 819 799
|
| 2522 |
+
f 797 799 817
|
| 2523 |
+
f 798 817 799
|
| 2524 |
+
f 799 819 820
|
| 2525 |
+
f 800 821 822
|
| 2526 |
+
f 800 822 823
|
| 2527 |
+
f 800 823 803
|
| 2528 |
+
f 800 802 824
|
| 2529 |
+
f 800 824 825
|
| 2530 |
+
f 800 825 821
|
| 2531 |
+
f 802 805 806
|
| 2532 |
+
f 802 806 826
|
| 2533 |
+
f 802 826 824
|
| 2534 |
+
f 803 823 810
|
| 2535 |
+
f 803 810 804
|
| 2536 |
+
f 806 827 826
|
| 2537 |
+
f 806 828 827
|
| 2538 |
+
f 808 811 810
|
| 2539 |
+
f 808 810 829
|
| 2540 |
+
f 808 829 830
|
| 2541 |
+
f 808 830 831
|
| 2542 |
+
f 810 823 829
|
| 2543 |
+
f 813 833 816
|
| 2544 |
+
f 814 815 834
|
| 2545 |
+
f 814 834 833
|
| 2546 |
+
f 816 833 835
|
| 2547 |
+
f 816 835 836
|
| 2548 |
+
f 816 836 818
|
| 2549 |
+
f 818 837 820
|
| 2550 |
+
f 818 820 819
|
| 2551 |
+
f 818 836 837
|
| 2552 |
+
f 821 838 822
|
| 2553 |
+
f 821 825 842
|
| 2554 |
+
f 821 842 838
|
| 2555 |
+
f 822 838 829
|
| 2556 |
+
f 822 829 823
|
| 2557 |
+
f 824 826 839
|
| 2558 |
+
f 824 839 840
|
| 2559 |
+
f 824 840 841
|
| 2560 |
+
f 824 841 825
|
| 2561 |
+
f 825 841 842
|
| 2562 |
+
f 826 827 843
|
| 2563 |
+
f 826 843 839
|
| 2564 |
+
f 827 828 844
|
| 2565 |
+
f 827 844 843
|
| 2566 |
+
f 828 845 860
|
| 2567 |
+
f 828 860 844
|
| 2568 |
+
f 829 838 830
|
| 2569 |
+
f 830 838 846
|
| 2570 |
+
f 830 846 863
|
| 2571 |
+
f 830 863 847
|
| 2572 |
+
f 830 847 831
|
| 2573 |
+
f 831 847 848
|
| 2574 |
+
f 831 848 849
|
| 2575 |
+
f 831 849 832
|
| 2576 |
+
f 833 850 835
|
| 2577 |
+
f 833 834 851
|
| 2578 |
+
f 833 851 850
|
| 2579 |
+
f 835 850 852
|
| 2580 |
+
f 835 852 836
|
| 2581 |
+
f 836 852 853
|
| 2582 |
+
f 836 853 854
|
| 2583 |
+
f 836 854 837
|
| 2584 |
+
f 837 854 855
|
| 2585 |
+
f 837 855 856
|
| 2586 |
+
f 838 842 846
|
| 2587 |
+
f 839 843 840
|
| 2588 |
+
f 840 843 857
|
| 2589 |
+
f 840 857 863
|
| 2590 |
+
f 840 863 846
|
| 2591 |
+
f 840 846 842
|
| 2592 |
+
f 840 842 841
|
| 2593 |
+
f 843 858 857
|
| 2594 |
+
f 843 844 859
|
| 2595 |
+
f 843 859 861
|
| 2596 |
+
f 843 861 858
|
| 2597 |
+
f 844 860 878
|
| 2598 |
+
f 844 878 861
|
| 2599 |
+
f 844 861 859
|
| 2600 |
+
f 845 862 860
|
| 2601 |
+
f 847 863 864
|
| 2602 |
+
f 847 864 848
|
| 2603 |
+
f 848 864 865
|
| 2604 |
+
f 848 865 849
|
| 2605 |
+
f 850 851 852
|
| 2606 |
+
f 851 866 867
|
| 2607 |
+
f 851 867 868
|
| 2608 |
+
f 851 868 852
|
| 2609 |
+
f 852 868 869
|
| 2610 |
+
f 852 869 870
|
| 2611 |
+
f 852 870 853
|
| 2612 |
+
f 853 870 854
|
| 2613 |
+
f 854 870 871
|
| 2614 |
+
f 854 871 860
|
| 2615 |
+
f 854 860 855
|
| 2616 |
+
f 855 860 872
|
| 2617 |
+
f 855 872 856
|
| 2618 |
+
f 856 872 873
|
| 2619 |
+
f 857 858 874
|
| 2620 |
+
f 857 874 864
|
| 2621 |
+
f 857 864 863
|
| 2622 |
+
f 858 875 881
|
| 2623 |
+
f 858 881 874
|
| 2624 |
+
f 858 861 876
|
| 2625 |
+
f 858 876 875
|
| 2626 |
+
f 860 862 872
|
| 2627 |
+
f 860 871 877
|
| 2628 |
+
f 860 877 878
|
| 2629 |
+
f 861 878 885
|
| 2630 |
+
f 861 885 879
|
| 2631 |
+
f 861 879 880
|
| 2632 |
+
f 861 880 876
|
| 2633 |
+
f 862 873 872
|
| 2634 |
+
f 864 874 881
|
| 2635 |
+
f 864 881 882
|
| 2636 |
+
f 866 883 867
|
| 2637 |
+
f 867 883 884
|
| 2638 |
+
f 867 884 885
|
| 2639 |
+
f 867 885 886
|
| 2640 |
+
f 867 886 868
|
| 2641 |
+
f 868 886 869
|
| 2642 |
+
f 869 886 877
|
| 2643 |
+
f 869 877 870
|
| 2644 |
+
f 870 877 871
|
| 2645 |
+
f 875 880 881
|
| 2646 |
+
f 875 876 880
|
| 2647 |
+
f 877 886 878
|
| 2648 |
+
f 878 886 885
|
| 2649 |
+
f 880 887 888
|
| 2650 |
+
f 880 888 881
|
| 2651 |
+
f 881 888 882
|
| 2652 |
+
f 882 888 887
|
| 2653 |
+
o convex_4
|
| 2654 |
+
v 0.025708 0.0346005 0.0657713
|
| 2655 |
+
v 0.0228511 0.0346005 0.0629188
|
| 2656 |
+
v 0.0228511 0.0346005 0.0673843
|
| 2657 |
+
v 0.0253871 0.0342764 0.0673843
|
| 2658 |
+
v 0.0301695 0.0323654 0.0673843
|
| 2659 |
+
v 0.031414 0.0317452 0.0673843
|
| 2660 |
+
v 0.0330264 0.0309406 0.0673843
|
| 2661 |
+
v 0.031414 0.0317452 0.0657713
|
| 2662 |
+
v 0.02761 0.033645 0.0648205
|
| 2663 |
+
v 0.0342709 0.0288899 0.045702
|
| 2664 |
+
v 0.0142881 0.0317452 0.045702
|
| 2665 |
+
v 0.011439 0.0346005 0.0572096
|
| 2666 |
+
v -0.00853598 0.0346005 0.0673843
|
| 2667 |
+
v 0.035218 0.0298398 0.0673843
|
| 2668 |
+
v 0.035218 0.0298398 0.0667242
|
| 2669 |
+
v 0.033316 0.0307897 0.0648205
|
| 2670 |
+
v 0.0370339 0.0275098 0.045702
|
| 2671 |
+
v -0.022805 0.0317452 0.045702
|
| 2672 |
+
v -0.00567906 0.0346005 0.0515046
|
| 2673 |
+
v 0.00288389 0.0346005 0.0543571
|
| 2674 |
+
v -0.0113851 0.0346005 0.0600642
|
| 2675 |
+
v -0.019948 0.0317452 0.0543571
|
| 2676 |
+
v -0.0203081 0.03103 0.0572096
|
| 2677 |
+
v -0.0150717 0.0313317 0.0673843
|
| 2678 |
+
v 0.0380749 0.0269845 0.0673843
|
| 2679 |
+
v 0.0380749 0.0269845 0.045702
|
| 2680 |
+
v -0.0247696 0.0307618 0.045702
|
| 2681 |
+
v -0.022805 0.0317452 0.0457954
|
| 2682 |
+
v -0.0113851 0.0346005 0.0515046
|
| 2683 |
+
v -0.0237599 0.0298398 0.0543571
|
| 2684 |
+
v -0.018046 0.0298398 0.0673843
|
| 2685 |
+
v 0.0416598 0.0224083 0.0673843
|
| 2686 |
+
v 0.040564 0.0237717 0.045702
|
| 2687 |
+
v -0.026609 0.0298398 0.045702
|
| 2688 |
+
v -0.026609 0.0298398 0.0457954
|
| 2689 |
+
v -0.0277987 0.0274595 0.0500762
|
| 2690 |
+
v -0.0220927 0.0274595 0.064345
|
| 2691 |
+
v -0.0190322 0.0291134 0.0673843
|
| 2692 |
+
v 0.0419103 0.0219836 0.0673843
|
| 2693 |
+
v 0.0421686 0.0202347 0.045702
|
| 2694 |
+
v -0.0266872 0.0297895 0.045702
|
| 2695 |
+
v -0.026609 0.0212796 0.0673843
|
| 2696 |
+
v -0.0295285 0.0269566 0.045702
|
| 2697 |
+
v -0.0295833 0.0260346 0.04865
|
| 2698 |
+
v -0.0206916 0.0274036 0.0673843
|
| 2699 |
+
v 0.0425599 0.0203241 0.0673843
|
| 2700 |
+
v 0.0450177 0.0116744 0.045702
|
| 2701 |
+
v -0.0324167 0.0232744 0.045702
|
| 2702 |
+
v -0.0324402 0.0231794 0.0457954
|
| 2703 |
+
v -0.0279787 0.0185472 0.0673843
|
| 2704 |
+
v 0.0428339 0.0190836 0.0673843
|
| 2705 |
+
v 0.0455969 0.00937234 0.045702
|
| 2706 |
+
v -0.0325107 0.02309 0.045702
|
| 2707 |
+
v -0.028511 0.0174744 0.0673843
|
| 2708 |
+
v 0.0450646 0.00890857 0.0673843
|
| 2709 |
+
v 0.0456908 0.00890857 0.0457954
|
| 2710 |
+
v 0.0456908 0.00890857 0.045702
|
| 2711 |
+
v -0.0342561 0.0174744 0.045702
|
| 2712 |
+
v -0.0335047 0.0188993 0.0472216
|
| 2713 |
+
v -0.028511 -0.00250136 0.0673843
|
| 2714 |
+
v -0.0342248 0.0117638 0.04865
|
| 2715 |
+
v -0.0313679 0.00605888 0.0600642
|
| 2716 |
+
v 0.0450646 0.00481843 0.0673843
|
| 2717 |
+
v 0.0456908 0.00320919 0.0457954
|
| 2718 |
+
v 0.0456908 0.0031142 0.045702
|
| 2719 |
+
v -0.0342718 0.0173347 0.045702
|
| 2720 |
+
v -0.0281588 -0.00392062 0.0673843
|
| 2721 |
+
v -0.0342248 0.000353913 0.04865
|
| 2722 |
+
v -0.0342248 -0.00535664 0.0457954
|
| 2723 |
+
v -0.0335047 -0.00821192 0.0461519
|
| 2724 |
+
v 0.0449942 7.47988e-06 0.045702
|
| 2725 |
+
v 0.0422077 -0.0065971 0.0673843
|
| 2726 |
+
v -0.0342718 -0.00250136 0.045702
|
| 2727 |
+
v -0.0277126 -0.00501021 0.0673843
|
| 2728 |
+
v -0.0335047 -0.00865893 0.045702
|
| 2729 |
+
v -0.0342405 -0.00535664 0.045702
|
| 2730 |
+
v -0.0342248 -0.00544604 0.045702
|
| 2731 |
+
v -0.0336065 -0.00821192 0.045702
|
| 2732 |
+
v 0.0428339 -0.00535664 0.04865
|
| 2733 |
+
v 0.0428104 -0.00545722 0.045702
|
| 2734 |
+
v 0.0420903 -0.00684295 0.0673843
|
| 2735 |
+
v 0.0421216 -0.00678149 0.0671997
|
| 2736 |
+
v -0.0270864 -0.00678149 0.0664865
|
| 2737 |
+
v -0.0259358 -0.00821192 0.0673843
|
| 2738 |
+
v -0.0327924 -0.00963677 0.0465085
|
| 2739 |
+
v -0.0299434 -0.0153417 0.0465085
|
| 2740 |
+
v -0.0308357 -0.0139169 0.045702
|
| 2741 |
+
v 0.0409632 -0.00914505 0.045702
|
| 2742 |
+
v 0.0409788 -0.00874833 0.0673843
|
| 2743 |
+
v 0.0409319 -0.00916182 0.04865
|
| 2744 |
+
v -0.0237599 -0.0120171 0.0673843
|
| 2745 |
+
v -0.0286049 -0.0167275 0.045702
|
| 2746 |
+
v -0.0270864 -0.018197 0.0465085
|
| 2747 |
+
v -0.0306087 -0.0142968 0.045702
|
| 2748 |
+
v -0.0299434 -0.0153417 0.045702
|
| 2749 |
+
v 0.0409006 -0.00922328 0.045702
|
| 2750 |
+
v 0.0380749 -0.0120171 0.0673843
|
| 2751 |
+
v -0.0209029 -0.0148668 0.0673843
|
| 2752 |
+
v -0.0270864 -0.018197 0.045702
|
| 2753 |
+
v -0.0260376 -0.0188675 0.045702
|
| 2754 |
+
v -0.024707 -0.018672 0.0496008
|
| 2755 |
+
v -0.0213804 -0.018197 0.0579227
|
| 2756 |
+
v 0.035218 -0.0148668 0.0673843
|
| 2757 |
+
v 0.0381062 -0.0120003 0.045702
|
| 2758 |
+
v -0.016551 -0.0181691 0.0673843
|
| 2759 |
+
v -0.0256619 -0.0190128 0.045702
|
| 2760 |
+
v -0.022805 -0.0196274 0.04865
|
| 2761 |
+
v -0.016144 -0.018672 0.0667242
|
| 2762 |
+
v 0.0343962 -0.0152859 0.0673843
|
| 2763 |
+
v 0.035218 -0.0148668 0.0657713
|
| 2764 |
+
v 0.0380436 -0.0120394 0.045702
|
| 2765 |
+
v -0.0155883 -0.0187446 0.0673843
|
| 2766 |
+
v -0.0228989 -0.0196274 0.045702
|
| 2767 |
+
v -0.014242 -0.0224827 0.04865
|
| 2768 |
+
v -0.014242 -0.0196274 0.0657713
|
| 2769 |
+
v 0.033316 -0.0158223 0.0657713
|
| 2770 |
+
v 0.031414 -0.0167722 0.0673843
|
| 2771 |
+
v 0.0271325 -0.0189122 0.0671997
|
| 2772 |
+
v 0.025708 -0.0196274 0.0657713
|
| 2773 |
+
v 0.0362042 -0.0129614 0.045702
|
| 2774 |
+
v -0.014242 -0.0192698 0.0673843
|
| 2775 |
+
v -0.0143359 -0.0210076 0.045702
|
| 2776 |
+
v -0.00282213 -0.0224827 0.0543571
|
| 2777 |
+
v -0.0113851 -0.0224827 0.0543571
|
| 2778 |
+
v -0.0132949 -0.0215272 0.0467462
|
| 2779 |
+
v 0.00572516 -0.0196274 0.045702
|
| 2780 |
+
v -0.00567906 -0.0224827 0.0629188
|
| 2781 |
+
v 0.0305843 -0.0171857 0.0673843
|
| 2782 |
+
v 0.0273204 -0.0187725 0.0673843
|
| 2783 |
+
v 0.0269525 -0.0188675 0.0673843
|
| 2784 |
+
v 0.0228511 -0.0196274 0.0673843
|
| 2785 |
+
v 0.011439 -0.0224827 0.0629188
|
| 2786 |
+
v 0.0157205 -0.0217675 0.0614904
|
| 2787 |
+
v 0.025708 -0.0167722 0.045702
|
| 2788 |
+
v 0.034177 -0.0139504 0.045702
|
| 2789 |
+
v 0.0342709 -0.0139169 0.045702
|
| 2790 |
+
v -0.00567906 -0.0217675 0.06613
|
| 2791 |
+
v -0.0126296 -0.0196274 0.0673843
|
| 2792 |
+
v 0.00288389 -0.0224827 0.0572096
|
| 2793 |
+
v 0.00858208 -0.0224827 0.0600642
|
| 2794 |
+
v -0.00282213 -0.0224827 0.0657713
|
| 2795 |
+
v 0.00733756 -0.0216278 0.0673843
|
| 2796 |
+
v 0.00715753 -0.0217675 0.0671997
|
| 2797 |
+
v 0.00572516 -0.0224827 0.0657713
|
| 2798 |
+
v -0.00336221 -0.0215719 0.0673843
|
| 2799 |
+
v -0.00282213 -0.0216725 0.0673843
|
| 2800 |
+
v 0.0066018 -0.0216725 0.0673843
|
| 2801 |
+
f 889 890 900
|
| 2802 |
+
f 889 900 908
|
| 2803 |
+
f 889 908 907
|
| 2804 |
+
f 889 907 917
|
| 2805 |
+
f 889 917 909
|
| 2806 |
+
f 889 909 901
|
| 2807 |
+
f 889 901 891
|
| 2808 |
+
f 889 891 892
|
| 2809 |
+
f 889 892 893
|
| 2810 |
+
f 889 893 894
|
| 2811 |
+
f 889 894 895
|
| 2812 |
+
f 889 895 896
|
| 2813 |
+
f 889 896 904
|
| 2814 |
+
f 889 904 897
|
| 2815 |
+
f 889 897 898
|
| 2816 |
+
f 889 898 890
|
| 2817 |
+
f 890 899 900
|
| 2818 |
+
f 890 898 899
|
| 2819 |
+
f 891 901 912
|
| 2820 |
+
f 891 912 919
|
| 2821 |
+
f 891 919 926
|
| 2822 |
+
f 891 926 933
|
| 2823 |
+
f 891 933 930
|
| 2824 |
+
f 891 930 938
|
| 2825 |
+
f 891 938 942
|
| 2826 |
+
f 891 942 948
|
| 2827 |
+
f 891 948 955
|
| 2828 |
+
f 891 955 962
|
| 2829 |
+
f 891 962 972
|
| 2830 |
+
f 891 972 979
|
| 2831 |
+
f 891 979 986
|
| 2832 |
+
f 891 986 993
|
| 2833 |
+
f 891 993 1000
|
| 2834 |
+
f 891 1000 1009
|
| 2835 |
+
f 891 1009 1026
|
| 2836 |
+
f 891 1026 1033
|
| 2837 |
+
f 891 1033 1034
|
| 2838 |
+
f 891 1034 1035
|
| 2839 |
+
f 891 1035 1030
|
| 2840 |
+
f 891 1030 1019
|
| 2841 |
+
f 891 1019 1018
|
| 2842 |
+
f 891 1018 1017
|
| 2843 |
+
f 891 1017 1016
|
| 2844 |
+
f 891 1016 1005
|
| 2845 |
+
f 891 1005 997
|
| 2846 |
+
f 891 997 991
|
| 2847 |
+
f 891 991 985
|
| 2848 |
+
f 891 985 977
|
| 2849 |
+
f 891 977 969
|
| 2850 |
+
f 891 969 960
|
| 2851 |
+
f 891 960 951
|
| 2852 |
+
f 891 951 943
|
| 2853 |
+
f 891 943 939
|
| 2854 |
+
f 891 939 934
|
| 2855 |
+
f 891 934 927
|
| 2856 |
+
f 891 927 920
|
| 2857 |
+
f 891 920 913
|
| 2858 |
+
f 891 913 902
|
| 2859 |
+
f 891 902 895
|
| 2860 |
+
f 891 895 894
|
| 2861 |
+
f 891 894 893
|
| 2862 |
+
f 891 893 892
|
| 2863 |
+
f 895 902 903
|
| 2864 |
+
f 895 903 904
|
| 2865 |
+
f 895 904 896
|
| 2866 |
+
f 897 904 905
|
| 2867 |
+
f 897 905 898
|
| 2868 |
+
f 898 905 914
|
| 2869 |
+
f 898 914 921
|
| 2870 |
+
f 898 921 928
|
| 2871 |
+
f 898 928 935
|
| 2872 |
+
f 898 935 940
|
| 2873 |
+
f 898 940 945
|
| 2874 |
+
f 898 945 953
|
| 2875 |
+
f 898 953 959
|
| 2876 |
+
f 898 959 968
|
| 2877 |
+
f 898 968 976
|
| 2878 |
+
f 898 976 984
|
| 2879 |
+
f 898 984 992
|
| 2880 |
+
f 898 992 999
|
| 2881 |
+
f 898 999 1008
|
| 2882 |
+
f 898 1008 1024
|
| 2883 |
+
f 898 1024 1023
|
| 2884 |
+
f 898 1023 1022
|
| 2885 |
+
f 898 1022 1014
|
| 2886 |
+
f 898 1014 1010
|
| 2887 |
+
f 898 1010 1001
|
| 2888 |
+
f 898 1001 994
|
| 2889 |
+
f 898 994 988
|
| 2890 |
+
f 898 988 987
|
| 2891 |
+
f 898 987 980
|
| 2892 |
+
f 898 980 983
|
| 2893 |
+
f 898 983 982
|
| 2894 |
+
f 898 982 975
|
| 2895 |
+
f 898 975 963
|
| 2896 |
+
f 898 963 966
|
| 2897 |
+
f 898 966 965
|
| 2898 |
+
f 898 965 964
|
| 2899 |
+
f 898 964 961
|
| 2900 |
+
f 898 961 954
|
| 2901 |
+
f 898 954 946
|
| 2902 |
+
f 898 946 941
|
| 2903 |
+
f 898 941 936
|
| 2904 |
+
f 898 936 931
|
| 2905 |
+
f 898 931 929
|
| 2906 |
+
f 898 929 922
|
| 2907 |
+
f 898 922 915
|
| 2908 |
+
f 898 915 906
|
| 2909 |
+
f 898 906 899
|
| 2910 |
+
f 899 906 917
|
| 2911 |
+
f 899 917 907
|
| 2912 |
+
f 899 907 908
|
| 2913 |
+
f 899 908 900
|
| 2914 |
+
f 901 909 910
|
| 2915 |
+
f 901 910 911
|
| 2916 |
+
f 901 911 912
|
| 2917 |
+
f 902 913 914
|
| 2918 |
+
f 902 914 903
|
| 2919 |
+
f 903 914 904
|
| 2920 |
+
f 904 914 905
|
| 2921 |
+
f 906 915 916
|
| 2922 |
+
f 906 916 909
|
| 2923 |
+
f 906 909 917
|
| 2924 |
+
f 909 916 910
|
| 2925 |
+
f 910 916 918
|
| 2926 |
+
f 910 918 911
|
| 2927 |
+
f 911 919 912
|
| 2928 |
+
f 911 918 919
|
| 2929 |
+
f 913 920 914
|
| 2930 |
+
f 914 920 921
|
| 2931 |
+
f 915 922 923
|
| 2932 |
+
f 915 923 916
|
| 2933 |
+
f 916 923 918
|
| 2934 |
+
f 918 923 924
|
| 2935 |
+
f 918 924 925
|
| 2936 |
+
f 918 925 926
|
| 2937 |
+
f 918 926 919
|
| 2938 |
+
f 920 927 921
|
| 2939 |
+
f 921 927 928
|
| 2940 |
+
f 922 929 923
|
| 2941 |
+
f 923 929 924
|
| 2942 |
+
f 924 930 925
|
| 2943 |
+
f 924 929 931
|
| 2944 |
+
f 924 931 932
|
| 2945 |
+
f 924 932 930
|
| 2946 |
+
f 925 933 926
|
| 2947 |
+
f 925 930 933
|
| 2948 |
+
f 927 934 928
|
| 2949 |
+
f 928 934 935
|
| 2950 |
+
f 930 932 936
|
| 2951 |
+
f 930 936 937
|
| 2952 |
+
f 930 937 938
|
| 2953 |
+
f 931 936 932
|
| 2954 |
+
f 934 939 940
|
| 2955 |
+
f 934 940 935
|
| 2956 |
+
f 936 941 937
|
| 2957 |
+
f 937 941 942
|
| 2958 |
+
f 937 942 938
|
| 2959 |
+
f 939 943 940
|
| 2960 |
+
f 940 943 944
|
| 2961 |
+
f 940 944 945
|
| 2962 |
+
f 941 946 947
|
| 2963 |
+
f 941 947 942
|
| 2964 |
+
f 942 947 946
|
| 2965 |
+
f 942 946 949
|
| 2966 |
+
f 942 949 950
|
| 2967 |
+
f 942 950 948
|
| 2968 |
+
f 943 951 952
|
| 2969 |
+
f 943 952 944
|
| 2970 |
+
f 944 952 953
|
| 2971 |
+
f 944 953 945
|
| 2972 |
+
f 946 954 949
|
| 2973 |
+
f 948 950 956
|
| 2974 |
+
f 948 956 957
|
| 2975 |
+
f 948 957 958
|
| 2976 |
+
f 948 958 955
|
| 2977 |
+
f 949 954 961
|
| 2978 |
+
f 949 961 956
|
| 2979 |
+
f 949 956 950
|
| 2980 |
+
f 951 959 953
|
| 2981 |
+
f 951 953 952
|
| 2982 |
+
f 951 960 959
|
| 2983 |
+
f 955 958 963
|
| 2984 |
+
f 955 963 962
|
| 2985 |
+
f 956 961 964
|
| 2986 |
+
f 956 964 957
|
| 2987 |
+
f 957 964 965
|
| 2988 |
+
f 957 965 966
|
| 2989 |
+
f 957 966 958
|
| 2990 |
+
f 958 966 963
|
| 2991 |
+
f 959 960 967
|
| 2992 |
+
f 959 967 968
|
| 2993 |
+
f 960 969 970
|
| 2994 |
+
f 960 970 968
|
| 2995 |
+
f 960 968 967
|
| 2996 |
+
f 962 971 972
|
| 2997 |
+
f 962 963 973
|
| 2998 |
+
f 962 973 974
|
| 2999 |
+
f 962 974 971
|
| 3000 |
+
f 963 975 974
|
| 3001 |
+
f 963 974 973
|
| 3002 |
+
f 968 970 976
|
| 3003 |
+
f 969 977 978
|
| 3004 |
+
f 969 978 976
|
| 3005 |
+
f 969 976 970
|
| 3006 |
+
f 971 974 972
|
| 3007 |
+
f 972 974 979
|
| 3008 |
+
f 974 980 981
|
| 3009 |
+
f 974 981 979
|
| 3010 |
+
f 974 975 982
|
| 3011 |
+
f 974 982 983
|
| 3012 |
+
f 974 983 980
|
| 3013 |
+
f 976 978 984
|
| 3014 |
+
f 977 985 978
|
| 3015 |
+
f 978 985 984
|
| 3016 |
+
f 979 981 986
|
| 3017 |
+
f 980 987 981
|
| 3018 |
+
f 981 987 988
|
| 3019 |
+
f 981 988 989
|
| 3020 |
+
f 981 989 990
|
| 3021 |
+
f 981 990 986
|
| 3022 |
+
f 984 985 991
|
| 3023 |
+
f 984 991 992
|
| 3024 |
+
f 986 990 993
|
| 3025 |
+
f 988 994 995
|
| 3026 |
+
f 988 995 989
|
| 3027 |
+
f 989 995 996
|
| 3028 |
+
f 989 996 990
|
| 3029 |
+
f 990 996 993
|
| 3030 |
+
f 991 997 998
|
| 3031 |
+
f 991 998 992
|
| 3032 |
+
f 992 998 999
|
| 3033 |
+
f 993 996 1000
|
| 3034 |
+
f 994 1001 1002
|
| 3035 |
+
f 994 1002 995
|
| 3036 |
+
f 995 1002 1003
|
| 3037 |
+
f 995 1003 996
|
| 3038 |
+
f 996 1003 1000
|
| 3039 |
+
f 997 1004 998
|
| 3040 |
+
f 997 1005 1006
|
| 3041 |
+
f 997 1006 1007
|
| 3042 |
+
f 997 1007 1004
|
| 3043 |
+
f 998 1004 999
|
| 3044 |
+
f 999 1004 1008
|
| 3045 |
+
f 1000 1003 1009
|
| 3046 |
+
f 1001 1010 1002
|
| 3047 |
+
f 1002 1011 1027
|
| 3048 |
+
f 1002 1027 1028
|
| 3049 |
+
f 1002 1028 1020
|
| 3050 |
+
f 1002 1020 1032
|
| 3051 |
+
f 1002 1032 1029
|
| 3052 |
+
f 1002 1029 1015
|
| 3053 |
+
f 1002 1015 1012
|
| 3054 |
+
f 1002 1012 1003
|
| 3055 |
+
f 1002 1010 1013
|
| 3056 |
+
f 1002 1013 1014
|
| 3057 |
+
f 1002 1014 1011
|
| 3058 |
+
f 1003 1012 1015
|
| 3059 |
+
f 1003 1015 1009
|
| 3060 |
+
f 1004 1007 1008
|
| 3061 |
+
f 1005 1016 1006
|
| 3062 |
+
f 1006 1016 1017
|
| 3063 |
+
f 1006 1017 1018
|
| 3064 |
+
f 1006 1018 1019
|
| 3065 |
+
f 1006 1019 1007
|
| 3066 |
+
f 1007 1019 1020
|
| 3067 |
+
f 1007 1020 1021
|
| 3068 |
+
f 1007 1021 1022
|
| 3069 |
+
f 1007 1022 1023
|
| 3070 |
+
f 1007 1023 1024
|
| 3071 |
+
f 1007 1024 1008
|
| 3072 |
+
f 1009 1015 1025
|
| 3073 |
+
f 1009 1025 1026
|
| 3074 |
+
f 1010 1014 1013
|
| 3075 |
+
f 1011 1014 1027
|
| 3076 |
+
f 1014 1022 1028
|
| 3077 |
+
f 1014 1028 1027
|
| 3078 |
+
f 1015 1029 1025
|
| 3079 |
+
f 1019 1030 1031
|
| 3080 |
+
f 1019 1031 1032
|
| 3081 |
+
f 1019 1032 1020
|
| 3082 |
+
f 1020 1028 1021
|
| 3083 |
+
f 1021 1028 1022
|
| 3084 |
+
f 1025 1029 1033
|
| 3085 |
+
f 1025 1033 1026
|
| 3086 |
+
f 1029 1032 1031
|
| 3087 |
+
f 1029 1031 1034
|
| 3088 |
+
f 1029 1034 1033
|
| 3089 |
+
f 1030 1035 1031
|
| 3090 |
+
f 1031 1035 1034
|
msx_assets/object/028_skillet_lid/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/054_softball/textured.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/057_racquetball/collision.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/065-e_cups/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/065-e_cups/textured.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/065-i_cups/material_0.mtl
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://github.com/mikedh/trimesh
|
| 2 |
+
newmtl material_0
|
| 3 |
+
Ka 0.40000000 0.40000000 0.40000000
|
| 4 |
+
Kd 0.40000000 0.40000000 0.40000000
|
| 5 |
+
Ks 0.40000000 0.40000000 0.40000000
|
| 6 |
+
Ns 1.00000000
|
| 7 |
+
map_Kd material_0.png
|
msx_assets/object/065-i_cups/textured.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
msx_assets/object/071_nine_hole_peg_test/material_0.mtl
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://github.com/mikedh/trimesh
|
| 2 |
+
newmtl material_0
|
| 3 |
+
Ka 0.40000000 0.40000000 0.40000000
|
| 4 |
+
Kd 0.40000000 0.40000000 0.40000000
|
| 5 |
+
Ks 0.40000000 0.40000000 0.40000000
|
| 6 |
+
Ns 1.00000000
|
| 7 |
+
map_Kd material_0.png
|
msx_assets/object/073-d_lego_duplo/textured.mtl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
newmtl material_0
|
| 2 |
+
# shader_type beckmann
|
| 3 |
+
map_Kd texture_map.png
|
msx_assets/object/073-d_lego_duplo/textured.obj
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/__init__.py
ADDED
|
File without changes
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (205 Bytes). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/__init__.py
ADDED
|
File without changes
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (200 Bytes). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# from .equiv_layers import *
|
| 2 |
+
from .layers import *
|
| 3 |
+
from .layers_attention import *
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (258 Bytes). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__pycache__/layers.cpython-310.pyc
ADDED
|
Binary file (15.6 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/__pycache__/layers_attention.cpython-310.pyc
ADDED
|
Binary file (6.68 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/layers.py
ADDED
|
@@ -0,0 +1,504 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
|
| 3 |
+
import einops
|
| 4 |
+
import numpy as np
|
| 5 |
+
import torch
|
| 6 |
+
import torch.nn as nn
|
| 7 |
+
import torch.nn.functional as F
|
| 8 |
+
from einops.layers.torch import Rearrange
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
## Fully connected Neural Network block - Multi Layer Perceptron
|
| 12 |
+
class MLP(nn.Module):
|
| 13 |
+
def __init__(self, in_dim, out_dim, hidden_dim=16, n_layers=1, act='relu', batch_norm=True):
|
| 14 |
+
super(MLP, self).__init__()
|
| 15 |
+
activations = {'relu': nn.ReLU, 'sigmoid': nn.Sigmoid, 'tanh': nn.Tanh, 'leaky_relu': nn.LeakyReLU,
|
| 16 |
+
'elu': nn.ELU, 'prelu': nn.PReLU, 'softplus': nn.Softplus, 'mish': nn.Mish,
|
| 17 |
+
'identity': nn.Identity
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
act_func = activations[act]
|
| 21 |
+
layers = [nn.Linear(in_dim, hidden_dim), act_func()]
|
| 22 |
+
for i in range(n_layers):
|
| 23 |
+
layers += [
|
| 24 |
+
nn.Linear(hidden_dim, hidden_dim),
|
| 25 |
+
nn.BatchNorm1d(hidden_dim) if batch_norm else nn.Identity(),
|
| 26 |
+
act_func(),
|
| 27 |
+
]
|
| 28 |
+
layers.append(nn.Linear(hidden_dim, out_dim))
|
| 29 |
+
|
| 30 |
+
self._network = nn.Sequential(
|
| 31 |
+
*layers
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
def forward(self, x):
|
| 35 |
+
return self._network(x)
|
| 36 |
+
|
| 37 |
+
# Identity layer
|
| 38 |
+
class Identity(nn.Module):
|
| 39 |
+
def __init__(self, ):
|
| 40 |
+
super(Identity, self).__init__()
|
| 41 |
+
|
| 42 |
+
def forward(self, x):
|
| 43 |
+
return x
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
# Resnet Blocks
|
| 47 |
+
class ResnetBlockFC(nn.Module):
|
| 48 |
+
'''
|
| 49 |
+
Fully connected ResNet Block class.
|
| 50 |
+
|
| 51 |
+
Args:
|
| 52 |
+
size_in (int): input dimension
|
| 53 |
+
size_out (int): output dimension
|
| 54 |
+
size_h (int): hidden dimension
|
| 55 |
+
'''
|
| 56 |
+
|
| 57 |
+
def __init__(self, size_in, size_out=None, size_h=None):
|
| 58 |
+
super().__init__()
|
| 59 |
+
# Attributes
|
| 60 |
+
if size_out is None:
|
| 61 |
+
size_out = size_in
|
| 62 |
+
|
| 63 |
+
if size_h is None:
|
| 64 |
+
size_h = min(size_in, size_out)
|
| 65 |
+
|
| 66 |
+
self.size_in = size_in
|
| 67 |
+
self.size_h = size_h
|
| 68 |
+
self.size_out = size_out
|
| 69 |
+
# Submodules
|
| 70 |
+
self.fc_0 = nn.Linear(size_in, size_h)
|
| 71 |
+
self.fc_1 = nn.Linear(size_h, size_out)
|
| 72 |
+
self.actvn = nn.ReLU()
|
| 73 |
+
|
| 74 |
+
if size_in == size_out:
|
| 75 |
+
self.shortcut = None
|
| 76 |
+
else:
|
| 77 |
+
self.shortcut = nn.Linear(size_in, size_out, bias=False)
|
| 78 |
+
# Initialization
|
| 79 |
+
nn.init.zeros_(self.fc_1.weight)
|
| 80 |
+
|
| 81 |
+
def forward(self, x):
|
| 82 |
+
net = self.fc_0(self.actvn(x))
|
| 83 |
+
dx = self.fc_1(self.actvn(net))
|
| 84 |
+
|
| 85 |
+
if self.shortcut is not None:
|
| 86 |
+
x_s = self.shortcut(x)
|
| 87 |
+
else:
|
| 88 |
+
x_s = x
|
| 89 |
+
|
| 90 |
+
return x_s + dx
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class GaussianFourierProjection(nn.Module):
|
| 94 |
+
"""Gaussian random features for encoding time steps."""
|
| 95 |
+
|
| 96 |
+
def __init__(self, embed_dim, scale=30.):
|
| 97 |
+
super().__init__()
|
| 98 |
+
# Randomly sample weights during initialization. These weights are fixed
|
| 99 |
+
# during optimization and are not trainable.
|
| 100 |
+
self.W = nn.Parameter(torch.randn(embed_dim // 2) * scale, requires_grad=False)
|
| 101 |
+
|
| 102 |
+
def forward(self, x):
|
| 103 |
+
x_proj = x[:, None] * self.W[None, :] * 2 * np.pi
|
| 104 |
+
return torch.cat([torch.sin(x_proj), torch.cos(x_proj)], dim=-1)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
# https://gist.github.com/kevinzakka/dd9fa5177cda13593524f4d71eb38ad5
|
| 108 |
+
class SpatialSoftArgmax(nn.Module):
|
| 109 |
+
"""Spatial softmax as defined in [1].
|
| 110 |
+
Concretely, the spatial softmax of each feature
|
| 111 |
+
map is used to compute a weighted mean of the pixel
|
| 112 |
+
locations, effectively performing a soft arg-max
|
| 113 |
+
over the feature dimension.
|
| 114 |
+
References:
|
| 115 |
+
[1]: End-to-End Training of Deep Visuomotor Policies,
|
| 116 |
+
https://arxiv.org/abs/1504.00702
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
def __init__(self, normalize=False):
|
| 120 |
+
"""Constructor.
|
| 121 |
+
Args:
|
| 122 |
+
normalize (bool): Whether to use normalized
|
| 123 |
+
image coordinates, i.e. coordinates in
|
| 124 |
+
the range `[-1, 1]`.
|
| 125 |
+
"""
|
| 126 |
+
super().__init__()
|
| 127 |
+
|
| 128 |
+
self.normalize = normalize
|
| 129 |
+
|
| 130 |
+
self.temperatur = nn.Parameter(torch.ones(1), requires_grad=True)
|
| 131 |
+
|
| 132 |
+
def _coord_grid(self, h, w, device):
|
| 133 |
+
if self.normalize:
|
| 134 |
+
return torch.stack(
|
| 135 |
+
torch.meshgrid(
|
| 136 |
+
torch.linspace(-1, 1, w, device=device),
|
| 137 |
+
torch.linspace(-1, 1, h, device=device),
|
| 138 |
+
)
|
| 139 |
+
)
|
| 140 |
+
return torch.stack(
|
| 141 |
+
torch.meshgrid(
|
| 142 |
+
torch.arange(0, w, device=device),
|
| 143 |
+
torch.arange(0, h, device=device),
|
| 144 |
+
)
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
def forward(self, x):
|
| 148 |
+
assert x.ndim == 4, "Expecting a tensor of shape (B, C, H, W)."
|
| 149 |
+
|
| 150 |
+
# compute a spatial softmax over the input:
|
| 151 |
+
# given an input of shape (B, C, H, W),
|
| 152 |
+
# reshape it to (B*C, H*W) then apply
|
| 153 |
+
# the softmax operator over the last dimension
|
| 154 |
+
b, c, h, w = x.shape
|
| 155 |
+
|
| 156 |
+
# x = x * h * w
|
| 157 |
+
x = x * (h * w / self.temperatur)
|
| 158 |
+
# print(self.temperatur)
|
| 159 |
+
softmax = F.softmax(x.view(-1, h * w), dim=-1)
|
| 160 |
+
|
| 161 |
+
# create a meshgrid of pixel coordinates
|
| 162 |
+
# both in the x and y axes
|
| 163 |
+
xc, yc = self._coord_grid(h, w, x.device)
|
| 164 |
+
|
| 165 |
+
# element-wise multiply the x and y coordinates
|
| 166 |
+
# with the softmax, then sum over the h*w dimension
|
| 167 |
+
# this effectively computes the weighted mean of x
|
| 168 |
+
# and y locations
|
| 169 |
+
y_mean = (softmax * xc.flatten()).sum(dim=1, keepdims=True)
|
| 170 |
+
x_mean = (softmax * yc.flatten()).sum(dim=1, keepdims=True)
|
| 171 |
+
|
| 172 |
+
# concatenate and reshape the result
|
| 173 |
+
# to (B, C*2) where for every feature
|
| 174 |
+
# we have the expected x and y pixel
|
| 175 |
+
# locations
|
| 176 |
+
return torch.cat([x_mean, y_mean], dim=1).view(-1, c * 2)
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
########################################################################################################################
|
| 180 |
+
# Modules Temporal Unet
|
| 181 |
+
########################################################################################################################
|
| 182 |
+
class Residual(nn.Module):
|
| 183 |
+
def __init__(self, fn):
|
| 184 |
+
super().__init__()
|
| 185 |
+
self.fn = fn
|
| 186 |
+
|
| 187 |
+
def forward(self, x, *args, **kwargs):
|
| 188 |
+
return self.fn(x, *args, **kwargs) + x
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class PreNorm(nn.Module):
|
| 192 |
+
def __init__(self, dim, fn):
|
| 193 |
+
super().__init__()
|
| 194 |
+
self.fn = fn
|
| 195 |
+
self.norm = LayerNorm(dim)
|
| 196 |
+
|
| 197 |
+
def forward(self, x):
|
| 198 |
+
x = self.norm(x)
|
| 199 |
+
return self.fn(x)
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class LayerNorm(nn.Module):
|
| 203 |
+
def __init__(self, dim, eps=1e-5):
|
| 204 |
+
super().__init__()
|
| 205 |
+
self.eps = eps
|
| 206 |
+
self.g = nn.Parameter(torch.ones(1, dim, 1))
|
| 207 |
+
self.b = nn.Parameter(torch.zeros(1, dim, 1))
|
| 208 |
+
|
| 209 |
+
def forward(self, x):
|
| 210 |
+
var = torch.var(x, dim=1, unbiased=False, keepdim=True)
|
| 211 |
+
mean = torch.mean(x, dim=1, keepdim=True)
|
| 212 |
+
return (x - mean) / (var + self.eps).sqrt() * self.g + self.b
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
class LinearAttention(nn.Module):
|
| 216 |
+
def __init__(self, dim, heads=4, dim_head=32):
|
| 217 |
+
super().__init__()
|
| 218 |
+
self.scale = dim_head ** -0.5
|
| 219 |
+
self.heads = heads
|
| 220 |
+
hidden_dim = dim_head * heads
|
| 221 |
+
self.to_qkv = nn.Conv1d(dim, hidden_dim * 3, 1, bias=False)
|
| 222 |
+
self.to_out = nn.Conv1d(hidden_dim, dim, 1)
|
| 223 |
+
|
| 224 |
+
def forward(self, x):
|
| 225 |
+
qkv = self.to_qkv(x).chunk(3, dim = 1)
|
| 226 |
+
q, k, v = map(lambda t: einops.rearrange(t, 'b (h c) d -> b h c d', h=self.heads), qkv)
|
| 227 |
+
q = q * self.scale
|
| 228 |
+
|
| 229 |
+
k = k.softmax(dim = -1)
|
| 230 |
+
context = torch.einsum('b h d n, b h e n -> b h d e', k, v)
|
| 231 |
+
|
| 232 |
+
out = torch.einsum('b h d e, b h d n -> b h e n', context, q)
|
| 233 |
+
out = einops.rearrange(out, 'b h c d -> b (h c) d')
|
| 234 |
+
return self.to_out(out)
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class TimeEncoder(nn.Module):
|
| 238 |
+
def __init__(self, dim, dim_out):
|
| 239 |
+
super().__init__()
|
| 240 |
+
self.encoder = nn.Sequential(
|
| 241 |
+
SinusoidalPosEmb(dim),
|
| 242 |
+
nn.Linear(dim, dim * 4),
|
| 243 |
+
nn.Mish(),
|
| 244 |
+
nn.Linear(dim * 4, dim_out)
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
def forward(self, x):
|
| 248 |
+
return self.encoder(x)
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class SinusoidalPosEmb(nn.Module):
|
| 252 |
+
def __init__(self, dim):
|
| 253 |
+
super().__init__()
|
| 254 |
+
self.dim = dim
|
| 255 |
+
|
| 256 |
+
def forward(self, x):
|
| 257 |
+
device = x.device
|
| 258 |
+
half_dim = self.dim // 2
|
| 259 |
+
emb = math.log(10000) / (half_dim - 1)
|
| 260 |
+
emb = torch.exp(torch.arange(half_dim, device=device) * -emb)
|
| 261 |
+
emb = x[:, None] * emb[None, :]
|
| 262 |
+
emb = torch.cat((emb.sin(), emb.cos()), dim=-1)
|
| 263 |
+
return emb
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
class Downsample1d(nn.Module):
|
| 267 |
+
def __init__(self, dim):
|
| 268 |
+
super().__init__()
|
| 269 |
+
self.conv = nn.Conv1d(dim, dim, kernel_size=3, stride=2, padding=1)
|
| 270 |
+
|
| 271 |
+
def forward(self, x):
|
| 272 |
+
return self.conv(x)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
class Upsample1d(nn.Module):
|
| 276 |
+
def __init__(self, dim):
|
| 277 |
+
super().__init__()
|
| 278 |
+
self.conv = nn.ConvTranspose1d(dim, dim, kernel_size=4, stride=2, padding=1)
|
| 279 |
+
|
| 280 |
+
def forward(self, x):
|
| 281 |
+
return self.conv(x)
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
class Conv1dBlock(nn.Module):
|
| 285 |
+
'''
|
| 286 |
+
Conv1d --> GroupNorm --> Mish
|
| 287 |
+
'''
|
| 288 |
+
|
| 289 |
+
def __init__(self, inp_channels, out_channels, kernel_size, padding=None, n_groups=8):
|
| 290 |
+
super().__init__()
|
| 291 |
+
self.block = nn.Sequential(
|
| 292 |
+
nn.Conv1d(inp_channels, out_channels, kernel_size, stride=1,
|
| 293 |
+
padding=padding if padding is not None else kernel_size // 2),
|
| 294 |
+
Rearrange('batch channels n_support_points -> batch channels 1 n_support_points'),
|
| 295 |
+
nn.GroupNorm(n_groups, out_channels),
|
| 296 |
+
Rearrange('batch channels 1 n_support_points -> batch channels n_support_points'),
|
| 297 |
+
nn.Mish(),
|
| 298 |
+
)
|
| 299 |
+
|
| 300 |
+
def forward(self, x):
|
| 301 |
+
return self.block(x)
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
class ResidualBlock(nn.Module):
|
| 305 |
+
################
|
| 306 |
+
# Janner code
|
| 307 |
+
def __init__(self, in_channels, out_channels, stride=1, downsample=None):
|
| 308 |
+
super(ResidualBlock, self).__init__()
|
| 309 |
+
self.conv1 = nn.Sequential(
|
| 310 |
+
nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1),
|
| 311 |
+
nn.BatchNorm2d(out_channels),
|
| 312 |
+
nn.ReLU())
|
| 313 |
+
self.conv2 = nn.Sequential(
|
| 314 |
+
nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1),
|
| 315 |
+
nn.BatchNorm2d(out_channels))
|
| 316 |
+
self.downsample = downsample
|
| 317 |
+
self.relu = nn.ReLU()
|
| 318 |
+
self.out_channels = out_channels
|
| 319 |
+
|
| 320 |
+
def forward(self, x):
|
| 321 |
+
residual = x
|
| 322 |
+
out = self.conv1(x)
|
| 323 |
+
out = self.conv2(out)
|
| 324 |
+
if self.downsample:
|
| 325 |
+
residual = self.downsample(x)
|
| 326 |
+
out += residual
|
| 327 |
+
out = self.relu(out)
|
| 328 |
+
return out
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
class ResidualTemporalBlock(nn.Module):
|
| 332 |
+
|
| 333 |
+
def __init__(self, inp_channels, out_channels, cond_embed_dim, n_support_points, kernel_size=5):
|
| 334 |
+
super().__init__()
|
| 335 |
+
|
| 336 |
+
self.blocks = nn.ModuleList([
|
| 337 |
+
Conv1dBlock(inp_channels, out_channels, kernel_size, n_groups=group_norm_n_groups(out_channels)),
|
| 338 |
+
Conv1dBlock(out_channels, out_channels, kernel_size, n_groups=group_norm_n_groups(out_channels)),
|
| 339 |
+
])
|
| 340 |
+
|
| 341 |
+
# Without context conditioning, cond_mlp handles only time embeddings
|
| 342 |
+
self.cond_mlp = nn.Sequential(
|
| 343 |
+
nn.Mish(),
|
| 344 |
+
nn.Linear(cond_embed_dim, out_channels),
|
| 345 |
+
Rearrange('batch t -> batch t 1'),
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
self.residual_conv = nn.Conv1d(inp_channels, out_channels, kernel_size=1, stride=1, padding=0) \
|
| 349 |
+
if inp_channels != out_channels else nn.Identity()
|
| 350 |
+
|
| 351 |
+
def forward(self, x, c):
|
| 352 |
+
'''
|
| 353 |
+
x : [ batch_size x inp_channels x n_support_points ]
|
| 354 |
+
c : [ batch_size x embed_dim ]
|
| 355 |
+
returns:
|
| 356 |
+
out : [ batch_size x out_channels x n_support_points ]
|
| 357 |
+
'''
|
| 358 |
+
h = self.blocks[0](x) + self.cond_mlp(c)
|
| 359 |
+
h = self.blocks[1](h)
|
| 360 |
+
res = self.residual_conv(x)
|
| 361 |
+
out = h + res
|
| 362 |
+
|
| 363 |
+
return out
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class TemporalBlockMLP(nn.Module):
|
| 367 |
+
|
| 368 |
+
def __init__(self, inp_channels, out_channels, cond_embed_dim):
|
| 369 |
+
super().__init__()
|
| 370 |
+
|
| 371 |
+
self.blocks = nn.ModuleList([
|
| 372 |
+
MLP(inp_channels, out_channels, hidden_dim=out_channels, n_layers=0, act='mish')
|
| 373 |
+
])
|
| 374 |
+
|
| 375 |
+
# Without context conditioning, cond_mlp handles only time embeddings
|
| 376 |
+
self.cond_mlp = nn.Sequential(
|
| 377 |
+
nn.Mish(),
|
| 378 |
+
nn.Linear(cond_embed_dim, out_channels),
|
| 379 |
+
# Rearrange('batch t -> batch t 1'),
|
| 380 |
+
)
|
| 381 |
+
|
| 382 |
+
self.last_act = nn.Mish()
|
| 383 |
+
|
| 384 |
+
def forward(self, x, c):
|
| 385 |
+
'''
|
| 386 |
+
x : [ batch_size x inp_channels x n_support_points ]
|
| 387 |
+
c : [ batch_size x embed_dim ]
|
| 388 |
+
returns:
|
| 389 |
+
out : [ batch_size x out_channels x n_support_points ]
|
| 390 |
+
'''
|
| 391 |
+
h = self.blocks[0](x) + self.cond_mlp(c)
|
| 392 |
+
out = self.last_act(h)
|
| 393 |
+
return out
|
| 394 |
+
|
| 395 |
+
|
| 396 |
+
class FiLMResidualTemporalBlock(ResidualTemporalBlock):
|
| 397 |
+
def __init__(self, inp_channels, out_channels, cond_embed_dim, n_support_points, kernel_size=5):
|
| 398 |
+
super().__init__(inp_channels, out_channels, cond_embed_dim, n_support_points, kernel_size)
|
| 399 |
+
|
| 400 |
+
# Replace the original cond_mlp with separate scale and bias MLPs
|
| 401 |
+
delattr(self, 'cond_mlp') # Remove the original conditioning MLP
|
| 402 |
+
|
| 403 |
+
self.scale_mlp = nn.Sequential(
|
| 404 |
+
nn.Mish(),
|
| 405 |
+
nn.Linear(cond_embed_dim, out_channels),
|
| 406 |
+
Rearrange('batch t -> batch t 1'),
|
| 407 |
+
)
|
| 408 |
+
self.bias_mlp = nn.Sequential(
|
| 409 |
+
nn.Mish(),
|
| 410 |
+
nn.Linear(cond_embed_dim, out_channels),
|
| 411 |
+
Rearrange('batch t -> batch t 1'),
|
| 412 |
+
)
|
| 413 |
+
|
| 414 |
+
def forward(self, x, c):
|
| 415 |
+
'''
|
| 416 |
+
x : [ batch_size x inp_channels x n_support_points ]
|
| 417 |
+
c : [ batch_size x embed_dim ]
|
| 418 |
+
returns:
|
| 419 |
+
out : [ batch_size x out_channels x n_support_points ]
|
| 420 |
+
'''
|
| 421 |
+
# FiLM conditioning
|
| 422 |
+
scale = self.scale_mlp(c)
|
| 423 |
+
bias = self.bias_mlp(c)
|
| 424 |
+
h = self.blocks[0](x) * scale + bias
|
| 425 |
+
|
| 426 |
+
h = self.blocks[1](h)
|
| 427 |
+
res = self.residual_conv(x)
|
| 428 |
+
out = h + res
|
| 429 |
+
return out
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
def group_norm_n_groups(n_channels, target_n_groups=8):
|
| 433 |
+
if n_channels < target_n_groups:
|
| 434 |
+
return 1
|
| 435 |
+
for n_groups in range(target_n_groups, target_n_groups + 10):
|
| 436 |
+
if n_channels % n_groups == 0:
|
| 437 |
+
return n_groups
|
| 438 |
+
return 1
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def compute_padding_conv1d(L, KSZ, S, D, deconv=False):
|
| 442 |
+
'''
|
| 443 |
+
https://gist.github.com/AhmadMoussa/d32c41c11366440bc5eaf4efb48a2e73
|
| 444 |
+
:param L: Input length (or width)
|
| 445 |
+
:param KSZ: Kernel size (or width)
|
| 446 |
+
:param S: Stride
|
| 447 |
+
:param D: Dilation Factor
|
| 448 |
+
:param deconv: True if ConvTranspose1d
|
| 449 |
+
:return: Returns padding such that output width is exactly half of input width
|
| 450 |
+
'''
|
| 451 |
+
print(f"INPUT SIZE {L}")
|
| 452 |
+
if not deconv:
|
| 453 |
+
return math.ceil((S * (L / 2) - L + D * (KSZ - 1) - 1) / 2)
|
| 454 |
+
else:
|
| 455 |
+
print(L, S, D, KSZ)
|
| 456 |
+
pad = math.ceil(((L - 1) * S + D * (KSZ - 1) + 1 - L * 2) / 2)
|
| 457 |
+
print("PAD", pad)
|
| 458 |
+
output_size = (L - 1) * S - 2 * pad + D * (KSZ - 1) + 1
|
| 459 |
+
print("OUTPUT SIZE", output_size)
|
| 460 |
+
return pad
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
def compute_output_length_maxpool1d(L, KSZ, S, D, P):
|
| 464 |
+
'''
|
| 465 |
+
https://pytorch.org/docs/stable/generated/torch.nn.MaxPool1d.html
|
| 466 |
+
:param L: Input length (or width)
|
| 467 |
+
:param KSZ: Kernel size (or width)
|
| 468 |
+
:param S: Stride
|
| 469 |
+
:param D: Dilation Factor
|
| 470 |
+
:param P: Padding
|
| 471 |
+
'''
|
| 472 |
+
return math.floor((L + 2 * P - D * (KSZ - 1) - 1) / S + 1)
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
if __name__ == "__main__":
|
| 476 |
+
b, c, h, w = 1, 64, 12, 12
|
| 477 |
+
x = torch.full((b, c, h, w), 0.00)
|
| 478 |
+
|
| 479 |
+
i_max = 4
|
| 480 |
+
true_max = torch.randint(0, 10, size=(b, c, 2))
|
| 481 |
+
for i in range(b):
|
| 482 |
+
for j in range(c):
|
| 483 |
+
x[i, j, true_max[i, j, 0], true_max[i, j, 1]] = 1000
|
| 484 |
+
# x[i, j, i_max, true_max] = 1
|
| 485 |
+
# x[0,0,0,0] = 1000
|
| 486 |
+
soft_max = SpatialSoftArgmax(normalize=True)(x)
|
| 487 |
+
soft_max2 = SpatialSoftArgmax(normalize=False)(x)
|
| 488 |
+
diff = soft_max - (soft_max2 / 12) * 2 - 1
|
| 489 |
+
resh = soft_max.reshape(b, c, 2)
|
| 490 |
+
assert torch.allclose(true_max.float(), resh)
|
| 491 |
+
|
| 492 |
+
exit()
|
| 493 |
+
test_scales = [1, 5, 10, 30, 50, 70, 100]
|
| 494 |
+
for scale in test_scales[::-1]:
|
| 495 |
+
i_max = 4
|
| 496 |
+
true_max = torch.randint(0, 10, size=(b, c, 2))
|
| 497 |
+
for i in range(b):
|
| 498 |
+
for j in range(c):
|
| 499 |
+
x[i, j, true_max[i, j, 0], true_max[i, j, 1]] = scale
|
| 500 |
+
# x[i, j, i_max, true_max] = 1
|
| 501 |
+
# x[0,0,0,0] = 1000
|
| 502 |
+
soft_max = SpatialSoftArgmax(normalize=False)(x)
|
| 503 |
+
resh = soft_max.reshape(b, c, 2)
|
| 504 |
+
assert torch.allclose(true_max.float(), resh), scale
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/layers/layers_attention.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
import torch
|
| 3 |
+
import torch.nn as nn
|
| 4 |
+
import torch.nn.functional as F
|
| 5 |
+
from einops import rearrange, repeat
|
| 6 |
+
from inspect import isfunction
|
| 7 |
+
from torch import einsum
|
| 8 |
+
|
| 9 |
+
from cfdp.diffusion_policy.layers.layers import group_norm_n_groups
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def exists(val):
|
| 13 |
+
return val is not None
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def uniq(arr):
|
| 17 |
+
return {el: True for el in arr}.keys()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def default(val, d):
|
| 21 |
+
if exists(val):
|
| 22 |
+
return val
|
| 23 |
+
return d() if isfunction(d) else d
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def max_neg_value(t):
|
| 27 |
+
return -torch.finfo(t.dtype).max
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def init_(tensor):
|
| 31 |
+
dim = tensor.shape[-1]
|
| 32 |
+
std = 1 / math.sqrt(dim)
|
| 33 |
+
tensor.uniform_(-std, std)
|
| 34 |
+
return tensor
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# feedforward
|
| 38 |
+
class GEGLU(nn.Module):
|
| 39 |
+
def __init__(self, dim_in, dim_out):
|
| 40 |
+
super().__init__()
|
| 41 |
+
self.proj = nn.Linear(dim_in, dim_out * 2)
|
| 42 |
+
|
| 43 |
+
def forward(self, x):
|
| 44 |
+
x, gate = self.proj(x).chunk(2, dim=-1)
|
| 45 |
+
return x * F.gelu(gate)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class FeedForward(nn.Module):
|
| 49 |
+
def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):
|
| 50 |
+
super().__init__()
|
| 51 |
+
inner_dim = int(dim * mult)
|
| 52 |
+
dim_out = default(dim_out, dim)
|
| 53 |
+
project_in = nn.Sequential(
|
| 54 |
+
nn.Linear(dim, inner_dim),
|
| 55 |
+
nn.GELU()
|
| 56 |
+
) if not glu else GEGLU(dim, inner_dim)
|
| 57 |
+
|
| 58 |
+
self.net = nn.Sequential(
|
| 59 |
+
project_in,
|
| 60 |
+
nn.Dropout(dropout),
|
| 61 |
+
nn.Linear(inner_dim, dim_out)
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
def forward(self, x):
|
| 65 |
+
return self.net(x)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def zero_module(module):
|
| 69 |
+
"""
|
| 70 |
+
Zero out the parameters of a module and return it.
|
| 71 |
+
"""
|
| 72 |
+
for p in module.parameters():
|
| 73 |
+
p.detach().zero_()
|
| 74 |
+
return module
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def Normalize(in_channels):
|
| 78 |
+
# return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True)
|
| 79 |
+
return torch.nn.GroupNorm(num_groups=group_norm_n_groups(in_channels), num_channels=in_channels, eps=1e-6,
|
| 80 |
+
affine=True)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class CrossAttention(nn.Module):
|
| 84 |
+
def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.):
|
| 85 |
+
super().__init__()
|
| 86 |
+
inner_dim = dim_head * heads
|
| 87 |
+
context_dim = default(context_dim, query_dim)
|
| 88 |
+
|
| 89 |
+
self.scale = dim_head ** -0.5
|
| 90 |
+
self.heads = heads
|
| 91 |
+
|
| 92 |
+
self.to_q = nn.Linear(query_dim, inner_dim, bias=False)
|
| 93 |
+
self.to_k = nn.Linear(context_dim, inner_dim, bias=False)
|
| 94 |
+
self.to_v = nn.Linear(context_dim, inner_dim, bias=False)
|
| 95 |
+
|
| 96 |
+
self.to_out = nn.Sequential(
|
| 97 |
+
nn.Linear(inner_dim, query_dim),
|
| 98 |
+
nn.Dropout(dropout)
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
def forward(self, x, context=None, mask=None):
|
| 102 |
+
h = self.heads
|
| 103 |
+
|
| 104 |
+
q = self.to_q(x)
|
| 105 |
+
context = default(context, x)
|
| 106 |
+
k = self.to_k(context)
|
| 107 |
+
v = self.to_v(context)
|
| 108 |
+
|
| 109 |
+
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v))
|
| 110 |
+
|
| 111 |
+
sim = einsum('b i d, b j d -> b i j', q, k) * self.scale
|
| 112 |
+
|
| 113 |
+
if exists(mask):
|
| 114 |
+
mask = rearrange(mask, 'b ... -> b (...)')
|
| 115 |
+
max_neg_value = -torch.finfo(sim.dtype).max
|
| 116 |
+
mask = repeat(mask, 'b j -> (b h) () j', h=h)
|
| 117 |
+
sim.masked_fill_(~mask, max_neg_value)
|
| 118 |
+
|
| 119 |
+
# attention, what we cannot get enough of
|
| 120 |
+
attn = sim.softmax(dim=-1)
|
| 121 |
+
|
| 122 |
+
out = einsum('b i j, b j d -> b i d', attn, v)
|
| 123 |
+
out = rearrange(out, '(b h) n d -> b n (h d)', h=h)
|
| 124 |
+
return self.to_out(out)
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class BasicTransformerBlock(nn.Module):
|
| 128 |
+
def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True):
|
| 129 |
+
super().__init__()
|
| 130 |
+
self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head,
|
| 131 |
+
dropout=dropout) # is a self-attention
|
| 132 |
+
self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
|
| 133 |
+
self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim,
|
| 134 |
+
heads=n_heads, dim_head=d_head, dropout=dropout) # is self-attn if context is none
|
| 135 |
+
self.norm1 = nn.LayerNorm(dim)
|
| 136 |
+
self.norm2 = nn.LayerNorm(dim)
|
| 137 |
+
self.norm3 = nn.LayerNorm(dim)
|
| 138 |
+
self.checkpoint = checkpoint
|
| 139 |
+
|
| 140 |
+
def forward(self, x, context=None):
|
| 141 |
+
# Residual connections and layer normalization
|
| 142 |
+
x = self.attn1(self.norm1(x)) + x # self attention
|
| 143 |
+
x = self.attn2(self.norm2(x), context=context) + x # attention to context
|
| 144 |
+
x = self.ff(self.norm3(x)) + x
|
| 145 |
+
return x
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class SpatialTransformer(nn.Module):
|
| 149 |
+
"""
|
| 150 |
+
Transformer block for trajectory-like data.
|
| 151 |
+
First, project the input (aka embedding)
|
| 152 |
+
and reshape to b, t, d.
|
| 153 |
+
Then apply standard transformer action.
|
| 154 |
+
Finally, reshape to trajectory
|
| 155 |
+
"""
|
| 156 |
+
|
| 157 |
+
def __init__(self, in_channels, n_heads, d_head,
|
| 158 |
+
depth=1, dropout=0., context_dim=None):
|
| 159 |
+
super().__init__()
|
| 160 |
+
self.in_channels = in_channels
|
| 161 |
+
inner_dim = n_heads * d_head
|
| 162 |
+
self.norm = Normalize(in_channels)
|
| 163 |
+
|
| 164 |
+
# self.proj_in = nn.Conv2d(in_channels,
|
| 165 |
+
# inner_dim,
|
| 166 |
+
# kernel_size=1,
|
| 167 |
+
# stride=1,
|
| 168 |
+
# padding=0)
|
| 169 |
+
self.proj_in = nn.Conv1d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0)
|
| 170 |
+
|
| 171 |
+
self.transformer_blocks = nn.ModuleList(
|
| 172 |
+
[BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim)
|
| 173 |
+
for d in range(depth)]
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
# self.proj_out = zero_module(nn.Conv2d(inner_dim,
|
| 177 |
+
# in_channels,
|
| 178 |
+
# kernel_size=1,
|
| 179 |
+
# stride=1,
|
| 180 |
+
# padding=0))
|
| 181 |
+
self.proj_out = zero_module(nn.Conv1d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0))
|
| 182 |
+
|
| 183 |
+
def forward(self, x, context=None):
|
| 184 |
+
# note: if no context is given, cross-attention defaults to self-attention
|
| 185 |
+
b, c, h = x.shape
|
| 186 |
+
x_in = x
|
| 187 |
+
x = self.norm(x)
|
| 188 |
+
x = self.proj_in(x)
|
| 189 |
+
x = rearrange(x, 'b c h -> b h c')
|
| 190 |
+
for block in self.transformer_blocks:
|
| 191 |
+
x = block(x, context=context)
|
| 192 |
+
x = rearrange(x, 'b h c -> b c h', h=h)
|
| 193 |
+
x = self.proj_out(x)
|
| 194 |
+
return x + x_in
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .diffusion_model_base import GaussianDiffusionModel, build_condition
|
| 2 |
+
from .flow_matching_base import FlowMatchingModel
|
| 3 |
+
from .mlp_model import MLPModel
|
| 4 |
+
from .gaussian_diffusion_loss import FlowMatchingLoss, GaussianDiffusionLoss
|
| 5 |
+
from .temporal_unet import TemporalUnet, ContextModel, PointUnet, UNET_DIM_MULTS
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__pycache__/helpers.cpython-310.pyc
ADDED
|
Binary file (3.54 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__pycache__/mlp_model.cpython-310.pyc
ADDED
|
Binary file (2.01 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/__pycache__/sample_functions.cpython-310.pyc
ADDED
|
Binary file (6 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/diffusion_model_base.py
ADDED
|
@@ -0,0 +1,689 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Adapted from https://github.com/jannerm/diffuser
|
| 3 |
+
"""
|
| 4 |
+
import abc
|
| 5 |
+
import time
|
| 6 |
+
from collections import namedtuple
|
| 7 |
+
from copy import copy
|
| 8 |
+
|
| 9 |
+
import einops
|
| 10 |
+
import matplotlib.pyplot as plt
|
| 11 |
+
import numpy as np
|
| 12 |
+
import torch
|
| 13 |
+
import torch.nn as nn
|
| 14 |
+
from abc import ABC
|
| 15 |
+
import time
|
| 16 |
+
from enum import Enum
|
| 17 |
+
|
| 18 |
+
from torch.nn import DataParallel
|
| 19 |
+
|
| 20 |
+
from cfdp.diffusion_policy.models.helpers import cosine_beta_schedule, Losses, exponential_beta_schedule
|
| 21 |
+
from cfdp.diffusion_policy.models.sample_functions import extract, apply_hard_conditioning, guide_gradient_steps, \
|
| 22 |
+
ddpm_sample_fn, ddpm_sample_fn_stomp
|
| 23 |
+
from cfdp.diffusion_policy.utils.utils import to_numpy
|
| 24 |
+
from cfdp.utils.data_utils import interpolate_points, pad_tensor
|
| 25 |
+
|
| 26 |
+
def make_timesteps(batch_size, i, device):
|
| 27 |
+
t = torch.full((batch_size,), i, device=device, dtype=torch.long)
|
| 28 |
+
return t
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def build_condition(model, dataset, input_dict):
|
| 32 |
+
# input_dict is already normalized
|
| 33 |
+
condition = None
|
| 34 |
+
if model.context_model is not None:
|
| 35 |
+
condition = dict()
|
| 36 |
+
task_normalized = input_dict[f'{dataset.data_key_task}_normalized']
|
| 37 |
+
|
| 38 |
+
# (normalized) features of variable environments
|
| 39 |
+
# if hasattr(dataset, 'variable_environment') and dataset.variable_environment:
|
| 40 |
+
# env_normalized = input_dict[f'{dataset.field_key_env}_normalized']
|
| 41 |
+
# condition['env'] = env_normalized
|
| 42 |
+
# obs_history
|
| 43 |
+
# breakpoint()
|
| 44 |
+
if hasattr(dataset, 'data_key_obs_history'): # TODO: Make it better...
|
| 45 |
+
obs_history = input_dict[f'{dataset.data_key_obs_history}_normalized']
|
| 46 |
+
# breakpoint()
|
| 47 |
+
condition['tasks'] = torch.cat((task_normalized, obs_history), dim = -1)
|
| 48 |
+
# breakpoint()
|
| 49 |
+
else:
|
| 50 |
+
condition['tasks'] = task_normalized
|
| 51 |
+
return condition
|
| 52 |
+
|
| 53 |
+
class PredictionMode(Enum):
|
| 54 |
+
EPSILON = "epsilon"
|
| 55 |
+
V = "v"
|
| 56 |
+
X = "x"
|
| 57 |
+
|
| 58 |
+
class GaussianDiffusionModel(nn.Module, ABC):
|
| 59 |
+
|
| 60 |
+
def __init__(self,
|
| 61 |
+
model=None,
|
| 62 |
+
variance_schedule='exponential',
|
| 63 |
+
n_diffusion_steps=100,
|
| 64 |
+
clip_denoised=True,
|
| 65 |
+
prediction_mode="epsilon", # Accept string: "epsilon", "v", "x"
|
| 66 |
+
use_snr_weight=False, #snr weight for training loss function
|
| 67 |
+
loss_type='l2',
|
| 68 |
+
context_model=None,
|
| 69 |
+
device='cuda',
|
| 70 |
+
**kwargs):
|
| 71 |
+
super().__init__()
|
| 72 |
+
|
| 73 |
+
self.model_name = 'DiffusionModel'
|
| 74 |
+
|
| 75 |
+
self.model = model
|
| 76 |
+
|
| 77 |
+
self.context_model = context_model
|
| 78 |
+
|
| 79 |
+
self.n_diffusion_steps = n_diffusion_steps
|
| 80 |
+
|
| 81 |
+
self.state_dim = self.model.state_dim
|
| 82 |
+
|
| 83 |
+
self.device = device
|
| 84 |
+
# debug information dispay
|
| 85 |
+
self.verbose = False
|
| 86 |
+
self.use_snr_weight = use_snr_weight
|
| 87 |
+
|
| 88 |
+
if variance_schedule == 'cosine':
|
| 89 |
+
betas = cosine_beta_schedule(n_diffusion_steps, s=0.008, a_min=0, a_max=0.999)
|
| 90 |
+
elif variance_schedule == 'exponential':
|
| 91 |
+
betas = exponential_beta_schedule(n_diffusion_steps, beta_start=1e-4, beta_end=1.0)
|
| 92 |
+
else:
|
| 93 |
+
raise NotImplementedError
|
| 94 |
+
|
| 95 |
+
alphas = 1. - betas
|
| 96 |
+
alphas_cumprod = torch.cumprod(alphas, axis=0)
|
| 97 |
+
alphas_cumprod_prev = torch.cat([torch.ones(1), alphas_cumprod[:-1]])
|
| 98 |
+
|
| 99 |
+
# print("betas shape: ", betas.shape)
|
| 100 |
+
# print("alpha shape: ", alphas.shape)
|
| 101 |
+
# print("alphas_cumprod_prev shape: ", alphas_cumprod_prev.shape)
|
| 102 |
+
# print("betas: ", betas)
|
| 103 |
+
# print("alphas: ", alphas)
|
| 104 |
+
# print("alphas_cumprod_prev: ", alphas_cumprod_prev)
|
| 105 |
+
|
| 106 |
+
self.clip_denoised = clip_denoised
|
| 107 |
+
# self.predict_epsilon = predict_epsilon
|
| 108 |
+
if isinstance(prediction_mode, str):
|
| 109 |
+
self.prediction_mode = PredictionMode(prediction_mode.lower())
|
| 110 |
+
elif isinstance(prediction_mode, PredictionMode):
|
| 111 |
+
self.prediction_mode = prediction_mode
|
| 112 |
+
else:
|
| 113 |
+
raise ValueError(f"Invalid prediction_mode: {prediction_mode}")
|
| 114 |
+
|
| 115 |
+
self.register_buffer('betas', betas)
|
| 116 |
+
self.register_buffer('alphas_cumprod', alphas_cumprod)
|
| 117 |
+
self.register_buffer('alphas_cumprod_prev', alphas_cumprod_prev)
|
| 118 |
+
|
| 119 |
+
# calculations for diffusion q(x_t | x_{t-1}) and others
|
| 120 |
+
self.register_buffer('sqrt_alphas_cumprod', torch.sqrt(alphas_cumprod))
|
| 121 |
+
self.register_buffer('sqrt_one_minus_alphas_cumprod', torch.sqrt(1. - alphas_cumprod))
|
| 122 |
+
self.register_buffer('log_one_minus_alphas_cumprod', torch.log(1. - alphas_cumprod))
|
| 123 |
+
self.register_buffer('sqrt_recip_alphas_cumprod', torch.sqrt(1. / alphas_cumprod))
|
| 124 |
+
self.register_buffer('sqrt_recipm1_alphas_cumprod', torch.sqrt(1. / alphas_cumprod - 1))
|
| 125 |
+
|
| 126 |
+
# calculations for posterior q(x_{t-1} | x_t, x_0)
|
| 127 |
+
posterior_variance = betas * (1. - alphas_cumprod_prev) / (1. - alphas_cumprod)
|
| 128 |
+
self.register_buffer('posterior_variance', posterior_variance)
|
| 129 |
+
|
| 130 |
+
## log calculation clipped because the posterior variance
|
| 131 |
+
## is 0 at the beginning of the diffusion chain
|
| 132 |
+
self.register_buffer('posterior_log_variance_clipped',
|
| 133 |
+
torch.log(torch.clamp(posterior_variance, min=1e-20)))
|
| 134 |
+
self.register_buffer('posterior_mean_coef1',
|
| 135 |
+
betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))
|
| 136 |
+
self.register_buffer('posterior_mean_coef2',
|
| 137 |
+
(1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))
|
| 138 |
+
|
| 139 |
+
## get loss coefficients and initialize objective
|
| 140 |
+
self.loss_fn = Losses[loss_type]()
|
| 141 |
+
|
| 142 |
+
# Initialize list to store diffusion process data (for vissualization usage)
|
| 143 |
+
self.diffusion_history = []
|
| 144 |
+
self.grad_scaled_history = []
|
| 145 |
+
# ------------------------------------------ sampling ------------------------------------------#
|
| 146 |
+
def predict_noise_from_start(self, x_t, t, x0):
|
| 147 |
+
"""
|
| 148 |
+
Model output can be 'epsilon', 'v', or 'x'
|
| 149 |
+
"""
|
| 150 |
+
if self.prediction_mode == PredictionMode.EPSILON:
|
| 151 |
+
return x0
|
| 152 |
+
elif self.prediction_mode == PredictionMode.X:
|
| 153 |
+
return (
|
| 154 |
+
extract(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - x0
|
| 155 |
+
) / extract(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape)
|
| 156 |
+
elif self.prediction_mode == PredictionMode.V:
|
| 157 |
+
# model predicts v; convert to epsilon:
|
| 158 |
+
# eps = sqrt(alpha_bar) * v + sqrt(1 - alpha_bar) * x_t
|
| 159 |
+
sqrt_ab = extract(self.sqrt_alphas_cumprod, t, x_t.shape)
|
| 160 |
+
sqrt_1mab = extract(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape)
|
| 161 |
+
v = x0
|
| 162 |
+
return sqrt_ab * v + sqrt_1mab * x_t
|
| 163 |
+
else:
|
| 164 |
+
raise ValueError(f"Unknown prediction_mode: {self.prediction_mode}")
|
| 165 |
+
|
| 166 |
+
def predict_start_from_noise(self, x_t, t, noise):
|
| 167 |
+
'''
|
| 168 |
+
Model output can be 'epsilon', 'v', or 'x'
|
| 169 |
+
'''
|
| 170 |
+
if self.prediction_mode == PredictionMode.EPSILON:
|
| 171 |
+
return (
|
| 172 |
+
extract(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t -
|
| 173 |
+
extract(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise
|
| 174 |
+
)
|
| 175 |
+
elif self.prediction_mode == PredictionMode.X:
|
| 176 |
+
return noise
|
| 177 |
+
elif self.prediction_mode == PredictionMode.V:
|
| 178 |
+
# model predicts v; convert to x0:
|
| 179 |
+
# x0 = sqrt(ᾱ)*x_t - sqrt(1-ᾱ)*v
|
| 180 |
+
sqrt_ab = extract(self.sqrt_alphas_cumprod, t, x_t.shape)
|
| 181 |
+
sqrt_1mab = extract(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape)
|
| 182 |
+
v = noise
|
| 183 |
+
return sqrt_ab * x_t - sqrt_1mab * v
|
| 184 |
+
else:
|
| 185 |
+
raise ValueError(f"Unknown prediction_mode: {self.prediction_mode}")
|
| 186 |
+
|
| 187 |
+
def q_posterior(self, x_start, x_t, t):
|
| 188 |
+
posterior_mean = (
|
| 189 |
+
extract(self.posterior_mean_coef1, t, x_t.shape) * x_start +
|
| 190 |
+
extract(self.posterior_mean_coef2, t, x_t.shape) * x_t
|
| 191 |
+
)
|
| 192 |
+
posterior_variance = extract(self.posterior_variance, t, x_t.shape)
|
| 193 |
+
posterior_log_variance_clipped = extract(self.posterior_log_variance_clipped, t, x_t.shape)
|
| 194 |
+
return posterior_mean, posterior_variance, posterior_log_variance_clipped
|
| 195 |
+
|
| 196 |
+
def p_mean_variance(self, x, hard_conds, context, t):
|
| 197 |
+
if context is not None:
|
| 198 |
+
context = self.context_model(context)
|
| 199 |
+
|
| 200 |
+
x_recon = self.predict_start_from_noise(x, t=t, noise=self.model(x, t, context['condition']))
|
| 201 |
+
# Store the current state, timestep, and reconstruction
|
| 202 |
+
# todo: hack here maybe, loop all x from -1 to 1
|
| 203 |
+
if self.clip_denoised:
|
| 204 |
+
x_recon.clamp_(-1., 1.)
|
| 205 |
+
else:
|
| 206 |
+
assert RuntimeError()
|
| 207 |
+
|
| 208 |
+
model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
|
| 209 |
+
return model_mean, posterior_variance, posterior_log_variance
|
| 210 |
+
|
| 211 |
+
@torch.no_grad()
|
| 212 |
+
def p_sample_loop(self, shape,
|
| 213 |
+
hard_conds,
|
| 214 |
+
context = None,
|
| 215 |
+
return_chain = False,
|
| 216 |
+
sample_fn = ddpm_sample_fn,
|
| 217 |
+
n_diffusion_steps_without_noise = 0,
|
| 218 |
+
prior_trajectory = None,
|
| 219 |
+
timestep = None,
|
| 220 |
+
**sample_kwargs):
|
| 221 |
+
device = self.betas.device
|
| 222 |
+
|
| 223 |
+
batch_size = shape[0]
|
| 224 |
+
n_diffusion_steps = self.n_diffusion_steps # Number of diffusion timesteps.
|
| 225 |
+
# If given prior trajectory to replan, then use the timestep provided.
|
| 226 |
+
if prior_trajectory is not None:
|
| 227 |
+
# Expand prior trajectory to match batch size if needed
|
| 228 |
+
x = prior_trajectory.repeat(batch_size, 1, 1)
|
| 229 |
+
n_diffusion_steps = timestep
|
| 230 |
+
else:
|
| 231 |
+
x = torch.randn(shape, device=device)
|
| 232 |
+
|
| 233 |
+
# print("In function p_sample_loop. Printing x.shape: ", x.shape)
|
| 234 |
+
# breakpoint()
|
| 235 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 236 |
+
chain = [x] if return_chain else None
|
| 237 |
+
for i in reversed(range(n_diffusion_steps_without_noise, n_diffusion_steps)):
|
| 238 |
+
t = make_timesteps(batch_size, i, device)
|
| 239 |
+
x, values = sample_fn(self, x, hard_conds, context, t, **sample_kwargs)
|
| 240 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 241 |
+
if return_chain:
|
| 242 |
+
chain.append(x)
|
| 243 |
+
|
| 244 |
+
if return_chain:
|
| 245 |
+
chain = torch.stack(chain, dim=1)
|
| 246 |
+
return x, chain
|
| 247 |
+
print("x min:", x.min().item(), "x max:", x.max().item())
|
| 248 |
+
print("hard_conds: ", hard_conds)
|
| 249 |
+
return x
|
| 250 |
+
|
| 251 |
+
def eta_schedule(self, t_norm: torch.Tensor):
|
| 252 |
+
# t_norm in [0,1], 1=early, 0=late
|
| 253 |
+
# e.g., cosine ramp: more noise early, near-zero late
|
| 254 |
+
# return 0.5 * (1 - torch.cos(torch.pi * t_norm)) # in [0,1]
|
| 255 |
+
base = 0.5 * (1 - torch.cos(torch.pi * t_norm))
|
| 256 |
+
res = base**20.0
|
| 257 |
+
# if res < 0.2:
|
| 258 |
+
# res = torch.tensor(0.2, device=t_norm.device)
|
| 259 |
+
return res
|
| 260 |
+
|
| 261 |
+
@torch.no_grad()
|
| 262 |
+
def _ddim_step_core(self, x_t, t, t_next, context, eta_t,
|
| 263 |
+
x0_override_fn=None):
|
| 264 |
+
# model forward
|
| 265 |
+
model_out = self.model(x_t, t, context['condition'])
|
| 266 |
+
# 2) x0 from model_out
|
| 267 |
+
x0 = self.predict_start_from_noise(x_t, t=t, noise=model_out)
|
| 268 |
+
x0.clamp_(-1., 1.)
|
| 269 |
+
# 3) optional late-stage overwrite on x0
|
| 270 |
+
if x0_override_fn is not None:
|
| 271 |
+
x0_new = x0_override_fn(x0) # may return x0 or blended x0
|
| 272 |
+
if x0_new is not None:
|
| 273 |
+
x0 = x0_new
|
| 274 |
+
# x0 = apply_hard_conditioning(x0, hard_conds=None) # or pass hard_conds via closure
|
| 275 |
+
# 4) compute epsilon consistent with (possibly) updated x0
|
| 276 |
+
# default: from model_out (handles EPS/X/V)
|
| 277 |
+
pred_noise = self.predict_noise_from_start(x_t, t=t, x0=model_out)
|
| 278 |
+
|
| 279 |
+
if self.verbose:
|
| 280 |
+
print("noise diff:", (model_out - pred_noise).abs().mean().item())
|
| 281 |
+
# 5) last step or DDIM update
|
| 282 |
+
if (t_next < 0).all():
|
| 283 |
+
x_next = x0
|
| 284 |
+
else:
|
| 285 |
+
alpha = extract(self.alphas_cumprod, t, x_t.shape)
|
| 286 |
+
alpha_next = extract(self.alphas_cumprod, t_next, x_t.shape)
|
| 287 |
+
sigma = (
|
| 288 |
+
eta_t * ((1 - alpha / alpha_next) * (1 - alpha_next) / (1 - alpha)).sqrt())
|
| 289 |
+
c = (1 - alpha_next - sigma**2).sqrt()
|
| 290 |
+
# breakpoint()
|
| 291 |
+
x_next = x0 * alpha_next.sqrt() + c * pred_noise
|
| 292 |
+
x_next = x_next + sigma * torch.randn_like(x_next)
|
| 293 |
+
return x_next, x0, model_out
|
| 294 |
+
|
| 295 |
+
@torch.no_grad()
|
| 296 |
+
def ddim_sample(
|
| 297 |
+
self,
|
| 298 |
+
shape,
|
| 299 |
+
hard_conds,
|
| 300 |
+
full_time_series=None,
|
| 301 |
+
start_timestep = None,
|
| 302 |
+
prior_trajectory = None,
|
| 303 |
+
context = None,
|
| 304 |
+
return_chain = False,
|
| 305 |
+
# t_start_guide = torch.inf,
|
| 306 |
+
# guide = None,
|
| 307 |
+
# n_guide_steps = 1,
|
| 308 |
+
**sample_kwargs,
|
| 309 |
+
):
|
| 310 |
+
# Adapted from https://github.com/ezhang7423/language-control-diffusion/blob/63cdafb63d166221549968c662562753f6ac5394/src/lcd/models/diffusion.py#L226
|
| 311 |
+
device = self.betas.device
|
| 312 |
+
batch_size = shape[0]
|
| 313 |
+
|
| 314 |
+
if full_time_series is None:
|
| 315 |
+
total_timesteps = self.n_diffusion_steps
|
| 316 |
+
sampling_timesteps = self.n_diffusion_steps // 4
|
| 317 |
+
full_time_series = torch.linspace(0, total_timesteps - 1, steps=sampling_timesteps + 1, device=device)
|
| 318 |
+
full_time_series = torch.cat((torch.tensor([-1], device=device), full_time_series))
|
| 319 |
+
|
| 320 |
+
# sampling_timesteps = self.n_diffusion_steps // 4
|
| 321 |
+
# # [-1, 0, 1, 2, ..., T-1] when sampling_timesteps == total_timesteps
|
| 322 |
+
if start_timestep is not None: # denoising from intermidiate timestep
|
| 323 |
+
times = full_time_series[full_time_series <= start_timestep].int().tolist()
|
| 324 |
+
times = list(reversed(times))
|
| 325 |
+
time_pairs = list(zip(times[:-1], times[1:]))
|
| 326 |
+
else:
|
| 327 |
+
times = list(reversed(full_time_series.int().tolist()))
|
| 328 |
+
time_pairs = list(zip(times[:-1], times[1:])) # [(T-1, T-2), (T-2, T-3), ..., (1, 0), (0, -1)]
|
| 329 |
+
|
| 330 |
+
if self.verbose:
|
| 331 |
+
print("full time series: ", full_time_series)
|
| 332 |
+
print("time pairs: ", time_pairs)
|
| 333 |
+
print("whole time steps for DDIM sampling: ", len(time_pairs))
|
| 334 |
+
|
| 335 |
+
if start_timestep is not None and prior_trajectory is not None:
|
| 336 |
+
x = prior_trajectory
|
| 337 |
+
else:
|
| 338 |
+
x = torch.randn(shape, device=device)
|
| 339 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 340 |
+
|
| 341 |
+
chain = [x] if return_chain else None
|
| 342 |
+
|
| 343 |
+
if context is not None:
|
| 344 |
+
context = self.context_model(context)
|
| 345 |
+
|
| 346 |
+
for time, time_next in time_pairs:
|
| 347 |
+
t = make_timesteps(batch_size, time, device)
|
| 348 |
+
t_next = make_timesteps(batch_size, time_next, device)
|
| 349 |
+
|
| 350 |
+
# eta_t
|
| 351 |
+
t_norm = (time + 1e-8) / (total_timesteps - 1 + 1e-8) # ~1 at start, 0 at end
|
| 352 |
+
eta_t = self.eta_schedule(torch.tensor(t_norm, device=device))
|
| 353 |
+
# eta_t = 0
|
| 354 |
+
x, x0, _ = self._ddim_step_core(
|
| 355 |
+
x, t, t_next, context, eta_t,
|
| 356 |
+
x0_override_fn=None
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 360 |
+
|
| 361 |
+
if return_chain: chain.append(x)
|
| 362 |
+
if (time_next < 0): break
|
| 363 |
+
|
| 364 |
+
if return_chain:
|
| 365 |
+
chain = torch.stack(chain, dim=1)
|
| 366 |
+
return x, chain
|
| 367 |
+
return x
|
| 368 |
+
|
| 369 |
+
@torch.no_grad()
|
| 370 |
+
def ddim_sample_with_guidance(
|
| 371 |
+
self,
|
| 372 |
+
shape,
|
| 373 |
+
hard_conds,
|
| 374 |
+
return_chain=False,
|
| 375 |
+
guide=None,
|
| 376 |
+
rank_fn=None,
|
| 377 |
+
start_timestep = None,
|
| 378 |
+
prior_trajectory = None,
|
| 379 |
+
context = None,
|
| 380 |
+
sampling_timesteps=None,
|
| 381 |
+
**sample_kwargs,
|
| 382 |
+
):
|
| 383 |
+
device = self.betas.device
|
| 384 |
+
batch_size = shape[0]
|
| 385 |
+
eta = 0
|
| 386 |
+
total_timesteps = self.n_diffusion_steps
|
| 387 |
+
sampling_timesteps = sampling_timesteps or total_timesteps // 4
|
| 388 |
+
|
| 389 |
+
if "t_start_guide" in sample_kwargs:
|
| 390 |
+
t_start_guide = sample_kwargs["t_start_guide"]
|
| 391 |
+
print("t_start_guide =", t_start_guide)
|
| 392 |
+
else:
|
| 393 |
+
print("No t_start_guide provided, t_start_guide = 3")
|
| 394 |
+
t_start_guide = 3
|
| 395 |
+
|
| 396 |
+
# ======== Construct Time Schedule ========
|
| 397 |
+
full_time_series = torch.linspace(0, total_timesteps - 1,
|
| 398 |
+
steps=sampling_timesteps + 1,
|
| 399 |
+
device=device)
|
| 400 |
+
full_time_series = torch.cat((torch.tensor([-1], device=device), full_time_series))
|
| 401 |
+
# middle_t = full_time_series[2].item() # e.g., 2nd step from end
|
| 402 |
+
# middle_timesteps = make_timesteps(shape[0], middle_t, device)
|
| 403 |
+
# sampling_timesteps = self.n_diffusion_steps // 4
|
| 404 |
+
# # [-1, 0, 1, 2, ..., T-1] when sampling_timesteps == total_timesteps
|
| 405 |
+
if start_timestep is not None: # denoising from intermidiate timestep
|
| 406 |
+
times = full_time_series[full_time_series <= start_timestep].int().tolist()
|
| 407 |
+
times = list(reversed(times))
|
| 408 |
+
time_pairs = list(zip(times[:-1], times[1:]))
|
| 409 |
+
else:
|
| 410 |
+
times = list(reversed(full_time_series.int().tolist()))
|
| 411 |
+
time_pairs = list(zip(times[:-1], times[1:])) # [(T-1, T-2), (T-2, T-3), ..., (1, 0), (0, -1)]
|
| 412 |
+
|
| 413 |
+
# use guidance to overwrite x, or not apply guidance
|
| 414 |
+
def apply_rank_or_inpaint(x, t):
|
| 415 |
+
if rank_fn is None:
|
| 416 |
+
return x
|
| 417 |
+
# return rank_fn.rank_trajectory(x, traj_type="ee")
|
| 418 |
+
if t < 1:
|
| 419 |
+
if self.verbose:
|
| 420 |
+
print("Applying rank function", type(rank_fn))
|
| 421 |
+
return rank_fn.rank_trajectory(x, traj_type="ee")
|
| 422 |
+
else:
|
| 423 |
+
if self.verbose:
|
| 424 |
+
print("Applying inpaint function", type(rank_fn))
|
| 425 |
+
return rank_fn.inpaint_trajectory(x, traj_type="ee", num_windows=4, top_k=3)
|
| 426 |
+
|
| 427 |
+
def make_override_fn(step_or_t):
|
| 428 |
+
def _fn(x0):
|
| 429 |
+
# Add guidance gradients
|
| 430 |
+
x_over, _ = guide_gradient_steps(
|
| 431 |
+
x0, hard_conds=hard_conds, guide=guide, **sample_kwargs)
|
| 432 |
+
# x_over = apply_hard_conditioning(x_over, hard_conds)
|
| 433 |
+
x_over = apply_rank_or_inpaint(x_over, step_or_t)
|
| 434 |
+
x_over = apply_hard_conditioning(x_over, hard_conds)
|
| 435 |
+
return x_over
|
| 436 |
+
return _fn
|
| 437 |
+
|
| 438 |
+
# ======== Initialize Trajectory ========
|
| 439 |
+
# x = torch.randn(shape, device=device) #or read from prior_trajectory
|
| 440 |
+
if start_timestep is not None and prior_trajectory is not None:
|
| 441 |
+
x = prior_trajectory
|
| 442 |
+
else:
|
| 443 |
+
x = torch.randn(shape, device=device)
|
| 444 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 445 |
+
|
| 446 |
+
# ======== Iterative Sampling + Guidance ========
|
| 447 |
+
if rank_fn is not None: rank_fn.reset()
|
| 448 |
+
chain = [x] if return_chain else None
|
| 449 |
+
|
| 450 |
+
if context is not None:
|
| 451 |
+
context = self.context_model(context)
|
| 452 |
+
|
| 453 |
+
for time, time_next in time_pairs:
|
| 454 |
+
t = make_timesteps(batch_size, time, device)
|
| 455 |
+
t_next = make_timesteps(batch_size, time_next, device)
|
| 456 |
+
|
| 457 |
+
override_fn = make_override_fn(time) if (guide is not None and time <= t_start_guide) else None
|
| 458 |
+
# eta_t
|
| 459 |
+
t_norm = (time + 1e-8) / (total_timesteps - 1 + 1e-8) # ~1 at start, 0 at end
|
| 460 |
+
eta_t = self.eta_schedule(torch.tensor(t_norm, device=device))
|
| 461 |
+
# eta_t = 0
|
| 462 |
+
x, x0, _ = self._ddim_step_core(
|
| 463 |
+
x, t, t_next, context, eta_t,
|
| 464 |
+
x0_override_fn=override_fn)
|
| 465 |
+
|
| 466 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 467 |
+
|
| 468 |
+
if return_chain: chain.append(x)
|
| 469 |
+
if (time_next < 0): break
|
| 470 |
+
|
| 471 |
+
if return_chain:
|
| 472 |
+
chain = torch.stack(chain, dim=1)
|
| 473 |
+
return x, chain
|
| 474 |
+
return x
|
| 475 |
+
|
| 476 |
+
@torch.no_grad()
|
| 477 |
+
def conditional_sample(self,
|
| 478 |
+
hard_conds,
|
| 479 |
+
horizon = None,
|
| 480 |
+
batch_size = 1,
|
| 481 |
+
ddim = False,
|
| 482 |
+
prior_trajectory = None,
|
| 483 |
+
timestep = None,
|
| 484 |
+
**sample_kwargs):
|
| 485 |
+
'''
|
| 486 |
+
hard conditions : hard_conds : { (time, state), ... }
|
| 487 |
+
'''
|
| 488 |
+
horizon = horizon or self.horizon
|
| 489 |
+
shape = (batch_size, horizon, self.state_dim)
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
if self.verbose:
|
| 493 |
+
print("-------use DDIM------", ddim)
|
| 494 |
+
if ddim:
|
| 495 |
+
return self.ddim_sample_with_guidance(shape, hard_conds, sampling_timesteps=self.n_diffusion_steps, **sample_kwargs)
|
| 496 |
+
# return self.ddim_sample(shape, hard_conds, sampling_timesteps=self.n_diffusion_steps, **sample_kwargs)
|
| 497 |
+
return self.p_sample_loop(shape,
|
| 498 |
+
hard_conds,
|
| 499 |
+
prior_trajectory = prior_trajectory,
|
| 500 |
+
timestep = timestep,
|
| 501 |
+
**sample_kwargs)
|
| 502 |
+
|
| 503 |
+
def forward(self, cond, *args, **kwargs):
|
| 504 |
+
raise NotImplementedError
|
| 505 |
+
return self.conditional_sample(cond, *args, **kwargs)
|
| 506 |
+
|
| 507 |
+
@torch.no_grad()
|
| 508 |
+
def warmup(self, horizon=64, device='cuda'):
|
| 509 |
+
shape = (2, horizon, self.state_dim)
|
| 510 |
+
x = torch.randn(shape, device=device)
|
| 511 |
+
t = make_timesteps(2, 1, device)
|
| 512 |
+
self.model(x, t, context=None)
|
| 513 |
+
|
| 514 |
+
@torch.no_grad()
|
| 515 |
+
def run_inference(self, context=None, hard_conds=None, n_samples=1, return_chain=False, **diffusion_kwargs):
|
| 516 |
+
# clear diffusion history record
|
| 517 |
+
self.diffusion_history = []
|
| 518 |
+
self.grad_scaled_history = []
|
| 519 |
+
|
| 520 |
+
# context and hard_conds must be normalized
|
| 521 |
+
hard_conds = copy(hard_conds)
|
| 522 |
+
context = copy(context)
|
| 523 |
+
|
| 524 |
+
# repeat hard conditions and contexts for n_samples
|
| 525 |
+
for k, v in hard_conds.items():
|
| 526 |
+
new_state = einops.repeat(v.to(self.device), 'd -> b d', b=n_samples)
|
| 527 |
+
hard_conds[k] = new_state
|
| 528 |
+
|
| 529 |
+
if context is not None:
|
| 530 |
+
for k, v in context.items():
|
| 531 |
+
context[k] = einops.repeat(v.to(self.device), 'd -> b d', b=n_samples)
|
| 532 |
+
|
| 533 |
+
# Sample from diffusion model
|
| 534 |
+
samples, chain = self.conditional_sample(
|
| 535 |
+
hard_conds, context=context, batch_size=n_samples, return_chain=True, **diffusion_kwargs
|
| 536 |
+
)
|
| 537 |
+
|
| 538 |
+
# chain: [ n_samples x (n_diffusion_steps + 1) x horizon x (state_dim)]
|
| 539 |
+
# extract normalized trajectories
|
| 540 |
+
trajs_chain_normalized = chain
|
| 541 |
+
|
| 542 |
+
# trajs: [ (n_diffusion_steps + 1) x n_samples x horizon x state_dim ]
|
| 543 |
+
trajs_chain_normalized = einops.rearrange(trajs_chain_normalized, 'b diffsteps h d -> diffsteps b h d')
|
| 544 |
+
|
| 545 |
+
if return_chain:
|
| 546 |
+
return trajs_chain_normalized
|
| 547 |
+
|
| 548 |
+
# return the last denoising step
|
| 549 |
+
return trajs_chain_normalized[-1]
|
| 550 |
+
|
| 551 |
+
@torch.no_grad()
|
| 552 |
+
def run_inference_with_replanning(self,
|
| 553 |
+
context = None,
|
| 554 |
+
hard_conds = None,
|
| 555 |
+
n_samples = 1,
|
| 556 |
+
return_chain = False,
|
| 557 |
+
trajectory_prior = None,
|
| 558 |
+
choice: str = None,
|
| 559 |
+
trajectory_length: int = None,
|
| 560 |
+
timestep: int = None,
|
| 561 |
+
**diffusion_kwargs):
|
| 562 |
+
# clear diffusion history record
|
| 563 |
+
self.diffusion_history = []
|
| 564 |
+
self.grad_scaled_history = []
|
| 565 |
+
# Extract the trajectory to replan from the prior trajectory.
|
| 566 |
+
if len(trajectory_prior.shape) == 2:
|
| 567 |
+
trajectory_prior = trajectory_prior.unsqueeze(0)
|
| 568 |
+
trajectory_prior = trajectory_prior.to(self.device)
|
| 569 |
+
trajectory = trajectory_prior
|
| 570 |
+
if choice == 'pad':
|
| 571 |
+
# Pad the trajectory to the original length
|
| 572 |
+
trajectory = pad_tensor(trajectory.squeeze(0), trajectory_length, pad_front = False).unsqueeze(0)
|
| 573 |
+
elif choice == 'interpolate':
|
| 574 |
+
# Interpolate the trajectory to the original length
|
| 575 |
+
trajectory = interpolate_points(trajectory, trajectory_length, resample=True)
|
| 576 |
+
else:
|
| 577 |
+
raise ValueError(f"Invalid choice: {choice}")
|
| 578 |
+
# print("After Interpolation/Padding Trajectory Shape: ", trajectory.shape)
|
| 579 |
+
# breakpoint()
|
| 580 |
+
|
| 581 |
+
# Add noise for replanning:
|
| 582 |
+
timesteps = torch.full((trajectory.shape[0],), timestep, device=trajectory.device) # Convert timestep to tensor and expand to match batch dimension
|
| 583 |
+
traj_noisy = self.q_sample(x_start=trajectory, t=timesteps, noise=torch.randn_like(trajectory))
|
| 584 |
+
|
| 585 |
+
# context and hard_conds must be normalized
|
| 586 |
+
hard_conds = copy(hard_conds)
|
| 587 |
+
context = copy(context)
|
| 588 |
+
|
| 589 |
+
# repeat hard conditions and contexts for n_samples
|
| 590 |
+
for k, v in hard_conds.items():
|
| 591 |
+
new_state = einops.repeat(v.to(self.device), 'd -> b d', b=n_samples)
|
| 592 |
+
hard_conds[k] = new_state
|
| 593 |
+
|
| 594 |
+
if context is not None:
|
| 595 |
+
for k, v in context.items():
|
| 596 |
+
context[k] = einops.repeat(v.to(self.device), 'd -> b d', b=n_samples)
|
| 597 |
+
|
| 598 |
+
# Sample from diffusion model
|
| 599 |
+
samples, chain = self.conditional_sample(
|
| 600 |
+
hard_conds,
|
| 601 |
+
context = context,
|
| 602 |
+
batch_size = n_samples,
|
| 603 |
+
return_chain =True,
|
| 604 |
+
prior_trajectory=traj_noisy,
|
| 605 |
+
timestep=timestep,
|
| 606 |
+
**diffusion_kwargs
|
| 607 |
+
)
|
| 608 |
+
|
| 609 |
+
# chain: [ n_samples x (n_diffusion_steps + 1) x horizon x (state_dim)]
|
| 610 |
+
# extract normalized trajectories
|
| 611 |
+
trajs_chain_normalized = chain
|
| 612 |
+
|
| 613 |
+
# trajs: [ (n_diffusion_steps + 1) x n_samples x horizon x state_dim ]
|
| 614 |
+
trajs_chain_normalized = einops.rearrange(trajs_chain_normalized, 'b diffsteps h d -> diffsteps b h d')
|
| 615 |
+
|
| 616 |
+
if return_chain:
|
| 617 |
+
return trajs_chain_normalized
|
| 618 |
+
|
| 619 |
+
# return the last denoising step
|
| 620 |
+
return trajs_chain_normalized[-1]
|
| 621 |
+
# ------------------------------------------ training ------------------------------------------#
|
| 622 |
+
|
| 623 |
+
def q_sample(self, x_start, t, noise=None):
|
| 624 |
+
if noise is None:
|
| 625 |
+
noise = torch.randn_like(x_start)
|
| 626 |
+
|
| 627 |
+
sample = (
|
| 628 |
+
extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +
|
| 629 |
+
extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise
|
| 630 |
+
)
|
| 631 |
+
return sample
|
| 632 |
+
|
| 633 |
+
def p_losses(self, x_start, context, t, hard_conds):
|
| 634 |
+
noise = torch.randn_like(x_start)
|
| 635 |
+
|
| 636 |
+
x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
|
| 637 |
+
x_noisy = apply_hard_conditioning(x_noisy, hard_conds)
|
| 638 |
+
|
| 639 |
+
# context model
|
| 640 |
+
if context is not None:
|
| 641 |
+
context = self.context_model(context)
|
| 642 |
+
|
| 643 |
+
# diffusion model
|
| 644 |
+
x_recon = self.model(x_noisy, t, context['condition'])
|
| 645 |
+
x_recon = apply_hard_conditioning(x_recon, hard_conds)
|
| 646 |
+
|
| 647 |
+
assert noise.shape == x_recon.shape
|
| 648 |
+
|
| 649 |
+
if self.prediction_mode == PredictionMode.EPSILON:
|
| 650 |
+
loss, info = self.loss_fn(x_recon, noise)
|
| 651 |
+
elif self.prediction_mode == PredictionMode.X:
|
| 652 |
+
loss, info = self.loss_fn(x_recon, x_start)
|
| 653 |
+
elif self.prediction_mode == PredictionMode.V:
|
| 654 |
+
# model outputs v-hat; build v_target = sqrt(ab)*eps - sqrt(1-ab)*x0
|
| 655 |
+
sqrt_ab = extract(self.sqrt_alphas_cumprod, t, x_start.shape) # sqrt(ᾱ_t)
|
| 656 |
+
sqrt_1mab = extract(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) # sqrt(1-ᾱ_t)
|
| 657 |
+
v_target = sqrt_ab * noise - sqrt_1mab * x_start # v = sqrt(ab)*eps - sqrt(1-ab)*x0
|
| 658 |
+
|
| 659 |
+
# Option A: plain loss via your loss_fn (assumed MSE)
|
| 660 |
+
if not self.use_snr_weight:
|
| 661 |
+
loss, info = self.loss_fn(x_recon, v_target)
|
| 662 |
+
# Option B: SNR-weighted MSE (recommended for v-pred)
|
| 663 |
+
# refer to paper: PROGRESSIVE DISTILLATION FOR FAST SAMPLING OF DIFFUSION MODELS
|
| 664 |
+
# reduce weight of high noise area
|
| 665 |
+
else:
|
| 666 |
+
# SNR_t = ᾱ_t / (1-ᾱ_t)
|
| 667 |
+
ab = sqrt_ab ** 2
|
| 668 |
+
snr = ab / (1.0 - ab).clamp(min=1e-8)
|
| 669 |
+
w = snr / (snr + 1.0) # stable reweighting
|
| 670 |
+
# match dims for broadcasting (B -> Bx1x...):
|
| 671 |
+
while w.ndim < v_target.ndim:
|
| 672 |
+
w = w.unsqueeze(-1)
|
| 673 |
+
mse = (x_recon - v_target).pow(2)
|
| 674 |
+
loss = (w * mse).mean()
|
| 675 |
+
info = {
|
| 676 |
+
"snr_weighted": True,
|
| 677 |
+
"snr_mean": snr.mean().item(),
|
| 678 |
+
"mse_mean": mse.mean().item(),
|
| 679 |
+
}
|
| 680 |
+
else:
|
| 681 |
+
raise ValueError(f"Unknown prediction_mode: {self.prediction_mode}")
|
| 682 |
+
|
| 683 |
+
return loss, info
|
| 684 |
+
|
| 685 |
+
def loss(self, x, context, *args):
|
| 686 |
+
batch_size = x.shape[0]
|
| 687 |
+
t = torch.randint(0, self.n_diffusion_steps, (batch_size,), device=x.device).long()
|
| 688 |
+
return self.p_losses(x, context, t, *args)
|
| 689 |
+
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/flow_matching_base.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from copy import copy
|
| 2 |
+
|
| 3 |
+
import einops
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn as nn
|
| 6 |
+
from abc import ABC
|
| 7 |
+
|
| 8 |
+
from cfdp.diffusion_policy.models.sample_functions import apply_hard_conditioning
|
| 9 |
+
|
| 10 |
+
class FlowMatchingModel(nn.Module, ABC):
|
| 11 |
+
def __init__(self,
|
| 12 |
+
model=None,
|
| 13 |
+
nonuniform_time=False, # Use sigmoid-transformed normal distribution for time sampling
|
| 14 |
+
context_model=None,
|
| 15 |
+
device='cuda',
|
| 16 |
+
**kwargs):
|
| 17 |
+
super().__init__()
|
| 18 |
+
self.model_name = 'FlowMatchingModel'
|
| 19 |
+
self.model = model # Unet model
|
| 20 |
+
self.nonuniform_time = nonuniform_time
|
| 21 |
+
self.context_model = context_model
|
| 22 |
+
self.state_dim = self.model.state_dim
|
| 23 |
+
self.device = device
|
| 24 |
+
|
| 25 |
+
def forward(self, x, hard_conds, context):
|
| 26 |
+
# x, action
|
| 27 |
+
# hard_conds, start & goal hard constraints
|
| 28 |
+
# context, task-specific conditioning, environment observation
|
| 29 |
+
b = x.size(0)
|
| 30 |
+
if self.nonuniform_time:
|
| 31 |
+
# Sample time values from sigmoid-transformed normal distribution
|
| 32 |
+
nt = torch.randn((b,)).to(x.device)
|
| 33 |
+
t = torch.sigmoid(nt)
|
| 34 |
+
else:
|
| 35 |
+
# Sample time values uniformly from [0,1]
|
| 36 |
+
t = torch.rand((b,)).to(x.device)
|
| 37 |
+
texp = t.view([b, *([1] * len(x.shape[1:]))])
|
| 38 |
+
z1 = torch.randn_like(x)
|
| 39 |
+
z1 = apply_hard_conditioning(z1, hard_conds)
|
| 40 |
+
zt = (1 - texp) * x + texp * z1
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
if context is not None:
|
| 44 |
+
context = self.context_model(context)
|
| 45 |
+
# match input & outpu & conditions
|
| 46 |
+
vtheta = self.model(zt, t, context['condition'])
|
| 47 |
+
|
| 48 |
+
batchwise_mse = ((z1 - x - vtheta) ** 2).mean(dim=list(range(1, len(x.shape))))
|
| 49 |
+
tlist = batchwise_mse.detach().cpu().reshape(-1).tolist()
|
| 50 |
+
ttloss = [(tv, tloss) for tv, tloss in zip(t, tlist)]
|
| 51 |
+
return batchwise_mse.mean(), ttloss
|
| 52 |
+
|
| 53 |
+
@torch.no_grad()
|
| 54 |
+
def warmup(self, horizon=64, device='cuda'):
|
| 55 |
+
shape = (2, horizon, self.state_dim)
|
| 56 |
+
x = torch.randn(shape, device=device)
|
| 57 |
+
t = torch.full((2,), 1, device=device, dtype=torch.long)
|
| 58 |
+
self.model(x, t, context=None)
|
| 59 |
+
|
| 60 |
+
@torch.no_grad()
|
| 61 |
+
def run_inference(self, context=None, hard_conds=None, n_samples=1, return_chain=False, **inference_kwargs):
|
| 62 |
+
# hard_conds and context must be normalized
|
| 63 |
+
hard_conds = copy(hard_conds)
|
| 64 |
+
context = copy(context)
|
| 65 |
+
# repeat hard conditions and contexts for n_samples
|
| 66 |
+
for k, v in hard_conds.items():
|
| 67 |
+
new_state = einops.repeat(v.to(self.device), 'd -> b d', b=n_samples)
|
| 68 |
+
hard_conds[k] = new_state
|
| 69 |
+
|
| 70 |
+
if context is not None:
|
| 71 |
+
for k, v in context.items():
|
| 72 |
+
context[k] = einops.repeat(v.to(self.device), 'd -> b d', b=n_samples)
|
| 73 |
+
|
| 74 |
+
# Sample from flow matching model
|
| 75 |
+
# TODO: sample z in x dimension [b, T, state_dim]
|
| 76 |
+
trajs_chain = self.sample(hard_conds = hard_conds, context = context,
|
| 77 |
+
batch_size = n_samples, **inference_kwargs)
|
| 78 |
+
|
| 79 |
+
# trajs_chain is a list of tensors, each with shape (b, T, state_dim)
|
| 80 |
+
if return_chain:
|
| 81 |
+
# Stack along first dimension to get (sample_steps+1, b, T, state_dim)
|
| 82 |
+
trajs_chain = torch.stack(trajs_chain, dim=0)
|
| 83 |
+
return trajs_chain
|
| 84 |
+
else:
|
| 85 |
+
return trajs_chain[-1]
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
@torch.no_grad()
|
| 90 |
+
def sample(self, hard_conds, horizon=None, context=None,
|
| 91 |
+
batch_size=1, null_cond=None, sample_steps=5, cfg=2.0, guide=None, **inference_kwargs):
|
| 92 |
+
z = torch.randn(batch_size, horizon, self.state_dim).to(self.device)
|
| 93 |
+
b = batch_size
|
| 94 |
+
dt = 1.0 / sample_steps
|
| 95 |
+
dt = torch.tensor([dt] * b).to(z.device).view([b, *([1] * len(z.shape[1:]))])
|
| 96 |
+
trajs_chain = [z]
|
| 97 |
+
print("guide function is: ", guide)
|
| 98 |
+
# TODO: add guidance will totally ruin the flow
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
for i in range(sample_steps, 0, -1):
|
| 102 |
+
t = i / sample_steps
|
| 103 |
+
t = torch.tensor([t] * b).to(z.device)
|
| 104 |
+
|
| 105 |
+
if context is not None:
|
| 106 |
+
conidtion = self.context_model(context)
|
| 107 |
+
|
| 108 |
+
vc = self.model(z, t, conidtion['condition'])
|
| 109 |
+
|
| 110 |
+
# if null_cond is not None:
|
| 111 |
+
# vu = self.model(z, t, null_cond)
|
| 112 |
+
# vc = vu + cfg * (vc - vu)
|
| 113 |
+
|
| 114 |
+
# if i<=3 and guide is not None:
|
| 115 |
+
# vu = 200*guide(z)
|
| 116 |
+
# vc_norm = torch.norm(vc)
|
| 117 |
+
# vu_norm = torch.norm(vu)
|
| 118 |
+
# norm_ratio = vc_norm / vu_norm
|
| 119 |
+
# print(f"Norm ratio (vc/vu): {norm_ratio:.4f}")
|
| 120 |
+
# vc = (1-0.05) * vc + 0.05 * vu
|
| 121 |
+
|
| 122 |
+
z = z - dt * vc
|
| 123 |
+
z = apply_hard_conditioning(z, hard_conds)
|
| 124 |
+
trajs_chain.append(z)
|
| 125 |
+
|
| 126 |
+
return trajs_chain
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/gaussian_diffusion_loss.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from cfdp.diffusion_policy.models import build_condition
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class GaussianDiffusionLoss:
|
| 6 |
+
|
| 7 |
+
def __init__(self):
|
| 8 |
+
pass
|
| 9 |
+
|
| 10 |
+
@staticmethod
|
| 11 |
+
def loss_fn(diffusion_model, input_dict, dataset, step=None):
|
| 12 |
+
"""
|
| 13 |
+
Loss function for training diffusion-based generative models.
|
| 14 |
+
"""
|
| 15 |
+
# traj_normalized = input_dict[f'{dataset.field_key_traj}_normalized']
|
| 16 |
+
|
| 17 |
+
traj_normalized = input_dict[f'{dataset.data_key_traj}_normalized']
|
| 18 |
+
|
| 19 |
+
condition = build_condition(diffusion_model, dataset, input_dict)
|
| 20 |
+
|
| 21 |
+
hard_conds = input_dict.get('hard_conds', {})
|
| 22 |
+
# print("hard_conds: ", hard_conds)
|
| 23 |
+
# print("trajectory normalized shape: ", traj_normalized.shape)
|
| 24 |
+
# print("trajectory normalized start: ", traj_normalized[0])
|
| 25 |
+
loss, info = diffusion_model.loss(traj_normalized, condition, hard_conds)
|
| 26 |
+
|
| 27 |
+
loss_dict = {'diffusion_loss': loss}
|
| 28 |
+
|
| 29 |
+
return loss_dict, info
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class FlowMatchingLoss:
|
| 33 |
+
def __init__(self):
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
@staticmethod
|
| 37 |
+
def loss_fn(model, input_dict, dataset, step=None):
|
| 38 |
+
"""
|
| 39 |
+
Loss function for training diffusion-based generative models.
|
| 40 |
+
"""
|
| 41 |
+
traj_normalized = input_dict[f'{dataset.data_key_traj}_normalized']
|
| 42 |
+
|
| 43 |
+
condition = build_condition(model, dataset, input_dict)
|
| 44 |
+
|
| 45 |
+
hard_conds = input_dict.get('hard_conds', {})
|
| 46 |
+
|
| 47 |
+
loss, _ = model(traj_normalized, hard_conds, condition)
|
| 48 |
+
# loss, info = diffusion_model.loss(traj_normalized, context, hard_conds)
|
| 49 |
+
|
| 50 |
+
loss_dict = {'diffusion_loss': loss}
|
| 51 |
+
|
| 52 |
+
return loss_dict, _
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/guide_managers.py
ADDED
|
@@ -0,0 +1,1636 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import time
|
| 3 |
+
from dataclasses import dataclass, field
|
| 4 |
+
|
| 5 |
+
from typing import Dict, List
|
| 6 |
+
import einops
|
| 7 |
+
import numpy as np
|
| 8 |
+
import sapien
|
| 9 |
+
import torch
|
| 10 |
+
from enum import Enum
|
| 11 |
+
import math
|
| 12 |
+
from torch import nn
|
| 13 |
+
import torch.nn.functional as F
|
| 14 |
+
|
| 15 |
+
from cfdp.utils.pointcloud_sdf import PointCloud_CSDF
|
| 16 |
+
from cfdp.utils.data_utils import transform_ortho6d_to_quat
|
| 17 |
+
from cfdp.utils.advanced_kinematics_solver import AdvancedKinematicsSolver
|
| 18 |
+
|
| 19 |
+
@dataclass
|
| 20 |
+
class DebugEntry: #for sphere based guidance debug
|
| 21 |
+
sphere_poses: any = None
|
| 22 |
+
sphere_radii: any = None
|
| 23 |
+
sdf_costs: any = None
|
| 24 |
+
spheres_after_guidance: any = None
|
| 25 |
+
sampled_spheres: any = None
|
| 26 |
+
|
| 27 |
+
class SampleHelper:
|
| 28 |
+
# can be expand to Gaussian / spline sampling
|
| 29 |
+
# define input and output
|
| 30 |
+
def __init__(self):
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
def catmull_rom_spline(self, p0, p1, p2, p3, t):
|
| 34 |
+
# p0...p3: (..., 7)
|
| 35 |
+
# t: [num_samples, 1]
|
| 36 |
+
# Returns: [num_samples, 7]
|
| 37 |
+
# TODO: batch fit & samples
|
| 38 |
+
a = 2*p1
|
| 39 |
+
b = -p0 + p2
|
| 40 |
+
c = 2*p0 - 5*p1 + 4*p2 - p3
|
| 41 |
+
d = -p0 + 3*p1 - 3*p2 + p3
|
| 42 |
+
return 0.5 * (a + b * t + c * t**2 + d * t**3)
|
| 43 |
+
|
| 44 |
+
class GuideManager(nn.Module, abc.ABC):
|
| 45 |
+
def __init__(self,
|
| 46 |
+
dataset,
|
| 47 |
+
tensor_args,
|
| 48 |
+
clip_grad=False, clip_grad_rule='norm',
|
| 49 |
+
max_grad_norm=1.0, max_grad_value=1.0,
|
| 50 |
+
guidance_weight=0.1,
|
| 51 |
+
):
|
| 52 |
+
super().__init__()
|
| 53 |
+
self.dataset = dataset
|
| 54 |
+
self.clip_grad = clip_grad
|
| 55 |
+
self.clip_grad_rule = clip_grad_rule
|
| 56 |
+
self.max_grad_norm = max_grad_norm
|
| 57 |
+
self.max_grad_value = max_grad_value
|
| 58 |
+
self.device = tensor_args['device']
|
| 59 |
+
self.dtype = tensor_args['dtype']
|
| 60 |
+
self.guidance_weight = guidance_weight
|
| 61 |
+
|
| 62 |
+
def forward(self, x_normalized):
|
| 63 |
+
grad = torch.full_like(x_normalized, fill_value=0.0)
|
| 64 |
+
return grad
|
| 65 |
+
|
| 66 |
+
def update_observation(self, obs):
|
| 67 |
+
pass
|
| 68 |
+
|
| 69 |
+
def clip_gradient(self, grad):
|
| 70 |
+
if self.clip_grad:
|
| 71 |
+
if self.clip_grad_rule == 'norm':
|
| 72 |
+
return self.clip_grad_by_norm(grad)
|
| 73 |
+
elif self.clip_grad_rule == 'value':
|
| 74 |
+
return self.clip_grad_by_value(grad)
|
| 75 |
+
else:
|
| 76 |
+
raise NotImplementedError
|
| 77 |
+
else:
|
| 78 |
+
return grad
|
| 79 |
+
|
| 80 |
+
def clip_grad_by_norm(self, grad):
|
| 81 |
+
# clip gradient by norm
|
| 82 |
+
if self.clip_grad:
|
| 83 |
+
grad_norm = torch.linalg.norm(grad + 1e-6, dim=-1, keepdims=True)
|
| 84 |
+
scale_ratio = torch.clip(grad_norm, 0., self.max_grad_norm) / grad_norm
|
| 85 |
+
grad = scale_ratio * grad
|
| 86 |
+
return grad
|
| 87 |
+
|
| 88 |
+
def clip_grad_by_value(self, grad):
|
| 89 |
+
# clip gradient by value
|
| 90 |
+
if self.clip_grad:
|
| 91 |
+
grad = torch.clip(grad, -self.max_grad_value, self.max_grad_value)
|
| 92 |
+
return grad
|
| 93 |
+
|
| 94 |
+
def reset(self):
|
| 95 |
+
pass
|
| 96 |
+
|
| 97 |
+
def running_avg(tensor, window_size):
|
| 98 |
+
"""
|
| 99 |
+
Computes running average using PyTorch's conv1d operation.
|
| 100 |
+
|
| 101 |
+
:param tensor: Input tensor of shape [batch_size, seq_len, dim]
|
| 102 |
+
:param window_size: Size of the sliding window
|
| 103 |
+
:return: Tensor with same shape as input, containing running averages
|
| 104 |
+
"""
|
| 105 |
+
# PyTorch doesn't have a direct convolve function like numpy
|
| 106 |
+
# Need to handle batch dimensions properly for trajectory data
|
| 107 |
+
batch_size, seq_len, dim = tensor.shape
|
| 108 |
+
result = torch.zeros_like(tensor)
|
| 109 |
+
# Create the kernel for the running average
|
| 110 |
+
kernel = torch.ones(window_size, device=tensor.device) / window_size
|
| 111 |
+
# Apply running average to each dimension separately
|
| 112 |
+
for b in range(batch_size):
|
| 113 |
+
for d in range(dim):
|
| 114 |
+
# Use 1D convolution for running average
|
| 115 |
+
# F.conv1d expects input shape [batch, channels, length]
|
| 116 |
+
padded = torch.nn.functional.pad(tensor[b, :, d].unsqueeze(0).unsqueeze(0),
|
| 117 |
+
(window_size-1, 0), mode='reflect')
|
| 118 |
+
convolved = torch.nn.functional.conv1d(padded,
|
| 119 |
+
kernel.view(1, 1, -1))
|
| 120 |
+
result[b, :, d] = convolved.squeeze()
|
| 121 |
+
return result
|
| 122 |
+
|
| 123 |
+
class GuideManagerPath(GuideManager):
|
| 124 |
+
def __init__(self,
|
| 125 |
+
dataset,
|
| 126 |
+
tensor_args,
|
| 127 |
+
clip_grad=False, clip_grad_rule='norm',
|
| 128 |
+
max_grad_norm=1.0, max_grad_value=1.0,
|
| 129 |
+
guidance_weight=0.15,
|
| 130 |
+
):
|
| 131 |
+
super().__init__(dataset,
|
| 132 |
+
tensor_args,
|
| 133 |
+
clip_grad, clip_grad_rule,
|
| 134 |
+
max_grad_norm, max_grad_value,
|
| 135 |
+
guidance_weight)
|
| 136 |
+
self._point_cloud = None
|
| 137 |
+
self._sdf_model = PointCloud_CSDF(sphere_radius=0.10, max_distance=0.04, device=self.device, pcd=None)
|
| 138 |
+
# print("GUIDANCE STRENGTH: ", self.guidance_weight)
|
| 139 |
+
|
| 140 |
+
def forward(self, x_normalized):
|
| 141 |
+
#calculate sdf guidance
|
| 142 |
+
x_norm = x_normalized.clone()
|
| 143 |
+
with torch.enable_grad():
|
| 144 |
+
x_norm.requires_grad_(True)
|
| 145 |
+
x = self.dataset.unnormalize_trajectory(x_norm)
|
| 146 |
+
# calculate sdf guidance according to x and environment
|
| 147 |
+
sdf_guidance = self._calculate_sdf_guidance(x)
|
| 148 |
+
# print("sdf_guidance: ", sdf_guidance)
|
| 149 |
+
smooth_guidance = self._calculate_smooth_guidance(x)
|
| 150 |
+
# print("sdf_guidance: ", sdf_guidance)
|
| 151 |
+
# guidance = 0.1*sdf_guidance + 0.01*smooth_guidance
|
| 152 |
+
guidance = 0.1*sdf_guidance
|
| 153 |
+
# guidance = self.clip_gradient(guidance)
|
| 154 |
+
return self.guidance_weight*guidance
|
| 155 |
+
|
| 156 |
+
# Update observation for calculation this guidance
|
| 157 |
+
# This function is required to be called before forward()
|
| 158 |
+
def update_observation(self, obs):
|
| 159 |
+
self._point_cloud = obs['point_cloud']
|
| 160 |
+
self._sdf_model.update_pcd(self._point_cloud)
|
| 161 |
+
|
| 162 |
+
def _calculate_sdf_guidance(self, x_unnormalized):
|
| 163 |
+
# Extract position coordinates (first 3 dimensions) and detach to handle gradients manually
|
| 164 |
+
positions = x_unnormalized[..., :3].detach().requires_grad_(True)
|
| 165 |
+
sdf_values = self._sdf_model(positions) # [batch, length]
|
| 166 |
+
# print("positions shape:", positions.shape)
|
| 167 |
+
# print(f'SDF values: {sdf_values.cpu().detach().numpy()}')
|
| 168 |
+
# Calculate gradients through backward pass
|
| 169 |
+
sdf_values.sum().backward()
|
| 170 |
+
sdf_gradient = positions.grad # Get gradient from positions tensor instead
|
| 171 |
+
sdf_gradient[:, 0] = 0.0
|
| 172 |
+
sdf_gradient[:, -1] = 0.0
|
| 173 |
+
running_avg_sdf_gradient = running_avg(sdf_gradient, 3)
|
| 174 |
+
# print(f'SDF gradient: {sdf_gradient.cpu().detach().numpy()}')
|
| 175 |
+
# Initialize guidance with zeros for all dimensions and apply SDF gradients to position dimensions
|
| 176 |
+
guidance = torch.zeros_like(x_unnormalized)
|
| 177 |
+
guidance[..., :3] = running_avg_sdf_gradient # Use positions.grad instead of sdf_values.grad
|
| 178 |
+
|
| 179 |
+
return guidance
|
| 180 |
+
|
| 181 |
+
def _calculate_smooth_guidance(self, x_unnormalized):
|
| 182 |
+
# Extract position coordinates and create a copy that requires gradients
|
| 183 |
+
positions = x_unnormalized[..., :3].detach().clone().requires_grad_(True)
|
| 184 |
+
|
| 185 |
+
# Calculate the smoothness objective: sum of squared accelerations
|
| 186 |
+
if positions.shape[1] >= 3:
|
| 187 |
+
velocities = positions[:, 1:] - positions[:, :-1]
|
| 188 |
+
accelerations = velocities[:, 1:] - velocities[:, :-1]
|
| 189 |
+
smoothness_objective = torch.sum(torch.norm(accelerations, dim=-1)**2)
|
| 190 |
+
# Calculate gradients through automatic differentiation
|
| 191 |
+
smoothness_objective.backward()
|
| 192 |
+
# Get the gradient
|
| 193 |
+
smooth_grad = positions.grad
|
| 194 |
+
# Initialize guidance with zeros for all dimensions
|
| 195 |
+
guidance = torch.zeros_like(x_unnormalized)
|
| 196 |
+
guidance[..., :3] = -smooth_grad # Negative to minimize the objective
|
| 197 |
+
|
| 198 |
+
return guidance
|
| 199 |
+
else:
|
| 200 |
+
return torch.zeros_like(x_unnormalized)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
class GuideManagerSTOMP(GuideManager):
|
| 204 |
+
class LocalDirectionMode(Enum):
|
| 205 |
+
WAYPOINT = 0
|
| 206 |
+
RESAMPLE_WAYPOINT = 1
|
| 207 |
+
RESAMPLE_SPLINE = 2
|
| 208 |
+
|
| 209 |
+
def __init__(self,
|
| 210 |
+
dataset,
|
| 211 |
+
robot_model,
|
| 212 |
+
tensor_args,
|
| 213 |
+
clip_grad=False, clip_grad_rule='norm',
|
| 214 |
+
max_grad_norm=1.0, max_grad_value=1.0,
|
| 215 |
+
guidance_weight=0.15
|
| 216 |
+
):
|
| 217 |
+
super().__init__(dataset,
|
| 218 |
+
tensor_args,
|
| 219 |
+
clip_grad, clip_grad_rule,
|
| 220 |
+
max_grad_norm, max_grad_value,
|
| 221 |
+
guidance_weight)
|
| 222 |
+
mode_int = 1
|
| 223 |
+
self._local_direction_mode = self.LocalDirectionMode(mode_int)
|
| 224 |
+
|
| 225 |
+
self._point_cloud = None
|
| 226 |
+
self._max_distance = 0.20
|
| 227 |
+
self._sdf_model = PointCloud_CSDF(sphere_radius=0.05, max_distance=None, device=self.device, pcd=None)
|
| 228 |
+
# print("GUIDANCE STRENGTH: ", self.guidance_weight)
|
| 229 |
+
self._advanced_kinematics_solver = AdvancedKinematicsSolver(robot_model)
|
| 230 |
+
# self._selected_links = ["panda_link0", "panda_link1", "panda_link2", "panda_link3",
|
| 231 |
+
# "panda_link4", "panda_link5", "panda_link6", "panda_link7", "panda_link8", "panda_hand"]
|
| 232 |
+
# self._selected_links = ["panda_link1", "panda_link2", "panda_link3", "panda_link4",
|
| 233 |
+
# "panda_link5", "panda_link6", "panda_link7", "panda_link8", "panda_hand"]
|
| 234 |
+
self._selected_links = ["panda_link5", "panda_link6", "panda_link7", "panda_link8", "panda_hand", "panda_leftfinger", "panda_rightfinger"]
|
| 235 |
+
# Dictionary to store debug values that can be accessed from outside
|
| 236 |
+
self.debug_state = []
|
| 237 |
+
|
| 238 |
+
def reset_debug_state(self):
|
| 239 |
+
self.debug_state = []
|
| 240 |
+
|
| 241 |
+
def forward(self, x_normalized, get_cost_rank=False, get_debug_info=True):
|
| 242 |
+
x_norm = x_normalized.clone() # ⚠️ IMPORTANT: x should be joint states, not end-effector poses
|
| 243 |
+
x = self.dataset.unnormalize_trajectory(x_norm) # x shape: [batch, length, joint_dim], joint_dim = 9
|
| 244 |
+
restructured_data = self._get_cartesian_positions(x)
|
| 245 |
+
sdf_cost = self._calculate_sdf_cost(restructured_data['all_sphere_poses'], restructured_data['all_sphere_radii']) # [batch, seq_len, sphere_num]
|
| 246 |
+
# filter out the cost of the sphere that is not in the selected links
|
| 247 |
+
sdf_cost = self._filter_cost_by_mask(sdf_cost, restructured_data['all_sphere_mask']) # [batch, seq_len, sphere_num]
|
| 248 |
+
sdf_gradient = self._calculate_mixed_gradient(sdf_cost, x, alpha_global=3.0, sliding_window = 3, max_window_nums=5)
|
| 249 |
+
guidance = self.guidance_weight*sdf_gradient #delta value does not need normalized. delta and absolute does not share same normalizer
|
| 250 |
+
x_after_guidance = x + guidance
|
| 251 |
+
guidance_norm = self.dataset.normalizer.normalize(x_after_guidance, self.dataset.data_key_traj) - x_norm
|
| 252 |
+
|
| 253 |
+
# ------ Debug info ------
|
| 254 |
+
if get_cost_rank:
|
| 255 |
+
# Sort batch indices by total cost (sum over all timesteps and spheres)
|
| 256 |
+
cost_total = sdf_cost.sum(dim=(1, 2)) # [batch]
|
| 257 |
+
self.sdf_cost_sorted_indices = torch.argsort(cost_total, dim=0) # ascending order
|
| 258 |
+
|
| 259 |
+
if get_debug_info:
|
| 260 |
+
# Make sure to not modify x; use a copy for x_after_guidance
|
| 261 |
+
x_after_guidance = x.detach().cpu().numpy().copy() + guidance.detach().cpu().numpy().copy()
|
| 262 |
+
temp_data =self._get_cartesian_positions(x_after_guidance)
|
| 263 |
+
debug_entry = DebugEntry(
|
| 264 |
+
sphere_poses=restructured_data['all_sphere_poses'],
|
| 265 |
+
sphere_radii=restructured_data['all_sphere_radii'],
|
| 266 |
+
sdf_costs=sdf_cost,
|
| 267 |
+
spheres_after_guidance=temp_data['all_sphere_poses'],
|
| 268 |
+
sampled_spheres=self.debug_local_samples_spheres,
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
self.debug_state.append(debug_entry)
|
| 272 |
+
# print("sdf_gradient shape: ", sdf_gradient.shape)
|
| 273 |
+
# print("SDF gradient statistics:")
|
| 274 |
+
# print(f"Shape: {sdf_gradient.shape}")
|
| 275 |
+
# print(f"Mean: {sdf_gradient.mean().item():.4f}")
|
| 276 |
+
# print(f"Std: {sdf_gradient.std().item():.4f}")
|
| 277 |
+
# print(f"Min: {sdf_gradient.min().item():.4f}")
|
| 278 |
+
# print(f"Max: {sdf_gradient.max().item():.4f}")
|
| 279 |
+
# print(f"Non-zero elements: {(sdf_gradient != 0).sum().item()}")
|
| 280 |
+
#---- end debug info ------
|
| 281 |
+
return guidance_norm
|
| 282 |
+
|
| 283 |
+
def update_observation(self, obs):
|
| 284 |
+
self._point_cloud = obs['point_cloud']
|
| 285 |
+
self._sdf_model.update_pcd(self._point_cloud)
|
| 286 |
+
|
| 287 |
+
def _get_cartesian_positions(self, joint_states):
|
| 288 |
+
# what is the best way to get end-effector poses? shall I wrap the restructure function in this?
|
| 289 |
+
if isinstance(joint_states, torch.Tensor): # this is not efficient, but let's keep it for now for MVP
|
| 290 |
+
joint_states = joint_states.detach().cpu().numpy()
|
| 291 |
+
sphere_poses_list = []
|
| 292 |
+
sphere_radii_list = []
|
| 293 |
+
for b in range(joint_states.shape[0]):
|
| 294 |
+
batch_sphere_poses = []
|
| 295 |
+
batch_sphere_radii = []
|
| 296 |
+
for t in range(joint_states.shape[1]):
|
| 297 |
+
sphere_poses, sphere_radii = self._advanced_kinematics_solver.calculate_all_sphere_positions_and_radii(joint_states[b, t, :])
|
| 298 |
+
batch_sphere_poses.append(sphere_poses)
|
| 299 |
+
batch_sphere_radii.append(sphere_radii)
|
| 300 |
+
sphere_poses_list.append(batch_sphere_poses)
|
| 301 |
+
sphere_radii_list.append(batch_sphere_radii)
|
| 302 |
+
|
| 303 |
+
restructured_data = self._restructure_sphere_data(sphere_poses_list, sphere_radii_list)
|
| 304 |
+
return restructured_data
|
| 305 |
+
|
| 306 |
+
def _filter_cost_by_mask(self, cost, sphere_mask):
|
| 307 |
+
filtered_cost = cost * sphere_mask
|
| 308 |
+
return filtered_cost
|
| 309 |
+
|
| 310 |
+
def _calculate_local_direction_from_resample_spline(self,
|
| 311 |
+
joint_states: torch.Tensor,
|
| 312 |
+
window_indices: torch.Tensor,
|
| 313 |
+
alpha_local: float = 3.0):
|
| 314 |
+
"""
|
| 315 |
+
Calculate the local direction for each timestep using a spline.
|
| 316 |
+
window_indices: [k, window_size]
|
| 317 |
+
joint_states: [batch, seq_len, joint_dim]
|
| 318 |
+
"""
|
| 319 |
+
def batch_window_perturb_sample_spline(joint_states, window_indices, num_samples=8, perturb_std=0.1, num_interp=10):
|
| 320 |
+
"""
|
| 321 |
+
For each [batch, window], perturb p2 of window and sample points on Catmull-Rom spline.
|
| 322 |
+
Args:
|
| 323 |
+
joint_states: [batch, seq_len, joint_dim]
|
| 324 |
+
window_indices: [num_windows, window_size]
|
| 325 |
+
num_samples: int, number of p2 perturbations per window
|
| 326 |
+
perturb_std: float, std of the perturbation
|
| 327 |
+
num_interp: int, number of sampled points per spline
|
| 328 |
+
Returns:
|
| 329 |
+
sampled_points: [batch, num_windows, num_samples, num_interp, joint_dim]
|
| 330 |
+
perturbed_controls: [batch, num_windows, num_samples, 4, joint_dim]
|
| 331 |
+
"""
|
| 332 |
+
batch_size, seq_len, joint_dim = joint_states.shape
|
| 333 |
+
num_windows, window_size = window_indices.shape
|
| 334 |
+
|
| 335 |
+
# Gather window control points: [batch, num_windows, window_size, joint_dim]
|
| 336 |
+
idx = einops.repeat(window_indices, 'w s -> b w s d', b=batch_size, d=joint_dim)
|
| 337 |
+
# Use gather to select each joint dim independently
|
| 338 |
+
joint_states_exp = einops.repeat(joint_states, 'b t d -> b w t d', w=num_windows)
|
| 339 |
+
window_points = torch.gather(joint_states_exp, 2, idx) # [batch, num_windows, window_size, joint_dim]
|
| 340 |
+
# print("window_points shape:", window_points.shape)
|
| 341 |
+
|
| 342 |
+
# Expand for batch p2 perturb: [batch, num_windows, num_samples, window_size, joint_dim]
|
| 343 |
+
controls = einops.repeat(window_points, 'b w t d -> b w n t d', n=num_samples)
|
| 344 |
+
noise = torch.randn(batch_size, num_windows, num_samples, joint_dim, device=joint_states.device) * perturb_std
|
| 345 |
+
# print("controls shape:", controls.shape)
|
| 346 |
+
# print("noise shape:", noise.shape)
|
| 347 |
+
controls = controls.clone() # [batch, num_windows, num_samples, window_size, joint_dim]
|
| 348 |
+
controls[:, :, :, 2, :] += noise # Only perturb p2
|
| 349 |
+
|
| 350 |
+
# Interpolation
|
| 351 |
+
t = torch.linspace(0, 1, num_interp, device=joint_states.device)
|
| 352 |
+
t = einops.rearrange(t, 'i -> 1 1 1 i 1') # [1, 1, 1, num_interp, 1]
|
| 353 |
+
|
| 354 |
+
# Controls: [batch, num_windows, num_samples, window_size, joint_dim]
|
| 355 |
+
p0 = einops.rearrange(controls[:, :, :, 0, :], 'b w n d -> b w n 1 d')
|
| 356 |
+
p1 = einops.rearrange(controls[:, :, :, 1, :], 'b w n d -> b w n 1 d')
|
| 357 |
+
p2 = einops.rearrange(controls[:, :, :, 2, :], 'b w n d -> b w n 1 d')
|
| 358 |
+
p3 = einops.rearrange(controls[:, :, :, 3, :], 'b w n d -> b w n 1 d')
|
| 359 |
+
|
| 360 |
+
a = 2 * p1
|
| 361 |
+
b = -p0 + p2
|
| 362 |
+
c = 2*p0 - 5*p1 + 4*p2 - p3
|
| 363 |
+
d = -p0 + 3*p1 - 3*p2 + p3
|
| 364 |
+
|
| 365 |
+
sampled_points = 0.5 * (a + b * t + c * t**2 + d * t**3) # [batch, num_windows, num_samples, num_interp, joint_dim]
|
| 366 |
+
|
| 367 |
+
return sampled_points, controls, window_points
|
| 368 |
+
|
| 369 |
+
def compute_sampled_gradient(sampled_points, cost, window_joint_states, alpha=4.0):
|
| 370 |
+
"""
|
| 371 |
+
Args:
|
| 372 |
+
sampled_points: [batch, num_windows, num_samples, num_interp, joint_dim]
|
| 373 |
+
cost: [batch, w, n, t]
|
| 374 |
+
window_joint_states: [batch, window_num, window_size, joint_dim]
|
| 375 |
+
alpha: softmin temp
|
| 376 |
+
Returns:
|
| 377 |
+
grad: [batch, w, t, joint_dim]
|
| 378 |
+
"""
|
| 379 |
+
# delta: each sample's difference to the current window's mean
|
| 380 |
+
# sampled_points: [batch, w, n, t, joint_dim]
|
| 381 |
+
# mean_joint_states_window: [batch, w, t, joint_dim] -> expand to match n
|
| 382 |
+
batch, joint_dim = sampled_points.shape[0], sampled_points.shape[-1]
|
| 383 |
+
n = sampled_points.shape[2]
|
| 384 |
+
num_windows = window_joint_states.shape[1]
|
| 385 |
+
t = sampled_points.shape[3]
|
| 386 |
+
delta = torch.zeros(batch, num_windows, n, t, joint_dim, device=sampled_points.device)
|
| 387 |
+
delta[..., 1, :] = sampled_points[..., 0, :] - window_joint_states[..., 1, :].unsqueeze(2).expand(-1, -1, n, -1)
|
| 388 |
+
delta[..., 2, :] = sampled_points[..., -1, :] - window_joint_states[..., 2, :].unsqueeze(2).expand(-1, -1, n, -1)
|
| 389 |
+
|
| 390 |
+
# cost softmin over n (perturbations)
|
| 391 |
+
# cost: [batch, w, n, t]
|
| 392 |
+
cost_norm = cost - cost.amin(dim=2, keepdim=True) # min-norm over n
|
| 393 |
+
weights = torch.softmax(-alpha * cost_norm, dim=2) # [batch, w, n, t]
|
| 394 |
+
weights = weights.unsqueeze(-1) # [batch, w, n, t, 1]
|
| 395 |
+
grad = (weights * delta).sum(dim=2) # [batch, w, t, joint_dim]
|
| 396 |
+
return grad
|
| 397 |
+
|
| 398 |
+
# mean_joint_states = joint_states.mean(dim=0).unsqueeze(0) # [1, seq_len, joint_dim]
|
| 399 |
+
pertube_sample_num = 5
|
| 400 |
+
num_interp = window_indices.shape[1] # don't make it too complex..
|
| 401 |
+
# sampled_points, controls, window_points = batch_window_perturb_sample_spline(
|
| 402 |
+
# mean_joint_states,
|
| 403 |
+
# window_indices,
|
| 404 |
+
# num_samples=pertube_sample_num,
|
| 405 |
+
# num_interp=num_interp
|
| 406 |
+
# )
|
| 407 |
+
sampled_points, controls, window_points = batch_window_perturb_sample_spline(
|
| 408 |
+
joint_states,
|
| 409 |
+
window_indices,
|
| 410 |
+
num_samples=pertube_sample_num,
|
| 411 |
+
num_interp=num_interp,
|
| 412 |
+
perturb_std=0.2
|
| 413 |
+
)
|
| 414 |
+
# print("window_points shape:", window_points.shape)
|
| 415 |
+
# print("sampled_points shape:", sampled_points.shape)
|
| 416 |
+
# print("controls shape:", controls.shape)
|
| 417 |
+
sampled_points = einops.rearrange(sampled_points, 'b w n t d -> b (w n t) d')
|
| 418 |
+
restructured_data = self._get_cartesian_positions(sampled_points)
|
| 419 |
+
sdf_cost = self._calculate_sdf_cost(restructured_data['all_sphere_poses'], restructured_data['all_sphere_radii'])
|
| 420 |
+
# filter out the cost of the sphere that is not in the selected links
|
| 421 |
+
sdf_cost = self._filter_cost_by_mask(sdf_cost, restructured_data['all_sphere_mask']).sum(dim=-1) #[batch, seq_len]
|
| 422 |
+
cost = einops.rearrange(sdf_cost, 'b (w n t) -> b w n t', w=window_indices.shape[0], n = pertube_sample_num, t = num_interp)
|
| 423 |
+
sampled_points = einops.rearrange(sampled_points, 'b (w n t) d -> b w n t d', w=window_indices.shape[0], n = pertube_sample_num, t = num_interp)
|
| 424 |
+
|
| 425 |
+
# print("cost shape:", cost.shape)
|
| 426 |
+
# softmax gradient over the cost
|
| 427 |
+
grad = compute_sampled_gradient(sampled_points, cost, window_points, alpha=1.0)
|
| 428 |
+
# print("grad shape:", grad.shape)
|
| 429 |
+
# print("grad:", grad[0,0,:])
|
| 430 |
+
|
| 431 |
+
# transform gard to original joint index
|
| 432 |
+
# grad: [batch, num_windows, num_interp, joint_dim]
|
| 433 |
+
# window_indices: [num_windows, num_interp]
|
| 434 |
+
batch, num_windows, num_interp, joint_dim = grad.shape
|
| 435 |
+
seq_len = joint_states.shape[1]
|
| 436 |
+
|
| 437 |
+
# Expand window_indices for batch和joint_dim
|
| 438 |
+
grad_flat = einops.rearrange(grad, 'b w t d -> b (w t) d')
|
| 439 |
+
# print("grad_flat shape:", grad_flat.shape)
|
| 440 |
+
# print("grad flat value:", grad_flat)
|
| 441 |
+
idx_flat = einops.rearrange(window_indices, 'w t -> (w t)')
|
| 442 |
+
idx_flat = einops.repeat(idx_flat, 'n -> b n d', b=batch, d=joint_dim)
|
| 443 |
+
|
| 444 |
+
grad_full = torch.zeros(batch, seq_len, joint_dim, device=grad.device)
|
| 445 |
+
grad_full = grad_full.scatter_add(dim=1, index=idx_flat, src=grad_flat)
|
| 446 |
+
|
| 447 |
+
return grad_full
|
| 448 |
+
|
| 449 |
+
def _calculate_local_direction_from_resample_waypoint(self,
|
| 450 |
+
joint_states: torch.Tensor,
|
| 451 |
+
window_indices: torch.Tensor,
|
| 452 |
+
alpha_local: float = 3.0,
|
| 453 |
+
perturb_std: float = 0.2):
|
| 454 |
+
"""Calculate the local direction for each timestep using a waypoint."""
|
| 455 |
+
def batch_perturb_and_resample(joint_states, window_indices, perturb_plan, perturb_std=0.2, num_interp=10):
|
| 456 |
+
# sample from polygon, not gaussian
|
| 457 |
+
batch_size, seq_len, joint_dim = joint_states.shape
|
| 458 |
+
num_windows, window_size = window_indices.shape
|
| 459 |
+
# assert window_size == 4
|
| 460 |
+
num_samples = len(perturb_plan)
|
| 461 |
+
|
| 462 |
+
# Gather window control points: [batch, num_windows, window_size, joint_dim]
|
| 463 |
+
idx = einops.repeat(window_indices, 'w s -> b w s d', b=batch_size, d=joint_dim)
|
| 464 |
+
# Use gather to select each joint dim independently
|
| 465 |
+
joint_states_exp = einops.repeat(joint_states, 'b t d -> b w t d', w=num_windows)
|
| 466 |
+
window_points = torch.gather(joint_states_exp, 2, idx) # [batch, num_windows, window_size, joint_dim]
|
| 467 |
+
# print("window_points shape:", window_points.shape)
|
| 468 |
+
|
| 469 |
+
# Joint sampling, batch expansion and perturbation: [batch, num_windows, num_samples, window_size, joint_dim]
|
| 470 |
+
# perturbed_windows = einops.repeat(window_points, 'b w t d -> b w n t d', n=num_samples)
|
| 471 |
+
## noise = torch.randn(batch_size, num_windows, num_samples, window_size, joint_dim, device=joint_states.device) * perturb_std
|
| 472 |
+
## perturbed_windows = perturbed_windows + noise # [batch, num_windows, num_samples, window_size, joint_dim]
|
| 473 |
+
# perturbed_windows = trust_region_perturbations(window_points, perturb_std, perturb_plan)
|
| 474 |
+
|
| 475 |
+
# Cartesian sampling
|
| 476 |
+
perturbed_windows = trust_region_cartesian_perturbations(window_points, perturb_std, perturb_plan)
|
| 477 |
+
|
| 478 |
+
# Resampling/interpolation:
|
| 479 |
+
# Reshape to (batch * num_windows * num_samples, window_size, joint_dim) for efficient interpolation
|
| 480 |
+
flat = perturbed_windows.reshape(-1, window_size, joint_dim) # [BWN, window_size, joint_dim]
|
| 481 |
+
flat_up = F.interpolate(flat.transpose(1, 2), size=num_interp, mode='linear', align_corners=True)
|
| 482 |
+
flat_up = flat_up.transpose(1, 2) # [BWN, num_interp, joint_dim]
|
| 483 |
+
resampled = flat_up.view(batch_size, num_windows, num_samples, num_interp, joint_dim)
|
| 484 |
+
return resampled, perturbed_windows, window_points # [batch, num_windows, num_samples, num_interp, joint_dim], [batch, num_windows, num_samples, window_size, joint_dim]
|
| 485 |
+
|
| 486 |
+
def trust_region_perturbations(window_points, std, perturb_plan):
|
| 487 |
+
num_samples = len(perturb_plan)
|
| 488 |
+
batch, num_windows, window_size, joint_dim = window_points.shape
|
| 489 |
+
perturbed = window_points.unsqueeze(2).expand(-1, -1, num_samples, -1, -1).clone()
|
| 490 |
+
|
| 491 |
+
for sample_idx, plan in enumerate(perturb_plan):
|
| 492 |
+
for idx in range(1, window_size-1): # skip first and last
|
| 493 |
+
for joint_id, sign in plan:
|
| 494 |
+
perturbed[:, :, sample_idx, idx, joint_id] += sign * std
|
| 495 |
+
return perturbed
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
def trust_region_cartesian_perturbations(window_points, std, cartesian_perturb_dirs):
|
| 499 |
+
"""
|
| 500 |
+
window_points: [batch, num_windows, window_size, joint_dim]
|
| 501 |
+
num_samples: int
|
| 502 |
+
std: float
|
| 503 |
+
return: [batch, num_windows, num_samples, window_size, joint_dim]
|
| 504 |
+
"""
|
| 505 |
+
dirs = np.array([
|
| 506 |
+
[1, 1, 1], [1, 1, -1], [1, -1, 1], [1, -1, -1],
|
| 507 |
+
[-1, 1, 1], [-1, 1, -1], [-1, -1, 1], [-1, -1, -1]
|
| 508 |
+
], dtype=np.float32)
|
| 509 |
+
|
| 510 |
+
num_dirs = dirs.shape[0]
|
| 511 |
+
b, w, t, d = window_points.shape
|
| 512 |
+
|
| 513 |
+
# Move to CPU/numpy for IK and FK
|
| 514 |
+
window_points_np = window_points.detach().cpu().numpy().reshape(-1, d) # (N, d)
|
| 515 |
+
perturbed_qpos_list = []
|
| 516 |
+
|
| 517 |
+
for qpos in window_points_np:
|
| 518 |
+
self._advanced_kinematics_solver.calculate_forward_kinematics(qpos)
|
| 519 |
+
cartesian_pose = self._advanced_kinematics_solver.get_all_link_pose()["panda_hand"]
|
| 520 |
+
# Perturb in cartesian space
|
| 521 |
+
perturbed_cartesian_poses = perturb_cartesian_pose(cartesian_pose, std, dirs=dirs) # (num_dirs, 7)
|
| 522 |
+
# IK for each perturbed pose
|
| 523 |
+
perturbed_qpos = []
|
| 524 |
+
for pose in perturbed_cartesian_poses:
|
| 525 |
+
qpos_ik = self._advanced_kinematics_solver.compute_ik(pose, initial_qpos=qpos)
|
| 526 |
+
if qpos_ik is None:
|
| 527 |
+
qpos_ik = qpos # fallback
|
| 528 |
+
perturbed_qpos.append(qpos_ik)
|
| 529 |
+
perturbed_qpos = np.stack(perturbed_qpos, axis=0) # (num_dirs, d)
|
| 530 |
+
perturbed_qpos_list.append(perturbed_qpos)
|
| 531 |
+
|
| 532 |
+
# Stack and reshape
|
| 533 |
+
perturbed_windows = np.stack(perturbed_qpos_list, axis=0) # (b * w * t, num_dirs, d)
|
| 534 |
+
perturbed_windows = perturbed_windows.reshape(b, w, t, num_dirs, d)
|
| 535 |
+
perturbed_windows = np.transpose(perturbed_windows, (0, 1, 3, 2, 4)) # → [b, w, num_dirs, t, d]
|
| 536 |
+
perturbed_windows = torch.tensor(perturbed_windows, device=window_points.device, dtype=window_points.dtype)
|
| 537 |
+
return perturbed_windows
|
| 538 |
+
|
| 539 |
+
def perturb_cartesian_pose(cartesian_pose, edge_len, dirs = None):
|
| 540 |
+
"""
|
| 541 |
+
Generate 8 perturbed poses that form a cube around the input pose.
|
| 542 |
+
Args:
|
| 543 |
+
cartesian_pose: Pose object or np.ndarray with fields .p (position [3]) and .q (quaternion [4])
|
| 544 |
+
edge_len: float, total edge length of the cube
|
| 545 |
+
|
| 546 |
+
Returns:
|
| 547 |
+
perturbed_poses: [batch, num_dirs, 7] # 7 for (x, y, z, qw, qx, qy, qz)
|
| 548 |
+
"""
|
| 549 |
+
if dirs is None:
|
| 550 |
+
dirs = np.array([
|
| 551 |
+
[1, 1, 1], [1, 1, -1], [1, -1, 1], [1, -1, -1],
|
| 552 |
+
[-1, 1, 1], [-1, 1, -1], [-1, -1, 1], [-1, -1, -1]
|
| 553 |
+
], dtype=np.float32)
|
| 554 |
+
|
| 555 |
+
dirs = dirs / np.linalg.norm(dirs, axis=1, keepdims=True) # normalize
|
| 556 |
+
half_diag = edge_len / 2.0
|
| 557 |
+
displacements = dirs * half_diag # length from center to each corner
|
| 558 |
+
|
| 559 |
+
pos = cartesian_pose.p # [3]
|
| 560 |
+
quat = cartesian_pose.q # [4]
|
| 561 |
+
|
| 562 |
+
pos_perturbed = pos[None, :] + displacements # [8, 3]
|
| 563 |
+
# Add small random perturbations to quaternions
|
| 564 |
+
quat_noise = np.random.normal(0, 0.15, (8, 4)) # Small Gaussian noise
|
| 565 |
+
quat_perturbed = quat[None, :] + quat_noise # [8, 4]
|
| 566 |
+
# Normalize quaternions to ensure they remain valid
|
| 567 |
+
quat_norms = np.linalg.norm(quat_perturbed, axis=1, keepdims=True)
|
| 568 |
+
quat_perturbed = quat_perturbed / quat_norms
|
| 569 |
+
|
| 570 |
+
perturbed_poses = np.concatenate([pos_perturbed, quat_perturbed], axis=-1) # [8, 7]
|
| 571 |
+
return perturbed_poses
|
| 572 |
+
|
| 573 |
+
def compute_sampled_gradient(sampled_points, cost, window_joint_states, alpha=4.0):
|
| 574 |
+
cost_per_sample = cost.sum(dim=-1) # [batch, num_windows, num_samples]
|
| 575 |
+
# 2. Softmin over samples (n) in each window
|
| 576 |
+
# cost_norm = cost_per_sample - cost_per_sample.amin(dim=2, keepdim=True)
|
| 577 |
+
cost_norm = (cost_per_sample - cost_per_sample.mean(dim=2, keepdim=True)) / (cost_per_sample.std(dim=2, keepdim=True) + 1e-8)
|
| 578 |
+
weights = torch.softmax(-alpha * cost_norm, dim=2) # [batch, num_windows, num_samples]
|
| 579 |
+
|
| 580 |
+
# cost_per_sample shape: torch.Size([batch, num_windows, pertube_nums])
|
| 581 |
+
# resampled_sampled_points shape: torch.Size([batch, num_windows, pertube_nums, window_length , 9])
|
| 582 |
+
# window_joint_states shape: torch.Size([batch, num_windows, window_length , 9])
|
| 583 |
+
delta = sampled_points - window_joint_states.unsqueeze(2) # [batch, num_windows, num_samples, window_length, joint_dim]
|
| 584 |
+
# print("cost norm", cost_norm)
|
| 585 |
+
# print("weights", weights)
|
| 586 |
+
# print("delta shape", delta.shape)
|
| 587 |
+
# print("delta[0,0,0,:]", delta[0,0,0,:])
|
| 588 |
+
# print("delta[0,0,1,:]", delta[0,0,1,:])
|
| 589 |
+
# grad_window = delta[:,:,0,:,:]
|
| 590 |
+
weights_expand = weights.unsqueeze(-1).unsqueeze(-1) # [batch, num_windows, num_samples, 1, 1]
|
| 591 |
+
grad_window = (weights_expand * delta).sum(dim=2) # [batch, num_windows, window_length, joint_dim]
|
| 592 |
+
return grad_window
|
| 593 |
+
|
| 594 |
+
def reinterpolate_sampled_points(sampled_points, window_indices, num_interp=10):
|
| 595 |
+
# sampled_points: [batch, num_windows, num_samples, num_interp, joint_dim]
|
| 596 |
+
# window_indices: [num_windows, num_interp]
|
| 597 |
+
# sampled_points: [b, w, n, num_interp, d]
|
| 598 |
+
b, w, n, num_interp, d = sampled_points.shape
|
| 599 |
+
target_t = window_indices.shape[1]
|
| 600 |
+
# Reshape for interpolation: [b*w*n, d, num_interp]
|
| 601 |
+
sampled_points_reshaped = sampled_points.permute(0, 1, 2, 4, 3).reshape(-1, d, num_interp)
|
| 602 |
+
# Interpolate along t dimension
|
| 603 |
+
sampled_points_interp = F.interpolate(
|
| 604 |
+
sampled_points_reshaped,
|
| 605 |
+
size=target_t,
|
| 606 |
+
mode='linear',
|
| 607 |
+
align_corners=True
|
| 608 |
+
)
|
| 609 |
+
# Reshape back: [b, w, n, d, target_t]
|
| 610 |
+
sampled_points_interp = sampled_points_interp.reshape(b, w, n, d, target_t).permute(0, 1, 2, 4, 3)
|
| 611 |
+
return sampled_points_interp
|
| 612 |
+
|
| 613 |
+
# def resample_to_original_window_size(sampled_points, window_points, window_indices):
|
| 614 |
+
#
|
| 615 |
+
|
| 616 |
+
# perturb_plan = [
|
| 617 |
+
# [(3, +2), (6, -3)], # joint3 +2, joint6 -2
|
| 618 |
+
# [(3, -2), (5, -3)], # joint3 -2, joint5 -2
|
| 619 |
+
# [(0, +1), (3, +2)], # joint0 +1, joint3 +2
|
| 620 |
+
# [(0, -1), (4, +3)], # joint0 -1, joint4 +2
|
| 621 |
+
# [(1, +1), (3, -2)], # joint1 +1, joint3 -2
|
| 622 |
+
# [(1, -1), (5, +3)], # joint1 -1, joint5 +2
|
| 623 |
+
# [(2, +1), (4, -3)], # joint2 +1, joint4 -2
|
| 624 |
+
# [(2, -1), (6, +3)], # joint2 -1, joint6 +2
|
| 625 |
+
# ]
|
| 626 |
+
|
| 627 |
+
# perturb_plan = [ #joint
|
| 628 |
+
# [(0, +0.5)], [(0, -0.5)], [(1, +1.0)], [(1, -1.0)], [(2, +1.0)], [(2, -1.0)],
|
| 629 |
+
# [(3, +2.0)], [(3, -2.0)], [(4, +3.0)], [(4, -3.0)],
|
| 630 |
+
# [(5, +4.0)], [(5, -4.0)], [(6, +4.0)], [(6, -4.0)],
|
| 631 |
+
# ]
|
| 632 |
+
num_interp = 4 # don't make it too complex..
|
| 633 |
+
|
| 634 |
+
perturb_plan = np.array([ #cartesian
|
| 635 |
+
[1, 1, 1], [1, 1, -1], [1, -1, 1], [1, -1, -1],
|
| 636 |
+
[-1, 1, 1], [-1, 1, -1], [-1, -1, 1], [-1, -1, -1]
|
| 637 |
+
], dtype=np.float32)
|
| 638 |
+
|
| 639 |
+
sampled_points, perturbed_windows, window_points = batch_perturb_and_resample(
|
| 640 |
+
joint_states,
|
| 641 |
+
window_indices,
|
| 642 |
+
perturb_plan,
|
| 643 |
+
num_interp=num_interp,
|
| 644 |
+
perturb_std=perturb_std
|
| 645 |
+
)
|
| 646 |
+
num_samples = len(perturb_plan)
|
| 647 |
+
# print("perturbed_windows shape:", perturbed_windows.shape)
|
| 648 |
+
# print("sampled_points shape:", sampled_points.shape)
|
| 649 |
+
sampled_points = einops.rearrange(sampled_points, 'b w n t d -> b (w n t) d') # [batch, (w pertube_sample_num, num_interp), joint_dim]
|
| 650 |
+
restructured_data = self._get_cartesian_positions(sampled_points)
|
| 651 |
+
sdf_cost = self._calculate_sdf_cost(restructured_data['all_sphere_poses'], restructured_data['all_sphere_radii'])
|
| 652 |
+
# filter out the cost of the sphere that is not in the selected links
|
| 653 |
+
sdf_cost = self._filter_cost_by_mask(sdf_cost, restructured_data['all_sphere_mask']).sum(dim=-1) #[batch, seq_len]
|
| 654 |
+
|
| 655 |
+
cost = einops.rearrange(sdf_cost, 'b (w n t) -> b w n t', w=window_indices.shape[0], n = num_samples, t = num_interp)
|
| 656 |
+
# print("cost", cost)
|
| 657 |
+
sampled_points = einops.rearrange(sampled_points, 'b (w n t) d -> b w n t d', w=window_indices.shape[0], n = num_samples, t = num_interp) #this need to be resampled to ori
|
| 658 |
+
resampled_sampled_points = reinterpolate_sampled_points(sampled_points, window_indices, num_interp=num_interp)
|
| 659 |
+
# # print("resampled_sampled_points shape:", resampled_sampled_points.shape)
|
| 660 |
+
grad = compute_sampled_gradient(resampled_sampled_points, cost, window_points, alpha=8.0)
|
| 661 |
+
# grad = compute_sampled_gradient(sampled_points, cost, window_points, alpha=4.0)
|
| 662 |
+
# grad = sampled_points[:,:,0,:,:] - window_points
|
| 663 |
+
|
| 664 |
+
# print("local grad shape:", grad.shape)
|
| 665 |
+
# print("local grad value:", grad.sum(dim=3))
|
| 666 |
+
self.debug_local_samples_spheres = restructured_data['all_sphere_poses']
|
| 667 |
+
# transform gard to original joint index
|
| 668 |
+
# grad: [batch, num_windows, num_interp, joint_dim]
|
| 669 |
+
# window_indices: [num_windows, num_interp]
|
| 670 |
+
batch, num_windows, num_interp, joint_dim = grad.shape
|
| 671 |
+
seq_len = joint_states.shape[1]
|
| 672 |
+
|
| 673 |
+
# # Expand window_indices for batch and joint_dim
|
| 674 |
+
grad_flat = einops.rearrange(grad, 'b w t d -> b (w t) d')
|
| 675 |
+
# print("grad_flat shape:", grad_flat.shape)
|
| 676 |
+
idx_flat = einops.rearrange(window_indices, 'w t -> (w t)')
|
| 677 |
+
idx_flat = einops.repeat(idx_flat, 'n -> b n d', b=batch, d=joint_dim)
|
| 678 |
+
|
| 679 |
+
grad_full = torch.zeros(batch, seq_len, joint_dim, device=grad.device)
|
| 680 |
+
grad_full = grad_full.scatter_add(dim=1, index=idx_flat, src=grad_flat)
|
| 681 |
+
# Compute statistics along all except the last dimension (joint_dim)
|
| 682 |
+
# mean_per_joint = grad_full.mean(dim=(0,1)) # [joint_dim]
|
| 683 |
+
# std_per_joint = grad_full.std(dim=(0,1)) # [joint_dim]
|
| 684 |
+
# min_per_joint = grad_full.amin(dim=(0,1)) # [joint_dim]
|
| 685 |
+
# max_per_joint = grad_full.amax(dim=(0,1)) # [joint_dim]
|
| 686 |
+
|
| 687 |
+
# print("Mean per joint:", mean_per_joint)
|
| 688 |
+
# print("Std per joint:", std_per_joint)
|
| 689 |
+
# print("Min per joint:", min_per_joint)
|
| 690 |
+
# print("Max per joint:", max_per_joint)
|
| 691 |
+
# print("grad_full shape:", grad_full.shape)
|
| 692 |
+
|
| 693 |
+
return grad_full
|
| 694 |
+
|
| 695 |
+
# print("cost shape:", cost.shape)
|
| 696 |
+
# raise NotImplementedError("Resample waypoint is not implemented yet")
|
| 697 |
+
|
| 698 |
+
def _calculate_local_direction_from_waypoint(self, cost: torch.Tensor, joint_states: torch.Tensor, alpha_local: float = 3.0):
|
| 699 |
+
"""
|
| 700 |
+
Calculate the local direction for each timestep using a waypoint.
|
| 701 |
+
"""
|
| 702 |
+
batch, seq_len = cost.shape
|
| 703 |
+
# 2. Calculate local direction for each timestep
|
| 704 |
+
local_direction_list = []
|
| 705 |
+
for t in range(seq_len):
|
| 706 |
+
# Get cost for all samples at this waypoint
|
| 707 |
+
cost_t = cost[:, t] # [batch]
|
| 708 |
+
# Normalize: softmin, i.e. exp(-alpha_local * (cost_t - min))
|
| 709 |
+
cost_t_norm = cost_t - cost_t.min()
|
| 710 |
+
weights_local = torch.exp(-alpha_local * cost_t_norm)
|
| 711 |
+
weights_local = weights_local / (weights_local.sum() + 1e-8) # [batch]
|
| 712 |
+
# States at current timestep
|
| 713 |
+
states_t = joint_states[:, t, :] # [batch, joint_dim]
|
| 714 |
+
mean_state = states_t.mean(dim=0)
|
| 715 |
+
# Weighted direction relative to the mean state
|
| 716 |
+
direction = (weights_local[:, None] * (states_t - mean_state)).sum(dim=0) # [joint_dim]
|
| 717 |
+
local_direction_list.append(direction)
|
| 718 |
+
local_direction = torch.stack(local_direction_list, dim=0) # [seq_len, joint_dim]
|
| 719 |
+
return local_direction
|
| 720 |
+
|
| 721 |
+
def _calculate_mixed_gradient(
|
| 722 |
+
self,
|
| 723 |
+
cost: torch.Tensor,
|
| 724 |
+
joint_states: torch.Tensor,
|
| 725 |
+
alpha_local: float = 3.0,
|
| 726 |
+
alpha_global: float = 3.0,
|
| 727 |
+
sliding_window: int = 4,
|
| 728 |
+
max_window_nums: int = 3,
|
| 729 |
+
) -> torch.Tensor:
|
| 730 |
+
"""
|
| 731 |
+
Combine full-trajectory and per-timestep weighted STOMP updates for a mixed gradient direction.
|
| 732 |
+
Args:
|
| 733 |
+
cost: [batch, seq_len, cost_per_sphere] # tensor
|
| 734 |
+
joint_states: [batch, seq_len, joint_dim] # tensor
|
| 735 |
+
alpha: float, softmax scaling factor
|
| 736 |
+
lambda_local: float, blending ratio for per-timestep (local) update
|
| 737 |
+
Returns:
|
| 738 |
+
grads: [seq_len, joint_dim] # tensor
|
| 739 |
+
"""
|
| 740 |
+
batch, seq_len, sphere_nums = cost.shape
|
| 741 |
+
device = cost.device
|
| 742 |
+
|
| 743 |
+
# ------ Compute GLOBAL weight for each waypoint using sliding window of costs ------
|
| 744 |
+
cost_per_sample = cost.sum(dim=-1) # [batch, seq_len]
|
| 745 |
+
window_costs = self._sliding_window_sum(cost_per_sample, sliding_window) # TODO: assume no...multi modes (will fix)
|
| 746 |
+
|
| 747 |
+
# Normalize using global mean
|
| 748 |
+
window_costs_norm =(window_costs - window_costs.min()) / (window_costs.max()+1e-8)
|
| 749 |
+
|
| 750 |
+
# Compute softmax weights
|
| 751 |
+
softmax_weights = torch.exp(alpha_global * window_costs_norm)
|
| 752 |
+
global_weight = softmax_weights / (softmax_weights.sum() + 1e-8)
|
| 753 |
+
# print("window_costs_norm:", window_costs_norm)
|
| 754 |
+
# print("weights_t:", global_weight)
|
| 755 |
+
|
| 756 |
+
# ------ Calculate local update direction ------
|
| 757 |
+
if self._local_direction_mode == self.LocalDirectionMode.RESAMPLE_SPLINE:
|
| 758 |
+
window_indices = self._get_window_indices(seq_len, sliding_window, device=device)
|
| 759 |
+
top_k_values, top_k_window_indices = self._get_top_k_windows(window_costs, window_indices, k=max_window_nums)
|
| 760 |
+
# print("top_k_values:", top_k_values)
|
| 761 |
+
# print("top_k_window_indices:", top_k_window_indices)
|
| 762 |
+
local_direction = self._calculate_local_direction_from_resample_spline(joint_states,
|
| 763 |
+
top_k_window_indices,
|
| 764 |
+
alpha_local)
|
| 765 |
+
elif self._local_direction_mode == self.LocalDirectionMode.RESAMPLE_WAYPOINT:
|
| 766 |
+
window_indices = self._get_window_indices(seq_len, sliding_window, device=device)
|
| 767 |
+
top_k_values, top_k_window_indices = self._top_k_windows_with_low_overlap(window_costs, window_indices, k=max_window_nums)
|
| 768 |
+
# print("top_k_values:", top_k_values)
|
| 769 |
+
# print("top_k_window_indices:", top_k_window_indices)
|
| 770 |
+
local_direction = self._calculate_local_direction_from_resample_waypoint(joint_states,
|
| 771 |
+
top_k_window_indices,
|
| 772 |
+
alpha_local,
|
| 773 |
+
perturb_std=0.2)
|
| 774 |
+
|
| 775 |
+
elif self._local_direction_mode == self.LocalDirectionMode.WAYPOINT:
|
| 776 |
+
cost_per_sample = cost.sum(dim=-1) # [batch, seq_len]
|
| 777 |
+
local_direction = self._calculate_local_direction_from_waypoint(cost_per_sample, joint_states, alpha_local)
|
| 778 |
+
else:
|
| 779 |
+
raise ValueError(f"Invalid local direction mode: {self._local_direction_mode}")
|
| 780 |
+
|
| 781 |
+
# ------ Combine local direction and global weight for each waypoint ------
|
| 782 |
+
# global_weight: [seq_len], local_direction: [seq_len, joint_dim]
|
| 783 |
+
grads = global_weight.unsqueeze(-1) * local_direction # [seq_len, joint_dim]
|
| 784 |
+
# print("global_weight:", global_weight)
|
| 785 |
+
# grads = local_direction
|
| 786 |
+
|
| 787 |
+
# Fix the start and end waypoints (do not update)
|
| 788 |
+
grads[:, 0, :] = 0.0
|
| 789 |
+
grads[:,-1, :] = 0.0
|
| 790 |
+
|
| 791 |
+
# print("grads.shape:", grads.shape)
|
| 792 |
+
# print("cost per sample:", cost_per_sample)
|
| 793 |
+
return grads
|
| 794 |
+
|
| 795 |
+
def _sliding_window_sum(self, cost_per_sample, sliding_window_size):
|
| 796 |
+
# Add a channel dimension for conv1d: [batch, 1, seq_len]
|
| 797 |
+
cost = cost_per_sample.unsqueeze(1)
|
| 798 |
+
weight = torch.ones(1, 1, sliding_window_size, device=cost.device) #kernel[1,1,sliding_window]
|
| 799 |
+
# Pad both sides
|
| 800 |
+
if sliding_window_size % 2 == 1: # odd
|
| 801 |
+
pad_left = pad_right = sliding_window_size // 2
|
| 802 |
+
else: # even
|
| 803 |
+
pad_left = sliding_window_size // 2 - 1
|
| 804 |
+
pad_right = sliding_window_size // 2
|
| 805 |
+
cost_padded = F.pad(cost, (pad_left, pad_right), mode='constant', value=0)
|
| 806 |
+
window_sum = F.conv1d(cost_padded, weight) # output: [batch, 1, seq_len]
|
| 807 |
+
window_sum = window_sum.sum(dim=0).squeeze(0) # sum across batch dimension[seq_len]
|
| 808 |
+
return window_sum
|
| 809 |
+
|
| 810 |
+
def _get_window_indices(self, seq_len, window_size, fill_value=-1, device=None):
|
| 811 |
+
t = torch.arange(seq_len, device=device) # [seq_len]
|
| 812 |
+
half = window_size // 2
|
| 813 |
+
offsets = torch.arange(-half, window_size - half, device=device).view(1, -1)
|
| 814 |
+
window_indices = t.view(-1, 1) + offsets
|
| 815 |
+
window_indices = torch.where(
|
| 816 |
+
(window_indices >= 0) & (window_indices < seq_len),
|
| 817 |
+
window_indices,
|
| 818 |
+
torch.full_like(window_indices, fill_value)
|
| 819 |
+
)
|
| 820 |
+
return window_indices # [seq_len, window_size]
|
| 821 |
+
|
| 822 |
+
def _smooth_grads(grads: torch.Tensor, weight: float = 0.4) -> torch.Tensor:
|
| 823 |
+
"""
|
| 824 |
+
Apply a simple smoothing filter to the gradient updates by blending each waypoint with its immediate neighbors.
|
| 825 |
+
Args:
|
| 826 |
+
grads: torch.Tensor of shape [seq_len, joint_dim]
|
| 827 |
+
The gradient updates to be smoothed.
|
| 828 |
+
weight: float, optional (default=0.4)
|
| 829 |
+
Smoothing strength. 0 means no smoothing, higher values result in smoother gradients.
|
| 830 |
+
Returns:
|
| 831 |
+
grads_smooth: torch.Tensor of shape [seq_len, joint_dim]
|
| 832 |
+
The smoothed gradient updates.
|
| 833 |
+
"""
|
| 834 |
+
grads_smooth = grads.clone()
|
| 835 |
+
# Smooth all inner waypoints by blending them with the ir left and right neighbors
|
| 836 |
+
grads_smooth[1:-1] = (
|
| 837 |
+
(1 - weight) * grads[1:-1]
|
| 838 |
+
+ (weight / 2) * (grads[:-2] + grads[2:])
|
| 839 |
+
)
|
| 840 |
+
return grads_smooth
|
| 841 |
+
|
| 842 |
+
def _calculate_sdf_cost(self, sphere_poses_tensor, sphere_radii_tensor):
|
| 843 |
+
assert sphere_poses_tensor.shape[:3] == sphere_radii_tensor.shape
|
| 844 |
+
batch_size, seq_len, num_spheres, _ = sphere_poses_tensor.shape
|
| 845 |
+
sphere_positions_flat = einops.rearrange(sphere_poses_tensor, 'b t s d -> b (t s) d')
|
| 846 |
+
sdf_distance_center = self._sdf_model(sphere_positions_flat)
|
| 847 |
+
sdf_distance_center = einops.rearrange(sdf_distance_center, 'b (t s) -> b t s', b=batch_size, t=seq_len)
|
| 848 |
+
# Distance from sphere surface
|
| 849 |
+
sdf_distance_surface = sdf_distance_center - sphere_radii_tensor
|
| 850 |
+
|
| 851 |
+
alpha = 8.0
|
| 852 |
+
max_distance = self._max_distance # should be scalar
|
| 853 |
+
clipped_distance = torch.clamp_max(sdf_distance_surface, max_distance)
|
| 854 |
+
sdf_cost = torch.exp(alpha * (max_distance - clipped_distance)) - 1
|
| 855 |
+
sdf_cost = torch.where(sdf_cost > 0, sdf_cost, torch.zeros_like(sdf_cost))
|
| 856 |
+
# print("sdf_cost mean:", sdf_cost.mean())
|
| 857 |
+
# print("sdf_cost max:", sdf_cost.max())
|
| 858 |
+
# print("sdf_cost min:", sdf_cost.min())
|
| 859 |
+
|
| 860 |
+
# # Clip at max_distance
|
| 861 |
+
# max_distance = self._max_distance # should be scalar
|
| 862 |
+
# clipped_distance = torch.clamp_max(sdf_distance_surface, max_distance)
|
| 863 |
+
# sdf_cost = torch.clamp(max_distance - clipped_distance, min=0)
|
| 864 |
+
return sdf_cost
|
| 865 |
+
|
| 866 |
+
def _restructure_sphere_data(self,
|
| 867 |
+
batch_sphere_poses_list: List[List[Dict[str, List[np.ndarray]]]],
|
| 868 |
+
batch_sphere_radii_list: List[List[Dict[str, List[float]]]]):
|
| 869 |
+
batch_size = len(batch_sphere_poses_list)
|
| 870 |
+
seq_len = len(batch_sphere_poses_list[0])
|
| 871 |
+
spheres_per_link = {link: len(spheres) for link, spheres in batch_sphere_poses_list[0][0].items()}
|
| 872 |
+
total_spheres = sum(spheres_per_link.values())
|
| 873 |
+
|
| 874 |
+
def create_single_qpos_sphere(sphere_pose_list, sphere_radii_list):
|
| 875 |
+
sphere_poses = torch.zeros((seq_len, total_spheres, 3), device=self.device, dtype=self.dtype)
|
| 876 |
+
sphere_radii = torch.zeros((seq_len, total_spheres), device=self.device, dtype=self.dtype)
|
| 877 |
+
for t in range(seq_len):
|
| 878 |
+
idx = 0
|
| 879 |
+
for link_name, spheres in sphere_pose_list[t].items():
|
| 880 |
+
n = len(spheres)
|
| 881 |
+
if n == 0:
|
| 882 |
+
continue
|
| 883 |
+
radii = sphere_radii_list[t][link_name]
|
| 884 |
+
assert n == len(radii), f"Mismatch: {n} spheres vs {len(radii)} radii for {link_name} at t={t}"
|
| 885 |
+
sphere_poses[t, idx:idx+n] = torch.from_numpy(np.stack(spheres)).to(device=self.device, dtype=self.dtype)
|
| 886 |
+
sphere_radii[t, idx:idx+n] = torch.tensor(radii, device=self.device, dtype=self.dtype)
|
| 887 |
+
idx += n
|
| 888 |
+
assert idx == total_spheres, f"Total spheres mismatch at t={t}"
|
| 889 |
+
return sphere_poses, sphere_radii
|
| 890 |
+
|
| 891 |
+
def create_sphere_mask(sphere_pose_list):
|
| 892 |
+
mask = torch.zeros((seq_len, total_spheres), device=self.device, dtype=torch.bool)
|
| 893 |
+
for t in range(seq_len):
|
| 894 |
+
idx = 0
|
| 895 |
+
for link_name, spheres in sphere_pose_list[t].items():
|
| 896 |
+
n = len(spheres)
|
| 897 |
+
if n == 0:
|
| 898 |
+
continue
|
| 899 |
+
if hasattr(self, '_selected_links') and link_name in self._selected_links:
|
| 900 |
+
mask[t, idx:idx+n] = True
|
| 901 |
+
idx += n
|
| 902 |
+
assert idx == total_spheres, f"Total spheres mismatch at t={t}"
|
| 903 |
+
return mask
|
| 904 |
+
|
| 905 |
+
all_sphere_poses_tensor = torch.zeros((batch_size, seq_len, total_spheres, 3), device=self.device, dtype=self.dtype)
|
| 906 |
+
all_sphere_radii_tensor = torch.zeros((batch_size, seq_len, total_spheres), device=self.device, dtype=self.dtype)
|
| 907 |
+
# Create the mask only once, using the first batch's structure
|
| 908 |
+
mask = create_sphere_mask(batch_sphere_poses_list[0])
|
| 909 |
+
all_sphere_mask_tensor = mask.unsqueeze(0).expand(batch_size, -1, -1).clone()
|
| 910 |
+
|
| 911 |
+
for b in range(batch_size):
|
| 912 |
+
poses, radii = create_single_qpos_sphere(batch_sphere_poses_list[b], batch_sphere_radii_list[b])
|
| 913 |
+
all_sphere_poses_tensor[b] = poses
|
| 914 |
+
all_sphere_radii_tensor[b] = radii
|
| 915 |
+
# No need to recompute mask for each batch
|
| 916 |
+
|
| 917 |
+
return {
|
| 918 |
+
'all_sphere_poses': all_sphere_poses_tensor,
|
| 919 |
+
'all_sphere_radii': all_sphere_radii_tensor,
|
| 920 |
+
'all_sphere_mask': all_sphere_mask_tensor,
|
| 921 |
+
}
|
| 922 |
+
|
| 923 |
+
def _top_k_windows_with_low_overlap(self, window_costs: torch.Tensor,
|
| 924 |
+
window_indices: torch.Tensor,
|
| 925 |
+
k: int = 3):
|
| 926 |
+
"""
|
| 927 |
+
Returns up to top k non-overlapping windows (no two windows share more than 1 index).
|
| 928 |
+
"""
|
| 929 |
+
valid_mask = (window_indices >= 0).all(dim=1)
|
| 930 |
+
valid_costs = window_costs[valid_mask]
|
| 931 |
+
valid_indices = window_indices[valid_mask] # [num_valid, window_size]
|
| 932 |
+
|
| 933 |
+
# Sort windows by cost descending (or ascending as you need)
|
| 934 |
+
sorted_costs, sorted_idx = torch.topk(valid_costs, min(len(valid_costs), k*4)) # oversample for filter
|
| 935 |
+
sorted_indices = valid_indices[sorted_idx]
|
| 936 |
+
|
| 937 |
+
selected = []
|
| 938 |
+
selected_indices = []
|
| 939 |
+
|
| 940 |
+
for idx, indices in enumerate(sorted_indices):
|
| 941 |
+
indices_set = set(indices.tolist())
|
| 942 |
+
# Check overlap with all previously selected
|
| 943 |
+
overlap = False
|
| 944 |
+
for prev in selected_indices:
|
| 945 |
+
prev_set = set(prev.tolist())
|
| 946 |
+
if len(indices_set & prev_set) > 1:
|
| 947 |
+
overlap = True
|
| 948 |
+
break
|
| 949 |
+
if not overlap:
|
| 950 |
+
selected.append(sorted_costs[idx])
|
| 951 |
+
selected_indices.append(indices)
|
| 952 |
+
if len(selected) == k:
|
| 953 |
+
break
|
| 954 |
+
|
| 955 |
+
if selected:
|
| 956 |
+
return (torch.stack(selected), torch.stack(selected_indices))
|
| 957 |
+
else:
|
| 958 |
+
# No valid windows
|
| 959 |
+
return (
|
| 960 |
+
torch.empty(0, dtype=window_costs.dtype, device=window_costs.device),
|
| 961 |
+
torch.empty(0, window_indices.shape[1], dtype=window_indices.dtype, device=window_indices.device)
|
| 962 |
+
)
|
| 963 |
+
|
| 964 |
+
def _get_top_k_windows(self, window_costs: torch.Tensor,
|
| 965 |
+
window_indices: torch.Tensor,
|
| 966 |
+
k: int = 3):
|
| 967 |
+
"""
|
| 968 |
+
Returns top k windows where all indices are in [0, seq_len).
|
| 969 |
+
Handles cases where invalid indices may be -1 or any other out-of-bounds value.
|
| 970 |
+
"""
|
| 971 |
+
# valid_mask is True for windows where all indices are valid
|
| 972 |
+
valid_mask = (window_indices >= 0).all(dim=1)
|
| 973 |
+
# print("valid_mask:", valid_mask)
|
| 974 |
+
# print("window_indices.shape:", window_indices.shape)
|
| 975 |
+
# print("window_costs.shape:", window_costs.shape)
|
| 976 |
+
valid_costs = window_costs[valid_mask]
|
| 977 |
+
valid_indices = window_indices[valid_mask]
|
| 978 |
+
|
| 979 |
+
if valid_costs.numel() == 0:
|
| 980 |
+
return (
|
| 981 |
+
torch.empty(0, dtype=window_costs.dtype, device=window_costs.device),
|
| 982 |
+
torch.empty(0, *window_indices.shape[1:], dtype=window_indices.dtype, device=window_indices.device)
|
| 983 |
+
)
|
| 984 |
+
# Top-k over valid windows only
|
| 985 |
+
top_k = min(k, len(valid_costs))
|
| 986 |
+
top_k_values, top_k_idx = torch.topk(valid_costs, top_k)
|
| 987 |
+
top_k_window_indices = valid_indices[top_k_idx]
|
| 988 |
+
return top_k_values, top_k_window_indices
|
| 989 |
+
|
| 990 |
+
def reset(self):
|
| 991 |
+
self.reset_debug_state()
|
| 992 |
+
|
| 993 |
+
class TrajectoryRanker():
|
| 994 |
+
def __init__(self,
|
| 995 |
+
dataset,
|
| 996 |
+
robot_model,
|
| 997 |
+
get_current_qpos,
|
| 998 |
+
tensor_args):
|
| 999 |
+
""" Initialize the TrajectoryRanker with dataset and tensor arguments."""
|
| 1000 |
+
self.dataset = dataset
|
| 1001 |
+
self.device = tensor_args['device']
|
| 1002 |
+
self.dtype = tensor_args['dtype']
|
| 1003 |
+
self._point_cloud = None
|
| 1004 |
+
self._max_distance = 0.20
|
| 1005 |
+
self._sdf_model = PointCloud_CSDF(sphere_radius=0.05,
|
| 1006 |
+
max_distance=None,
|
| 1007 |
+
device=self.device,
|
| 1008 |
+
pcd=None)
|
| 1009 |
+
self._kin_solver = AdvancedKinematicsSolver(robot_model)
|
| 1010 |
+
self._get_current_qpos = get_current_qpos
|
| 1011 |
+
# self._selected_links = ["panda_link0", "panda_link1", "panda_link2", "panda_link3",
|
| 1012 |
+
# "panda_link4", "panda_link5", "panda_link6", "panda_link7",
|
| 1013 |
+
# "panda_link8", "panda_hand", "panda_leftfinger", "panda_rightfinger"]
|
| 1014 |
+
self._selected_links = ["panda_link5", "panda_link6", "panda_link7", "panda_link8",
|
| 1015 |
+
"panda_hand", "panda_leftfinger", "panda_rightfinger"]
|
| 1016 |
+
|
| 1017 |
+
# Dictionary to store debug values that can be accessed from outside
|
| 1018 |
+
self.debug_state = []
|
| 1019 |
+
|
| 1020 |
+
def rank_trajectory(self, traj: torch.Tensor, traj_type: str = "joint"):
|
| 1021 |
+
"""
|
| 1022 |
+
Unified entrypoint.
|
| 1023 |
+
|
| 1024 |
+
Args:
|
| 1025 |
+
traj: [B, T, D] normalized trajectory (D = 7 for EE, D = joint_dim for joint)
|
| 1026 |
+
traj_type: "ee" or "joint"
|
| 1027 |
+
Returns:
|
| 1028 |
+
ranking_ids: [B]
|
| 1029 |
+
ranked_traj: [B, T, D] (still in normalized space)
|
| 1030 |
+
"""
|
| 1031 |
+
traj, joint_traj, valid_mask = self._prepare_trajectory_inputs(
|
| 1032 |
+
traj,
|
| 1033 |
+
traj_type
|
| 1034 |
+
)
|
| 1035 |
+
|
| 1036 |
+
original_B = valid_mask.shape[0]
|
| 1037 |
+
valid_B = traj.shape[0] # after masking
|
| 1038 |
+
|
| 1039 |
+
if valid_B == 0:
|
| 1040 |
+
raise RuntimeError("All trajectories invalid.")
|
| 1041 |
+
|
| 1042 |
+
ranking_ids, sdf_cost = self._rank_joint_trajectory_by_cost(joint_traj)
|
| 1043 |
+
sorted_trajs = traj[ranking_ids] # [valid_B, T, D]
|
| 1044 |
+
|
| 1045 |
+
# Repeat fill
|
| 1046 |
+
repeat_factor = (original_B + valid_B - 1) // valid_B
|
| 1047 |
+
tiled_trajs = sorted_trajs.repeat((repeat_factor, 1, 1))[:original_B]
|
| 1048 |
+
|
| 1049 |
+
return tiled_trajs
|
| 1050 |
+
|
| 1051 |
+
# ranking_ids, sdf_cost = self._rank_joint_trajectory_by_cost(joint_traj)
|
| 1052 |
+
# print(f"Ranking IDs: {ranking_ids}")
|
| 1053 |
+
# total_cost = sdf_cost.sum(dim=(1, 2))
|
| 1054 |
+
# print(f"Total Cost: {total_cost[ranking_ids]}")
|
| 1055 |
+
# return ranking_ids, traj[ranking_ids] # return in normalized space
|
| 1056 |
+
|
| 1057 |
+
def inpaint_trajectory(self, traj: torch.Tensor, traj_type: str = "joint",
|
| 1058 |
+
num_windows: int = 4, top_k: int = 4):
|
| 1059 |
+
"""
|
| 1060 |
+
Public interface: Inpaint/reassemble top-k ranked trajectories from windowed cost.
|
| 1061 |
+
|
| 1062 |
+
Args:
|
| 1063 |
+
traj: [B, T, D] normalized trajectory
|
| 1064 |
+
traj_type: "joint" or "ee"
|
| 1065 |
+
Returns:
|
| 1066 |
+
recombined_trajs: [top_k**num_windows, T, D] (normalized)
|
| 1067 |
+
"""
|
| 1068 |
+
traj, joint_traj, valid_mask = self._prepare_trajectory_inputs(
|
| 1069 |
+
traj,
|
| 1070 |
+
traj_type
|
| 1071 |
+
)
|
| 1072 |
+
|
| 1073 |
+
# Compute cost and call recombination logic
|
| 1074 |
+
ranking_ids, sdf_cost = self._rank_joint_trajectory_by_cost(joint_traj)
|
| 1075 |
+
recombined_trajs = self._reassemble_from_windows(traj, sdf_cost, num_windows, top_k)
|
| 1076 |
+
|
| 1077 |
+
# Repeat and fill
|
| 1078 |
+
original_B = valid_mask.shape[0]
|
| 1079 |
+
valid_B = recombined_trajs.shape[0]
|
| 1080 |
+
|
| 1081 |
+
if valid_B == 0:
|
| 1082 |
+
raise RuntimeError("All trajectories are invalid. Cannot inpaint.")
|
| 1083 |
+
|
| 1084 |
+
repeat_factor = (original_B + valid_B - 1) // valid_B
|
| 1085 |
+
tiled_trajs = recombined_trajs.repeat((repeat_factor, 1, 1))
|
| 1086 |
+
filled_trajs = tiled_trajs[:original_B]
|
| 1087 |
+
return filled_trajs
|
| 1088 |
+
|
| 1089 |
+
def keep_top_k_and_expand(self, x_sorted: torch.Tensor, k: int) -> torch.Tensor:
|
| 1090 |
+
"""
|
| 1091 |
+
Args:
|
| 1092 |
+
x_sorted: [B, T, D] batch of trajectories sorted by cost (ascending)
|
| 1093 |
+
k: number of top trajectories to keep
|
| 1094 |
+
Returns:
|
| 1095 |
+
x_top: [B, T, D] — top-k repeated to fill B if k < B
|
| 1096 |
+
"""
|
| 1097 |
+
B = x_sorted.shape[0]
|
| 1098 |
+
x_top_k = x_sorted[:k] # [k, T, D]
|
| 1099 |
+
|
| 1100 |
+
if k == B:
|
| 1101 |
+
return x_top_k
|
| 1102 |
+
|
| 1103 |
+
# Repeat to fill B
|
| 1104 |
+
repeat_factor = (B + k - 1) // k # ceil(B / k)
|
| 1105 |
+
x_repeated = x_top_k.repeat((repeat_factor, 1, 1)) # [ceil* k, T, D]
|
| 1106 |
+
x_padded = x_repeated[:B] # Trim to [B, T, D]
|
| 1107 |
+
|
| 1108 |
+
return x_padded
|
| 1109 |
+
|
| 1110 |
+
def _prepare_trajectory_inputs(
|
| 1111 |
+
self,
|
| 1112 |
+
traj: torch.Tensor,
|
| 1113 |
+
traj_type: str,
|
| 1114 |
+
) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
|
| 1115 |
+
"""
|
| 1116 |
+
Args:
|
| 1117 |
+
traj: [B, T, D] normalized
|
| 1118 |
+
traj_type: "ee" or "joint"
|
| 1119 |
+
Returns:
|
| 1120 |
+
traj: filtered normalized traj [B', T, D]
|
| 1121 |
+
joint_traj: [B', T, joint_dim]
|
| 1122 |
+
valid_mask: [B]
|
| 1123 |
+
"""
|
| 1124 |
+
traj_unnorm = self.dataset.unnormalize_trajectory(traj.clone())
|
| 1125 |
+
|
| 1126 |
+
if traj_type == "ee":
|
| 1127 |
+
B, T = traj_unnorm.shape[:2]
|
| 1128 |
+
ee_pose_ortho6d = traj_unnorm[..., :9].contiguous()
|
| 1129 |
+
ee_pose_quat = transform_ortho6d_to_quat(ee_pose_ortho6d.view(B * T, 9)).view(B, T, 7)
|
| 1130 |
+
joint_traj, valid_mask = self._batch_ik(ee_pose_quat)
|
| 1131 |
+
elif traj_type == "joint":
|
| 1132 |
+
joint_traj = traj_unnorm
|
| 1133 |
+
valid_mask = torch.ones(traj.shape[0], dtype=torch.bool, device=traj.device)
|
| 1134 |
+
else:
|
| 1135 |
+
raise ValueError(f"Unsupported traj_type: {traj_type}")
|
| 1136 |
+
if not valid_mask.any():
|
| 1137 |
+
print("\033[91m[Warning] All trajectories are invalid! Forcing valid_mask = all True.\033[0m")
|
| 1138 |
+
valid_mask = torch.ones_like(valid_mask, dtype=torch.bool)
|
| 1139 |
+
|
| 1140 |
+
return traj[valid_mask], joint_traj[valid_mask], valid_mask
|
| 1141 |
+
|
| 1142 |
+
def _reassemble_from_windows(self, traj: torch.Tensor, sdf_cost: torch.Tensor, num_windows: int, top_k: int):
|
| 1143 |
+
"""
|
| 1144 |
+
Internal logic: select top-k windows based on cost and recombine.
|
| 1145 |
+
|
| 1146 |
+
Args:
|
| 1147 |
+
traj: [B, T, D] normalized trajectory
|
| 1148 |
+
sdf_cost: [B, T, S] full sdf cost per trajectory
|
| 1149 |
+
Returns:
|
| 1150 |
+
recombined_trajs: [top_k**num_windows, T, D] (normalized)
|
| 1151 |
+
"""
|
| 1152 |
+
B, T, D = traj.shape
|
| 1153 |
+
window_size = T // num_windows
|
| 1154 |
+
assert T % num_windows == 0, "Trajectory length T must be divisible by num_windows"
|
| 1155 |
+
|
| 1156 |
+
top_segments_per_window = []
|
| 1157 |
+
|
| 1158 |
+
for w in range(num_windows):
|
| 1159 |
+
t_start = w * window_size
|
| 1160 |
+
t_end = (w + 1) * window_size
|
| 1161 |
+
|
| 1162 |
+
window_cost = sdf_cost[:, t_start:t_end, :].sum(dim=(1, 2)) # [B]
|
| 1163 |
+
top_k_ids = torch.argsort(window_cost, dim=0)[:top_k]
|
| 1164 |
+
top_k_segments = traj[top_k_ids, t_start:t_end, :] # from normalized traj
|
| 1165 |
+
top_segments_per_window.append(top_k_segments)
|
| 1166 |
+
|
| 1167 |
+
# Cartesian product recombination
|
| 1168 |
+
import itertools
|
| 1169 |
+
import random
|
| 1170 |
+
|
| 1171 |
+
B = traj.shape[0] # original batch size
|
| 1172 |
+
all_combinations = list(itertools.product(*top_segments_per_window))
|
| 1173 |
+
random.shuffle(all_combinations) # optional: for stochasticity
|
| 1174 |
+
|
| 1175 |
+
# Select only the first B combinations
|
| 1176 |
+
selected_combinations = all_combinations[:B]
|
| 1177 |
+
recombined_trajs = [torch.cat(segments, dim=0) for segments in selected_combinations]
|
| 1178 |
+
recombined_trajs = torch.stack(recombined_trajs, dim=0) # [B, T, D]
|
| 1179 |
+
|
| 1180 |
+
return recombined_trajs.to(traj.device)
|
| 1181 |
+
|
| 1182 |
+
def _rank_joint_trajectory_by_cost(self, joint_traj: torch.Tensor):
|
| 1183 |
+
sphere_data = self._get_cartesian_positions(joint_traj)
|
| 1184 |
+
sdf_cost = self._calculate_sdf_cost(sphere_data['all_sphere_poses'], sphere_data['all_sphere_radii'])
|
| 1185 |
+
sdf_cost = self._filter_cost_by_mask(sdf_cost, sphere_data['all_sphere_mask'])
|
| 1186 |
+
debug_entry = DebugEntry(
|
| 1187 |
+
sphere_poses=sphere_data['all_sphere_poses'],
|
| 1188 |
+
sphere_radii=sphere_data['all_sphere_radii'],
|
| 1189 |
+
sdf_costs=sdf_cost,
|
| 1190 |
+
)
|
| 1191 |
+
self.debug_state.append(debug_entry)
|
| 1192 |
+
|
| 1193 |
+
total_cost = sdf_cost.sum(dim=(1, 2)) # [B]
|
| 1194 |
+
ranking_ids = torch.argsort(total_cost, dim=0)
|
| 1195 |
+
return ranking_ids, sdf_cost
|
| 1196 |
+
|
| 1197 |
+
def update_observation(self, obs):
|
| 1198 |
+
self._point_cloud = obs['point_cloud']
|
| 1199 |
+
self._sdf_model.update_pcd(self._point_cloud)
|
| 1200 |
+
|
| 1201 |
+
def reset(self):
|
| 1202 |
+
self.reset_debug_state()
|
| 1203 |
+
|
| 1204 |
+
def reset_debug_state(self):
|
| 1205 |
+
self.debug_state = []
|
| 1206 |
+
|
| 1207 |
+
def _batch_ik(self, ee_trajectory: torch.Tensor, max_invalid: int = 3):
|
| 1208 |
+
"""
|
| 1209 |
+
Convert end-effector trajectory to joint trajectory using IK.
|
| 1210 |
+
Input: ee_trajectory [B, T, 7]
|
| 1211 |
+
Output: joint_trajs [B, T, D]
|
| 1212 |
+
"""
|
| 1213 |
+
ee_np = ee_trajectory.detach().cpu().numpy()
|
| 1214 |
+
B, T = ee_np.shape[:2]
|
| 1215 |
+
joint_dim = len(self._kin_solver.joint_indices)
|
| 1216 |
+
q_trajs = np.zeros((B, T, joint_dim), dtype=np.float32)
|
| 1217 |
+
invalid_ik_counts = np.zeros(B, dtype=np.int32) # count each trajectory invalid ik waypoints, if too much, remove it
|
| 1218 |
+
|
| 1219 |
+
for b in range(B):
|
| 1220 |
+
last_valid_q = None
|
| 1221 |
+
for t in range(T):
|
| 1222 |
+
pose = ee_np[b, t]
|
| 1223 |
+
pose = sapien.Pose(pose[:3], pose[3:])
|
| 1224 |
+
|
| 1225 |
+
if t == 0:
|
| 1226 |
+
# ik_sol = self._kin_solver.compute_ik(target_pose=pose, initial_qpos=self._get_current_qpos().detach().cpu().numpy())
|
| 1227 |
+
initial_qpos = self._get_current_qpos().detach().cpu().numpy().reshape(-1)
|
| 1228 |
+
ik_sol = self._kin_solver.compute_ik(target_pose=pose, initial_qpos=initial_qpos)
|
| 1229 |
+
else:
|
| 1230 |
+
ik_sol = self._kin_solver.compute_ik(target_pose=pose, initial_qpos=last_valid_q)
|
| 1231 |
+
|
| 1232 |
+
if ik_sol is None:
|
| 1233 |
+
|
| 1234 |
+
# print(f"[IK FAIL] Batch {b}, t={t}, pose={pose}")
|
| 1235 |
+
invalid_ik_counts[b] += 1
|
| 1236 |
+
ik_sol = last_valid_q.copy() if last_valid_q is not None else np.zeros(joint_dim, dtype=np.float32)
|
| 1237 |
+
|
| 1238 |
+
q_trajs[b, t] = ik_sol
|
| 1239 |
+
last_valid_q = ik_sol
|
| 1240 |
+
|
| 1241 |
+
valid_mask = invalid_ik_counts <= max_invalid
|
| 1242 |
+
# print(f"Invalid IK counts: {invalid_ik_counts}, valid mask: {valid_mask}")
|
| 1243 |
+
return torch.tensor(q_trajs, device=self.device, dtype=self.dtype), torch.from_numpy(valid_mask).to(self.device)
|
| 1244 |
+
|
| 1245 |
+
def _filter_cost_by_mask(self, cost, sphere_mask):
|
| 1246 |
+
filtered_cost = cost * sphere_mask
|
| 1247 |
+
return filtered_cost
|
| 1248 |
+
|
| 1249 |
+
def _calculate_sdf_cost(self, sphere_poses_tensor, sphere_radii_tensor):
|
| 1250 |
+
assert sphere_poses_tensor.shape[:3] == sphere_radii_tensor.shape
|
| 1251 |
+
batch_size, seq_len, num_spheres, _ = sphere_poses_tensor.shape
|
| 1252 |
+
sphere_positions_flat = einops.rearrange(sphere_poses_tensor, 'b t s d -> b (t s) d')
|
| 1253 |
+
sdf_distance_center = self._sdf_model(sphere_positions_flat)
|
| 1254 |
+
sdf_distance_center = einops.rearrange(sdf_distance_center, 'b (t s) -> b t s', b=batch_size, t=seq_len)
|
| 1255 |
+
# Distance from sphere surface
|
| 1256 |
+
sdf_distance_surface = sdf_distance_center - sphere_radii_tensor
|
| 1257 |
+
|
| 1258 |
+
alpha = 8.0
|
| 1259 |
+
max_distance = self._max_distance # should be scalar
|
| 1260 |
+
clipped_distance = torch.clamp_max(sdf_distance_surface, max_distance)
|
| 1261 |
+
sdf_cost = torch.exp(alpha * (max_distance - clipped_distance)) - 1
|
| 1262 |
+
sdf_cost = torch.where(sdf_cost > 0, sdf_cost, torch.zeros_like(sdf_cost))
|
| 1263 |
+
# print("sdf_cost mean:", sdf_cost.mean())
|
| 1264 |
+
# print("sdf_cost max:", sdf_cost.max())
|
| 1265 |
+
# print("sdf_cost min:", sdf_cost.min())
|
| 1266 |
+
return sdf_cost
|
| 1267 |
+
|
| 1268 |
+
def _get_cartesian_positions(self, joint_states):
|
| 1269 |
+
# what is the best way to get end-effector poses? shall I wrap the restructure function in this?
|
| 1270 |
+
if isinstance(joint_states, torch.Tensor): # this is not efficient, but let's keep it for now for MVP
|
| 1271 |
+
joint_states = joint_states.detach().cpu().numpy()
|
| 1272 |
+
sphere_poses_list = []
|
| 1273 |
+
sphere_radii_list = []
|
| 1274 |
+
for b in range(joint_states.shape[0]):
|
| 1275 |
+
batch_sphere_poses = []
|
| 1276 |
+
batch_sphere_radii = []
|
| 1277 |
+
for t in range(joint_states.shape[1]):
|
| 1278 |
+
sphere_poses, sphere_radii = self._kin_solver.calculate_all_sphere_positions_and_radii(joint_states[b, t, :])
|
| 1279 |
+
batch_sphere_poses.append(sphere_poses)
|
| 1280 |
+
batch_sphere_radii.append(sphere_radii)
|
| 1281 |
+
sphere_poses_list.append(batch_sphere_poses)
|
| 1282 |
+
sphere_radii_list.append(batch_sphere_radii)
|
| 1283 |
+
|
| 1284 |
+
restructured_data = self._restructure_sphere_data(sphere_poses_list, sphere_radii_list)
|
| 1285 |
+
return restructured_data
|
| 1286 |
+
|
| 1287 |
+
def _restructure_sphere_data(self,
|
| 1288 |
+
batch_sphere_poses_list: List[List[Dict[str, List[np.ndarray]]]],
|
| 1289 |
+
batch_sphere_radii_list: List[List[Dict[str, List[float]]]]):
|
| 1290 |
+
batch_size = len(batch_sphere_poses_list)
|
| 1291 |
+
seq_len = len(batch_sphere_poses_list[0])
|
| 1292 |
+
spheres_per_link = {link: len(spheres) for link, spheres in batch_sphere_poses_list[0][0].items()}
|
| 1293 |
+
total_spheres = sum(spheres_per_link.values())
|
| 1294 |
+
|
| 1295 |
+
def create_single_qpos_sphere(sphere_pose_list, sphere_radii_list):
|
| 1296 |
+
sphere_poses = torch.zeros((seq_len, total_spheres, 3), device=self.device, dtype=self.dtype)
|
| 1297 |
+
sphere_radii = torch.zeros((seq_len, total_spheres), device=self.device, dtype=self.dtype)
|
| 1298 |
+
for t in range(seq_len):
|
| 1299 |
+
idx = 0
|
| 1300 |
+
for link_name, spheres in sphere_pose_list[t].items():
|
| 1301 |
+
n = len(spheres)
|
| 1302 |
+
if n == 0:
|
| 1303 |
+
continue
|
| 1304 |
+
radii = sphere_radii_list[t][link_name]
|
| 1305 |
+
assert n == len(radii), f"Mismatch: {n} spheres vs {len(radii)} radii for {link_name} at t={t}"
|
| 1306 |
+
sphere_poses[t, idx:idx+n] = torch.from_numpy(np.stack(spheres)).to(device=self.device, dtype=self.dtype)
|
| 1307 |
+
sphere_radii[t, idx:idx+n] = torch.tensor(radii, device=self.device, dtype=self.dtype)
|
| 1308 |
+
idx += n
|
| 1309 |
+
assert idx == total_spheres, f"Total spheres mismatch at t={t}"
|
| 1310 |
+
return sphere_poses, sphere_radii
|
| 1311 |
+
|
| 1312 |
+
def create_sphere_mask(sphere_pose_list):
|
| 1313 |
+
mask = torch.zeros((seq_len, total_spheres), device=self.device, dtype=torch.bool)
|
| 1314 |
+
for t in range(seq_len):
|
| 1315 |
+
idx = 0
|
| 1316 |
+
for link_name, spheres in sphere_pose_list[t].items():
|
| 1317 |
+
n = len(spheres)
|
| 1318 |
+
if n == 0:
|
| 1319 |
+
continue
|
| 1320 |
+
if hasattr(self, '_selected_links') and link_name in self._selected_links:
|
| 1321 |
+
mask[t, idx:idx+n] = True
|
| 1322 |
+
idx += n
|
| 1323 |
+
assert idx == total_spheres, f"Total spheres mismatch at t={t}"
|
| 1324 |
+
return mask
|
| 1325 |
+
|
| 1326 |
+
all_sphere_poses_tensor = torch.zeros((batch_size, seq_len, total_spheres, 3), device=self.device, dtype=self.dtype)
|
| 1327 |
+
all_sphere_radii_tensor = torch.zeros((batch_size, seq_len, total_spheres), device=self.device, dtype=self.dtype)
|
| 1328 |
+
# Create the mask only once, using the first batch's structure
|
| 1329 |
+
mask = create_sphere_mask(batch_sphere_poses_list[0])
|
| 1330 |
+
all_sphere_mask_tensor = mask.unsqueeze(0).expand(batch_size, -1, -1).clone()
|
| 1331 |
+
|
| 1332 |
+
for b in range(batch_size):
|
| 1333 |
+
poses, radii = create_single_qpos_sphere(batch_sphere_poses_list[b], batch_sphere_radii_list[b])
|
| 1334 |
+
all_sphere_poses_tensor[b] = poses
|
| 1335 |
+
all_sphere_radii_tensor[b] = radii
|
| 1336 |
+
# No need to recompute mask for each batch
|
| 1337 |
+
|
| 1338 |
+
return {
|
| 1339 |
+
'all_sphere_poses': all_sphere_poses_tensor,
|
| 1340 |
+
'all_sphere_radii': all_sphere_radii_tensor,
|
| 1341 |
+
'all_sphere_mask': all_sphere_mask_tensor,
|
| 1342 |
+
}
|
| 1343 |
+
|
| 1344 |
+
|
| 1345 |
+
|
| 1346 |
+
class GuideManagerJacobian(GuideManager):
|
| 1347 |
+
"""
|
| 1348 |
+
This class is not working, keep here in case we need it later
|
| 1349 |
+
"""
|
| 1350 |
+
def __init__(self,
|
| 1351 |
+
dataset,
|
| 1352 |
+
robot_model,
|
| 1353 |
+
tensor_args,
|
| 1354 |
+
clip_grad=False, clip_grad_rule='norm',
|
| 1355 |
+
max_grad_norm=1.0, max_grad_value=1.0,
|
| 1356 |
+
guidance_weight=0.15
|
| 1357 |
+
):
|
| 1358 |
+
super().__init__(dataset,
|
| 1359 |
+
tensor_args,
|
| 1360 |
+
clip_grad, clip_grad_rule,
|
| 1361 |
+
max_grad_norm, max_grad_value,
|
| 1362 |
+
guidance_weight)
|
| 1363 |
+
self._point_cloud = None
|
| 1364 |
+
self._sdf_model = PointCloud_CSDF(sphere_radius=0.10, max_distance=0.04, device=self.device, pcd=None)
|
| 1365 |
+
# print("GUIDANCE STRENGTH: ", self.guidance_weight)
|
| 1366 |
+
self._advanced_kinematics_solver = AdvancedKinematicsSolver(robot_model)
|
| 1367 |
+
# self._selected_links = ["panda_link0", "panda_link1", "panda_link2", "panda_link3",
|
| 1368 |
+
# "panda_link4", "panda_link5", "panda_link6", "panda_link7", "panda_link8", "panda_hand"]
|
| 1369 |
+
self._selected_links = ["panda_link5", "panda_link6", "panda_link7", "panda_link8", "panda_hand", "panda_leftfinger", "panda_rightfinger"]
|
| 1370 |
+
# self._selected_links = ["panda_hand"]
|
| 1371 |
+
|
| 1372 |
+
# all the links in the robot: "panda_link0", "panda_link1", "panda_link2", "panda_link3",
|
| 1373 |
+
# "panda_link4", "panda_link5", "panda_link6", "panda_link7", "panda_link8", "panda_hand"
|
| 1374 |
+
# panda_leftfinger, panda_rightfinger, camera_base_link, camera_link
|
| 1375 |
+
# Dictionary to store debug values that can be accessed from outside
|
| 1376 |
+
self.debug_state = []
|
| 1377 |
+
|
| 1378 |
+
def update_observation(self, obs):
|
| 1379 |
+
self._point_cloud = obs['point_cloud']
|
| 1380 |
+
self._sdf_model.update_pcd(self._point_cloud)
|
| 1381 |
+
|
| 1382 |
+
def forward(self, x_normalized):
|
| 1383 |
+
'''
|
| 1384 |
+
In this class, x has to be joint states, not end-effector poses
|
| 1385 |
+
x_normalized: [batch, length, joint_dim]
|
| 1386 |
+
|
| 1387 |
+
Define the pipeline before heading to submodules
|
| 1388 |
+
1. calculate sdf on each sphere
|
| 1389 |
+
a. calculate sphere position
|
| 1390 |
+
b. get direction based on each sphere
|
| 1391 |
+
2. derivative each sphere's direction to delta joint
|
| 1392 |
+
a. get jacobian of each sphere based on current joint position (x)
|
| 1393 |
+
b. calculate derivative
|
| 1394 |
+
3. add joint derivative to each joint
|
| 1395 |
+
'''
|
| 1396 |
+
# start_time = time.time()
|
| 1397 |
+
x_norm = x_normalized.clone() # ⚠️ IMPORTANT: x should be joint states, not end-effector poses
|
| 1398 |
+
x = self.dataset.unnormalize_trajectory(x_norm)
|
| 1399 |
+
|
| 1400 |
+
# Convert tensor x to numpy array for sphere & jacobian calculation
|
| 1401 |
+
x_np = x.detach().cpu().numpy() #TODO: this copy waste resouce, but...will find a better way after MVP works
|
| 1402 |
+
# x shape: [batch, length, joint_dim], joint_dim = 9
|
| 1403 |
+
# CPU version, only choose the first batch
|
| 1404 |
+
sphere_poses_list = []
|
| 1405 |
+
sphere_radii_list = []
|
| 1406 |
+
sphere_jacobians_list = []
|
| 1407 |
+
for i in range(x_np.shape[1]):
|
| 1408 |
+
sphere_poses, sphere_radii = self._advanced_kinematics_solver.calculate_all_sphere_positions_and_radii(x_np[0, i, :]) # un-normalized x_norm, sample spheres
|
| 1409 |
+
sphere_jacobians = self._advanced_kinematics_solver.calculate_all_sphere_jacobian(x_np[0, i, :])
|
| 1410 |
+
sphere_poses_list.append(sphere_poses)
|
| 1411 |
+
sphere_radii_list.append(sphere_radii)
|
| 1412 |
+
sphere_jacobians_list.append(sphere_jacobians)
|
| 1413 |
+
|
| 1414 |
+
self.debug_state.append({
|
| 1415 |
+
'sphere_poses': sphere_poses_list,
|
| 1416 |
+
'sphere_radii': sphere_radii_list,
|
| 1417 |
+
})
|
| 1418 |
+
|
| 1419 |
+
joint_guidance = self._calculate_sdf_guidance(sphere_poses_list, sphere_radii_list, sphere_jacobians_list) #[batch, length, joint_dim]
|
| 1420 |
+
|
| 1421 |
+
# Calculate statistics for each joint dimension
|
| 1422 |
+
joint_guidance_mean = torch.mean(joint_guidance, dim=(0,1)) # Average across batch and time
|
| 1423 |
+
joint_guidance_std = torch.std(joint_guidance, dim=(0,1)) # Std across batch and time
|
| 1424 |
+
joint_guidance_max = torch.amax(joint_guidance, dim=(0,1))
|
| 1425 |
+
joint_guidance_min = torch.amin(joint_guidance, dim=(0,1))
|
| 1426 |
+
|
| 1427 |
+
# print("\nJoint Guidance Statistics (UnNormalized):")
|
| 1428 |
+
# if torch.any(joint_guidance_mean != 0):
|
| 1429 |
+
# print(f"Mean per joint: {joint_guidance_mean}")
|
| 1430 |
+
# if torch.any(joint_guidance_std != 0):
|
| 1431 |
+
# print(f"Std per joint: {joint_guidance_std}")
|
| 1432 |
+
# if torch.any(joint_guidance_max != 0):
|
| 1433 |
+
# print(f"Max per joint: {joint_guidance_max}")
|
| 1434 |
+
# if torch.any(joint_guidance_min != 0):
|
| 1435 |
+
# print(f"Min per joint: {joint_guidance_min}")
|
| 1436 |
+
|
| 1437 |
+
joint_guidance_normalized = self.dataset.normalizer.normalize(joint_guidance, self.dataset.data_key_traj)
|
| 1438 |
+
|
| 1439 |
+
# print("\nJoint Guidance Statistics (Normalized):")
|
| 1440 |
+
joint_guidance_norm_mean = torch.mean(joint_guidance_normalized, dim=(0,1))
|
| 1441 |
+
joint_guidance_norm_std = torch.std(joint_guidance_normalized, dim=(0,1))
|
| 1442 |
+
joint_guidance_norm_max = torch.amax(joint_guidance_normalized, dim=(0,1))
|
| 1443 |
+
joint_guidance_norm_min = torch.amin(joint_guidance_normalized, dim=(0,1))
|
| 1444 |
+
|
| 1445 |
+
# if torch.any(joint_guidance_norm_mean != 0):
|
| 1446 |
+
# print(f"Mean per joint: {joint_guidance_norm_mean}")
|
| 1447 |
+
# if torch.any(joint_guidance_norm_std != 0):
|
| 1448 |
+
# print(f"Std per joint: {joint_guidance_norm_std}")
|
| 1449 |
+
# if torch.any(joint_guidance_norm_max != 0):
|
| 1450 |
+
# print(f"Max per joint: {joint_guidance_norm_max}")
|
| 1451 |
+
# if torch.any(joint_guidance_norm_min != 0):
|
| 1452 |
+
# print(f"Min per joint: {joint_guidance_norm_min}")
|
| 1453 |
+
|
| 1454 |
+
# end_time = time.time()
|
| 1455 |
+
# print(f"GuideManagerJointStates.forward execution time: {end_time - start_time:.4f} seconds")
|
| 1456 |
+
# return joint_guidance_normalized
|
| 1457 |
+
return self.guidance_weight*joint_guidance
|
| 1458 |
+
|
| 1459 |
+
def _calculate_sdf_guidance(self,
|
| 1460 |
+
sphere_poses_list: List[Dict[str, List[np.ndarray]]],
|
| 1461 |
+
sphere_radii_list: List[Dict[str, List[float]]],
|
| 1462 |
+
sphere_jacobians_list: List[Dict[str, List[np.ndarray]]]):
|
| 1463 |
+
batch_size = 1
|
| 1464 |
+
|
| 1465 |
+
restructured_data = self._restructure_sphere_data(batch_size, sphere_poses_list, sphere_jacobians_list, sphere_radii_list)
|
| 1466 |
+
|
| 1467 |
+
# calculate sdf distance
|
| 1468 |
+
all_sphere_positions = restructured_data['sphere_poses']
|
| 1469 |
+
# reshape to [batch, seq_len*spheres_per_timestep, 3]
|
| 1470 |
+
all_sphere_positions = einops.rearrange(all_sphere_positions, 'b s n d -> b (s n) d')
|
| 1471 |
+
|
| 1472 |
+
# Only set requires_grad for the positions tensor
|
| 1473 |
+
all_sphere_positions.requires_grad_(True)
|
| 1474 |
+
|
| 1475 |
+
# print("all_sphere_positions shape: ", all_sphere_positions.shape)
|
| 1476 |
+
with torch.enable_grad():
|
| 1477 |
+
sdf_values = self._sdf_model(all_sphere_positions) #[batch, seq_len*spheres_per_timestep]
|
| 1478 |
+
# Reshape sdf_values back to [batch, seq_len, spheres_per_timestep]
|
| 1479 |
+
# sdf_values = einops.rearrange(sdf_values, 'b (s n) -> b s n',
|
| 1480 |
+
# b=batch_size, # batch size
|
| 1481 |
+
# s=len(sphere_poses_list)) # sequence length
|
| 1482 |
+
# print("sdf_values shape: ", sdf_values.shape)
|
| 1483 |
+
|
| 1484 |
+
# Compute gradients only for the positions
|
| 1485 |
+
sdf_values.sum().backward()
|
| 1486 |
+
sdf_gradient = all_sphere_positions.grad # Get gradient from positions tensor, shape: [batch, seq_len*spheres_per_timestep, 3]
|
| 1487 |
+
|
| 1488 |
+
|
| 1489 |
+
|
| 1490 |
+
sdf_gradient = einops.rearrange(sdf_gradient, 'b (s n) d -> b s n d',
|
| 1491 |
+
b=batch_size, # batch size
|
| 1492 |
+
s=len(sphere_poses_list)) # sequence length
|
| 1493 |
+
|
| 1494 |
+
sphere_mask = restructured_data['sphere_mask']
|
| 1495 |
+
# print("sdf gradient shape: ", sdf_gradient.shape)
|
| 1496 |
+
# print("sphere mask shape: ", sphere_mask.shape)
|
| 1497 |
+
sdf_gradient = sdf_gradient * sphere_mask.unsqueeze(-1)
|
| 1498 |
+
|
| 1499 |
+
alpha = 2.5
|
| 1500 |
+
norms = torch.norm(sdf_gradient, dim=-1, keepdim=True)
|
| 1501 |
+
weights = torch.exp(-alpha * norms)
|
| 1502 |
+
# print("weights shape: ", weights.shape)
|
| 1503 |
+
# print("sdf_gradient shape: ", sdf_gradient.shape)
|
| 1504 |
+
sdf_gradient_weighted = sdf_gradient * weights
|
| 1505 |
+
sdf_gradient_weighted = torch.clamp(sdf_gradient_weighted, min=-0.1, max=0.1)
|
| 1506 |
+
|
| 1507 |
+
# Debug feature: zero out gradients for sphere indices <= 10
|
| 1508 |
+
# self.debug_only_spheres_gt_10 = True
|
| 1509 |
+
# if hasattr(self, 'debug_only_spheres_gt_10') and self.debug_only_spheres_gt_10:
|
| 1510 |
+
# mask = torch.zeros_like(sdf_gradient_weighted)
|
| 1511 |
+
# mask[..., 19:, :] = 1 # keep only spheres with index > 10
|
| 1512 |
+
# sdf_gradient_weighted = sdf_gradient_weighted * mask
|
| 1513 |
+
|
| 1514 |
+
self.debug_state.append({
|
| 1515 |
+
'sdf_gradients': sdf_gradient_weighted,
|
| 1516 |
+
})
|
| 1517 |
+
|
| 1518 |
+
# Only rearrange if not already 5D
|
| 1519 |
+
if sdf_gradient_weighted.dim() == 4:
|
| 1520 |
+
sdf_gradient_weighted = einops.rearrange(sdf_gradient_weighted, 'b t s d -> b t s 1 d')
|
| 1521 |
+
|
| 1522 |
+
joint_guidance = self._calculate_joint_guidance_from_sphere_sdf(restructured_data, sdf_gradient_weighted)
|
| 1523 |
+
return joint_guidance
|
| 1524 |
+
|
| 1525 |
+
def _restructure_sphere_data(self,
|
| 1526 |
+
batch_size: int,
|
| 1527 |
+
sphere_poses_list: List[Dict[str, List[np.ndarray]]],
|
| 1528 |
+
sphere_jacobians_list: List[Dict[str, List[np.ndarray]]],
|
| 1529 |
+
sphere_radii_list: List[Dict[str, List[float]]]) -> Dict[str, torch.Tensor]:
|
| 1530 |
+
|
| 1531 |
+
seq_len = len(sphere_poses_list)
|
| 1532 |
+
# Get unique link names and create mappings
|
| 1533 |
+
link_names = list(sphere_poses_list[0].keys())
|
| 1534 |
+
|
| 1535 |
+
# Count total spheres per link across all timesteps
|
| 1536 |
+
spheres_per_link = {link: len(spheres) for link, spheres in sphere_poses_list[0].items()}
|
| 1537 |
+
total_spheres = sum(spheres_per_link.values())
|
| 1538 |
+
|
| 1539 |
+
# Create link to sphere mapping
|
| 1540 |
+
# link_to_spheres = {}
|
| 1541 |
+
# current_idx = 0
|
| 1542 |
+
# for link, num_spheres in spheres_per_link.items():
|
| 1543 |
+
# link_to_spheres[link] = list(range(current_idx, current_idx + num_spheres))
|
| 1544 |
+
# current_idx += num_spheres
|
| 1545 |
+
|
| 1546 |
+
# Initialize tensors
|
| 1547 |
+
sphere_poses = torch.zeros((batch_size, seq_len, total_spheres, 3),
|
| 1548 |
+
device=self.device, dtype=self.dtype)
|
| 1549 |
+
jacobians = torch.zeros((batch_size, seq_len, total_spheres, 6, sphere_jacobians_list[0][link_names[0]][0].shape[-1]),
|
| 1550 |
+
device=self.device, dtype=self.dtype)
|
| 1551 |
+
sphere_radii = torch.zeros((batch_size, seq_len, total_spheres),
|
| 1552 |
+
device=self.device, dtype=self.dtype)
|
| 1553 |
+
sphere_mask = torch.zeros((batch_size, seq_len, total_spheres),
|
| 1554 |
+
device=self.device, dtype=torch.bool)
|
| 1555 |
+
|
| 1556 |
+
# Fill tensors
|
| 1557 |
+
for t in range(seq_len):
|
| 1558 |
+
current_sphere_idx = 0
|
| 1559 |
+
for link_name, spheres in sphere_poses_list[t].items():
|
| 1560 |
+
if len(spheres) == 0:
|
| 1561 |
+
continue
|
| 1562 |
+
|
| 1563 |
+
# Convert positions to tensor
|
| 1564 |
+
spheres_tensor = torch.from_numpy(np.stack(spheres)).to(
|
| 1565 |
+
device=self.device, dtype=self.dtype)
|
| 1566 |
+
sphere_poses[0, t, current_sphere_idx:current_sphere_idx + len(spheres)] = spheres_tensor
|
| 1567 |
+
|
| 1568 |
+
# Get corresponding jacobians
|
| 1569 |
+
link_jacobians = sphere_jacobians_list[t][link_name]
|
| 1570 |
+
jacobians[0, t, current_sphere_idx:current_sphere_idx + len(spheres)] = torch.from_numpy(
|
| 1571 |
+
np.stack(link_jacobians)
|
| 1572 |
+
).to(device=self.device, dtype=self.dtype)
|
| 1573 |
+
|
| 1574 |
+
# Get corresponding radii
|
| 1575 |
+
radii = sphere_radii_list[t][link_name]
|
| 1576 |
+
sphere_radii[0, t, current_sphere_idx:current_sphere_idx + len(spheres)] = torch.tensor(
|
| 1577 |
+
radii, device=self.device, dtype=self.dtype)
|
| 1578 |
+
|
| 1579 |
+
# Set mask to True if link is in selected_links
|
| 1580 |
+
if hasattr(self, '_selected_links') and link_name in self._selected_links:
|
| 1581 |
+
sphere_mask[0, t, current_sphere_idx:current_sphere_idx + len(spheres)] = True
|
| 1582 |
+
|
| 1583 |
+
current_sphere_idx += len(spheres)
|
| 1584 |
+
|
| 1585 |
+
return {
|
| 1586 |
+
'sphere_poses': sphere_poses, # [batch, timestep, sphere_number, 3]
|
| 1587 |
+
'jacobians': jacobians, # [batch, timestep, sphere_number, 6, n_joints]
|
| 1588 |
+
'sphere_radii': sphere_radii, # [batch, timestep, sphere_number]
|
| 1589 |
+
'sphere_mask': sphere_mask # [batch, timestep, sphere_number]
|
| 1590 |
+
}
|
| 1591 |
+
|
| 1592 |
+
def _calculate_joint_guidance_from_sphere_sdf(self,
|
| 1593 |
+
restructured_data: Dict[str, torch.Tensor],
|
| 1594 |
+
sdf_gradient_weighted: torch.Tensor) -> torch.Tensor:
|
| 1595 |
+
"""
|
| 1596 |
+
Calculate joint guidance from sphere SDF gradients using parallel computation.
|
| 1597 |
+
|
| 1598 |
+
Returns:
|
| 1599 |
+
Joint guidance tensor of shape [num_joints]
|
| 1600 |
+
"""
|
| 1601 |
+
# Extract data
|
| 1602 |
+
sphere_jacobians = restructured_data['jacobians'] #
|
| 1603 |
+
sphere_mask = restructured_data['sphere_mask'] #
|
| 1604 |
+
|
| 1605 |
+
position_jacobian = sphere_jacobians[:, :, :, :3, :] # [batch, timestep, sphere_number, 3, 9]
|
| 1606 |
+
# From [batch, timestep, sphere_number, 3] to [batch, timestep, sphere_number, 1, 3]
|
| 1607 |
+
|
| 1608 |
+
# Print statistics about sdf_gradients_unsq
|
| 1609 |
+
# print("SDF gradients shape:", sdf_gradient_weighted.shape)
|
| 1610 |
+
# print("SDF gradients mean (per component):", sdf_gradient_weighted.mean(dim=(0,1,2)))
|
| 1611 |
+
# print("SDF gradients std (per component):", sdf_gradient_weighted.std(dim=(0,1,2)))
|
| 1612 |
+
# print("SDF gradients min (per component):", sdf_gradient_weighted.amin(dim=(0,1,2)))
|
| 1613 |
+
# print("SDF gradients max (per component):", sdf_gradient_weighted.amax(dim=(0,1,2)))
|
| 1614 |
+
|
| 1615 |
+
# The multiplication / mask stuff here needs tobe double checked, I don't think it is correct
|
| 1616 |
+
# 2. Matrix multiply (no einops needed here)
|
| 1617 |
+
joint_grads = torch.matmul(sdf_gradient_weighted, position_jacobian) # [b, t, s, 1, j]
|
| 1618 |
+
|
| 1619 |
+
# 3. Remove singleton (now [b, t, s, j])
|
| 1620 |
+
joint_grads = einops.rearrange(joint_grads, 'b t s 1 j -> b t s j')
|
| 1621 |
+
|
| 1622 |
+
# a no mask version
|
| 1623 |
+
# Sum over sphere_number (axis s) to get final joint deltas:
|
| 1624 |
+
delta_joint_states = einops.reduce(joint_grads, 'b t s j -> b t j', 'sum')
|
| 1625 |
+
|
| 1626 |
+
delta_joint_states = torch.clamp(delta_joint_states, min=-0.1, max=0.1)
|
| 1627 |
+
|
| 1628 |
+
# # 4. Mask broadcast ([s, j] -> [1, 1, s, j])
|
| 1629 |
+
# # TODO: jacobian mask
|
| 1630 |
+
# mask_broadcast = einops.rearrange(mask, 's j -> 1 1 s j')
|
| 1631 |
+
# joint_grads = joint_grads * mask_broadcast
|
| 1632 |
+
|
| 1633 |
+
return delta_joint_states
|
| 1634 |
+
|
| 1635 |
+
|
| 1636 |
+
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/helpers.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Adapted from https://github.com/joaoamcarvalho/mpd-public
|
| 3 |
+
"""
|
| 4 |
+
import numpy as np
|
| 5 |
+
import torch
|
| 6 |
+
from torch import nn
|
| 7 |
+
import torch.nn.functional as F
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
#-----------------------------------------------------------------------------#
|
| 11 |
+
#---------------------------- variance schedules -----------------------------#
|
| 12 |
+
#-----------------------------------------------------------------------------#
|
| 13 |
+
|
| 14 |
+
def linear_beta_schedule(n_diffusion_steps, beta_start=0.0001, beta_end=0.02):
|
| 15 |
+
return torch.linspace(beta_start, beta_end, n_diffusion_steps)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def quadratic_beta_schedule(n_diffusion_steps, beta_start=0.0001, beta_end=0.02):
|
| 19 |
+
return torch.linspace(beta_start**0.5, beta_end**0.5, n_diffusion_steps) ** 2
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def sigmoid_beta_schedule(n_diffusion_steps, beta_start=0.0001, beta_end=0.02):
|
| 23 |
+
betas = torch.linspace(-6, 6, n_diffusion_steps)
|
| 24 |
+
return torch.sigmoid(betas) * (beta_end - beta_start) + beta_start
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def cosine_beta_schedule(n_diffusion_steps, s=0.008, a_min=0, a_max=0.999, dtype=torch.float32):
|
| 28 |
+
"""
|
| 29 |
+
cosine schedule
|
| 30 |
+
as proposed in https://openreview.net/forum?id=-NEXDKk8gZ
|
| 31 |
+
"""
|
| 32 |
+
steps = n_diffusion_steps + 1
|
| 33 |
+
x = np.linspace(0, steps, steps)
|
| 34 |
+
alphas_cumprod = np.cos(((x / steps) + s) / (1 + s) * np.pi * 0.5) ** 2
|
| 35 |
+
alphas_cumprod = alphas_cumprod / alphas_cumprod[0]
|
| 36 |
+
betas = 1 - (alphas_cumprod[1:] / alphas_cumprod[:-1])
|
| 37 |
+
betas_clipped = np.clip(betas, a_min=a_min, a_max=a_max)
|
| 38 |
+
return torch.tensor(betas_clipped, dtype=dtype)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def exponential_beta_schedule(n_diffusion_steps, beta_start=1e-4, beta_end=1.0):
|
| 42 |
+
# exponential increasing noise from t=0 to t=T
|
| 43 |
+
x = torch.linspace(0, n_diffusion_steps, n_diffusion_steps)
|
| 44 |
+
beta_start = torch.tensor(beta_start)
|
| 45 |
+
beta_end = torch.tensor(beta_end)
|
| 46 |
+
a = 1 / n_diffusion_steps * torch.log(beta_end / beta_start)
|
| 47 |
+
betas = beta_start * torch.exp(a * x)
|
| 48 |
+
return torch.clamp(betas, max=beta_end) # Ensure values don't exceed beta_end
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def constant_fraction_beta_schedule(n_diffusion_steps):
|
| 52 |
+
# exponential increasing noise from t=0 to t=T
|
| 53 |
+
x = torch.linspace(0, n_diffusion_steps, n_diffusion_steps)
|
| 54 |
+
return 1 / (n_diffusion_steps-x+1)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def variance_preserving_beta_schedule(n_diffusion_steps, beta_start=1e-4, beta_end=1.0):
|
| 58 |
+
# Works only with a small number of diffusion steps
|
| 59 |
+
# https://arxiv.org/abs/2112.07804
|
| 60 |
+
# https://openreview.net/pdf?id=AHvFDPi-FA
|
| 61 |
+
x = torch.linspace(0, n_diffusion_steps, n_diffusion_steps)
|
| 62 |
+
alphas = torch.exp(-beta_start*(1/n_diffusion_steps) - 0.5*(beta_end-beta_start)*(2*x-1)/(n_diffusion_steps**2))
|
| 63 |
+
betas = 1 - alphas
|
| 64 |
+
return betas
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
#-----------------------------------------------------------------------------#
|
| 70 |
+
#---------------------------------- losses -----------------------------------#
|
| 71 |
+
#-----------------------------------------------------------------------------#
|
| 72 |
+
|
| 73 |
+
class WeightedLoss(nn.Module):
|
| 74 |
+
|
| 75 |
+
def __init__(self, weights=None):
|
| 76 |
+
super().__init__()
|
| 77 |
+
self.register_buffer('weights', weights)
|
| 78 |
+
|
| 79 |
+
def forward(self, pred, targ):
|
| 80 |
+
'''
|
| 81 |
+
pred, targ : tensor
|
| 82 |
+
[ batch_size x horizon x transition_dim ]
|
| 83 |
+
'''
|
| 84 |
+
loss = self._loss(pred, targ)
|
| 85 |
+
if self.weights is not None:
|
| 86 |
+
weighted_loss = (loss * self.weights).mean()
|
| 87 |
+
else:
|
| 88 |
+
weighted_loss = loss.mean()
|
| 89 |
+
return weighted_loss, {}
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class WeightedL1(WeightedLoss):
|
| 93 |
+
|
| 94 |
+
def _loss(self, pred, targ):
|
| 95 |
+
return torch.abs(pred - targ)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class WeightedL2(WeightedLoss):
|
| 99 |
+
|
| 100 |
+
def _loss(self, pred, targ):
|
| 101 |
+
return F.mse_loss(pred, targ, reduction='none')
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
Losses = {
|
| 105 |
+
'l1': WeightedL1,
|
| 106 |
+
'l2': WeightedL2,
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/mlp_model.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from torch import nn
|
| 2 |
+
|
| 3 |
+
import numpy as np
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn as nn
|
| 6 |
+
import torch.nn.functional as F
|
| 7 |
+
from einops.layers.torch import Rearrange
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
## Fully connected Neural Network block - Multi Layer Perceptron
|
| 11 |
+
class MLP(nn.Module):
|
| 12 |
+
def __init__(self, in_dim, out_dim, hidden_dim=16, n_layers=1, act='relu', batch_norm=True):
|
| 13 |
+
super(MLP, self).__init__()
|
| 14 |
+
activations = {'relu': nn.ReLU, 'sigmoid': nn.Sigmoid, 'tanh': nn.Tanh, 'leaky_relu': nn.LeakyReLU,
|
| 15 |
+
'elu': nn.ELU, 'prelu': nn.PReLU, 'softplus': nn.Softplus, 'mish': nn.Mish,
|
| 16 |
+
'identity': nn.Identity
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
act_func = activations[act]
|
| 20 |
+
layers = [nn.Linear(in_dim, hidden_dim), act_func()]
|
| 21 |
+
for i in range(n_layers):
|
| 22 |
+
layers += [
|
| 23 |
+
nn.Linear(hidden_dim, hidden_dim),
|
| 24 |
+
nn.BatchNorm1d(hidden_dim) if batch_norm else nn.Identity(),
|
| 25 |
+
act_func(),
|
| 26 |
+
]
|
| 27 |
+
layers.append(nn.Linear(hidden_dim, out_dim))
|
| 28 |
+
|
| 29 |
+
self._network = nn.Sequential(
|
| 30 |
+
*layers
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def forward(self, x):
|
| 34 |
+
return self._network(x)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class MLPModel(nn.Module):
|
| 39 |
+
def __init__(self, in_dim=16, out_dim=16, input_field='x',
|
| 40 |
+
output_field='y',
|
| 41 |
+
pretrained_dir=None,
|
| 42 |
+
**kwargs):
|
| 43 |
+
super().__init__()
|
| 44 |
+
|
| 45 |
+
self.input_field = input_field
|
| 46 |
+
self.output_field = output_field
|
| 47 |
+
|
| 48 |
+
self.in_dim = in_dim
|
| 49 |
+
self.out_dim = out_dim
|
| 50 |
+
|
| 51 |
+
self._net = MLP(in_dim, out_dim, **kwargs)
|
| 52 |
+
|
| 53 |
+
def forward(self, input):
|
| 54 |
+
x = input[self.input_field]
|
| 55 |
+
out = self._net(x)
|
| 56 |
+
return {self.output_field: out}
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/sample_functions.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from matplotlib import pyplot as plt
|
| 3 |
+
import time
|
| 4 |
+
from cfdp.diffusion_policy.utils.utils import to_numpy
|
| 5 |
+
|
| 6 |
+
def apply_hard_conditioning(x, conditions):
|
| 7 |
+
for t, val in conditions.items():
|
| 8 |
+
# print("x: ", x.shape)
|
| 9 |
+
# print("val: ", val.shape)
|
| 10 |
+
x[:, t, :] = val.clone()
|
| 11 |
+
return x
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def extract(a, t, x_shape):
|
| 15 |
+
b, *_ = t.shape
|
| 16 |
+
out = a.gather(-1, t)
|
| 17 |
+
return out.reshape(b, *((1,) * (len(x_shape) - 1)))
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@torch.no_grad()
|
| 21 |
+
def ddpm_sample_fn(
|
| 22 |
+
model, x, hard_conds, context, t,
|
| 23 |
+
guide=None,
|
| 24 |
+
n_guide_steps=1,
|
| 25 |
+
scale_grad_by_std=False,
|
| 26 |
+
t_start_guide=torch.inf,
|
| 27 |
+
noise_std_extra_schedule_fn=None, # 'linear'
|
| 28 |
+
debug=False,
|
| 29 |
+
**kwargs
|
| 30 |
+
):
|
| 31 |
+
t_single = t[0]
|
| 32 |
+
if t_single < 0:
|
| 33 |
+
t = torch.zeros_like(t)
|
| 34 |
+
|
| 35 |
+
model_mean, _, model_log_variance = model.p_mean_variance(x=x, hard_conds=hard_conds, context=context, t=t)
|
| 36 |
+
x = model_mean
|
| 37 |
+
# DEBUG Store the current state, timestep, and reconstruction todo: if debug
|
| 38 |
+
if debug:
|
| 39 |
+
if context is not None:
|
| 40 |
+
context_temp = model.context_model(context)
|
| 41 |
+
noise_temp = model.model(x, t, context_temp['condition'])
|
| 42 |
+
model.diffusion_history.append({
|
| 43 |
+
't': to_numpy(t),
|
| 44 |
+
'x': to_numpy(model_mean),
|
| 45 |
+
'noise': to_numpy(-1*noise_temp)
|
| 46 |
+
})
|
| 47 |
+
|
| 48 |
+
model_log_variance = extract(model.posterior_log_variance_clipped, t, x.shape)
|
| 49 |
+
model_std = torch.exp(0.5 * model_log_variance)
|
| 50 |
+
model_var = torch.exp(model_log_variance)
|
| 51 |
+
|
| 52 |
+
if guide is not None and t_single < t_start_guide:
|
| 53 |
+
|
| 54 |
+
x, grad_scaled_lst = guide_gradient_steps(
|
| 55 |
+
x,
|
| 56 |
+
hard_conds=hard_conds,
|
| 57 |
+
guide=guide,
|
| 58 |
+
n_guide_steps=n_guide_steps,
|
| 59 |
+
scale_grad_by_std=False,
|
| 60 |
+
model_var=model_var,
|
| 61 |
+
)
|
| 62 |
+
if debug:
|
| 63 |
+
model.grad_scaled_history.append({
|
| 64 |
+
't': to_numpy(t_single),
|
| 65 |
+
'x': to_numpy(model_mean),
|
| 66 |
+
'grad_scaled': to_numpy(grad_scaled_lst[0])
|
| 67 |
+
})
|
| 68 |
+
# no noise when t == 0
|
| 69 |
+
noise = torch.randn_like(x)
|
| 70 |
+
noise[t == 0] = 0
|
| 71 |
+
|
| 72 |
+
# For smoother results, we can decay the noise standard deviation throughout the diffusion
|
| 73 |
+
# this is roughly equivalent to using a temperature in the prior distribution
|
| 74 |
+
if noise_std_extra_schedule_fn is None:
|
| 75 |
+
noise_std = 1.0
|
| 76 |
+
else:
|
| 77 |
+
noise_std = noise_std_extra_schedule_fn(t_single)
|
| 78 |
+
values = None
|
| 79 |
+
return x + model_std * noise * noise_std, values
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
@torch.no_grad()
|
| 83 |
+
def ddpm_sample_fn_stomp(
|
| 84 |
+
model, x, hard_conds, context, t,
|
| 85 |
+
guide=None,
|
| 86 |
+
n_guide_steps=1,
|
| 87 |
+
scale_grad_by_std=False,
|
| 88 |
+
t_start_guide=torch.inf,
|
| 89 |
+
noise_std_extra_schedule_fn=None, # 'linear'
|
| 90 |
+
debug=False,
|
| 91 |
+
**kwargs
|
| 92 |
+
):
|
| 93 |
+
t_single = t[0]
|
| 94 |
+
if t_single < 0:
|
| 95 |
+
t = torch.zeros_like(t)
|
| 96 |
+
|
| 97 |
+
model_mean, _, model_log_variance = model.p_mean_variance(x=x, hard_conds=hard_conds, context=context, t=t)
|
| 98 |
+
x = model_mean
|
| 99 |
+
# DEBUG Store the current state, timestep, and reconstruction todo: if debug
|
| 100 |
+
if debug:
|
| 101 |
+
if context is not None:
|
| 102 |
+
context_temp = model.context_model(context)
|
| 103 |
+
noise_temp = model.model(x, t, context_temp['condition'])
|
| 104 |
+
model.diffusion_history.append({
|
| 105 |
+
't': to_numpy(t),
|
| 106 |
+
'x': to_numpy(model_mean),
|
| 107 |
+
'noise': to_numpy(-1*noise_temp)
|
| 108 |
+
})
|
| 109 |
+
|
| 110 |
+
model_log_variance = extract(model.posterior_log_variance_clipped, t, x.shape)
|
| 111 |
+
model_std = torch.exp(0.5 * model_log_variance)
|
| 112 |
+
model_var = torch.exp(model_log_variance)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
# if guide is not None and t_single in [1, 0]:
|
| 116 |
+
# k = min(2, x.shape[0])
|
| 117 |
+
# # x = inject_noise_to_mean(x, std=0.08)
|
| 118 |
+
# x = select_best_trajectories(x, guide, k)
|
| 119 |
+
# # x = smooth_trajectory_batch(x, weight=0.99)
|
| 120 |
+
# print("x.shape:", x.shape)
|
| 121 |
+
|
| 122 |
+
if guide is not None and t_single < t_start_guide:
|
| 123 |
+
x, grad_scaled_lst = guide_gradient_steps(
|
| 124 |
+
x,
|
| 125 |
+
hard_conds=hard_conds,
|
| 126 |
+
guide=guide,
|
| 127 |
+
n_guide_steps=n_guide_steps,
|
| 128 |
+
scale_grad_by_std=scale_grad_by_std,
|
| 129 |
+
model_var=model_var,
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
if debug:
|
| 133 |
+
model.grad_scaled_history.append({
|
| 134 |
+
't': to_numpy(t_single),
|
| 135 |
+
'x': to_numpy(model_mean),
|
| 136 |
+
'grad_scaled': to_numpy(grad_scaled_lst[0])
|
| 137 |
+
})
|
| 138 |
+
|
| 139 |
+
# no noise when t == 0
|
| 140 |
+
noise = torch.randn_like(x)
|
| 141 |
+
noise[t == 0] = 0
|
| 142 |
+
|
| 143 |
+
# For smoother results, we can decay the noise standard deviation throughout the diffusion
|
| 144 |
+
# this is roughly equivalent to using a temperature in the prior distribution
|
| 145 |
+
if noise_std_extra_schedule_fn is None:
|
| 146 |
+
noise_std = 1.0
|
| 147 |
+
else:
|
| 148 |
+
noise_std = noise_std_extra_schedule_fn(t_single)
|
| 149 |
+
values = None
|
| 150 |
+
|
| 151 |
+
return x + model_std * noise * noise_std, values
|
| 152 |
+
|
| 153 |
+
def select_best_trajectories(x, guide, k=2):
|
| 154 |
+
guide(x, get_cost_rank=True) # [batch]
|
| 155 |
+
sorted_indices = guide.sdf_cost_sorted_indices[:k] # [k]
|
| 156 |
+
x_selected = x[sorted_indices] # [k, seq_len, joint_dim]
|
| 157 |
+
batch = x.shape[0]
|
| 158 |
+
if k == 0:
|
| 159 |
+
raise ValueError("No trajectories selected (k=0).")
|
| 160 |
+
|
| 161 |
+
# Randomly sample k trajectories to create batch_size trajectories
|
| 162 |
+
indices = torch.randint(0, k, (batch,), device=x_selected.device) # [batch]
|
| 163 |
+
x_new = x_selected[indices] # [batch, seq_len, joint_dim]
|
| 164 |
+
return x_new
|
| 165 |
+
|
| 166 |
+
def guide_with_selection(x,
|
| 167 |
+
hard_conds=None,
|
| 168 |
+
guide=None,
|
| 169 |
+
n_guide_steps=1, scale_grad_by_std=False,
|
| 170 |
+
model_var=None,
|
| 171 |
+
k=2,
|
| 172 |
+
**kwargs
|
| 173 |
+
):
|
| 174 |
+
grad_scaled_lst = []
|
| 175 |
+
x_new = select_best_trajectories(x, guide, k)
|
| 176 |
+
|
| 177 |
+
# Apply gradient steps
|
| 178 |
+
for step in range(n_guide_steps):
|
| 179 |
+
grad_scaled = guide(x_new, get_cost_rank=True)
|
| 180 |
+
if scale_grad_by_std:
|
| 181 |
+
grad_scaled = model_var * grad_scaled
|
| 182 |
+
x = x + grad_scaled
|
| 183 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 184 |
+
grad_scaled_lst.append(grad_scaled)
|
| 185 |
+
|
| 186 |
+
return x, grad_scaled_lst
|
| 187 |
+
|
| 188 |
+
def guide_gradient_steps(
|
| 189 |
+
x,
|
| 190 |
+
hard_conds=None,
|
| 191 |
+
guide=None,
|
| 192 |
+
n_guide_steps=1, scale_grad_by_std=False,
|
| 193 |
+
model_var=None,
|
| 194 |
+
**kwargs
|
| 195 |
+
):
|
| 196 |
+
grad_scaled_lst = []
|
| 197 |
+
for step in range(n_guide_steps):
|
| 198 |
+
grad_scaled = guide(x)
|
| 199 |
+
# print("grad_scaled: ", grad_scaled)
|
| 200 |
+
if scale_grad_by_std:
|
| 201 |
+
grad_scaled = model_var * grad_scaled
|
| 202 |
+
# print("grad_scaled: ", grad_scaled)
|
| 203 |
+
x = x + grad_scaled
|
| 204 |
+
x = apply_hard_conditioning(x, hard_conds)
|
| 205 |
+
grad_scaled_lst.append(grad_scaled)
|
| 206 |
+
|
| 207 |
+
return x, grad_scaled_lst
|
| 208 |
+
|
| 209 |
+
def smooth_trajectory_batch(traj: torch.Tensor, weight: float = 0.4) -> torch.Tensor:
|
| 210 |
+
traj_smooth = traj.clone()
|
| 211 |
+
# Smooth all inner waypoints by blending with neighbors
|
| 212 |
+
traj_smooth[:, 1:-1, :] = (
|
| 213 |
+
(1 - weight) * traj[:, 1:-1, :]
|
| 214 |
+
+ (weight / 2) * (traj[:, :-2, :] + traj[:, 2:, :])
|
| 215 |
+
)
|
| 216 |
+
return traj_smooth
|
| 217 |
+
|
| 218 |
+
def inject_stomp_noise(x: torch.Tensor, num_samples: int = 2, std: float = 0.1) -> torch.Tensor:
|
| 219 |
+
"""
|
| 220 |
+
Randomly select a subset of trajectories from x and add Gaussian noise (STOMP-style).
|
| 221 |
+
Args:
|
| 222 |
+
x: torch.Tensor of shape [batch, seq_len, joint_dim] or similar
|
| 223 |
+
The batch of trajectories or states to be perturbed.
|
| 224 |
+
num_samples: int, optional (default=2)
|
| 225 |
+
Number of trajectories to add noise to.
|
| 226 |
+
std: float, optional (default=0.1)
|
| 227 |
+
Standard deviation of the Gaussian noise to be added.
|
| 228 |
+
Returns:
|
| 229 |
+
x_noisy: torch.Tensor of the same shape as x,
|
| 230 |
+
with selected trajectories perturbed.
|
| 231 |
+
selected_indices: torch.Tensor containing the indices of the perturbed trajectories.
|
| 232 |
+
"""
|
| 233 |
+
x_noisy = x.clone()
|
| 234 |
+
batch_size, seq_len, joint_dim = x.shape
|
| 235 |
+
device = x.device
|
| 236 |
+
# Select random indices
|
| 237 |
+
selected_indices = torch.randperm(batch_size, device=device)[:num_samples]
|
| 238 |
+
|
| 239 |
+
# Create time weights: [seq_len, 1], e.g., sin(pi * t)
|
| 240 |
+
t = torch.linspace(0, 1, seq_len, device=device)
|
| 241 |
+
time_weights = torch.sin(t * torch.pi).unsqueeze(1) # shape: [seq_len, 1]
|
| 242 |
+
|
| 243 |
+
# Generate Gaussian noise and apply time-based weights
|
| 244 |
+
noise = torch.randn((num_samples, seq_len, joint_dim), device=device) * std
|
| 245 |
+
noise *= time_weights # Broadcast [seq_len, 1] to [num_samples, seq_len, joint_dim]
|
| 246 |
+
|
| 247 |
+
# Inject into selected trajectories
|
| 248 |
+
x_noisy[selected_indices] += noise
|
| 249 |
+
|
| 250 |
+
return x_noisy, selected_indices
|
| 251 |
+
|
| 252 |
+
def inject_noise_to_mean(x: torch.Tensor, std: float = 0.1) -> torch.Tensor:
|
| 253 |
+
"""
|
| 254 |
+
Compute mean trajectory and add noise to copies of it.
|
| 255 |
+
Args:
|
| 256 |
+
x: torch.Tensor of shape [batch, seq_len, joint_dim] or similar
|
| 257 |
+
The batch of trajectories or states.
|
| 258 |
+
std: float, optional (default=0.1)
|
| 259 |
+
Standard deviation of the Gaussian noise to be added.
|
| 260 |
+
Returns:
|
| 261 |
+
x_noisy: torch.Tensor of the same shape as x,
|
| 262 |
+
with noisy copies of the mean trajectory.
|
| 263 |
+
"""
|
| 264 |
+
batch_size, seq_len, joint_dim = x.shape
|
| 265 |
+
device = x.device
|
| 266 |
+
|
| 267 |
+
# Compute mean trajectory across batch dimension
|
| 268 |
+
mean_traj = x.mean(dim=0, keepdim=True) # [1, seq_len, joint_dim]
|
| 269 |
+
|
| 270 |
+
# Expand mean trajectory to match batch size
|
| 271 |
+
mean_traj = mean_traj.expand(batch_size, -1, -1) # [batch, seq_len, joint_dim]
|
| 272 |
+
|
| 273 |
+
# Create time weights: [seq_len, 1], e.g., sin(pi * t)
|
| 274 |
+
t = torch.linspace(0, 1, seq_len, device=device)
|
| 275 |
+
time_weights = torch.sin(t * torch.pi).unsqueeze(1) # shape: [seq_len, 1]
|
| 276 |
+
print("time_weights:", time_weights)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
# Generate Gaussian noise and apply time-based weights
|
| 280 |
+
noise = torch.randn((batch_size, seq_len, joint_dim), device=device) * std
|
| 281 |
+
noise *= time_weights # Broadcast [seq_len, 1] to [batch_size, seq_len, joint_dim]
|
| 282 |
+
|
| 283 |
+
# Add noise to the mean trajectory copies
|
| 284 |
+
x_noisy = mean_traj + noise
|
| 285 |
+
|
| 286 |
+
return x_noisy
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/models/temporal_unet.py
ADDED
|
@@ -0,0 +1,370 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Adapted from https://github.com/joaoamcarvalho/mpd-public
|
| 3 |
+
"""
|
| 4 |
+
import einops
|
| 5 |
+
import numpy as np
|
| 6 |
+
import torch
|
| 7 |
+
import torch.nn as nn
|
| 8 |
+
from abc import ABC
|
| 9 |
+
from einops import rearrange
|
| 10 |
+
from torch.nn import DataParallel
|
| 11 |
+
|
| 12 |
+
from cfdp.diffusion_policy.layers.layers import Downsample1d, Conv1dBlock, Upsample1d, \
|
| 13 |
+
ResidualTemporalBlock, FiLMResidualTemporalBlock, TimeEncoder, MLP, group_norm_n_groups, LinearAttention, PreNorm, Residual, TemporalBlockMLP
|
| 14 |
+
from cfdp.diffusion_policy.layers.layers_attention import SpatialTransformer
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
UNET_DIM_MULTS = {
|
| 18 |
+
0: (1, 2, 4),
|
| 19 |
+
1: (1, 2, 4, 8),
|
| 20 |
+
2: (1, 2, 4, 8, 16)
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
class TemporalUnet(nn.Module):
|
| 24 |
+
|
| 25 |
+
def __init__(
|
| 26 |
+
self,
|
| 27 |
+
n_support_points=None,
|
| 28 |
+
state_dim=None,
|
| 29 |
+
unet_input_dim=32,
|
| 30 |
+
dim_mults=(1, 2, 4, 8),
|
| 31 |
+
time_emb_dim=32,
|
| 32 |
+
self_attention=False,
|
| 33 |
+
conditioning_embed_dim=4,
|
| 34 |
+
conditioning_type=None,
|
| 35 |
+
attention_num_heads=2,
|
| 36 |
+
attention_dim_head=32,
|
| 37 |
+
**kwargs
|
| 38 |
+
):
|
| 39 |
+
super().__init__()
|
| 40 |
+
|
| 41 |
+
self.state_dim = state_dim
|
| 42 |
+
input_dim = state_dim
|
| 43 |
+
|
| 44 |
+
# Conditioning
|
| 45 |
+
if conditioning_type is None or conditioning_type == 'None':
|
| 46 |
+
conditioning_type = None
|
| 47 |
+
elif conditioning_type == 'concatenate':
|
| 48 |
+
if self.state_dim < conditioning_embed_dim // 4:
|
| 49 |
+
# Embed the state in a latent space HxF if the conditioning embedding is much larger than the state
|
| 50 |
+
state_emb_dim = conditioning_embed_dim // 4
|
| 51 |
+
self.state_encoder = MLP(state_dim, state_emb_dim, hidden_dim=state_emb_dim//2, n_layers=1, act='mish')
|
| 52 |
+
else:
|
| 53 |
+
state_emb_dim = state_dim
|
| 54 |
+
self.state_encoder = nn.Identity()
|
| 55 |
+
input_dim = state_emb_dim + conditioning_embed_dim
|
| 56 |
+
elif conditioning_type == 'attention':
|
| 57 |
+
pass
|
| 58 |
+
elif conditioning_type == 'default':
|
| 59 |
+
pass
|
| 60 |
+
else:
|
| 61 |
+
raise NotImplementedError
|
| 62 |
+
self.conditioning_type = conditioning_type
|
| 63 |
+
|
| 64 |
+
dims = [input_dim, *map(lambda m: unet_input_dim * m, dim_mults)]
|
| 65 |
+
in_out = list(zip(dims[:-1], dims[1:]))
|
| 66 |
+
print(f'[ models/temporal ] Channel dimensions: {in_out}')
|
| 67 |
+
|
| 68 |
+
# Networks
|
| 69 |
+
self.time_mlp = TimeEncoder(32, time_emb_dim)
|
| 70 |
+
|
| 71 |
+
# conditioning dimension (time + context)
|
| 72 |
+
cond_dim = time_emb_dim + (conditioning_embed_dim if conditioning_type == 'default' else 0)
|
| 73 |
+
|
| 74 |
+
# Unet
|
| 75 |
+
self.downs = nn.ModuleList([])
|
| 76 |
+
self.ups = nn.ModuleList([])
|
| 77 |
+
num_resolutions = len(in_out)
|
| 78 |
+
|
| 79 |
+
for ind, (dim_in, dim_out) in enumerate(in_out):
|
| 80 |
+
is_last = ind >= (num_resolutions - 1)
|
| 81 |
+
|
| 82 |
+
self.downs.append(nn.ModuleList([
|
| 83 |
+
FiLMResidualTemporalBlock(dim_in, dim_out, cond_dim, n_support_points=n_support_points),
|
| 84 |
+
FiLMResidualTemporalBlock(dim_out, dim_out, cond_dim, n_support_points=n_support_points),
|
| 85 |
+
Residual(PreNorm(dim_out, LinearAttention(dim_out))) if self_attention else nn.Identity(),
|
| 86 |
+
SpatialTransformer(dim_out, attention_num_heads, attention_dim_head, depth=1,
|
| 87 |
+
context_dim=conditioning_embed_dim) if conditioning_type == 'attention' else None,
|
| 88 |
+
Downsample1d(dim_out) if not is_last else nn.Identity()
|
| 89 |
+
]))
|
| 90 |
+
|
| 91 |
+
if not is_last:
|
| 92 |
+
n_support_points = n_support_points // 2
|
| 93 |
+
|
| 94 |
+
mid_dim = dims[-1]
|
| 95 |
+
self.mid_block1 = FiLMResidualTemporalBlock(mid_dim, mid_dim, cond_dim, n_support_points=n_support_points)
|
| 96 |
+
self.mid_attn = Residual(PreNorm(mid_dim, LinearAttention(mid_dim))) if self_attention else nn.Identity()
|
| 97 |
+
self.mid_attention = SpatialTransformer(mid_dim, attention_num_heads, attention_dim_head, depth=1,
|
| 98 |
+
context_dim=conditioning_embed_dim) if conditioning_type == 'attention' else nn.Identity()
|
| 99 |
+
self.mid_block2 = ResidualTemporalBlock(mid_dim, mid_dim, cond_dim, n_support_points=n_support_points)
|
| 100 |
+
|
| 101 |
+
for ind, (dim_in, dim_out) in enumerate(reversed(in_out[1:])):
|
| 102 |
+
is_last = ind >= (num_resolutions - 1)
|
| 103 |
+
|
| 104 |
+
self.ups.append(nn.ModuleList([
|
| 105 |
+
FiLMResidualTemporalBlock(dim_out * 2, dim_in, cond_dim, n_support_points=n_support_points),
|
| 106 |
+
FiLMResidualTemporalBlock(dim_in, dim_in, cond_dim, n_support_points=n_support_points),
|
| 107 |
+
Residual(PreNorm(dim_in, LinearAttention(dim_in))) if self_attention else nn.Identity(),
|
| 108 |
+
SpatialTransformer(dim_in, attention_num_heads, attention_dim_head, depth=1,
|
| 109 |
+
context_dim=conditioning_embed_dim) if conditioning_type == 'attention' else None,
|
| 110 |
+
Upsample1d(dim_in) if not is_last else nn.Identity()
|
| 111 |
+
]))
|
| 112 |
+
|
| 113 |
+
if not is_last:
|
| 114 |
+
n_support_points = n_support_points * 2
|
| 115 |
+
|
| 116 |
+
self.final_conv = nn.Sequential(
|
| 117 |
+
Conv1dBlock(unet_input_dim, unet_input_dim, kernel_size=5, n_groups=group_norm_n_groups(unet_input_dim)),
|
| 118 |
+
nn.Conv1d(unet_input_dim, state_dim, 1),
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
def forward(self, x, time, context):
|
| 122 |
+
"""
|
| 123 |
+
x : [ batch x horizon x state_dim ]
|
| 124 |
+
context: [batch x context_dim]
|
| 125 |
+
"""
|
| 126 |
+
b, h, d = x.shape
|
| 127 |
+
|
| 128 |
+
t_emb = self.time_mlp(time)
|
| 129 |
+
c_emb = t_emb
|
| 130 |
+
|
| 131 |
+
if self.conditioning_type == 'concatenate':
|
| 132 |
+
x_emb = self.state_encoder(x)
|
| 133 |
+
context = einops.repeat(context, 'm n -> m h n', h=h)
|
| 134 |
+
x = torch.cat((x_emb, context), dim=-1)
|
| 135 |
+
elif self.conditioning_type == 'attention':
|
| 136 |
+
# reshape to keep the interface
|
| 137 |
+
context = einops.rearrange(context, 'b d -> b 1 d')
|
| 138 |
+
elif self.conditioning_type == 'default':
|
| 139 |
+
# Ensure context has the same shape as t_emb for concatenation
|
| 140 |
+
c_emb = torch.cat((t_emb, context), dim=-1)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# swap horizon and channels (state_dim)
|
| 144 |
+
x = einops.rearrange(x, 'b h c -> b c h') # batch, horizon, channels (state_dim)
|
| 145 |
+
|
| 146 |
+
h = []
|
| 147 |
+
for resnet, resnet2, attn_self, attn_conditioning, downsample in self.downs:
|
| 148 |
+
x = resnet(x, c_emb)
|
| 149 |
+
# if self.conditioning_type == 'attention':
|
| 150 |
+
# x = attention1(x, context=conditioning_emb)
|
| 151 |
+
x = resnet2(x, c_emb)
|
| 152 |
+
x = attn_self(x)
|
| 153 |
+
if self.conditioning_type == 'attention':
|
| 154 |
+
x = attn_conditioning(x, context=context)
|
| 155 |
+
h.append(x)
|
| 156 |
+
x = downsample(x)
|
| 157 |
+
|
| 158 |
+
x = self.mid_block1(x, c_emb)
|
| 159 |
+
x = self.mid_attn(x)
|
| 160 |
+
if self.conditioning_type == 'attention':
|
| 161 |
+
x = self.mid_attention(x, context=context)
|
| 162 |
+
x = self.mid_block2(x, c_emb)
|
| 163 |
+
|
| 164 |
+
for resnet, resnet2, attn_self, attn_conditioning, upsample in self.ups:
|
| 165 |
+
x = torch.cat((x, h.pop()), dim=1)
|
| 166 |
+
x = resnet(x, c_emb)
|
| 167 |
+
x = resnet2(x, c_emb)
|
| 168 |
+
x = attn_self(x)
|
| 169 |
+
if self.conditioning_type == 'attention':
|
| 170 |
+
x = attn_conditioning(x, context=context)
|
| 171 |
+
x = upsample(x)
|
| 172 |
+
|
| 173 |
+
x = self.final_conv(x)
|
| 174 |
+
|
| 175 |
+
x = einops.rearrange(x, 'b c h -> b h c')
|
| 176 |
+
|
| 177 |
+
return x
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class EnvModel(nn.Module):
|
| 181 |
+
|
| 182 |
+
def __init__(
|
| 183 |
+
self,
|
| 184 |
+
in_dim=16,
|
| 185 |
+
out_dim=16,
|
| 186 |
+
**kwargs
|
| 187 |
+
):
|
| 188 |
+
super().__init__()
|
| 189 |
+
self.in_dim = in_dim
|
| 190 |
+
self.out_dim = out_dim
|
| 191 |
+
|
| 192 |
+
self.net = nn.Identity()
|
| 193 |
+
|
| 194 |
+
def forward(self, input_d):
|
| 195 |
+
env = input_d['env']
|
| 196 |
+
env_emb = self.net(env)
|
| 197 |
+
return env_emb
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class TaskModel(nn.Module):
|
| 201 |
+
|
| 202 |
+
def __init__(
|
| 203 |
+
self,
|
| 204 |
+
in_dim=16,
|
| 205 |
+
out_dim=32,
|
| 206 |
+
**kwargs
|
| 207 |
+
):
|
| 208 |
+
super().__init__()
|
| 209 |
+
self.in_dim = in_dim
|
| 210 |
+
self.out_dim = out_dim
|
| 211 |
+
|
| 212 |
+
self.net = nn.Identity()
|
| 213 |
+
|
| 214 |
+
def forward(self, input_d):
|
| 215 |
+
task = input_d['tasks']
|
| 216 |
+
task_emb = self.net(task)
|
| 217 |
+
return task_emb
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class TaskModelNew(nn.Module):
|
| 221 |
+
|
| 222 |
+
def __init__(
|
| 223 |
+
self,
|
| 224 |
+
in_dim=16,
|
| 225 |
+
out_dim=32,
|
| 226 |
+
**kwargs
|
| 227 |
+
):
|
| 228 |
+
super().__init__()
|
| 229 |
+
self.in_dim = in_dim
|
| 230 |
+
self.out_dim = out_dim
|
| 231 |
+
|
| 232 |
+
self.net = nn.Identity()
|
| 233 |
+
|
| 234 |
+
def forward(self, task):
|
| 235 |
+
task_emb = self.net(task)
|
| 236 |
+
return task_emb
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class ContextModel(nn.Module):
|
| 240 |
+
|
| 241 |
+
def __init__(
|
| 242 |
+
self,
|
| 243 |
+
env_model=None,
|
| 244 |
+
task_model=None,
|
| 245 |
+
out_dim=32,
|
| 246 |
+
**kwargs
|
| 247 |
+
):
|
| 248 |
+
super().__init__()
|
| 249 |
+
|
| 250 |
+
self.env_model = env_model
|
| 251 |
+
self.task_model = task_model
|
| 252 |
+
|
| 253 |
+
self.in_dim = self.env_model.out_dim + self.task_model.out_dim
|
| 254 |
+
|
| 255 |
+
# self.out_dim = out_dim
|
| 256 |
+
# self.net = MLP(self.in_dim, self.out_dim, hidden_dim=out_dim, n_layers=1, act='mish')
|
| 257 |
+
|
| 258 |
+
self.out_dim = self.in_dim
|
| 259 |
+
self.net = nn.Identity()
|
| 260 |
+
|
| 261 |
+
def forward(self, input_d=None):
|
| 262 |
+
if input_d is None:
|
| 263 |
+
return None
|
| 264 |
+
env_emb = self.env_model(input_d)
|
| 265 |
+
task_emb = self.task_model(input_d)
|
| 266 |
+
context = torch.cat((env_emb, task_emb), dim=-1)
|
| 267 |
+
context_emb = self.net(context)
|
| 268 |
+
return context_emb
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class PointUnet(nn.Module):
|
| 272 |
+
|
| 273 |
+
def __init__(
|
| 274 |
+
self,
|
| 275 |
+
n_support_points=None,
|
| 276 |
+
state_dim=None,
|
| 277 |
+
dim=32,
|
| 278 |
+
dim_mults=(1, 2, 4),
|
| 279 |
+
time_emb_dim=32,
|
| 280 |
+
conditioning_embed_dim=4,
|
| 281 |
+
conditioning_type=None,
|
| 282 |
+
**kwargs
|
| 283 |
+
):
|
| 284 |
+
super().__init__()
|
| 285 |
+
|
| 286 |
+
self.dim_mults = dim_mults
|
| 287 |
+
|
| 288 |
+
self.state_dim = state_dim
|
| 289 |
+
input_dim = state_dim
|
| 290 |
+
|
| 291 |
+
# Conditioning
|
| 292 |
+
if conditioning_type is None or conditioning_type == 'None':
|
| 293 |
+
conditioning_type = None
|
| 294 |
+
elif conditioning_type == 'concatenate':
|
| 295 |
+
if self.state_dim < conditioning_embed_dim // 4:
|
| 296 |
+
# Embed the state in a latent space HxF if the conditioning embedding is much larger than the state
|
| 297 |
+
state_emb_dim = conditioning_embed_dim // 4
|
| 298 |
+
self.state_encoder = MLP(state_dim, state_emb_dim, hidden_dim=state_emb_dim//2, n_layers=1, act='mish')
|
| 299 |
+
else:
|
| 300 |
+
state_emb_dim = state_dim
|
| 301 |
+
self.state_encoder = nn.Identity()
|
| 302 |
+
input_dim = state_emb_dim + conditioning_embed_dim
|
| 303 |
+
elif conditioning_type == 'default':
|
| 304 |
+
pass
|
| 305 |
+
else:
|
| 306 |
+
raise NotImplementedError
|
| 307 |
+
self.conditioning_type = conditioning_type
|
| 308 |
+
|
| 309 |
+
dims = [input_dim, *map(lambda m: dim * m, self.dim_mults)]
|
| 310 |
+
in_out = list(zip(dims[:-1], dims[1:]))
|
| 311 |
+
print(f'[ models/temporal ] Channel dimensions: {in_out}')
|
| 312 |
+
|
| 313 |
+
# Networks
|
| 314 |
+
self.time_mlp = TimeEncoder(32, time_emb_dim)
|
| 315 |
+
|
| 316 |
+
# conditioning dimension (time + context)
|
| 317 |
+
cond_dim = time_emb_dim + (conditioning_embed_dim if conditioning_type == 'default' else 0)
|
| 318 |
+
|
| 319 |
+
# Unet
|
| 320 |
+
self.downs = nn.ModuleList([])
|
| 321 |
+
self.ups = nn.ModuleList([])
|
| 322 |
+
|
| 323 |
+
for ind, (dim_in, dim_out) in enumerate(in_out):
|
| 324 |
+
self.downs.append(nn.ModuleList([
|
| 325 |
+
TemporalBlockMLP(dim_in, dim_out, cond_dim)
|
| 326 |
+
]))
|
| 327 |
+
|
| 328 |
+
mid_dim = dims[-1]
|
| 329 |
+
self.mid_block1 = TemporalBlockMLP(mid_dim, mid_dim, cond_dim)
|
| 330 |
+
|
| 331 |
+
for ind, (dim_in, dim_out) in enumerate(reversed(in_out[1:])):
|
| 332 |
+
self.ups.append(nn.ModuleList([
|
| 333 |
+
TemporalBlockMLP(dim_out * 2, dim_in, cond_dim)
|
| 334 |
+
]))
|
| 335 |
+
|
| 336 |
+
self.final_layer = nn.Sequential(
|
| 337 |
+
MLP(dim, state_dim, hidden_dim=dim, n_layers=0, act='identity')
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
def forward(self, x, time, context):
|
| 341 |
+
"""
|
| 342 |
+
x : [ batch x horizon x state_dim ]
|
| 343 |
+
context: [batch x context_dim]
|
| 344 |
+
"""
|
| 345 |
+
x = einops.rearrange(x, 'b 1 d -> b d')
|
| 346 |
+
|
| 347 |
+
t_emb = self.time_mlp(time)
|
| 348 |
+
c_emb = t_emb
|
| 349 |
+
if self.conditioning_type == 'concatenate':
|
| 350 |
+
x_emb = self.state_encoder(x)
|
| 351 |
+
x = torch.cat((x_emb, context), dim=-1)
|
| 352 |
+
elif self.conditioning_type == 'default':
|
| 353 |
+
c_emb = torch.cat((t_emb, context), dim=-1)
|
| 354 |
+
|
| 355 |
+
h = []
|
| 356 |
+
for resnet, in self.downs:
|
| 357 |
+
x = resnet(x, c_emb)
|
| 358 |
+
h.append(x)
|
| 359 |
+
|
| 360 |
+
x = self.mid_block1(x, c_emb)
|
| 361 |
+
|
| 362 |
+
for resnet, in self.ups:
|
| 363 |
+
x = torch.cat((x, h.pop()), dim=1)
|
| 364 |
+
x = resnet(x, c_emb)
|
| 365 |
+
|
| 366 |
+
x = self.final_layer(x)
|
| 367 |
+
|
| 368 |
+
x = einops.rearrange(x, 'b d -> b 1 d')
|
| 369 |
+
|
| 370 |
+
return x
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .trainer import train, get_num_epochs
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (270 Bytes). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/__pycache__/trainer.cpython-310.pyc
ADDED
|
Binary file (8.64 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/trainer/trainer.py
ADDED
|
@@ -0,0 +1,385 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import copy
|
| 2 |
+
from math import ceil
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import os
|
| 6 |
+
import time
|
| 7 |
+
import collections
|
| 8 |
+
import torch
|
| 9 |
+
import wandb
|
| 10 |
+
from collections import defaultdict
|
| 11 |
+
from tqdm.autonotebook import tqdm
|
| 12 |
+
from cfdp.diffusion_policy.utils.utils import TimerCUDA
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
#TODO: select which GPU
|
| 16 |
+
DEFAULT_TENSOR_ARGS = {'device': 'cuda', 'dtype': torch.float32}
|
| 17 |
+
|
| 18 |
+
def get_torch_device(device='cuda'):
|
| 19 |
+
if 'cuda' in device and torch.cuda.is_available():
|
| 20 |
+
device = 'cuda'
|
| 21 |
+
else:
|
| 22 |
+
device = 'cpu'
|
| 23 |
+
return torch.device(device)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def dict_to_device(ob, device):
|
| 27 |
+
if isinstance(ob, (dict, collections.abc.Mapping)):
|
| 28 |
+
return {k: dict_to_device(v, device) for k, v in ob.items()}
|
| 29 |
+
else:
|
| 30 |
+
return ob.to(device)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def to_numpy(x, dtype=np.float32, clone=False):
|
| 34 |
+
if torch.is_tensor(x):
|
| 35 |
+
x = x.detach().cpu().numpy().astype(dtype)
|
| 36 |
+
return x
|
| 37 |
+
if isinstance(x, np.ndarray):
|
| 38 |
+
return x.astype(dtype, copy=clone)
|
| 39 |
+
return np.array(x).astype(dtype)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def get_num_epochs(num_train_steps, batch_size, dataset_len):
|
| 43 |
+
return ceil(num_train_steps * batch_size / dataset_len)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def save_models_to_disk(models_prefix_l, epoch, total_steps, checkpoints_dir=None):
|
| 47 |
+
for model, prefix in models_prefix_l:
|
| 48 |
+
if model is not None:
|
| 49 |
+
save_model_to_disk(model, epoch, total_steps, checkpoints_dir, prefix=f'{prefix}_')
|
| 50 |
+
for submodule_key, submodule_value in model.submodules.items():
|
| 51 |
+
save_model_to_disk(submodule_value, epoch, total_steps, checkpoints_dir,
|
| 52 |
+
prefix=f'{prefix}_{submodule_key}_')
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def save_model_to_disk(model, epoch, total_steps, checkpoints_dir=None, prefix='model_'):
|
| 56 |
+
# If the model is frozen we do not save it again, since the parameters did not change
|
| 57 |
+
if hasattr(model, 'is_frozen') and model.is_frozen:
|
| 58 |
+
return
|
| 59 |
+
|
| 60 |
+
torch.save(model.state_dict(), os.path.join(checkpoints_dir, f'{prefix}current_state_dict.pth'))
|
| 61 |
+
torch.save(model.state_dict(), os.path.join(checkpoints_dir, f'{prefix}epoch_{epoch:04d}_iter_{total_steps:06d}_state_dict.pth'))
|
| 62 |
+
torch.save(model, os.path.join(checkpoints_dir, f'{prefix}current.pth'))
|
| 63 |
+
torch.save(model, os.path.join(checkpoints_dir, f'{prefix}epoch_{epoch:04d}_iter_{total_steps:06d}.pth'))
|
| 64 |
+
|
| 65 |
+
def save_losses_to_disk(train_losses, val_losses, checkpoints_dir=None):
|
| 66 |
+
np.save(os.path.join(checkpoints_dir, f'train_losses.npy'), train_losses)
|
| 67 |
+
np.save(os.path.join(checkpoints_dir, f'val_losses.npy'), val_losses)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class EarlyStopper:
|
| 71 |
+
# https://stackoverflow.com/questions/71998978/early-stopping-in-pytorch
|
| 72 |
+
|
| 73 |
+
def __init__(self, patience=10, min_delta=0):
|
| 74 |
+
self.patience = patience # use -1 to deactivate it
|
| 75 |
+
self.min_delta = min_delta
|
| 76 |
+
self.counter = 0
|
| 77 |
+
self.min_validation_loss = torch.inf
|
| 78 |
+
|
| 79 |
+
def early_stop(self, validation_loss):
|
| 80 |
+
if self.patience == -1:
|
| 81 |
+
return
|
| 82 |
+
if validation_loss < self.min_validation_loss:
|
| 83 |
+
self.min_validation_loss = validation_loss
|
| 84 |
+
self.counter = 0
|
| 85 |
+
elif validation_loss > (self.min_validation_loss + self.min_delta):
|
| 86 |
+
self.counter += 1
|
| 87 |
+
if self.counter >= self.patience:
|
| 88 |
+
return True
|
| 89 |
+
return False
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class EMA:
|
| 93 |
+
"""
|
| 94 |
+
https://github.com/jannerm/diffuser
|
| 95 |
+
(empirical) exponential moving average parameters
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
def __init__(self, beta=0.995):
|
| 99 |
+
super().__init__()
|
| 100 |
+
self.beta = beta
|
| 101 |
+
|
| 102 |
+
def update_model_average(self, ema_model, current_model):
|
| 103 |
+
for ema_params, current_params in zip(ema_model.parameters(), current_model.parameters()):
|
| 104 |
+
old_weight, up_weight = ema_params.data, current_params.data
|
| 105 |
+
ema_params.data = self.update_average(old_weight, up_weight)
|
| 106 |
+
|
| 107 |
+
def update_average(self, old, new):
|
| 108 |
+
if old is None:
|
| 109 |
+
return new
|
| 110 |
+
return old * self.beta + (1 - self.beta) * new
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def do_summary(
|
| 114 |
+
summary_fn,
|
| 115 |
+
train_steps_current,
|
| 116 |
+
model,
|
| 117 |
+
batch_dict,
|
| 118 |
+
loss_info,
|
| 119 |
+
datasubset,
|
| 120 |
+
**kwargs
|
| 121 |
+
):
|
| 122 |
+
if summary_fn is None:
|
| 123 |
+
return
|
| 124 |
+
|
| 125 |
+
with torch.no_grad():
|
| 126 |
+
# set model to evaluation mode
|
| 127 |
+
model.eval()
|
| 128 |
+
|
| 129 |
+
summary_fn(train_steps_current,
|
| 130 |
+
model,
|
| 131 |
+
batch_dict=batch_dict,
|
| 132 |
+
loss_info=loss_info,
|
| 133 |
+
datasubset=datasubset,
|
| 134 |
+
**kwargs
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
# set model to training mode
|
| 138 |
+
model.train()
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def train(model=None, train_dataloader=None, epochs=None, lr=None, steps_til_summary=None, model_dir=None, loss_fn=None,
|
| 142 |
+
train_subset=None,
|
| 143 |
+
summary_fn=None, steps_til_checkpoint=None,
|
| 144 |
+
val_dataloader=None, val_subset=None,
|
| 145 |
+
clip_grad=False,
|
| 146 |
+
clip_grad_max_norm=1.0,
|
| 147 |
+
val_loss_fn=None,
|
| 148 |
+
optimizers=None, steps_per_validation=10, max_steps=None,
|
| 149 |
+
use_ema: bool = True,
|
| 150 |
+
ema_decay: float = 0.995, step_start_ema: int = 1000, update_ema_every: int = 10,
|
| 151 |
+
use_amp=False,
|
| 152 |
+
early_stopper_patience=-1,
|
| 153 |
+
debug=False,
|
| 154 |
+
tensor_args=DEFAULT_TENSOR_ARGS,
|
| 155 |
+
**kwargs
|
| 156 |
+
):
|
| 157 |
+
|
| 158 |
+
print(f'\n------- TRAINING STARTED -------\n')
|
| 159 |
+
|
| 160 |
+
ema_model = None
|
| 161 |
+
if use_ema:
|
| 162 |
+
# Exponential moving average model
|
| 163 |
+
ema = EMA(beta=ema_decay)
|
| 164 |
+
ema_model = copy.deepcopy(model)
|
| 165 |
+
|
| 166 |
+
# Model optimizers
|
| 167 |
+
if optimizers is None:
|
| 168 |
+
optimizers = [torch.optim.Adam(lr=lr, params=model.parameters())]
|
| 169 |
+
|
| 170 |
+
# Automatic Mixed Precision
|
| 171 |
+
scaler = torch.amp.GradScaler('cuda', enabled=use_amp)
|
| 172 |
+
|
| 173 |
+
if val_dataloader is not None:
|
| 174 |
+
assert val_loss_fn is not None, "If validation set is passed, have to pass a validation loss_fn!"
|
| 175 |
+
|
| 176 |
+
## Build saving directories
|
| 177 |
+
os.makedirs(model_dir, exist_ok=True)
|
| 178 |
+
|
| 179 |
+
summaries_dir = os.path.join(model_dir, 'summaries')
|
| 180 |
+
os.makedirs(summaries_dir, exist_ok=True)
|
| 181 |
+
|
| 182 |
+
checkpoints_dir = os.path.join(model_dir, 'checkpoints')
|
| 183 |
+
os.makedirs(checkpoints_dir, exist_ok=True)
|
| 184 |
+
|
| 185 |
+
# Early stopping
|
| 186 |
+
early_stopper = EarlyStopper(patience=early_stopper_patience, min_delta=0)
|
| 187 |
+
|
| 188 |
+
stop_training = False
|
| 189 |
+
train_steps_current = 0
|
| 190 |
+
|
| 191 |
+
# save models before training
|
| 192 |
+
save_models_to_disk([(model, 'model'), (ema_model, 'ema_model')], 0, 0, checkpoints_dir)
|
| 193 |
+
|
| 194 |
+
with tqdm(total=len(train_dataloader) * epochs, mininterval=1 if debug else 60) as pbar:
|
| 195 |
+
train_losses_l = []
|
| 196 |
+
validation_losses_l = []
|
| 197 |
+
for epoch in range(epochs):
|
| 198 |
+
model.train() # set model to training mode
|
| 199 |
+
for step, train_batch_dict in enumerate(train_dataloader):
|
| 200 |
+
####################################################################################################
|
| 201 |
+
# TRAINING LOSS
|
| 202 |
+
####################################################################################################
|
| 203 |
+
with TimerCUDA() as t_training_loss:
|
| 204 |
+
train_batch_dict = dict_to_device(train_batch_dict, tensor_args['device'])
|
| 205 |
+
|
| 206 |
+
# Compute losses
|
| 207 |
+
with torch.autocast(device_type='cuda', dtype=torch.float16, enabled=use_amp):
|
| 208 |
+
train_losses, train_losses_info = loss_fn(model, train_batch_dict, train_subset.dataset)
|
| 209 |
+
|
| 210 |
+
train_loss_batch = 0.
|
| 211 |
+
train_losses_log = {}
|
| 212 |
+
for loss_name, loss in train_losses.items():
|
| 213 |
+
single_loss = loss.mean()
|
| 214 |
+
train_loss_batch += single_loss
|
| 215 |
+
train_losses_log[loss_name] = to_numpy(single_loss).item()
|
| 216 |
+
|
| 217 |
+
####################################################################################################
|
| 218 |
+
# SUMMARY
|
| 219 |
+
if train_steps_current % steps_til_summary == 0:
|
| 220 |
+
# TRAINING
|
| 221 |
+
print(f"\n-----------------------------------------")
|
| 222 |
+
print(f"train_steps_current: {train_steps_current}")
|
| 223 |
+
print(f"t_training_loss: {t_training_loss.elapsed:.4f} sec")
|
| 224 |
+
print(f"Total training loss {train_loss_batch:.4f}")
|
| 225 |
+
print(f"Training losses {train_losses}")
|
| 226 |
+
|
| 227 |
+
train_losses_l.append((train_steps_current, train_losses_log))
|
| 228 |
+
|
| 229 |
+
with TimerCUDA() as t_training_summary:
|
| 230 |
+
do_summary(
|
| 231 |
+
summary_fn,
|
| 232 |
+
train_steps_current,
|
| 233 |
+
ema_model if ema_model is not None else model,
|
| 234 |
+
train_batch_dict,
|
| 235 |
+
train_losses_info,
|
| 236 |
+
train_subset,
|
| 237 |
+
prefix='TRAINING ',
|
| 238 |
+
debug=debug,
|
| 239 |
+
tensor_args=tensor_args
|
| 240 |
+
)
|
| 241 |
+
print(f"t_training_summary: {t_training_summary.elapsed:.4f} sec")
|
| 242 |
+
|
| 243 |
+
################################################################################################
|
| 244 |
+
# VALIDATION LOSS and SUMMARY
|
| 245 |
+
validation_losses_log = {}
|
| 246 |
+
if val_dataloader is not None:
|
| 247 |
+
|
| 248 |
+
#Remove time cuda
|
| 249 |
+
print("Running validation...")
|
| 250 |
+
val_losses = defaultdict(list)
|
| 251 |
+
total_val_loss = 0.
|
| 252 |
+
for step_val, batch_dict_val in enumerate(val_dataloader):
|
| 253 |
+
batch_dict_val = dict_to_device(batch_dict_val, tensor_args['device'])
|
| 254 |
+
val_loss, val_loss_info = loss_fn(
|
| 255 |
+
model, batch_dict_val, val_subset.dataset, step=train_steps_current)
|
| 256 |
+
for name, value in val_loss.items():
|
| 257 |
+
single_loss = to_numpy(value)
|
| 258 |
+
val_losses[name].append(single_loss)
|
| 259 |
+
total_val_loss += np.mean(single_loss).item()
|
| 260 |
+
|
| 261 |
+
if step_val == steps_per_validation:
|
| 262 |
+
break
|
| 263 |
+
|
| 264 |
+
validation_losses = {}
|
| 265 |
+
for loss_name, loss in val_losses.items():
|
| 266 |
+
single_loss = np.mean(loss).item()
|
| 267 |
+
validation_losses[f'VALIDATION {loss_name}'] = single_loss
|
| 268 |
+
print(f"Validation losses {validation_losses}")
|
| 269 |
+
|
| 270 |
+
# with TimerCUDA() as t_validation_loss:
|
| 271 |
+
# print("Running validation...")
|
| 272 |
+
# val_losses = defaultdict(list)
|
| 273 |
+
# total_val_loss = 0.
|
| 274 |
+
# for step_val, batch_dict_val in enumerate(val_dataloader):
|
| 275 |
+
# batch_dict_val = dict_to_device(batch_dict_val, tensor_args['device'])
|
| 276 |
+
# val_loss, val_loss_info = loss_fn(
|
| 277 |
+
# model, batch_dict_val, val_subset.dataset, step=train_steps_current)
|
| 278 |
+
# for name, value in val_loss.items():
|
| 279 |
+
# single_loss = to_numpy(value)
|
| 280 |
+
# val_losses[name].append(single_loss)
|
| 281 |
+
# total_val_loss += np.mean(single_loss).item()
|
| 282 |
+
|
| 283 |
+
# if step_val == steps_per_validation:
|
| 284 |
+
# break
|
| 285 |
+
|
| 286 |
+
# validation_losses = {}
|
| 287 |
+
# for loss_name, loss in val_losses.items():
|
| 288 |
+
# single_loss = np.mean(loss).item()
|
| 289 |
+
# validation_losses[f'VALIDATION {loss_name}'] = single_loss
|
| 290 |
+
# print("... finished validation.")
|
| 291 |
+
|
| 292 |
+
# print(f"t_validation_loss: {t_validation_loss.elapsed:.4f} sec")
|
| 293 |
+
# print(f"Validation losses {validation_losses}")
|
| 294 |
+
|
| 295 |
+
validation_losses_log = validation_losses
|
| 296 |
+
validation_losses_l.append((train_steps_current, validation_losses_log))
|
| 297 |
+
|
| 298 |
+
# The validation summary is done only on one batch of the validation data
|
| 299 |
+
with TimerCUDA() as t_validation_summary:
|
| 300 |
+
do_summary(
|
| 301 |
+
summary_fn,
|
| 302 |
+
train_steps_current,
|
| 303 |
+
ema_model if ema_model is not None else model,
|
| 304 |
+
batch_dict_val,
|
| 305 |
+
val_loss_info,
|
| 306 |
+
val_subset,
|
| 307 |
+
prefix='VALIDATION ',
|
| 308 |
+
debug=debug,
|
| 309 |
+
tensor_args=tensor_args
|
| 310 |
+
)
|
| 311 |
+
print(f"t_valididation_summary: {t_validation_summary.elapsed:.4f} sec")
|
| 312 |
+
|
| 313 |
+
wandb.log({**train_losses_log, **validation_losses_log}, step=train_steps_current)
|
| 314 |
+
|
| 315 |
+
####################################################################################################
|
| 316 |
+
# Early stopping
|
| 317 |
+
if early_stopper.early_stop(total_val_loss):
|
| 318 |
+
print(f'Early stopped training at {train_steps_current} steps.')
|
| 319 |
+
stop_training = True
|
| 320 |
+
|
| 321 |
+
####################################################################################################
|
| 322 |
+
# OPTIMIZE TRAIN LOSS BATCH
|
| 323 |
+
with TimerCUDA() as t_training_optimization:
|
| 324 |
+
for optim in optimizers:
|
| 325 |
+
optim.zero_grad()
|
| 326 |
+
|
| 327 |
+
scaler.scale(train_loss_batch).backward()
|
| 328 |
+
|
| 329 |
+
if clip_grad:
|
| 330 |
+
for optim in optimizers:
|
| 331 |
+
scaler.unscale_(optim)
|
| 332 |
+
torch.nn.utils.clip_grad_norm_(
|
| 333 |
+
model.parameters(),
|
| 334 |
+
max_norm=clip_grad_max_norm if isinstance(clip_grad, bool) else clip_grad
|
| 335 |
+
)
|
| 336 |
+
|
| 337 |
+
for optim in optimizers:
|
| 338 |
+
scaler.step(optim)
|
| 339 |
+
|
| 340 |
+
scaler.update()
|
| 341 |
+
|
| 342 |
+
if ema_model is not None:
|
| 343 |
+
if train_steps_current % update_ema_every == 0:
|
| 344 |
+
# update ema
|
| 345 |
+
if train_steps_current < step_start_ema:
|
| 346 |
+
# reset parameters ema
|
| 347 |
+
ema_model.load_state_dict(model.state_dict())
|
| 348 |
+
ema.update_model_average(ema_model, model)
|
| 349 |
+
|
| 350 |
+
if train_steps_current % steps_til_summary == 0:
|
| 351 |
+
print(f"t_training_optimization: {t_training_optimization.elapsed:.4f} sec")
|
| 352 |
+
|
| 353 |
+
####################################################################################################
|
| 354 |
+
# SAVING
|
| 355 |
+
####################################################################################################
|
| 356 |
+
pbar.update(1)
|
| 357 |
+
train_steps_current += 1
|
| 358 |
+
|
| 359 |
+
if (steps_til_checkpoint is not None) and (train_steps_current % steps_til_checkpoint == 0):
|
| 360 |
+
save_models_to_disk([(model, 'model'), (ema_model, 'ema_model')],
|
| 361 |
+
epoch, train_steps_current, checkpoints_dir)
|
| 362 |
+
save_losses_to_disk(train_losses_l, validation_losses_l, checkpoints_dir)
|
| 363 |
+
|
| 364 |
+
if stop_training or (max_steps is not None and train_steps_current == max_steps):
|
| 365 |
+
break
|
| 366 |
+
|
| 367 |
+
if max_steps is not None and train_steps_current == max_steps:
|
| 368 |
+
break
|
| 369 |
+
|
| 370 |
+
# Update ema model at the end of training
|
| 371 |
+
if ema_model is not None:
|
| 372 |
+
# update ema
|
| 373 |
+
if train_steps_current < step_start_ema:
|
| 374 |
+
# reset parameters ema
|
| 375 |
+
ema_model.load_state_dict(model.state_dict())
|
| 376 |
+
ema.update_model_average(ema_model, model)
|
| 377 |
+
|
| 378 |
+
# Save model at end of training
|
| 379 |
+
save_models_to_disk([(model, 'model'), (ema_model, 'ema_model')],
|
| 380 |
+
epoch, train_steps_current, checkpoints_dir)
|
| 381 |
+
save_losses_to_disk(train_losses_l, validation_losses_l, checkpoints_dir)
|
| 382 |
+
|
| 383 |
+
print(f'\n------- TRAINING FINISHED -------')
|
| 384 |
+
|
| 385 |
+
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .utils import TimerCUDA
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (245 Bytes). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__pycache__/loaders.cpython-310.pyc
ADDED
|
Binary file (2.44 kB). View file
|
|
|
project/ManiSkill3/src/maniskill2_benchmark/cfdp_experiment/cfdp/diffusion_policy/utils/__pycache__/summary_trajectory.cpython-310.pyc
ADDED
|
Binary file (2.75 kB). View file
|
|
|