diff --git a/maniskill3_environment_assets/.gitignore b/maniskill3_environment_assets/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6f66c74b0e21f83535e723a3886b6d5838811278 --- /dev/null +++ b/maniskill3_environment_assets/.gitignore @@ -0,0 +1 @@ +*.zip \ No newline at end of file diff --git a/maniskill3_environment_assets/racks/bin.gltf b/maniskill3_environment_assets/racks/bin.gltf new file mode 100644 index 0000000000000000000000000000000000000000..de6b4b629971d6b54adac20dccadb9fbaad7a8da --- /dev/null +++ b/maniskill3_environment_assets/racks/bin.gltf @@ -0,0 +1,207 @@ +{ + "asset":{ + "generator":"Khronos glTF Blender I/O v4.3.47", + "version":"2.0" + }, + "scene":0, + "scenes":[ + { + "name":"Scene", + "nodes":[ + 0, + 1 + ] + } + ], + "nodes":[ + { + "mesh":0, + "name":"Model" + }, + { + "mesh":1, + "name":"Mesh_0", + "rotation":[ + 0.7071068286895752, + 0, + 0, + 0.7071068286895752 + ] + } + ], + "materials":[ + { + "name":"Material.001" + }, + { + "doubleSided":true, + "name":"Material_0.006", + "pbrMetallicRoughness":{ + "baseColorFactor":[ + 0.800000011920929, + 0.800000011920929, + 0.800000011920929, + 1 + ], + "metallicFactor":0, + "roughnessFactor":0.5 + } + } + ], + "meshes":[ + { + "name":"Model.002", + "primitives":[ + { + "attributes":{ + "POSITION":0, + "NORMAL":1, + "TEXCOORD_0":2 + }, + "indices":3, + "material":0 + } + ] + }, + { + "name":"Mesh_0.005", + "primitives":[ + { + "attributes":{ + "POSITION":4, + "NORMAL":5, + "TEXCOORD_0":6 + }, + "indices":7, + "material":1 + } + ] + } + ], + "accessors":[ + { + "bufferView":0, + "componentType":5126, + "count":96746, + "max":[ + 366.4075012207031, + 1.4419732093811035, + 192.4498291015625 + ], + "min":[ + 352.80267333984375, + -9.999999974752427e-07, + 174.88624572753906 + ], + "type":"VEC3" + }, + { + "bufferView":1, + "componentType":5126, + "count":96746, + "type":"VEC3" + }, + { + "bufferView":2, + "componentType":5126, + "count":96746, + "type":"VEC2" + }, + { + "bufferView":3, + "componentType":5125, + "count":398523, + "type":"SCALAR" + }, + { + "bufferView":4, + "componentType":5126, + "count":18533, + "max":[ + 0.12296000123023987, + 0.11509200185537338, + 0.08574099838733673 + ], + "min":[ + -0.12296000123023987, + 9.999999974752427e-07, + -0.08569499850273132 + ], + "type":"VEC3" + }, + { + "bufferView":5, + "componentType":5126, + "count":18533, + "type":"VEC3" + }, + { + "bufferView":6, + "componentType":5126, + "count":18533, + "type":"VEC2" + }, + { + "bufferView":7, + "componentType":5123, + "count":89001, + "type":"SCALAR" + } + ], + "bufferViews":[ + { + "buffer":0, + "byteLength":1160952, + "byteOffset":0, + "target":34962 + }, + { + "buffer":0, + "byteLength":1160952, + "byteOffset":1160952, + "target":34962 + }, + { + "buffer":0, + "byteLength":773968, + "byteOffset":2321904, + "target":34962 + }, + { + "buffer":0, + "byteLength":1594092, + "byteOffset":3095872, + "target":34963 + }, + { + "buffer":0, + "byteLength":222396, + "byteOffset":4689964, + "target":34962 + }, + { + "buffer":0, + "byteLength":222396, + "byteOffset":4912360, + "target":34962 + }, + { + "buffer":0, + "byteLength":148264, + "byteOffset":5134756, + "target":34962 + }, + { + "buffer":0, + "byteLength":178002, + "byteOffset":5283020, + "target":34963 + } + ], + "buffers":[ + { + "byteLength":5461024, + "uri":"bin.bin" + } + ] +} diff --git a/maniskill3_environment_assets/racks/dish_rack.mtl b/maniskill3_environment_assets/racks/dish_rack.mtl new file mode 100644 index 0000000000000000000000000000000000000000..24c0d6a75182bf171d926211e26a16590eb6316d --- /dev/null +++ b/maniskill3_environment_assets/racks/dish_rack.mtl @@ -0,0 +1,2 @@ +# Blender 4.3.2 MTL File: 'None' +# www.blender.org diff --git a/maniskill3_environment_assets/racks/dish_rack.obj b/maniskill3_environment_assets/racks/dish_rack.obj new file mode 100644 index 0000000000000000000000000000000000000000..c3ed890fdc2ed5a27ae8110192957d7be3989450 --- /dev/null +++ b/maniskill3_environment_assets/racks/dish_rack.obj @@ -0,0 +1,38402 @@ +# Blender 4.3.2 +# www.blender.org +mtllib dish_rack.mtl +o dish_rack +v 0.173503 0.183685 0.061584 +v 0.173250 -0.183581 0.061913 +v 0.173503 -0.183685 0.061584 +v 0.173250 0.183581 0.061913 +v 0.136060 -0.221127 0.061584 +v 0.135956 -0.220875 0.061913 +v 0.173831 -0.183821 0.061332 +v 0.173831 0.183821 0.061332 +v 0.136060 0.221128 0.061584 +v 0.135956 0.220875 0.061913 +v 0.173092 -0.183515 0.062296 +v 0.173092 0.183515 0.062296 +v -0.136060 -0.221127 0.061584 +v -0.135956 -0.220875 0.061913 +v 0.136196 -0.221456 0.061332 +v 0.135890 -0.220717 0.062296 +v 0.174214 -0.183980 0.061173 +v 0.174214 0.183980 0.061173 +v 0.136196 0.221456 0.061332 +v -0.135956 0.220875 0.061913 +v -0.136060 0.221128 0.061584 +v 0.135890 0.220717 0.062296 +v 0.173038 -0.183492 0.062706 +v 0.173038 -0.177164 0.062706 +v 0.173038 0.183492 0.062706 +v -0.173250 -0.183581 0.061913 +v -0.173502 -0.183685 0.061584 +v -0.136196 -0.221456 0.061332 +v -0.135890 -0.220717 0.062296 +v 0.136355 -0.221839 0.061173 +v 0.135867 -0.220663 0.062706 +v 0.174625 -0.184150 0.061119 +v 0.174625 0.184150 0.061119 +v 0.136355 0.221839 0.061173 +v -0.136196 0.221456 0.061332 +v -0.173250 0.183581 0.061913 +v -0.173502 0.183685 0.061584 +v -0.135890 0.220717 0.062296 +v 0.135867 0.220662 0.062706 +v 0.173092 0.183515 0.063117 +v 0.173092 -0.183515 0.063117 +v -0.173831 -0.183821 0.061332 +v -0.173092 -0.183515 0.062296 +v -0.136355 -0.221839 0.061173 +v -0.135867 -0.220663 0.062706 +v 0.136525 -0.222250 0.061119 +v 0.135890 -0.220717 0.063117 +v 0.175036 -0.184320 0.061173 +v 0.175036 0.184320 0.061173 +v 0.136525 0.222250 0.061119 +v -0.136355 0.221839 0.061173 +v -0.173831 0.183821 0.061332 +v -0.173092 0.183515 0.062296 +v -0.135867 0.220662 0.062706 +v 0.129689 0.220671 0.062706 +v 0.129628 0.220671 0.062706 +v 0.129752 0.220670 0.062706 +v 0.135890 0.220717 0.063117 +v 0.173250 0.183581 0.063500 +v 0.173250 -0.183581 0.063500 +v -0.174214 -0.183980 0.061173 +v -0.173038 -0.183492 0.062706 +v -0.136525 -0.222250 0.061119 +v -0.135890 -0.220717 0.063117 +v 0.136695 -0.222661 0.061173 +v 0.135956 -0.220875 0.063500 +v 0.175419 -0.184479 0.061332 +v 0.175419 0.184479 0.061332 +v 0.136695 0.222661 0.061173 +v -0.136525 0.222250 0.061119 +v -0.174214 0.183980 0.061173 +v -0.173038 0.183492 0.062706 +v 0.129689 0.220717 0.063117 +v -0.135890 0.220717 0.063117 +v 0.135956 0.220875 0.063500 +v 0.173503 0.183685 0.063829 +v 0.173503 -0.183685 0.063829 +v -0.173038 -0.177164 0.062706 +v -0.174625 -0.184150 0.061119 +v -0.173092 -0.183515 0.063117 +v -0.136695 -0.222661 0.061173 +v -0.135956 -0.220875 0.063500 +v 0.136854 -0.223044 0.061332 +v 0.136060 -0.221127 0.063829 +v 0.175747 -0.184615 0.061584 +v 0.175747 0.184615 0.061584 +v 0.136854 0.223044 0.061332 +v -0.136695 0.222661 0.061173 +v -0.174625 0.184150 0.061119 +v -0.173092 0.183515 0.063117 +v -0.135956 0.220875 0.063500 +v 0.136060 0.221128 0.063829 +v 0.173831 0.183821 0.064081 +v 0.173831 -0.183821 0.064081 +v -0.175036 -0.184320 0.061173 +v -0.173250 -0.183581 0.063500 +v -0.136854 -0.223044 0.061332 +v -0.136060 -0.221127 0.063829 +v 0.136990 -0.223373 0.061584 +v 0.136196 -0.221456 0.064081 +v 0.176000 0.184719 0.061913 +v 0.176000 -0.184719 0.061913 +v 0.136990 0.223372 0.061584 +v -0.136854 0.223044 0.061332 +v -0.175036 0.184320 0.061173 +v -0.173250 0.183581 0.063500 +v -0.136060 0.221128 0.063829 +v 0.136196 0.221456 0.064081 +v 0.174214 0.183980 0.064240 +v 0.174214 -0.183980 0.064240 +v -0.175419 -0.184479 0.061332 +v -0.173502 -0.183685 0.063829 +v -0.136990 -0.223373 0.061584 +v -0.136196 -0.221456 0.064081 +v 0.137094 -0.223625 0.061913 +v 0.136355 -0.221839 0.064240 +v 0.176158 0.184785 0.062296 +v 0.176158 -0.184785 0.062296 +v 0.137094 0.223625 0.061913 +v -0.136990 0.223372 0.061584 +v -0.175419 0.184479 0.061332 +v -0.173502 0.183685 0.063829 +v -0.136196 0.221456 0.064081 +v 0.136355 0.221839 0.064240 +v 0.174625 0.184150 0.064294 +v 0.174625 -0.184150 0.064294 +v -0.175748 -0.184615 0.061584 +v -0.173831 -0.183821 0.064081 +v -0.137094 -0.223625 0.061913 +v -0.136355 -0.221839 0.064240 +v 0.137160 -0.223783 0.062296 +v 0.136525 -0.222250 0.064294 +v 0.176212 0.184808 0.062706 +v 0.176212 -0.184808 0.062706 +v 0.137160 0.223783 0.062296 +v -0.137094 0.223625 0.061913 +v -0.175748 0.184615 0.061584 +v -0.173831 0.183821 0.064081 +v -0.136355 0.221839 0.064240 +v 0.136525 0.222250 0.064294 +v 0.175036 0.184320 0.064240 +v 0.175036 -0.184320 0.064240 +v -0.176000 -0.184719 0.061913 +v -0.174214 -0.183980 0.064240 +v -0.137160 -0.223783 0.062296 +v -0.136525 -0.222250 0.064294 +v 0.137183 -0.223837 0.062706 +v 0.136695 -0.222661 0.064240 +v 0.176158 -0.184785 0.063117 +v 0.176158 0.184785 0.063117 +v 0.137183 0.223838 0.062706 +v -0.137160 0.223783 0.062296 +v -0.176000 0.184719 0.061913 +v -0.174214 0.183980 0.064240 +v -0.136525 0.222250 0.064294 +v 0.136695 0.222661 0.064240 +v 0.175419 0.184479 0.064081 +v 0.175419 -0.184479 0.064081 +v -0.176158 -0.184785 0.062296 +v -0.174625 -0.184150 0.064294 +v -0.137183 -0.223837 0.062706 +v -0.136695 -0.222661 0.064240 +v 0.137160 -0.223783 0.063117 +v 0.136854 -0.223044 0.064081 +v 0.176000 -0.184719 0.063500 +v 0.176000 0.184719 0.063500 +v 0.137160 0.223783 0.063117 +v -0.137183 0.223838 0.062706 +v -0.176158 0.184785 0.062296 +v -0.174625 0.184150 0.064294 +v -0.136695 0.222661 0.064240 +v 0.136854 0.223044 0.064081 +v 0.175747 0.184615 0.063829 +v 0.175747 -0.184615 0.063829 +v -0.176212 -0.184808 0.062706 +v -0.175036 -0.184320 0.064240 +v -0.137160 -0.223783 0.063117 +v -0.136854 -0.223044 0.064081 +v 0.137094 -0.223625 0.063500 +v 0.136990 -0.223373 0.063829 +v 0.137094 0.223625 0.063500 +v -0.137160 0.223783 0.063117 +v -0.176212 0.184808 0.062706 +v -0.175036 0.184320 0.064240 +v -0.136854 0.223044 0.064081 +v 0.136990 0.223372 0.063829 +v -0.176158 -0.184785 0.063117 +v -0.175419 -0.184479 0.064081 +v -0.137094 -0.223625 0.063500 +v -0.136990 -0.223373 0.063829 +v -0.137094 0.223625 0.063500 +v -0.176158 0.184785 0.063117 +v -0.175419 0.184479 0.064081 +v -0.136990 0.223372 0.063829 +v -0.176000 -0.184719 0.063500 +v -0.175748 -0.184615 0.063829 +v -0.176000 0.184719 0.063500 +v -0.175748 0.184615 0.063829 +v 0.132071 -0.176562 -0.064294 +v 0.127308 -0.182912 -0.064294 +v 0.127308 -0.176562 -0.064294 +v 0.132071 -0.182912 -0.064294 +v 0.126379 -0.175324 -0.057944 +v 0.126379 -0.184150 -0.057944 +v 0.132999 -0.175324 -0.057944 +v 0.132999 -0.184150 -0.057944 +v 0.126379 0.184150 -0.057944 +v 0.127308 0.176562 -0.064294 +v 0.126379 0.175323 -0.057944 +v 0.127308 0.182912 -0.064294 +v 0.132999 0.175323 -0.057944 +v 0.132999 0.184150 -0.057944 +v 0.132071 0.182912 -0.064294 +v 0.132071 0.176562 -0.064294 +v -0.133971 0.184150 -0.057943 +v -0.128279 0.182912 -0.064293 +v -0.133042 0.182912 -0.064293 +v -0.127351 0.184150 -0.057943 +v -0.133042 0.176562 -0.064293 +v -0.133971 0.175323 -0.057943 +v -0.127351 0.175323 -0.057943 +v -0.128279 0.176562 -0.064293 +v -0.128279 -0.182912 -0.064293 +v -0.127351 -0.175324 -0.057943 +v -0.127351 -0.184150 -0.057943 +v -0.128279 -0.176562 -0.064293 +v -0.133042 -0.182912 -0.064293 +v -0.133042 -0.176562 -0.064293 +v -0.133971 -0.175324 -0.057943 +v -0.133971 -0.184150 -0.057943 +v -0.131072 0.217594 0.063021 +v -0.130661 0.206682 -0.054789 +v -0.131072 0.206731 -0.054843 +v -0.130661 0.217541 0.063027 +v -0.131072 -0.206715 -0.054843 +v -0.130661 -0.206666 -0.054789 +v -0.131454 0.217752 0.063005 +v -0.131454 0.206875 -0.055001 +v -0.132194 0.219521 0.062825 +v -0.132194 0.218707 0.062908 +v -0.132248 0.219114 0.062866 +v -0.132035 0.219900 0.062786 +v -0.132035 0.218327 0.062946 +v -0.131783 0.220226 0.062753 +v -0.131783 0.218002 0.062979 +v -0.131454 0.220476 0.062727 +v -0.131072 0.220633 0.062711 +v -0.130722 0.220678 0.062706 +v -0.130661 0.220678 0.062706 +v -0.130598 0.220678 0.062706 +v -0.130250 0.217594 0.063021 +v -0.130250 0.220633 0.062711 +v -0.129867 0.220476 0.062727 +v -0.129867 0.217752 0.063005 +v -0.129538 0.220226 0.062753 +v -0.129538 0.218002 0.062979 +v -0.129286 0.219900 0.062786 +v -0.129286 0.218327 0.062946 +v -0.129127 0.219521 0.062825 +v -0.129127 0.218707 0.062908 +v -0.129073 0.219114 0.062866 +v -0.130250 0.206731 -0.054843 +v -0.131072 -0.217576 0.062991 +v -0.130661 -0.217522 0.062996 +v -0.131454 -0.206859 -0.055001 +v -0.130250 -0.206715 -0.054843 +v -0.131783 0.207104 -0.055252 +v -0.129286 0.207402 -0.055580 +v -0.129538 0.207104 -0.055252 +v -0.129867 0.206875 -0.055001 +v -0.132035 0.207402 -0.055580 +v -0.132194 0.207750 -0.055961 +v -0.132248 0.208123 -0.056370 +v -0.132194 0.208496 -0.056779 +v -0.132035 0.208844 -0.057161 +v -0.131783 0.209143 -0.057488 +v -0.131454 0.209372 -0.057739 +v -0.131072 0.209516 -0.057897 +v -0.130722 0.209558 -0.057943 +v -0.130661 0.209558 -0.057943 +v -0.130250 0.209516 -0.057897 +v -0.130599 0.209558 -0.057943 +v -0.129867 0.209372 -0.057739 +v -0.129538 0.209143 -0.057488 +v -0.129286 0.208844 -0.057161 +v -0.129127 0.208496 -0.056779 +v -0.129073 0.208123 -0.056370 +v -0.129127 0.207750 -0.055961 +v -0.132194 -0.218689 0.062888 +v -0.132194 -0.219504 0.062813 +v -0.132248 -0.219097 0.062851 +v -0.132035 -0.218310 0.062923 +v -0.132035 -0.219884 0.062778 +v -0.131783 -0.217984 0.062953 +v -0.131783 -0.220210 0.062748 +v -0.131454 -0.217733 0.062976 +v -0.131454 -0.220460 0.062725 +v -0.131072 -0.220617 0.062711 +v -0.130722 -0.220663 0.062706 +v -0.130661 -0.220663 0.062706 +v -0.130250 -0.217576 0.062991 +v -0.130598 -0.220663 0.062706 +v -0.130250 -0.220617 0.062711 +v -0.129867 -0.217733 0.062976 +v -0.129867 -0.220460 0.062725 +v -0.129538 -0.217984 0.062953 +v -0.129538 -0.220210 0.062748 +v -0.129286 -0.218310 0.062923 +v -0.129286 -0.219884 0.062778 +v -0.129127 -0.218689 0.062888 +v -0.129127 -0.219504 0.062813 +v -0.129073 -0.219097 0.062851 +v -0.131783 -0.207088 -0.055252 +v -0.129867 -0.206859 -0.055001 +v -0.129538 -0.207088 -0.055252 +v -0.129286 -0.207387 -0.055580 +v -0.132035 -0.207387 -0.055580 +v -0.132194 -0.207735 -0.055961 +v -0.132248 -0.208108 -0.056370 +v -0.132194 -0.208481 -0.056779 +v -0.132035 -0.208829 -0.057161 +v -0.131783 -0.209127 -0.057488 +v -0.131454 -0.209356 -0.057739 +v -0.131072 -0.209500 -0.057897 +v -0.130722 -0.209542 -0.057943 +v -0.130661 -0.209542 -0.057943 +v -0.130599 -0.209542 -0.057943 +v -0.130250 -0.209500 -0.057897 +v -0.129867 -0.209356 -0.057739 +v -0.129538 -0.209127 -0.057488 +v -0.129286 -0.208829 -0.057161 +v -0.129127 -0.208481 -0.056779 +v -0.129073 -0.208108 -0.056370 +v -0.129127 -0.207735 -0.055961 +v 0.129278 0.217588 0.063020 +v 0.129689 0.206675 -0.054790 +v 0.129278 0.206724 -0.054844 +v 0.129689 0.217534 0.063026 +v 0.129278 -0.206722 -0.054844 +v 0.129689 -0.206673 -0.054790 +v 0.128896 0.217745 0.063004 +v 0.128896 0.206868 -0.055002 +v 0.128156 0.219514 0.062824 +v 0.128156 0.218700 0.062907 +v 0.128102 0.219107 0.062865 +v 0.128315 0.219893 0.062785 +v 0.128315 0.218321 0.062946 +v 0.128567 0.220219 0.062752 +v 0.128567 0.217995 0.062979 +v 0.128896 0.220469 0.062726 +v 0.129278 0.220626 0.062710 +v 0.129628 0.220672 0.062706 +v 0.129689 0.220672 0.062706 +v 0.129752 0.220672 0.062706 +v 0.130100 0.217588 0.063020 +v 0.130100 0.220626 0.062710 +v 0.130483 0.220469 0.062726 +v 0.130483 0.217745 0.063004 +v 0.130812 0.220219 0.062752 +v 0.130812 0.217995 0.062979 +v 0.131064 0.219893 0.062785 +v 0.131064 0.218321 0.062946 +v 0.131223 0.219514 0.062824 +v 0.131223 0.218700 0.062907 +v 0.131277 0.219107 0.062865 +v 0.130100 0.206724 -0.054844 +v 0.129278 -0.217583 0.062990 +v 0.129689 -0.217529 0.062995 +v 0.128896 -0.206866 -0.055002 +v 0.130100 -0.206722 -0.054844 +v 0.128567 0.207097 -0.055253 +v 0.131064 0.207396 -0.055580 +v 0.130812 0.207097 -0.055253 +v 0.130483 0.206868 -0.055002 +v 0.128315 0.207396 -0.055580 +v 0.128156 0.207743 -0.055962 +v 0.128102 0.208117 -0.056371 +v 0.128156 0.208490 -0.056780 +v 0.128315 0.208837 -0.057161 +v 0.128567 0.209136 -0.057489 +v 0.128896 0.209365 -0.057740 +v 0.129278 0.209509 -0.057898 +v 0.129628 0.209551 -0.057944 +v 0.129689 0.209551 -0.057944 +v 0.130100 0.209509 -0.057898 +v 0.129751 0.209551 -0.057944 +v 0.130483 0.209365 -0.057740 +v 0.130812 0.209136 -0.057489 +v 0.131064 0.208837 -0.057161 +v 0.131223 0.208490 -0.056780 +v 0.131277 0.208117 -0.056371 +v 0.131223 0.207743 -0.055962 +v 0.128156 -0.218696 0.062888 +v 0.128156 -0.219511 0.062812 +v 0.128102 -0.219103 0.062850 +v 0.128315 -0.218316 0.062923 +v 0.128315 -0.219890 0.062777 +v 0.128567 -0.217990 0.062953 +v 0.128567 -0.220216 0.062747 +v 0.128896 -0.217740 0.062976 +v 0.128896 -0.220467 0.062724 +v 0.129278 -0.220624 0.062710 +v 0.129628 -0.220669 0.062706 +v 0.129689 -0.220669 0.062706 +v 0.130100 -0.217583 0.062990 +v 0.129752 -0.220669 0.062706 +v 0.130100 -0.220624 0.062710 +v 0.130483 -0.217740 0.062976 +v 0.130483 -0.220467 0.062724 +v 0.130812 -0.217990 0.062953 +v 0.130812 -0.220216 0.062747 +v 0.131064 -0.218316 0.062923 +v 0.131064 -0.219890 0.062777 +v 0.131223 -0.218696 0.062888 +v 0.131223 -0.219511 0.062812 +v 0.131277 -0.219103 0.062850 +v 0.128567 -0.207095 -0.055253 +v 0.130483 -0.206866 -0.055002 +v 0.130812 -0.207095 -0.055253 +v 0.131064 -0.207394 -0.055580 +v 0.128315 -0.207394 -0.055580 +v 0.128156 -0.207741 -0.055962 +v 0.128102 -0.208114 -0.056371 +v 0.128156 -0.208488 -0.056780 +v 0.128315 -0.208835 -0.057161 +v 0.128567 -0.209134 -0.057489 +v 0.128896 -0.209363 -0.057740 +v 0.129278 -0.209507 -0.057898 +v 0.129628 -0.209549 -0.057944 +v 0.129689 -0.209549 -0.057944 +v 0.129751 -0.209549 -0.057944 +v 0.130100 -0.209507 -0.057898 +v 0.130483 -0.209363 -0.057740 +v 0.130812 -0.209134 -0.057489 +v 0.131064 -0.208835 -0.057161 +v 0.131223 -0.208488 -0.056780 +v 0.131277 -0.208114 -0.056371 +v 0.131223 -0.207741 -0.055962 +v -0.092972 0.217587 0.063020 +v -0.092561 0.206674 -0.054790 +v -0.092972 0.206723 -0.054844 +v -0.092561 0.217533 0.063026 +v -0.092972 -0.206723 -0.054844 +v -0.092561 -0.206674 -0.054790 +v -0.093354 0.217744 0.063004 +v -0.093354 0.206867 -0.055002 +v -0.094094 0.219513 0.062824 +v -0.094094 0.218699 0.062907 +v -0.094148 0.219106 0.062865 +v -0.093935 0.219892 0.062785 +v -0.093935 0.218319 0.062946 +v -0.093683 0.220218 0.062752 +v -0.093683 0.217994 0.062979 +v -0.093354 0.220468 0.062726 +v -0.092972 0.220625 0.062710 +v -0.092622 0.220670 0.062706 +v -0.092561 0.220670 0.062706 +v -0.092498 0.220670 0.062706 +v -0.092150 0.217587 0.063020 +v -0.092150 0.220625 0.062710 +v -0.091767 0.220468 0.062726 +v -0.091767 0.217744 0.063004 +v -0.091438 0.220218 0.062752 +v -0.091438 0.217994 0.062979 +v -0.091186 0.219892 0.062785 +v -0.091186 0.218319 0.062946 +v -0.091027 0.219513 0.062824 +v -0.091027 0.218699 0.062907 +v -0.090973 0.219106 0.062865 +v -0.092150 0.206723 -0.054844 +v -0.092972 -0.217584 0.062990 +v -0.092561 -0.217530 0.062995 +v -0.093354 -0.206867 -0.055002 +v -0.092150 -0.206723 -0.054844 +v -0.093683 0.207096 -0.055253 +v -0.091186 0.207395 -0.055580 +v -0.091438 0.207096 -0.055253 +v -0.091767 0.206867 -0.055002 +v -0.093935 0.207395 -0.055580 +v -0.094094 0.207742 -0.055962 +v -0.094148 0.208116 -0.056371 +v -0.094094 0.208489 -0.056780 +v -0.093935 0.208836 -0.057161 +v -0.093683 0.209135 -0.057489 +v -0.093354 0.209364 -0.057740 +v -0.092972 0.209508 -0.057898 +v -0.092622 0.209550 -0.057944 +v -0.092561 0.209550 -0.057944 +v -0.092150 0.209508 -0.057898 +v -0.092499 0.209550 -0.057944 +v -0.091767 0.209364 -0.057740 +v -0.091438 0.209135 -0.057489 +v -0.091186 0.208836 -0.057161 +v -0.091027 0.208489 -0.056780 +v -0.090973 0.208116 -0.056371 +v -0.091027 0.207742 -0.055962 +v -0.094094 -0.218697 0.062888 +v -0.094094 -0.219512 0.062812 +v -0.094148 -0.219104 0.062850 +v -0.093935 -0.218317 0.062923 +v -0.093935 -0.219892 0.062777 +v -0.093683 -0.217991 0.062953 +v -0.093683 -0.220218 0.062747 +v -0.093354 -0.217741 0.062976 +v -0.093354 -0.220468 0.062724 +v -0.092972 -0.220625 0.062710 +v -0.092622 -0.220671 0.062706 +v -0.092561 -0.220671 0.062706 +v -0.092150 -0.217584 0.062990 +v -0.092498 -0.220670 0.062706 +v -0.092150 -0.220625 0.062710 +v -0.091767 -0.217741 0.062976 +v -0.091767 -0.220468 0.062724 +v -0.091438 -0.217991 0.062953 +v -0.091438 -0.220218 0.062747 +v -0.091186 -0.218317 0.062923 +v -0.091186 -0.219892 0.062777 +v -0.091027 -0.218697 0.062888 +v -0.091027 -0.219512 0.062812 +v -0.090973 -0.219104 0.062850 +v -0.093683 -0.207096 -0.055253 +v -0.091767 -0.206867 -0.055002 +v -0.091438 -0.207096 -0.055253 +v -0.091186 -0.207395 -0.055580 +v -0.093935 -0.207395 -0.055580 +v -0.094094 -0.207742 -0.055962 +v -0.094148 -0.208116 -0.056371 +v -0.094094 -0.208489 -0.056780 +v -0.093935 -0.208836 -0.057161 +v -0.093683 -0.209135 -0.057489 +v -0.093354 -0.209364 -0.057740 +v -0.092972 -0.209508 -0.057898 +v -0.092622 -0.209550 -0.057944 +v -0.092561 -0.209550 -0.057944 +v -0.092499 -0.209550 -0.057944 +v -0.092150 -0.209508 -0.057898 +v -0.091767 -0.209364 -0.057740 +v -0.091438 -0.209135 -0.057489 +v -0.091186 -0.208836 -0.057161 +v -0.091027 -0.208489 -0.056780 +v -0.090973 -0.208116 -0.056371 +v -0.091027 -0.207742 -0.055962 +v 0.065717 0.217588 0.063020 +v 0.066128 0.206675 -0.054790 +v 0.065717 0.206724 -0.054844 +v 0.066128 0.217534 0.063026 +v 0.065717 -0.206722 -0.054844 +v 0.066128 -0.206673 -0.054790 +v 0.065334 0.217745 0.063004 +v 0.065334 0.206868 -0.055002 +v 0.064594 0.219514 0.062824 +v 0.064594 0.218700 0.062907 +v 0.064540 0.219107 0.062865 +v 0.064753 0.219893 0.062785 +v 0.064753 0.218321 0.062946 +v 0.065005 0.220219 0.062752 +v 0.065005 0.217995 0.062979 +v 0.065334 0.220469 0.062726 +v 0.065717 0.220626 0.062710 +v 0.066066 0.220672 0.062706 +v 0.066128 0.220672 0.062706 +v 0.066190 0.220672 0.062706 +v 0.066538 0.217588 0.063020 +v 0.066538 0.220626 0.062710 +v 0.066921 0.220469 0.062726 +v 0.066921 0.217745 0.063004 +v 0.067250 0.220219 0.062752 +v 0.067250 0.217995 0.062979 +v 0.067502 0.219893 0.062785 +v 0.067502 0.218321 0.062946 +v 0.067661 0.219514 0.062824 +v 0.067661 0.218700 0.062907 +v 0.067715 0.219107 0.062865 +v 0.066538 0.206724 -0.054844 +v 0.065717 -0.217583 0.062990 +v 0.066128 -0.217529 0.062995 +v 0.065334 -0.206866 -0.055002 +v 0.066538 -0.206722 -0.054844 +v 0.065005 0.207097 -0.055253 +v 0.067502 0.207396 -0.055580 +v 0.067250 0.207097 -0.055253 +v 0.066921 0.206868 -0.055002 +v 0.064753 0.207396 -0.055580 +v 0.064594 0.207743 -0.055962 +v 0.064540 0.208117 -0.056371 +v 0.064594 0.208490 -0.056780 +v 0.064753 0.208837 -0.057161 +v 0.065005 0.209136 -0.057489 +v 0.065334 0.209365 -0.057740 +v 0.065717 0.209509 -0.057898 +v 0.066066 0.209551 -0.057944 +v 0.066128 0.209551 -0.057944 +v 0.066538 0.209509 -0.057898 +v 0.066189 0.209551 -0.057944 +v 0.066921 0.209365 -0.057740 +v 0.067250 0.209136 -0.057489 +v 0.067502 0.208837 -0.057161 +v 0.067661 0.208490 -0.056780 +v 0.067715 0.208117 -0.056371 +v 0.067661 0.207743 -0.055962 +v 0.064594 -0.218696 0.062888 +v 0.064594 -0.219511 0.062812 +v 0.064540 -0.219103 0.062850 +v 0.064753 -0.218316 0.062923 +v 0.064753 -0.219890 0.062777 +v 0.065005 -0.217990 0.062953 +v 0.065005 -0.220216 0.062747 +v 0.065334 -0.217740 0.062976 +v 0.065334 -0.220467 0.062724 +v 0.065717 -0.220624 0.062710 +v 0.066066 -0.220669 0.062706 +v 0.066128 -0.220669 0.062706 +v 0.066538 -0.217583 0.062990 +v 0.066190 -0.220669 0.062706 +v 0.066538 -0.220624 0.062710 +v 0.066921 -0.217740 0.062976 +v 0.066921 -0.220467 0.062724 +v 0.067250 -0.217990 0.062953 +v 0.067250 -0.220216 0.062747 +v 0.067502 -0.218316 0.062923 +v 0.067502 -0.219890 0.062777 +v 0.067661 -0.218696 0.062888 +v 0.067661 -0.219511 0.062812 +v 0.067715 -0.219103 0.062850 +v 0.065005 -0.207095 -0.055253 +v 0.066921 -0.206866 -0.055002 +v 0.067250 -0.207095 -0.055253 +v 0.067502 -0.207394 -0.055580 +v 0.064753 -0.207394 -0.055580 +v 0.064594 -0.207741 -0.055962 +v 0.064540 -0.208114 -0.056371 +v 0.064594 -0.208488 -0.056780 +v 0.064753 -0.208835 -0.057161 +v 0.065005 -0.209134 -0.057489 +v 0.065334 -0.209363 -0.057740 +v 0.065717 -0.209507 -0.057898 +v 0.066066 -0.209549 -0.057944 +v 0.066128 -0.209549 -0.057944 +v 0.066189 -0.209549 -0.057944 +v 0.066538 -0.209507 -0.057898 +v 0.066921 -0.209363 -0.057740 +v 0.067250 -0.209134 -0.057489 +v 0.067502 -0.208835 -0.057161 +v 0.067661 -0.208488 -0.056780 +v 0.067715 -0.208114 -0.056371 +v 0.067661 -0.207741 -0.055962 +v 0.002278 0.217587 0.063020 +v 0.002689 0.206674 -0.054790 +v 0.002278 0.206723 -0.054844 +v 0.002689 0.217533 0.063026 +v 0.002278 -0.206723 -0.054844 +v 0.002689 -0.206674 -0.054790 +v 0.001896 0.217744 0.063004 +v 0.001896 0.206867 -0.055002 +v 0.001156 0.219513 0.062824 +v 0.001156 0.218699 0.062907 +v 0.001102 0.219106 0.062865 +v 0.001315 0.219892 0.062785 +v 0.001315 0.218319 0.062946 +v 0.001567 0.220218 0.062752 +v 0.001567 0.217994 0.062979 +v 0.001896 0.220468 0.062726 +v 0.002278 0.220625 0.062710 +v 0.002628 0.220670 0.062706 +v 0.002689 0.220670 0.062706 +v 0.002752 0.220670 0.062706 +v 0.003100 0.217587 0.063020 +v 0.003100 0.220625 0.062710 +v 0.003483 0.220468 0.062726 +v 0.003483 0.217744 0.063004 +v 0.003812 0.220218 0.062752 +v 0.003812 0.217994 0.062979 +v 0.004064 0.219892 0.062785 +v 0.004064 0.218319 0.062946 +v 0.004223 0.219513 0.062824 +v 0.004223 0.218699 0.062907 +v 0.004277 0.219106 0.062865 +v 0.003100 0.206723 -0.054844 +v 0.002278 -0.217584 0.062990 +v 0.002689 -0.217530 0.062995 +v 0.001896 -0.206867 -0.055002 +v 0.003100 -0.206723 -0.054844 +v 0.001567 0.207096 -0.055253 +v 0.004064 0.207395 -0.055580 +v 0.003812 0.207096 -0.055253 +v 0.003483 0.206867 -0.055002 +v 0.001315 0.207395 -0.055580 +v 0.001156 0.207742 -0.055962 +v 0.001102 0.208116 -0.056371 +v 0.001156 0.208489 -0.056780 +v 0.001315 0.208836 -0.057161 +v 0.001567 0.209135 -0.057489 +v 0.001896 0.209364 -0.057740 +v 0.002278 0.209508 -0.057898 +v 0.002628 0.209550 -0.057944 +v 0.002689 0.209550 -0.057944 +v 0.003100 0.209508 -0.057898 +v 0.002751 0.209550 -0.057944 +v 0.003483 0.209364 -0.057740 +v 0.003812 0.209135 -0.057489 +v 0.004064 0.208836 -0.057161 +v 0.004223 0.208489 -0.056780 +v 0.004277 0.208116 -0.056371 +v 0.004223 0.207742 -0.055962 +v 0.001156 -0.218697 0.062888 +v 0.001156 -0.219512 0.062812 +v 0.001102 -0.219104 0.062850 +v 0.001315 -0.218317 0.062923 +v 0.001315 -0.219892 0.062777 +v 0.001567 -0.217991 0.062953 +v 0.001567 -0.220218 0.062747 +v 0.001896 -0.217741 0.062976 +v 0.001896 -0.220468 0.062724 +v 0.002278 -0.220625 0.062710 +v 0.002628 -0.220671 0.062706 +v 0.002689 -0.220671 0.062706 +v 0.003100 -0.217584 0.062990 +v 0.002752 -0.220670 0.062706 +v 0.003100 -0.220625 0.062710 +v 0.003483 -0.217741 0.062976 +v 0.003483 -0.220468 0.062724 +v 0.003812 -0.217991 0.062953 +v 0.003812 -0.220218 0.062747 +v 0.004064 -0.218317 0.062923 +v 0.004064 -0.219892 0.062777 +v 0.004223 -0.218697 0.062888 +v 0.004223 -0.219512 0.062812 +v 0.004277 -0.219104 0.062850 +v 0.001567 -0.207096 -0.055253 +v 0.003483 -0.206867 -0.055002 +v 0.003812 -0.207096 -0.055253 +v 0.004064 -0.207395 -0.055580 +v 0.001315 -0.207395 -0.055580 +v 0.001156 -0.207742 -0.055962 +v 0.001102 -0.208116 -0.056371 +v 0.001156 -0.208489 -0.056780 +v 0.001315 -0.208836 -0.057161 +v 0.001567 -0.209135 -0.057489 +v 0.001896 -0.209364 -0.057740 +v 0.002278 -0.209508 -0.057898 +v 0.002628 -0.209550 -0.057944 +v 0.002689 -0.209550 -0.057944 +v 0.002751 -0.209550 -0.057944 +v 0.003100 -0.209508 -0.057898 +v 0.003483 -0.209364 -0.057740 +v 0.003812 -0.209135 -0.057489 +v 0.004064 -0.208836 -0.057161 +v 0.004223 -0.208489 -0.056780 +v 0.004277 -0.208116 -0.056371 +v 0.004223 -0.207742 -0.055962 +v -0.092561 -0.176370 -0.054574 +v -0.120941 -0.176042 -0.054319 +v -0.120941 -0.176370 -0.054570 +v -0.092561 -0.176042 -0.054323 +v -0.161794 -0.176042 -0.054319 +v -0.162023 -0.176370 -0.054570 +v -0.092561 -0.176753 -0.054732 +v -0.120941 -0.176753 -0.054728 +v -0.086220 -0.176370 -0.054574 +v -0.086342 -0.176042 -0.054323 +v -0.092561 -0.175789 -0.053996 +v -0.120941 -0.175789 -0.053992 +v -0.172827 -0.176370 0.062726 +v -0.172576 -0.176042 0.062749 +v -0.162167 -0.176753 -0.054728 +v -0.161495 -0.175789 -0.053992 +v -0.092561 -0.177164 -0.054786 +v -0.120941 -0.177164 -0.054782 +v -0.086176 -0.176753 -0.054732 +v -0.070467 -0.188742 -0.003524 +v -0.070345 -0.189070 -0.003775 +v -0.086532 -0.175789 -0.053996 +v -0.092561 -0.175631 -0.053614 +v -0.120941 -0.175631 -0.053610 +v -0.172984 -0.176753 0.062711 +v -0.172984 -0.177575 0.062711 +v -0.172827 -0.177958 0.062726 +v -0.172576 -0.178287 0.062749 +v -0.172250 -0.175789 0.062779 +v -0.172250 -0.178539 0.062779 +v -0.171871 -0.175631 0.062814 +v -0.171871 -0.178698 0.062814 +v -0.171463 -0.175577 0.062851 +v -0.171463 -0.178752 0.062851 +v -0.171056 -0.175631 0.062889 +v -0.171056 -0.178698 0.062889 +v -0.170676 -0.175789 0.062924 +v -0.170676 -0.178539 0.062924 +v -0.170350 -0.176042 0.062954 +v -0.170350 -0.178287 0.062954 +v -0.170100 -0.176370 0.062977 +v -0.170100 -0.177958 0.062977 +v -0.169943 -0.176753 0.062991 +v -0.169943 -0.177575 0.062991 +v -0.169889 -0.177164 0.062996 +v -0.162216 -0.177164 -0.054782 +v -0.161147 -0.175631 -0.053610 +v -0.120941 -0.177575 -0.054728 +v -0.092561 -0.177575 -0.054732 +v -0.086211 -0.177164 -0.054786 +v -0.070301 -0.189453 -0.003933 +v -0.019404 -0.188742 -0.003524 +v -0.019526 -0.189070 -0.003775 +v -0.070657 -0.188489 -0.003197 +v -0.086778 -0.175631 -0.053614 +v -0.087063 -0.175577 -0.053205 +v -0.092561 -0.175577 -0.053205 +v -0.160774 -0.175577 -0.053201 +v -0.120941 -0.175577 -0.053201 +v -0.162167 -0.177575 -0.054728 +v -0.162023 -0.177958 -0.054570 +v -0.160401 -0.175631 -0.052792 +v -0.160053 -0.175789 -0.052411 +v -0.159755 -0.176042 -0.052083 +v -0.159526 -0.176370 -0.051832 +v -0.159382 -0.176753 -0.051674 +v -0.159332 -0.177164 -0.051620 +v -0.159382 -0.177575 -0.051674 +v -0.159526 -0.177958 -0.051832 +v -0.159755 -0.178287 -0.052083 +v -0.160053 -0.178539 -0.052411 +v -0.160401 -0.178698 -0.052792 +v -0.160774 -0.178752 -0.053201 +v -0.161147 -0.178698 -0.053610 +v -0.161495 -0.178539 -0.053992 +v -0.161794 -0.178287 -0.054319 +v -0.120941 -0.177958 -0.054570 +v -0.092561 -0.177958 -0.054574 +v -0.086323 -0.177575 -0.054732 +v -0.070336 -0.189864 -0.003987 +v -0.019571 -0.189453 -0.003933 +v -0.003651 -0.176370 -0.054576 +v -0.003529 -0.176042 -0.054325 +v -0.019214 -0.188489 -0.003197 +v -0.070903 -0.188331 -0.002815 +v -0.071188 -0.188277 -0.002406 +v -0.087367 -0.175631 -0.052796 +v -0.092561 -0.175631 -0.052796 +v -0.120941 -0.175631 -0.052792 +v -0.120941 -0.175789 -0.052411 +v -0.120941 -0.176042 -0.052083 +v -0.120941 -0.176370 -0.051832 +v -0.120941 -0.176753 -0.051674 +v -0.120941 -0.177164 -0.051620 +v -0.120941 -0.177575 -0.051674 +v -0.120941 -0.177958 -0.051832 +v -0.120941 -0.178287 -0.052083 +v -0.120941 -0.178539 -0.052411 +v -0.120941 -0.178698 -0.052792 +v -0.120941 -0.178752 -0.053201 +v -0.092561 -0.178698 -0.052796 +v -0.087919 -0.178698 -0.052796 +v -0.092561 -0.178752 -0.053205 +v -0.087634 -0.178752 -0.053205 +v -0.120941 -0.178698 -0.053610 +v -0.092561 -0.178698 -0.053614 +v -0.087330 -0.178698 -0.053614 +v -0.120941 -0.178539 -0.053992 +v -0.120941 -0.178287 -0.054319 +v -0.092561 -0.178287 -0.054323 +v -0.086506 -0.177958 -0.054574 +v -0.070448 -0.190275 -0.003933 +v -0.019536 -0.189864 -0.003987 +v -0.003696 -0.176753 -0.054734 +v 0.002689 -0.176370 -0.054576 +v 0.002689 -0.176042 -0.054325 +v -0.003339 -0.175789 -0.053998 +v -0.018968 -0.188331 -0.002815 +v -0.018684 -0.188277 -0.002406 +v -0.071492 -0.188331 -0.001997 +v -0.087670 -0.175789 -0.052415 +v -0.092560 -0.175789 -0.052415 +v -0.092560 -0.176042 -0.052087 +v -0.092560 -0.176370 -0.051836 +v -0.092560 -0.176753 -0.051678 +v -0.092560 -0.177164 -0.051624 +v -0.092560 -0.177575 -0.051678 +v -0.092560 -0.177958 -0.051836 +v -0.092560 -0.178287 -0.052087 +v -0.092560 -0.178539 -0.052415 +v -0.088164 -0.178539 -0.052415 +v -0.071759 -0.191452 -0.002406 +v -0.072044 -0.191398 -0.001997 +v -0.071455 -0.191398 -0.002815 +v -0.092561 -0.178539 -0.053996 +v -0.087027 -0.178539 -0.053996 +v -0.086746 -0.178287 -0.054323 +v -0.070631 -0.190658 -0.003775 +v -0.019423 -0.190275 -0.003933 +v -0.003661 -0.177164 -0.054788 +v 0.002689 -0.176753 -0.054734 +v 0.161794 -0.176042 -0.054319 +v 0.162023 -0.176370 -0.054570 +v 0.002689 -0.175789 -0.053998 +v -0.003093 -0.175631 -0.053616 +v -0.002809 -0.175577 -0.053207 +v -0.018380 -0.188331 -0.001997 +v -0.071795 -0.188489 -0.001616 +v -0.087951 -0.176042 -0.052088 +v -0.088191 -0.176370 -0.051836 +v -0.088373 -0.176753 -0.051678 +v -0.088486 -0.177164 -0.051625 +v -0.088521 -0.177575 -0.051678 +v -0.088476 -0.177958 -0.051836 +v -0.088355 -0.178287 -0.052088 +v -0.072289 -0.191239 -0.001616 +v -0.018112 -0.191452 -0.002406 +v -0.017828 -0.191398 -0.001997 +v -0.018417 -0.191398 -0.002815 +v -0.071152 -0.191239 -0.003197 +v -0.070871 -0.190987 -0.003524 +v -0.019240 -0.190658 -0.003775 +v -0.003548 -0.177575 -0.054734 +v 0.002689 -0.177164 -0.054788 +v 0.162167 -0.176753 -0.054728 +v 0.172576 -0.176042 0.062749 +v 0.172827 -0.176370 0.062726 +v 0.161495 -0.175789 -0.053992 +v 0.002689 -0.175631 -0.053616 +v 0.160774 -0.175577 -0.053201 +v 0.002689 -0.175577 -0.053207 +v 0.161148 -0.175631 -0.053610 +v -0.002504 -0.175631 -0.052798 +v -0.018077 -0.188489 -0.001616 +v -0.072076 -0.188742 -0.001288 +v -0.072316 -0.189070 -0.001037 +v -0.072498 -0.189453 -0.000879 +v -0.072611 -0.189864 -0.000825 +v -0.072646 -0.190275 -0.000879 +v -0.072601 -0.190658 -0.001037 +v -0.072480 -0.190987 -0.001288 +v -0.017582 -0.191239 -0.001616 +v -0.001953 -0.178698 -0.052798 +v -0.002237 -0.178752 -0.053207 +v -0.002542 -0.178698 -0.053616 +v -0.018720 -0.191239 -0.003197 +v -0.019001 -0.190987 -0.003524 +v -0.003365 -0.177958 -0.054576 +v 0.002689 -0.177575 -0.054734 +v 0.162216 -0.177164 -0.054782 +v 0.172984 -0.176753 0.062711 +v 0.169943 -0.176753 0.062991 +v 0.169943 -0.177575 0.062991 +v 0.169889 -0.177164 0.062996 +v 0.170100 -0.177958 0.062977 +v 0.170100 -0.176370 0.062977 +v 0.170350 -0.178287 0.062954 +v 0.170350 -0.176042 0.062954 +v 0.170676 -0.178539 0.062924 +v 0.170676 -0.175789 0.062924 +v 0.171056 -0.178698 0.062889 +v 0.171056 -0.175631 0.062889 +v 0.171463 -0.178752 0.062851 +v 0.171463 -0.175577 0.062851 +v 0.171871 -0.178698 0.062814 +v 0.171871 -0.175631 0.062814 +v 0.172250 -0.178539 0.062779 +v 0.172250 -0.175789 0.062779 +v 0.172576 -0.178287 0.062749 +v 0.172827 -0.177958 0.062726 +v 0.172984 -0.177575 0.062711 +v 0.160401 -0.175631 -0.052792 +v 0.002689 -0.175631 -0.052798 +v -0.002202 -0.175789 -0.052417 +v -0.017796 -0.188742 -0.001288 +v -0.017556 -0.189070 -0.001037 +v -0.017373 -0.189453 -0.000879 +v -0.017260 -0.189864 -0.000825 +v -0.017225 -0.190275 -0.000879 +v -0.017270 -0.190658 -0.001037 +v -0.017392 -0.190987 -0.001288 +v -0.001707 -0.178539 -0.052417 +v 0.160774 -0.178752 -0.053201 +v 0.002689 -0.178752 -0.053207 +v 0.002689 -0.178698 -0.052798 +v 0.160401 -0.178698 -0.052792 +v 0.161148 -0.178698 -0.053610 +v 0.002689 -0.178698 -0.053616 +v -0.002845 -0.178539 -0.053998 +v -0.003126 -0.178287 -0.054325 +v 0.002689 -0.177958 -0.054576 +v 0.162167 -0.177575 -0.054728 +v 0.162023 -0.177958 -0.054570 +v 0.161794 -0.178287 -0.054319 +v 0.161495 -0.178539 -0.053992 +v 0.160053 -0.178539 -0.052411 +v 0.159755 -0.178287 -0.052083 +v 0.159525 -0.177958 -0.051832 +v 0.159381 -0.177575 -0.051674 +v 0.159332 -0.177164 -0.051620 +v 0.159381 -0.176753 -0.051674 +v 0.159525 -0.176370 -0.051832 +v 0.159755 -0.176042 -0.052083 +v 0.160053 -0.175789 -0.052411 +v 0.002689 -0.175789 -0.052417 +v -0.001921 -0.176042 -0.052089 +v -0.001681 -0.176370 -0.051838 +v -0.001498 -0.176753 -0.051680 +v -0.001385 -0.177164 -0.051626 +v -0.001350 -0.177575 -0.051680 +v -0.001395 -0.177958 -0.051838 +v -0.001517 -0.178287 -0.052089 +v 0.002689 -0.178539 -0.052417 +v 0.002689 -0.178539 -0.053998 +v 0.002689 -0.178287 -0.054325 +v 0.002689 -0.178287 -0.052089 +v 0.002689 -0.177958 -0.051838 +v 0.002689 -0.177575 -0.051680 +v 0.002689 -0.177164 -0.051626 +v 0.002689 -0.176753 -0.051680 +v 0.002689 -0.176370 -0.051838 +v 0.002689 -0.176042 -0.052089 +v -0.092561 -0.150970 -0.054574 +v -0.120941 -0.150642 -0.054319 +v -0.120941 -0.150970 -0.054570 +v -0.092561 -0.150642 -0.054323 +v -0.161794 -0.150642 -0.054319 +v -0.162023 -0.150970 -0.054570 +v -0.092561 -0.151353 -0.054732 +v -0.120941 -0.151353 -0.054728 +v -0.086220 -0.150970 -0.054574 +v -0.086342 -0.150642 -0.054323 +v -0.092561 -0.150389 -0.053996 +v -0.120941 -0.150389 -0.053992 +v -0.172827 -0.150970 0.062726 +v -0.172576 -0.150642 0.062749 +v -0.162167 -0.151353 -0.054728 +v -0.161495 -0.150389 -0.053992 +v -0.092561 -0.151764 -0.054786 +v -0.120941 -0.151764 -0.054782 +v -0.086176 -0.151353 -0.054732 +v -0.070467 -0.163342 -0.003524 +v -0.070345 -0.163670 -0.003775 +v -0.086532 -0.150389 -0.053996 +v -0.092561 -0.150231 -0.053614 +v -0.120941 -0.150231 -0.053610 +v -0.172984 -0.151353 0.062711 +v -0.172984 -0.152175 0.062711 +v -0.173038 -0.151764 0.062706 +v -0.172827 -0.152558 0.062726 +v -0.172576 -0.152887 0.062749 +v -0.172250 -0.150389 0.062779 +v -0.172250 -0.153139 0.062779 +v -0.171871 -0.150231 0.062814 +v -0.171871 -0.153298 0.062814 +v -0.171463 -0.150177 0.062851 +v -0.171463 -0.153352 0.062851 +v -0.171056 -0.150231 0.062889 +v -0.171056 -0.153298 0.062889 +v -0.170676 -0.150389 0.062924 +v -0.170676 -0.153139 0.062924 +v -0.170350 -0.150642 0.062954 +v -0.170350 -0.152887 0.062954 +v -0.170100 -0.150970 0.062977 +v -0.170100 -0.152558 0.062977 +v -0.169943 -0.151353 0.062991 +v -0.169943 -0.152175 0.062991 +v -0.169889 -0.151764 0.062996 +v -0.162216 -0.151764 -0.054782 +v -0.161147 -0.150231 -0.053610 +v -0.120941 -0.152175 -0.054728 +v -0.092561 -0.152175 -0.054732 +v -0.086211 -0.151764 -0.054786 +v -0.070301 -0.164053 -0.003933 +v -0.019404 -0.163342 -0.003524 +v -0.019526 -0.163670 -0.003775 +v -0.070657 -0.163089 -0.003197 +v -0.086778 -0.150231 -0.053614 +v -0.087063 -0.150177 -0.053205 +v -0.092561 -0.150177 -0.053205 +v -0.160774 -0.150177 -0.053201 +v -0.120941 -0.150177 -0.053201 +v -0.162167 -0.152175 -0.054728 +v -0.162023 -0.152558 -0.054570 +v -0.160401 -0.150231 -0.052792 +v -0.160053 -0.150389 -0.052411 +v -0.159755 -0.150642 -0.052083 +v -0.159526 -0.150970 -0.051832 +v -0.159382 -0.151353 -0.051674 +v -0.159332 -0.151764 -0.051620 +v -0.159382 -0.152175 -0.051674 +v -0.159526 -0.152558 -0.051832 +v -0.159755 -0.152887 -0.052083 +v -0.160053 -0.153139 -0.052411 +v -0.160401 -0.153298 -0.052792 +v -0.160774 -0.153352 -0.053201 +v -0.161147 -0.153298 -0.053610 +v -0.161495 -0.153139 -0.053992 +v -0.161794 -0.152887 -0.054319 +v -0.120941 -0.152558 -0.054570 +v -0.092561 -0.152558 -0.054574 +v -0.086323 -0.152175 -0.054732 +v -0.070336 -0.164464 -0.003987 +v -0.019571 -0.164053 -0.003933 +v -0.003651 -0.150970 -0.054576 +v -0.003529 -0.150642 -0.054325 +v -0.019214 -0.163089 -0.003197 +v -0.070903 -0.162931 -0.002815 +v -0.071188 -0.162877 -0.002406 +v -0.087367 -0.150231 -0.052796 +v -0.092561 -0.150231 -0.052796 +v -0.120941 -0.150231 -0.052792 +v -0.120941 -0.150389 -0.052411 +v -0.120941 -0.150642 -0.052083 +v -0.120941 -0.150970 -0.051832 +v -0.120941 -0.151353 -0.051674 +v -0.120941 -0.151764 -0.051620 +v -0.120941 -0.152175 -0.051674 +v -0.120941 -0.152558 -0.051832 +v -0.120941 -0.152887 -0.052083 +v -0.120941 -0.153139 -0.052411 +v -0.120941 -0.153298 -0.052792 +v -0.120941 -0.153352 -0.053201 +v -0.092561 -0.153298 -0.052796 +v -0.087919 -0.153298 -0.052796 +v -0.092561 -0.153352 -0.053205 +v -0.087634 -0.153352 -0.053205 +v -0.120941 -0.153298 -0.053610 +v -0.092561 -0.153298 -0.053614 +v -0.087330 -0.153298 -0.053614 +v -0.120941 -0.153139 -0.053992 +v -0.120941 -0.152887 -0.054319 +v -0.092561 -0.152887 -0.054323 +v -0.086506 -0.152558 -0.054574 +v -0.070448 -0.164875 -0.003933 +v -0.019536 -0.164464 -0.003987 +v -0.003696 -0.151353 -0.054734 +v 0.002689 -0.150970 -0.054576 +v 0.002689 -0.150642 -0.054325 +v -0.003339 -0.150389 -0.053998 +v -0.018968 -0.162931 -0.002815 +v -0.018684 -0.162877 -0.002406 +v -0.071492 -0.162931 -0.001997 +v -0.087670 -0.150389 -0.052415 +v -0.092560 -0.150389 -0.052415 +v -0.092560 -0.150642 -0.052087 +v -0.092560 -0.150970 -0.051836 +v -0.092560 -0.151353 -0.051678 +v -0.092560 -0.151764 -0.051624 +v -0.092560 -0.152175 -0.051678 +v -0.092560 -0.152558 -0.051836 +v -0.092560 -0.152887 -0.052087 +v -0.092560 -0.153139 -0.052415 +v -0.088164 -0.153139 -0.052415 +v -0.071759 -0.166052 -0.002406 +v -0.072044 -0.165998 -0.001997 +v -0.071455 -0.165998 -0.002815 +v -0.092561 -0.153139 -0.053996 +v -0.087027 -0.153139 -0.053996 +v -0.086746 -0.152887 -0.054323 +v -0.070631 -0.165258 -0.003775 +v -0.019423 -0.164875 -0.003933 +v -0.003661 -0.151764 -0.054788 +v 0.002689 -0.151353 -0.054734 +v 0.161794 -0.150642 -0.054319 +v 0.162023 -0.150970 -0.054570 +v 0.002689 -0.150389 -0.053998 +v -0.003093 -0.150231 -0.053616 +v -0.002809 -0.150177 -0.053207 +v -0.018380 -0.162931 -0.001997 +v -0.071795 -0.163089 -0.001616 +v -0.087951 -0.150642 -0.052088 +v -0.088191 -0.150970 -0.051836 +v -0.088373 -0.151353 -0.051678 +v -0.088486 -0.151764 -0.051625 +v -0.088521 -0.152175 -0.051678 +v -0.088476 -0.152558 -0.051836 +v -0.088355 -0.152887 -0.052088 +v -0.072289 -0.165839 -0.001616 +v -0.018112 -0.166052 -0.002406 +v -0.017828 -0.165998 -0.001997 +v -0.018417 -0.165998 -0.002815 +v -0.071152 -0.165839 -0.003197 +v -0.070871 -0.165587 -0.003524 +v -0.019240 -0.165258 -0.003775 +v -0.003548 -0.152175 -0.054734 +v 0.002689 -0.151764 -0.054788 +v 0.162167 -0.151353 -0.054728 +v 0.172576 -0.150642 0.062749 +v 0.172827 -0.150970 0.062726 +v 0.161495 -0.150389 -0.053992 +v 0.002689 -0.150231 -0.053616 +v 0.160774 -0.150177 -0.053201 +v 0.002689 -0.150177 -0.053207 +v 0.161148 -0.150231 -0.053610 +v -0.002504 -0.150231 -0.052798 +v -0.018077 -0.163089 -0.001616 +v -0.072076 -0.163342 -0.001288 +v -0.072316 -0.163670 -0.001037 +v -0.072498 -0.164053 -0.000879 +v -0.072611 -0.164464 -0.000825 +v -0.072646 -0.164875 -0.000879 +v -0.072601 -0.165258 -0.001037 +v -0.072480 -0.165587 -0.001288 +v -0.017582 -0.165839 -0.001616 +v -0.001953 -0.153298 -0.052798 +v -0.002237 -0.153352 -0.053207 +v -0.002542 -0.153298 -0.053616 +v -0.018720 -0.165839 -0.003197 +v -0.019001 -0.165587 -0.003524 +v -0.003365 -0.152558 -0.054576 +v 0.002689 -0.152175 -0.054734 +v 0.162216 -0.151764 -0.054782 +v 0.172984 -0.151353 0.062711 +v 0.169943 -0.151353 0.062991 +v 0.169943 -0.152175 0.062991 +v 0.169889 -0.151764 0.062996 +v 0.170100 -0.152558 0.062977 +v 0.170100 -0.150970 0.062977 +v 0.170350 -0.152887 0.062954 +v 0.170350 -0.150642 0.062954 +v 0.170676 -0.153139 0.062924 +v 0.170676 -0.150389 0.062924 +v 0.171056 -0.153298 0.062889 +v 0.171056 -0.150231 0.062889 +v 0.171463 -0.153352 0.062851 +v 0.171463 -0.150177 0.062851 +v 0.171871 -0.153298 0.062814 +v 0.171871 -0.150231 0.062814 +v 0.172250 -0.153139 0.062779 +v 0.172250 -0.150389 0.062779 +v 0.172576 -0.152887 0.062749 +v 0.172827 -0.152558 0.062726 +v 0.172984 -0.152175 0.062711 +v 0.173038 -0.151764 0.062706 +v 0.160401 -0.150231 -0.052792 +v 0.002689 -0.150231 -0.052798 +v -0.002202 -0.150389 -0.052417 +v -0.017796 -0.163342 -0.001288 +v -0.017556 -0.163670 -0.001037 +v -0.017373 -0.164053 -0.000879 +v -0.017260 -0.164464 -0.000825 +v -0.017225 -0.164875 -0.000879 +v -0.017270 -0.165258 -0.001037 +v -0.017392 -0.165587 -0.001288 +v -0.001707 -0.153139 -0.052417 +v 0.160774 -0.153352 -0.053201 +v 0.002689 -0.153352 -0.053207 +v 0.002689 -0.153298 -0.052798 +v 0.160401 -0.153298 -0.052792 +v 0.161148 -0.153298 -0.053610 +v 0.002689 -0.153298 -0.053616 +v -0.002845 -0.153139 -0.053998 +v -0.003126 -0.152887 -0.054325 +v 0.002689 -0.152558 -0.054576 +v 0.162167 -0.152175 -0.054728 +v 0.162023 -0.152558 -0.054570 +v 0.161794 -0.152887 -0.054319 +v 0.161495 -0.153139 -0.053992 +v 0.160053 -0.153139 -0.052411 +v 0.159755 -0.152887 -0.052083 +v 0.159525 -0.152558 -0.051832 +v 0.159381 -0.152175 -0.051674 +v 0.159332 -0.151764 -0.051620 +v 0.159381 -0.151353 -0.051674 +v 0.159525 -0.150970 -0.051832 +v 0.159755 -0.150642 -0.052083 +v 0.160053 -0.150389 -0.052411 +v 0.002689 -0.150389 -0.052417 +v -0.001921 -0.150642 -0.052089 +v -0.001681 -0.150970 -0.051838 +v -0.001498 -0.151353 -0.051680 +v -0.001385 -0.151764 -0.051626 +v -0.001350 -0.152175 -0.051680 +v -0.001395 -0.152558 -0.051838 +v -0.001517 -0.152887 -0.052089 +v 0.002689 -0.153139 -0.052417 +v 0.002689 -0.153139 -0.053998 +v 0.002689 -0.152887 -0.054325 +v 0.002689 -0.152887 -0.052089 +v 0.002689 -0.152558 -0.051838 +v 0.002689 -0.152175 -0.051680 +v 0.002689 -0.151764 -0.051626 +v 0.002689 -0.151353 -0.051680 +v 0.002689 -0.150970 -0.051838 +v 0.002689 -0.150642 -0.052089 +v -0.092561 0.153405 -0.054574 +v -0.120941 0.153734 -0.054319 +v -0.120941 0.153405 -0.054570 +v -0.092561 0.153734 -0.054323 +v -0.161794 0.153734 -0.054319 +v -0.162023 0.153405 -0.054570 +v -0.092561 0.153022 -0.054732 +v -0.120941 0.153022 -0.054728 +v -0.086220 0.153405 -0.054574 +v -0.086342 0.153734 -0.054323 +v -0.092561 0.153986 -0.053996 +v -0.120941 0.153986 -0.053992 +v -0.172827 0.153405 0.062726 +v -0.172576 0.153734 0.062749 +v -0.162167 0.153022 -0.054728 +v -0.161495 0.153986 -0.053992 +v -0.092561 0.152611 -0.054786 +v -0.120941 0.152611 -0.054782 +v -0.086176 0.153022 -0.054732 +v -0.070467 0.141034 -0.003524 +v -0.070345 0.140705 -0.003775 +v -0.086532 0.153986 -0.053996 +v -0.092561 0.154144 -0.053614 +v -0.120941 0.154144 -0.053610 +v -0.172984 0.153022 0.062711 +v -0.172984 0.152200 0.062711 +v -0.173038 0.152611 0.062706 +v -0.172827 0.151817 0.062726 +v -0.172576 0.151489 0.062749 +v -0.172250 0.153986 0.062779 +v -0.172250 0.151236 0.062779 +v -0.171871 0.154144 0.062814 +v -0.171871 0.151078 0.062814 +v -0.171463 0.154199 0.062851 +v -0.171463 0.151024 0.062851 +v -0.171056 0.154144 0.062889 +v -0.171056 0.151078 0.062889 +v -0.170676 0.153986 0.062924 +v -0.170676 0.151236 0.062924 +v -0.170350 0.153734 0.062954 +v -0.170350 0.151489 0.062954 +v -0.170100 0.153405 0.062977 +v -0.170100 0.151817 0.062977 +v -0.169943 0.153022 0.062991 +v -0.169943 0.152200 0.062991 +v -0.169889 0.152611 0.062996 +v -0.162216 0.152611 -0.054782 +v -0.161147 0.154144 -0.053610 +v -0.120941 0.152200 -0.054728 +v -0.092561 0.152200 -0.054732 +v -0.086211 0.152611 -0.054786 +v -0.070301 0.140322 -0.003933 +v -0.019404 0.141034 -0.003524 +v -0.019526 0.140705 -0.003775 +v -0.070657 0.141286 -0.003197 +v -0.086778 0.154144 -0.053614 +v -0.087063 0.154199 -0.053205 +v -0.092561 0.154199 -0.053205 +v -0.160774 0.154199 -0.053201 +v -0.120941 0.154199 -0.053201 +v -0.162167 0.152200 -0.054728 +v -0.162023 0.151817 -0.054570 +v -0.160401 0.154144 -0.052792 +v -0.160053 0.153986 -0.052411 +v -0.159755 0.153734 -0.052083 +v -0.159526 0.153405 -0.051832 +v -0.159382 0.153022 -0.051674 +v -0.159332 0.152611 -0.051620 +v -0.159382 0.152200 -0.051674 +v -0.159526 0.151817 -0.051832 +v -0.159755 0.151489 -0.052083 +v -0.160053 0.151236 -0.052411 +v -0.160401 0.151078 -0.052792 +v -0.160774 0.151024 -0.053201 +v -0.161147 0.151078 -0.053610 +v -0.161495 0.151236 -0.053992 +v -0.161794 0.151489 -0.054319 +v -0.120941 0.151817 -0.054570 +v -0.092561 0.151817 -0.054574 +v -0.086323 0.152200 -0.054732 +v -0.070336 0.139911 -0.003987 +v -0.019571 0.140322 -0.003933 +v -0.003651 0.153405 -0.054576 +v -0.003529 0.153734 -0.054325 +v -0.019214 0.141286 -0.003197 +v -0.070903 0.141444 -0.002815 +v -0.071188 0.141499 -0.002406 +v -0.087367 0.154144 -0.052796 +v -0.092561 0.154144 -0.052796 +v -0.120941 0.154144 -0.052792 +v -0.120941 0.153986 -0.052411 +v -0.120941 0.153734 -0.052083 +v -0.120941 0.153405 -0.051832 +v -0.120941 0.153022 -0.051674 +v -0.120941 0.152611 -0.051620 +v -0.120941 0.152200 -0.051674 +v -0.120941 0.151817 -0.051832 +v -0.120941 0.151489 -0.052083 +v -0.120941 0.151236 -0.052411 +v -0.120941 0.151078 -0.052792 +v -0.120941 0.151024 -0.053201 +v -0.092561 0.151078 -0.052796 +v -0.087919 0.151078 -0.052796 +v -0.092561 0.151024 -0.053205 +v -0.087634 0.151024 -0.053205 +v -0.120941 0.151078 -0.053610 +v -0.092561 0.151078 -0.053614 +v -0.087330 0.151078 -0.053614 +v -0.120941 0.151236 -0.053992 +v -0.120941 0.151489 -0.054319 +v -0.092561 0.151489 -0.054323 +v -0.086506 0.151817 -0.054574 +v -0.070448 0.139500 -0.003933 +v -0.019536 0.139911 -0.003987 +v -0.003696 0.153022 -0.054734 +v 0.002689 0.153405 -0.054576 +v 0.002689 0.153734 -0.054325 +v -0.003339 0.153986 -0.053998 +v -0.018968 0.141444 -0.002815 +v -0.018684 0.141499 -0.002406 +v -0.071492 0.141444 -0.001997 +v -0.087670 0.153986 -0.052415 +v -0.092560 0.153986 -0.052415 +v -0.092560 0.153734 -0.052087 +v -0.092560 0.153405 -0.051836 +v -0.092560 0.153022 -0.051678 +v -0.092560 0.152611 -0.051624 +v -0.092560 0.152200 -0.051678 +v -0.092560 0.151817 -0.051836 +v -0.092560 0.151489 -0.052087 +v -0.092560 0.151236 -0.052415 +v -0.088164 0.151236 -0.052415 +v -0.071759 0.138324 -0.002406 +v -0.072044 0.138378 -0.001997 +v -0.071455 0.138378 -0.002815 +v -0.092561 0.151236 -0.053996 +v -0.087027 0.151236 -0.053996 +v -0.086746 0.151489 -0.054323 +v -0.070631 0.139117 -0.003775 +v -0.019423 0.139500 -0.003933 +v -0.003661 0.152611 -0.054788 +v 0.002689 0.153022 -0.054734 +v 0.161794 0.153734 -0.054319 +v 0.162023 0.153405 -0.054570 +v 0.002689 0.153986 -0.053998 +v -0.003093 0.154144 -0.053616 +v -0.002809 0.154199 -0.053207 +v -0.018380 0.141444 -0.001997 +v -0.071795 0.141286 -0.001616 +v -0.087951 0.153734 -0.052088 +v -0.088191 0.153405 -0.051836 +v -0.088373 0.153022 -0.051678 +v -0.088486 0.152611 -0.051625 +v -0.088521 0.152200 -0.051678 +v -0.088476 0.151817 -0.051836 +v -0.088355 0.151489 -0.052088 +v -0.072289 0.138536 -0.001616 +v -0.018112 0.138324 -0.002406 +v -0.017828 0.138378 -0.001997 +v -0.018417 0.138378 -0.002815 +v -0.071152 0.138536 -0.003197 +v -0.070871 0.138789 -0.003524 +v -0.019240 0.139117 -0.003775 +v -0.003548 0.152200 -0.054734 +v 0.002689 0.152611 -0.054788 +v 0.162167 0.153022 -0.054728 +v 0.172576 0.153734 0.062749 +v 0.172827 0.153405 0.062726 +v 0.161495 0.153986 -0.053992 +v 0.002689 0.154144 -0.053616 +v 0.160774 0.154199 -0.053201 +v 0.002689 0.154199 -0.053207 +v 0.161148 0.154144 -0.053610 +v -0.002504 0.154144 -0.052798 +v -0.018077 0.141286 -0.001616 +v -0.072076 0.141034 -0.001288 +v -0.072316 0.140705 -0.001037 +v -0.072498 0.140322 -0.000879 +v -0.072611 0.139911 -0.000825 +v -0.072646 0.139500 -0.000879 +v -0.072601 0.139117 -0.001037 +v -0.072480 0.138789 -0.001288 +v -0.017582 0.138536 -0.001616 +v -0.001953 0.151078 -0.052798 +v -0.002237 0.151024 -0.053207 +v -0.002542 0.151078 -0.053616 +v -0.018720 0.138536 -0.003197 +v -0.019001 0.138789 -0.003524 +v -0.003365 0.151817 -0.054576 +v 0.002689 0.152200 -0.054734 +v 0.162216 0.152611 -0.054782 +v 0.172984 0.153022 0.062711 +v 0.169943 0.153022 0.062991 +v 0.169943 0.152200 0.062991 +v 0.169889 0.152611 0.062996 +v 0.170100 0.151817 0.062977 +v 0.170100 0.153405 0.062977 +v 0.170350 0.151489 0.062954 +v 0.170350 0.153734 0.062954 +v 0.170676 0.151236 0.062924 +v 0.170676 0.153986 0.062924 +v 0.171056 0.151078 0.062889 +v 0.171056 0.154144 0.062889 +v 0.171463 0.151024 0.062851 +v 0.171463 0.154199 0.062851 +v 0.171871 0.151078 0.062814 +v 0.171871 0.154144 0.062814 +v 0.172250 0.151236 0.062779 +v 0.172250 0.153986 0.062779 +v 0.172576 0.151489 0.062749 +v 0.172827 0.151817 0.062726 +v 0.172984 0.152200 0.062711 +v 0.173038 0.152611 0.062706 +v 0.160401 0.154144 -0.052792 +v 0.002689 0.154144 -0.052798 +v -0.002202 0.153986 -0.052417 +v -0.017796 0.141034 -0.001288 +v -0.017556 0.140705 -0.001037 +v -0.017373 0.140322 -0.000879 +v -0.017260 0.139911 -0.000825 +v -0.017225 0.139500 -0.000879 +v -0.017270 0.139117 -0.001037 +v -0.017392 0.138789 -0.001288 +v -0.001707 0.151236 -0.052417 +v 0.160774 0.151024 -0.053201 +v 0.002689 0.151024 -0.053207 +v 0.002689 0.151078 -0.052798 +v 0.160401 0.151078 -0.052792 +v 0.161148 0.151078 -0.053610 +v 0.002689 0.151078 -0.053616 +v -0.002845 0.151236 -0.053998 +v -0.003126 0.151489 -0.054325 +v 0.002689 0.151817 -0.054576 +v 0.162167 0.152200 -0.054728 +v 0.162023 0.151817 -0.054570 +v 0.161794 0.151489 -0.054319 +v 0.161495 0.151236 -0.053992 +v 0.160053 0.151236 -0.052411 +v 0.159755 0.151489 -0.052083 +v 0.159525 0.151817 -0.051832 +v 0.159381 0.152200 -0.051674 +v 0.159332 0.152611 -0.051620 +v 0.159381 0.153022 -0.051674 +v 0.159525 0.153405 -0.051832 +v 0.159755 0.153734 -0.052083 +v 0.160053 0.153986 -0.052411 +v 0.002689 0.153986 -0.052417 +v -0.001921 0.153734 -0.052089 +v -0.001681 0.153405 -0.051838 +v -0.001498 0.153022 -0.051680 +v -0.001385 0.152611 -0.051626 +v -0.001350 0.152200 -0.051680 +v -0.001395 0.151817 -0.051838 +v -0.001517 0.151489 -0.052089 +v 0.002689 0.151236 -0.052417 +v 0.002689 0.151236 -0.053998 +v 0.002689 0.151489 -0.054325 +v 0.002689 0.151489 -0.052089 +v 0.002689 0.151817 -0.051838 +v 0.002689 0.152200 -0.051680 +v 0.002689 0.152611 -0.051626 +v 0.002689 0.153022 -0.051680 +v 0.002689 0.153405 -0.051838 +v 0.002689 0.153734 -0.052089 +v -0.092561 0.178805 -0.054574 +v -0.120941 0.179134 -0.054319 +v -0.120941 0.178805 -0.054570 +v -0.092561 0.179134 -0.054323 +v -0.161794 0.179134 -0.054319 +v -0.162023 0.178805 -0.054570 +v -0.092561 0.178422 -0.054732 +v -0.120941 0.178422 -0.054728 +v -0.086220 0.178805 -0.054574 +v -0.086342 0.179134 -0.054323 +v -0.092561 0.179386 -0.053996 +v -0.120941 0.179386 -0.053992 +v -0.172827 0.178805 0.062726 +v -0.172576 0.179134 0.062749 +v -0.162167 0.178422 -0.054728 +v -0.161495 0.179386 -0.053992 +v -0.092561 0.178011 -0.054786 +v -0.120941 0.178011 -0.054782 +v -0.086176 0.178422 -0.054732 +v -0.070467 0.166434 -0.003524 +v -0.070345 0.166105 -0.003775 +v -0.086532 0.179386 -0.053996 +v -0.092561 0.179544 -0.053614 +v -0.120941 0.179544 -0.053610 +v -0.172984 0.178422 0.062711 +v -0.172984 0.177600 0.062711 +v -0.173038 0.178011 0.062706 +v -0.172827 0.177217 0.062726 +v -0.172576 0.176889 0.062749 +v -0.172250 0.179386 0.062779 +v -0.172250 0.176636 0.062779 +v -0.171871 0.179544 0.062814 +v -0.171871 0.176478 0.062814 +v -0.171463 0.179599 0.062851 +v -0.171463 0.176424 0.062851 +v -0.171056 0.179544 0.062889 +v -0.171056 0.176478 0.062889 +v -0.170676 0.179386 0.062924 +v -0.170676 0.176636 0.062924 +v -0.170350 0.179134 0.062954 +v -0.170350 0.176889 0.062954 +v -0.170100 0.178805 0.062977 +v -0.170100 0.177217 0.062977 +v -0.169943 0.178422 0.062991 +v -0.169943 0.177600 0.062991 +v -0.169889 0.178011 0.062996 +v -0.162216 0.178011 -0.054782 +v -0.161147 0.179544 -0.053610 +v -0.120941 0.177600 -0.054728 +v -0.092561 0.177600 -0.054732 +v -0.086211 0.178011 -0.054786 +v -0.070301 0.165722 -0.003933 +v -0.019404 0.166434 -0.003524 +v -0.019526 0.166105 -0.003775 +v -0.070657 0.166686 -0.003197 +v -0.086778 0.179544 -0.053614 +v -0.087063 0.179599 -0.053205 +v -0.092561 0.179599 -0.053205 +v -0.160774 0.179599 -0.053201 +v -0.120941 0.179599 -0.053201 +v -0.162167 0.177600 -0.054728 +v -0.162023 0.177217 -0.054570 +v -0.160401 0.179544 -0.052792 +v -0.160053 0.179386 -0.052411 +v -0.159755 0.179134 -0.052083 +v -0.159526 0.178805 -0.051832 +v -0.159382 0.178422 -0.051674 +v -0.159332 0.178011 -0.051620 +v -0.159382 0.177600 -0.051674 +v -0.159526 0.177217 -0.051832 +v -0.159755 0.176889 -0.052083 +v -0.160053 0.176636 -0.052411 +v -0.160401 0.176478 -0.052792 +v -0.160774 0.176424 -0.053201 +v -0.161147 0.176478 -0.053610 +v -0.161495 0.176636 -0.053992 +v -0.161794 0.176889 -0.054319 +v -0.120941 0.177217 -0.054570 +v -0.092561 0.177217 -0.054574 +v -0.086323 0.177600 -0.054732 +v -0.070336 0.165311 -0.003987 +v -0.019571 0.165722 -0.003933 +v -0.003651 0.178805 -0.054576 +v -0.003529 0.179134 -0.054325 +v -0.019214 0.166686 -0.003197 +v -0.070903 0.166844 -0.002815 +v -0.071188 0.166899 -0.002406 +v -0.087367 0.179544 -0.052796 +v -0.092561 0.179544 -0.052796 +v -0.120941 0.179544 -0.052792 +v -0.120941 0.179386 -0.052411 +v -0.120941 0.179134 -0.052083 +v -0.120941 0.178805 -0.051832 +v -0.120941 0.178422 -0.051674 +v -0.120941 0.178011 -0.051620 +v -0.120941 0.177600 -0.051674 +v -0.120941 0.177217 -0.051832 +v -0.120941 0.176889 -0.052083 +v -0.120941 0.176636 -0.052411 +v -0.120941 0.176478 -0.052792 +v -0.120941 0.176424 -0.053201 +v -0.092561 0.176478 -0.052796 +v -0.087919 0.176478 -0.052796 +v -0.092561 0.176424 -0.053205 +v -0.087634 0.176424 -0.053205 +v -0.120941 0.176478 -0.053610 +v -0.092561 0.176478 -0.053614 +v -0.087330 0.176478 -0.053614 +v -0.120941 0.176636 -0.053992 +v -0.120941 0.176889 -0.054319 +v -0.092561 0.176889 -0.054323 +v -0.086506 0.177217 -0.054574 +v -0.070448 0.164900 -0.003933 +v -0.019536 0.165311 -0.003987 +v -0.003696 0.178422 -0.054734 +v 0.002689 0.178805 -0.054576 +v 0.002689 0.179134 -0.054325 +v -0.003339 0.179386 -0.053998 +v -0.018968 0.166844 -0.002815 +v -0.018684 0.166899 -0.002406 +v -0.071492 0.166844 -0.001997 +v -0.087670 0.179386 -0.052415 +v -0.092560 0.179386 -0.052415 +v -0.092560 0.179134 -0.052087 +v -0.092560 0.178805 -0.051836 +v -0.092560 0.178422 -0.051678 +v -0.092560 0.178011 -0.051624 +v -0.092560 0.177600 -0.051678 +v -0.092560 0.177217 -0.051836 +v -0.092560 0.176889 -0.052087 +v -0.092560 0.176636 -0.052415 +v -0.088164 0.176636 -0.052415 +v -0.071759 0.163724 -0.002406 +v -0.072044 0.163778 -0.001997 +v -0.071455 0.163778 -0.002815 +v -0.092561 0.176636 -0.053996 +v -0.087027 0.176636 -0.053996 +v -0.086746 0.176889 -0.054323 +v -0.070631 0.164517 -0.003775 +v -0.019423 0.164900 -0.003933 +v -0.003661 0.178011 -0.054788 +v 0.002689 0.178422 -0.054734 +v 0.161794 0.179134 -0.054319 +v 0.162023 0.178805 -0.054570 +v 0.002689 0.179386 -0.053998 +v -0.003093 0.179544 -0.053616 +v -0.002809 0.179599 -0.053207 +v -0.018380 0.166844 -0.001997 +v -0.071795 0.166686 -0.001616 +v -0.087951 0.179134 -0.052088 +v -0.088191 0.178805 -0.051836 +v -0.088373 0.178422 -0.051678 +v -0.088486 0.178011 -0.051625 +v -0.088521 0.177600 -0.051678 +v -0.088476 0.177217 -0.051836 +v -0.088355 0.176889 -0.052088 +v -0.072289 0.163936 -0.001616 +v -0.018112 0.163724 -0.002406 +v -0.017828 0.163778 -0.001997 +v -0.018417 0.163778 -0.002815 +v -0.071152 0.163936 -0.003197 +v -0.070871 0.164189 -0.003524 +v -0.019240 0.164517 -0.003775 +v -0.003548 0.177600 -0.054734 +v 0.002689 0.178011 -0.054788 +v 0.162167 0.178422 -0.054728 +v 0.172576 0.179134 0.062749 +v 0.172827 0.178805 0.062726 +v 0.161495 0.179386 -0.053992 +v 0.002689 0.179544 -0.053616 +v 0.160774 0.179599 -0.053201 +v 0.002689 0.179599 -0.053207 +v 0.161148 0.179544 -0.053610 +v -0.002504 0.179544 -0.052798 +v -0.018077 0.166686 -0.001616 +v -0.072076 0.166434 -0.001288 +v -0.072316 0.166105 -0.001037 +v -0.072498 0.165722 -0.000879 +v -0.072611 0.165311 -0.000825 +v -0.072646 0.164900 -0.000879 +v -0.072601 0.164517 -0.001037 +v -0.072480 0.164189 -0.001288 +v -0.017582 0.163936 -0.001616 +v -0.001953 0.176478 -0.052798 +v -0.002237 0.176424 -0.053207 +v -0.002542 0.176478 -0.053616 +v -0.018720 0.163936 -0.003197 +v -0.019001 0.164189 -0.003524 +v -0.003365 0.177217 -0.054576 +v 0.002689 0.177600 -0.054734 +v 0.162216 0.178011 -0.054782 +v 0.172984 0.178422 0.062711 +v 0.169943 0.178422 0.062991 +v 0.169943 0.177600 0.062991 +v 0.169889 0.178011 0.062996 +v 0.170100 0.177217 0.062977 +v 0.170100 0.178805 0.062977 +v 0.170350 0.176889 0.062954 +v 0.170350 0.179134 0.062954 +v 0.170676 0.176636 0.062924 +v 0.170676 0.179386 0.062924 +v 0.171056 0.176478 0.062889 +v 0.171056 0.179544 0.062889 +v 0.171463 0.176424 0.062851 +v 0.171463 0.179599 0.062851 +v 0.171871 0.176478 0.062814 +v 0.171871 0.179544 0.062814 +v 0.172250 0.176636 0.062779 +v 0.172250 0.179386 0.062779 +v 0.172576 0.176889 0.062749 +v 0.172827 0.177217 0.062726 +v 0.172984 0.177600 0.062711 +v 0.173038 0.178011 0.062706 +v 0.160401 0.179544 -0.052792 +v 0.002689 0.179544 -0.052798 +v -0.002202 0.179386 -0.052417 +v -0.017796 0.166434 -0.001288 +v -0.017556 0.166105 -0.001037 +v -0.017373 0.165722 -0.000879 +v -0.017260 0.165311 -0.000825 +v -0.017225 0.164900 -0.000879 +v -0.017270 0.164517 -0.001037 +v -0.017392 0.164189 -0.001288 +v -0.001707 0.176636 -0.052417 +v 0.160774 0.176424 -0.053201 +v 0.002689 0.176424 -0.053207 +v 0.002689 0.176478 -0.052798 +v 0.160401 0.176478 -0.052792 +v 0.161148 0.176478 -0.053610 +v 0.002689 0.176478 -0.053616 +v -0.002845 0.176636 -0.053998 +v -0.003126 0.176889 -0.054325 +v 0.002689 0.177217 -0.054576 +v 0.162167 0.177600 -0.054728 +v 0.162023 0.177217 -0.054570 +v 0.161794 0.176889 -0.054319 +v 0.161495 0.176636 -0.053992 +v 0.160053 0.176636 -0.052411 +v 0.159755 0.176889 -0.052083 +v 0.159525 0.177217 -0.051832 +v 0.159381 0.177600 -0.051674 +v 0.159332 0.178011 -0.051620 +v 0.159381 0.178422 -0.051674 +v 0.159525 0.178805 -0.051832 +v 0.159755 0.179134 -0.052083 +v 0.160053 0.179386 -0.052411 +v 0.002689 0.179386 -0.052417 +v -0.001921 0.179134 -0.052089 +v -0.001681 0.178805 -0.051838 +v -0.001498 0.178422 -0.051680 +v -0.001385 0.178011 -0.051626 +v -0.001350 0.177600 -0.051680 +v -0.001395 0.177217 -0.051838 +v -0.001517 0.176889 -0.052089 +v 0.002689 0.176636 -0.052417 +v 0.002689 0.176636 -0.053998 +v 0.002689 0.176889 -0.054325 +v 0.002689 0.176889 -0.052089 +v 0.002689 0.177217 -0.051838 +v 0.002689 0.177600 -0.051680 +v 0.002689 0.178011 -0.051626 +v 0.002689 0.178422 -0.051680 +v 0.002689 0.178805 -0.051838 +v 0.002689 0.179134 -0.052089 +v -0.092561 -0.098583 -0.054574 +v -0.120941 -0.098254 -0.054319 +v -0.120941 -0.098583 -0.054570 +v -0.092561 -0.098254 -0.054323 +v -0.161794 -0.098254 -0.054319 +v -0.162023 -0.098583 -0.054570 +v -0.092561 -0.098966 -0.054732 +v -0.120941 -0.098966 -0.054728 +v -0.086220 -0.098583 -0.054574 +v -0.086342 -0.098254 -0.054323 +v -0.092561 -0.098002 -0.053996 +v -0.120941 -0.098002 -0.053992 +v -0.172827 -0.098583 0.062726 +v -0.172576 -0.098254 0.062749 +v -0.162167 -0.098966 -0.054728 +v -0.161495 -0.098002 -0.053992 +v -0.092561 -0.099377 -0.054786 +v -0.120941 -0.099377 -0.054782 +v -0.086176 -0.098966 -0.054732 +v -0.070467 -0.110954 -0.003524 +v -0.070345 -0.111283 -0.003775 +v -0.086532 -0.098002 -0.053996 +v -0.092561 -0.097843 -0.053614 +v -0.120941 -0.097843 -0.053610 +v -0.172984 -0.098966 0.062711 +v -0.172984 -0.099788 0.062711 +v -0.173038 -0.099377 0.062706 +v -0.172827 -0.100170 0.062726 +v -0.172576 -0.100499 0.062749 +v -0.172250 -0.098002 0.062779 +v -0.172250 -0.100751 0.062779 +v -0.171871 -0.097843 0.062814 +v -0.171871 -0.100910 0.062814 +v -0.171463 -0.097789 0.062851 +v -0.171463 -0.100964 0.062851 +v -0.171056 -0.097843 0.062889 +v -0.171056 -0.100910 0.062889 +v -0.170676 -0.098002 0.062924 +v -0.170676 -0.100751 0.062924 +v -0.170350 -0.098254 0.062954 +v -0.170350 -0.100499 0.062954 +v -0.170100 -0.098583 0.062977 +v -0.170100 -0.100170 0.062977 +v -0.169943 -0.098966 0.062991 +v -0.169943 -0.099788 0.062991 +v -0.169889 -0.099377 0.062996 +v -0.162216 -0.099377 -0.054782 +v -0.161147 -0.097843 -0.053610 +v -0.120941 -0.099788 -0.054728 +v -0.092561 -0.099788 -0.054732 +v -0.086211 -0.099377 -0.054786 +v -0.070301 -0.111666 -0.003933 +v -0.019404 -0.110954 -0.003524 +v -0.019526 -0.111283 -0.003775 +v -0.070657 -0.110702 -0.003197 +v -0.086778 -0.097843 -0.053614 +v -0.087063 -0.097789 -0.053205 +v -0.092561 -0.097789 -0.053205 +v -0.160774 -0.097789 -0.053201 +v -0.120941 -0.097789 -0.053201 +v -0.162167 -0.099788 -0.054728 +v -0.162023 -0.100170 -0.054570 +v -0.160401 -0.097843 -0.052792 +v -0.160053 -0.098002 -0.052411 +v -0.159755 -0.098254 -0.052083 +v -0.159526 -0.098583 -0.051832 +v -0.159382 -0.098966 -0.051674 +v -0.159332 -0.099377 -0.051620 +v -0.159382 -0.099788 -0.051674 +v -0.159526 -0.100170 -0.051832 +v -0.159755 -0.100499 -0.052083 +v -0.160053 -0.100751 -0.052411 +v -0.160401 -0.100910 -0.052792 +v -0.160774 -0.100964 -0.053201 +v -0.161147 -0.100910 -0.053610 +v -0.161495 -0.100751 -0.053992 +v -0.161794 -0.100499 -0.054319 +v -0.120941 -0.100170 -0.054570 +v -0.092561 -0.100170 -0.054574 +v -0.086323 -0.099788 -0.054732 +v -0.070336 -0.112077 -0.003987 +v -0.019571 -0.111666 -0.003933 +v -0.003651 -0.098583 -0.054576 +v -0.003529 -0.098254 -0.054325 +v -0.019214 -0.110702 -0.003197 +v -0.070903 -0.110543 -0.002815 +v -0.071188 -0.110489 -0.002406 +v -0.087367 -0.097843 -0.052796 +v -0.092561 -0.097843 -0.052796 +v -0.120941 -0.097843 -0.052792 +v -0.120941 -0.098002 -0.052411 +v -0.120941 -0.098254 -0.052083 +v -0.120941 -0.098583 -0.051832 +v -0.120941 -0.098966 -0.051674 +v -0.120941 -0.099377 -0.051620 +v -0.120941 -0.099788 -0.051674 +v -0.120941 -0.100170 -0.051832 +v -0.120941 -0.100499 -0.052083 +v -0.120941 -0.100751 -0.052411 +v -0.120941 -0.100910 -0.052792 +v -0.120941 -0.100964 -0.053201 +v -0.092561 -0.100910 -0.052796 +v -0.087919 -0.100910 -0.052796 +v -0.092561 -0.100964 -0.053205 +v -0.087634 -0.100964 -0.053205 +v -0.120941 -0.100910 -0.053610 +v -0.092561 -0.100910 -0.053614 +v -0.087330 -0.100910 -0.053614 +v -0.120941 -0.100751 -0.053992 +v -0.120941 -0.100499 -0.054319 +v -0.092561 -0.100499 -0.054323 +v -0.086506 -0.100170 -0.054574 +v -0.070448 -0.112488 -0.003933 +v -0.019536 -0.112077 -0.003987 +v -0.003696 -0.098966 -0.054734 +v 0.002689 -0.098583 -0.054576 +v 0.002689 -0.098254 -0.054325 +v -0.003339 -0.098002 -0.053998 +v -0.018968 -0.110543 -0.002815 +v -0.018684 -0.110489 -0.002406 +v -0.071492 -0.110543 -0.001997 +v -0.087670 -0.098002 -0.052415 +v -0.092560 -0.098002 -0.052415 +v -0.092560 -0.098254 -0.052087 +v -0.092560 -0.098583 -0.051836 +v -0.092560 -0.098966 -0.051678 +v -0.092560 -0.099377 -0.051624 +v -0.092560 -0.099788 -0.051678 +v -0.092560 -0.100170 -0.051836 +v -0.092560 -0.100499 -0.052087 +v -0.092560 -0.100751 -0.052415 +v -0.088164 -0.100751 -0.052415 +v -0.071759 -0.113664 -0.002406 +v -0.072044 -0.113610 -0.001997 +v -0.071455 -0.113610 -0.002815 +v -0.092561 -0.100751 -0.053996 +v -0.087027 -0.100751 -0.053996 +v -0.086746 -0.100499 -0.054323 +v -0.070631 -0.112870 -0.003775 +v -0.019423 -0.112488 -0.003933 +v -0.003661 -0.099377 -0.054788 +v 0.002689 -0.098966 -0.054734 +v 0.161794 -0.098254 -0.054319 +v 0.162023 -0.098583 -0.054570 +v 0.002689 -0.098002 -0.053998 +v -0.003093 -0.097843 -0.053616 +v -0.002809 -0.097789 -0.053207 +v -0.018380 -0.110543 -0.001997 +v -0.071795 -0.110702 -0.001616 +v -0.087951 -0.098254 -0.052088 +v -0.088191 -0.098583 -0.051836 +v -0.088373 -0.098966 -0.051678 +v -0.088486 -0.099377 -0.051625 +v -0.088521 -0.099788 -0.051678 +v -0.088476 -0.100170 -0.051836 +v -0.088355 -0.100499 -0.052088 +v -0.072289 -0.113451 -0.001616 +v -0.018112 -0.113664 -0.002406 +v -0.017828 -0.113610 -0.001997 +v -0.018417 -0.113610 -0.002815 +v -0.071152 -0.113451 -0.003197 +v -0.070871 -0.113199 -0.003524 +v -0.019240 -0.112870 -0.003775 +v -0.003548 -0.099788 -0.054734 +v 0.002689 -0.099377 -0.054788 +v 0.162167 -0.098966 -0.054728 +v 0.172576 -0.098254 0.062749 +v 0.172827 -0.098583 0.062726 +v 0.161495 -0.098002 -0.053992 +v 0.002689 -0.097843 -0.053616 +v 0.160774 -0.097789 -0.053201 +v 0.002689 -0.097789 -0.053207 +v 0.161148 -0.097843 -0.053610 +v -0.002504 -0.097843 -0.052798 +v -0.018077 -0.110702 -0.001616 +v -0.072076 -0.110954 -0.001288 +v -0.072316 -0.111283 -0.001037 +v -0.072498 -0.111666 -0.000879 +v -0.072611 -0.112077 -0.000825 +v -0.072646 -0.112488 -0.000879 +v -0.072601 -0.112870 -0.001037 +v -0.072480 -0.113199 -0.001288 +v -0.017582 -0.113451 -0.001616 +v -0.001953 -0.100910 -0.052798 +v -0.002237 -0.100964 -0.053207 +v -0.002542 -0.100910 -0.053616 +v -0.018720 -0.113451 -0.003197 +v -0.019001 -0.113199 -0.003524 +v -0.003365 -0.100170 -0.054576 +v 0.002689 -0.099788 -0.054734 +v 0.162216 -0.099377 -0.054782 +v 0.172984 -0.098966 0.062711 +v 0.169943 -0.098966 0.062991 +v 0.169943 -0.099787 0.062991 +v 0.169889 -0.099377 0.062996 +v 0.170100 -0.100170 0.062977 +v 0.170100 -0.098583 0.062977 +v 0.170350 -0.100499 0.062954 +v 0.170350 -0.098254 0.062954 +v 0.170676 -0.100751 0.062924 +v 0.170676 -0.098002 0.062924 +v 0.171056 -0.100910 0.062889 +v 0.171056 -0.097843 0.062889 +v 0.171463 -0.100964 0.062851 +v 0.171463 -0.097789 0.062851 +v 0.171871 -0.100910 0.062814 +v 0.171871 -0.097843 0.062814 +v 0.172250 -0.100751 0.062779 +v 0.172250 -0.098002 0.062779 +v 0.172576 -0.100499 0.062749 +v 0.172827 -0.100170 0.062726 +v 0.172984 -0.099788 0.062711 +v 0.173038 -0.099377 0.062706 +v 0.160401 -0.097843 -0.052792 +v 0.002689 -0.097843 -0.052798 +v -0.002202 -0.098002 -0.052417 +v -0.017796 -0.110954 -0.001288 +v -0.017556 -0.111283 -0.001037 +v -0.017373 -0.111666 -0.000879 +v -0.017260 -0.112077 -0.000825 +v -0.017225 -0.112488 -0.000879 +v -0.017270 -0.112870 -0.001037 +v -0.017392 -0.113199 -0.001288 +v -0.001707 -0.100751 -0.052417 +v 0.160774 -0.100964 -0.053201 +v 0.002689 -0.100964 -0.053207 +v 0.002689 -0.100910 -0.052798 +v 0.160401 -0.100910 -0.052792 +v 0.161148 -0.100910 -0.053610 +v 0.002689 -0.100910 -0.053616 +v -0.002845 -0.100751 -0.053998 +v -0.003126 -0.100499 -0.054325 +v 0.002689 -0.100170 -0.054576 +v 0.162167 -0.099788 -0.054728 +v 0.162023 -0.100170 -0.054570 +v 0.161794 -0.100499 -0.054319 +v 0.161495 -0.100751 -0.053992 +v 0.160053 -0.100751 -0.052411 +v 0.159755 -0.100499 -0.052083 +v 0.159525 -0.100170 -0.051832 +v 0.159381 -0.099787 -0.051674 +v 0.159332 -0.099377 -0.051620 +v 0.159381 -0.098966 -0.051674 +v 0.159525 -0.098583 -0.051832 +v 0.159755 -0.098254 -0.052083 +v 0.160053 -0.098002 -0.052411 +v 0.002689 -0.098002 -0.052417 +v -0.001921 -0.098254 -0.052089 +v -0.001681 -0.098583 -0.051838 +v -0.001498 -0.098966 -0.051680 +v -0.001385 -0.099377 -0.051626 +v -0.001350 -0.099787 -0.051680 +v -0.001395 -0.100170 -0.051838 +v -0.001517 -0.100499 -0.052089 +v 0.002689 -0.100751 -0.052417 +v 0.002689 -0.100751 -0.053998 +v 0.002689 -0.100499 -0.054325 +v 0.002689 -0.100499 -0.052089 +v 0.002689 -0.100170 -0.051838 +v 0.002689 -0.099787 -0.051680 +v 0.002689 -0.099377 -0.051626 +v 0.002689 -0.098966 -0.051680 +v 0.002689 -0.098583 -0.051838 +v 0.002689 -0.098254 -0.052089 +v -0.092561 -0.073183 -0.054574 +v -0.120941 -0.072854 -0.054319 +v -0.120941 -0.073183 -0.054570 +v -0.092561 -0.072854 -0.054323 +v -0.161794 -0.072854 -0.054319 +v -0.162023 -0.073183 -0.054570 +v -0.092561 -0.073566 -0.054732 +v -0.120941 -0.073566 -0.054728 +v -0.086220 -0.073183 -0.054574 +v -0.086342 -0.072854 -0.054323 +v -0.092561 -0.072602 -0.053996 +v -0.120941 -0.072602 -0.053992 +v -0.172827 -0.073183 0.062726 +v -0.172576 -0.072854 0.062749 +v -0.162167 -0.073566 -0.054728 +v -0.161495 -0.072602 -0.053992 +v -0.092561 -0.073977 -0.054786 +v -0.120941 -0.073977 -0.054782 +v -0.086176 -0.073566 -0.054732 +v -0.070467 -0.085554 -0.003524 +v -0.070345 -0.085883 -0.003775 +v -0.086532 -0.072602 -0.053996 +v -0.092561 -0.072443 -0.053614 +v -0.120941 -0.072443 -0.053610 +v -0.172984 -0.073566 0.062711 +v -0.172984 -0.074388 0.062711 +v -0.173038 -0.073977 0.062706 +v -0.172827 -0.074770 0.062726 +v -0.172576 -0.075099 0.062749 +v -0.172250 -0.072602 0.062779 +v -0.172250 -0.075351 0.062779 +v -0.171871 -0.072443 0.062814 +v -0.171871 -0.075510 0.062814 +v -0.171463 -0.072389 0.062851 +v -0.171463 -0.075564 0.062851 +v -0.171056 -0.072443 0.062889 +v -0.171056 -0.075510 0.062889 +v -0.170676 -0.072602 0.062924 +v -0.170676 -0.075351 0.062924 +v -0.170350 -0.072854 0.062954 +v -0.170350 -0.075099 0.062954 +v -0.170100 -0.073183 0.062977 +v -0.170100 -0.074770 0.062977 +v -0.169943 -0.073566 0.062991 +v -0.169943 -0.074388 0.062991 +v -0.169889 -0.073977 0.062996 +v -0.162216 -0.073977 -0.054782 +v -0.161147 -0.072443 -0.053610 +v -0.120941 -0.074388 -0.054728 +v -0.092561 -0.074388 -0.054732 +v -0.086211 -0.073977 -0.054786 +v -0.070301 -0.086266 -0.003933 +v -0.019404 -0.085554 -0.003524 +v -0.019526 -0.085883 -0.003775 +v -0.070657 -0.085302 -0.003197 +v -0.086778 -0.072443 -0.053614 +v -0.087063 -0.072389 -0.053205 +v -0.092561 -0.072389 -0.053205 +v -0.160774 -0.072389 -0.053201 +v -0.120941 -0.072389 -0.053201 +v -0.162167 -0.074388 -0.054728 +v -0.162023 -0.074770 -0.054570 +v -0.160401 -0.072443 -0.052792 +v -0.160053 -0.072602 -0.052411 +v -0.159755 -0.072854 -0.052083 +v -0.159526 -0.073183 -0.051832 +v -0.159382 -0.073566 -0.051674 +v -0.159332 -0.073977 -0.051620 +v -0.159382 -0.074388 -0.051674 +v -0.159526 -0.074770 -0.051832 +v -0.159755 -0.075099 -0.052083 +v -0.160053 -0.075351 -0.052411 +v -0.160401 -0.075510 -0.052792 +v -0.160774 -0.075564 -0.053201 +v -0.161147 -0.075510 -0.053610 +v -0.161495 -0.075351 -0.053992 +v -0.161794 -0.075099 -0.054319 +v -0.120941 -0.074770 -0.054570 +v -0.092561 -0.074770 -0.054574 +v -0.086323 -0.074388 -0.054732 +v -0.070336 -0.086677 -0.003987 +v -0.019571 -0.086266 -0.003933 +v -0.003651 -0.073183 -0.054576 +v -0.003529 -0.072854 -0.054325 +v -0.019214 -0.085302 -0.003197 +v -0.070903 -0.085143 -0.002815 +v -0.071188 -0.085089 -0.002406 +v -0.087367 -0.072443 -0.052796 +v -0.092561 -0.072443 -0.052796 +v -0.120941 -0.072443 -0.052792 +v -0.120941 -0.072602 -0.052411 +v -0.120941 -0.072854 -0.052083 +v -0.120941 -0.073183 -0.051832 +v -0.120941 -0.073566 -0.051674 +v -0.120941 -0.073977 -0.051620 +v -0.120941 -0.074388 -0.051674 +v -0.120941 -0.074770 -0.051832 +v -0.120941 -0.075099 -0.052083 +v -0.120941 -0.075351 -0.052411 +v -0.120941 -0.075510 -0.052792 +v -0.120941 -0.075564 -0.053201 +v -0.092561 -0.075510 -0.052796 +v -0.087919 -0.075510 -0.052796 +v -0.092561 -0.075564 -0.053205 +v -0.087634 -0.075564 -0.053205 +v -0.120941 -0.075510 -0.053610 +v -0.092561 -0.075510 -0.053614 +v -0.087330 -0.075510 -0.053614 +v -0.120941 -0.075351 -0.053992 +v -0.120941 -0.075099 -0.054319 +v -0.092561 -0.075099 -0.054323 +v -0.086506 -0.074770 -0.054574 +v -0.070448 -0.087088 -0.003933 +v -0.019536 -0.086677 -0.003987 +v -0.003696 -0.073566 -0.054734 +v 0.002689 -0.073183 -0.054576 +v 0.002689 -0.072854 -0.054325 +v -0.003339 -0.072602 -0.053998 +v -0.018968 -0.085143 -0.002815 +v -0.018684 -0.085089 -0.002406 +v -0.071492 -0.085143 -0.001997 +v -0.087670 -0.072602 -0.052415 +v -0.092560 -0.072602 -0.052415 +v -0.092560 -0.072854 -0.052087 +v -0.092560 -0.073183 -0.051836 +v -0.092560 -0.073566 -0.051678 +v -0.092560 -0.073977 -0.051624 +v -0.092560 -0.074388 -0.051678 +v -0.092560 -0.074770 -0.051836 +v -0.092560 -0.075099 -0.052087 +v -0.092560 -0.075351 -0.052415 +v -0.088164 -0.075351 -0.052415 +v -0.071759 -0.088264 -0.002406 +v -0.072044 -0.088210 -0.001997 +v -0.071455 -0.088210 -0.002815 +v -0.092561 -0.075351 -0.053996 +v -0.087027 -0.075351 -0.053996 +v -0.086746 -0.075099 -0.054323 +v -0.070631 -0.087470 -0.003775 +v -0.019423 -0.087088 -0.003933 +v -0.003661 -0.073977 -0.054788 +v 0.002689 -0.073566 -0.054734 +v 0.161794 -0.072854 -0.054319 +v 0.162023 -0.073183 -0.054570 +v 0.002689 -0.072602 -0.053998 +v -0.003093 -0.072443 -0.053616 +v -0.002809 -0.072389 -0.053207 +v -0.018380 -0.085143 -0.001997 +v -0.071795 -0.085302 -0.001616 +v -0.087951 -0.072854 -0.052088 +v -0.088191 -0.073183 -0.051836 +v -0.088373 -0.073566 -0.051678 +v -0.088486 -0.073977 -0.051625 +v -0.088521 -0.074388 -0.051678 +v -0.088476 -0.074770 -0.051836 +v -0.088355 -0.075099 -0.052088 +v -0.072289 -0.088051 -0.001616 +v -0.018112 -0.088264 -0.002406 +v -0.017828 -0.088210 -0.001997 +v -0.018417 -0.088210 -0.002815 +v -0.071152 -0.088051 -0.003197 +v -0.070871 -0.087799 -0.003524 +v -0.019240 -0.087470 -0.003775 +v -0.003548 -0.074388 -0.054734 +v 0.002689 -0.073977 -0.054788 +v 0.162167 -0.073566 -0.054728 +v 0.172576 -0.072854 0.062749 +v 0.172827 -0.073183 0.062726 +v 0.161495 -0.072602 -0.053992 +v 0.002689 -0.072443 -0.053616 +v 0.160774 -0.072389 -0.053201 +v 0.002689 -0.072389 -0.053207 +v 0.161148 -0.072443 -0.053610 +v -0.002504 -0.072443 -0.052798 +v -0.018077 -0.085302 -0.001616 +v -0.072076 -0.085554 -0.001288 +v -0.072316 -0.085883 -0.001037 +v -0.072498 -0.086266 -0.000879 +v -0.072611 -0.086677 -0.000825 +v -0.072646 -0.087088 -0.000879 +v -0.072601 -0.087470 -0.001037 +v -0.072480 -0.087799 -0.001288 +v -0.017582 -0.088051 -0.001616 +v -0.001953 -0.075510 -0.052798 +v -0.002237 -0.075564 -0.053207 +v -0.002542 -0.075510 -0.053616 +v -0.018720 -0.088051 -0.003197 +v -0.019001 -0.087799 -0.003524 +v -0.003365 -0.074770 -0.054576 +v 0.002689 -0.074388 -0.054734 +v 0.162216 -0.073977 -0.054782 +v 0.172984 -0.073566 0.062711 +v 0.169943 -0.073566 0.062991 +v 0.169943 -0.074387 0.062991 +v 0.169889 -0.073977 0.062996 +v 0.170100 -0.074770 0.062977 +v 0.170100 -0.073183 0.062977 +v 0.170350 -0.075099 0.062954 +v 0.170350 -0.072854 0.062954 +v 0.170676 -0.075351 0.062924 +v 0.170676 -0.072602 0.062924 +v 0.171056 -0.075510 0.062889 +v 0.171056 -0.072443 0.062889 +v 0.171463 -0.075564 0.062851 +v 0.171463 -0.072389 0.062851 +v 0.171871 -0.075510 0.062814 +v 0.171871 -0.072443 0.062814 +v 0.172250 -0.075351 0.062779 +v 0.172250 -0.072602 0.062779 +v 0.172576 -0.075099 0.062749 +v 0.172827 -0.074770 0.062726 +v 0.172984 -0.074388 0.062711 +v 0.173038 -0.073977 0.062706 +v 0.160401 -0.072443 -0.052792 +v 0.002689 -0.072443 -0.052798 +v -0.002202 -0.072602 -0.052417 +v -0.017796 -0.085554 -0.001288 +v -0.017556 -0.085883 -0.001037 +v -0.017373 -0.086266 -0.000879 +v -0.017260 -0.086677 -0.000825 +v -0.017225 -0.087088 -0.000879 +v -0.017270 -0.087470 -0.001037 +v -0.017392 -0.087799 -0.001288 +v -0.001707 -0.075351 -0.052417 +v 0.160774 -0.075564 -0.053201 +v 0.002689 -0.075564 -0.053207 +v 0.002689 -0.075510 -0.052798 +v 0.160401 -0.075510 -0.052792 +v 0.161148 -0.075510 -0.053610 +v 0.002689 -0.075510 -0.053616 +v -0.002845 -0.075351 -0.053998 +v -0.003126 -0.075099 -0.054325 +v 0.002689 -0.074770 -0.054576 +v 0.162167 -0.074388 -0.054728 +v 0.162023 -0.074770 -0.054570 +v 0.161794 -0.075099 -0.054319 +v 0.161495 -0.075351 -0.053992 +v 0.160053 -0.075351 -0.052411 +v 0.159755 -0.075099 -0.052083 +v 0.159525 -0.074770 -0.051832 +v 0.159381 -0.074387 -0.051674 +v 0.159332 -0.073977 -0.051620 +v 0.159381 -0.073566 -0.051674 +v 0.159525 -0.073183 -0.051832 +v 0.159755 -0.072854 -0.052083 +v 0.160053 -0.072602 -0.052411 +v 0.002689 -0.072602 -0.052417 +v -0.001921 -0.072854 -0.052089 +v -0.001681 -0.073183 -0.051838 +v -0.001498 -0.073566 -0.051680 +v -0.001385 -0.073977 -0.051626 +v -0.001350 -0.074387 -0.051680 +v -0.001395 -0.074770 -0.051838 +v -0.001517 -0.075099 -0.052089 +v 0.002689 -0.075351 -0.052417 +v 0.002689 -0.075351 -0.053998 +v 0.002689 -0.075099 -0.054325 +v 0.002689 -0.075099 -0.052089 +v 0.002689 -0.074770 -0.051838 +v 0.002689 -0.074387 -0.051680 +v 0.002689 -0.073977 -0.051626 +v 0.002689 -0.073566 -0.051680 +v 0.002689 -0.073183 -0.051838 +v 0.002689 -0.072854 -0.052089 +v -0.092561 0.102605 -0.054574 +v -0.120941 0.102934 -0.054319 +v -0.120941 0.102605 -0.054570 +v -0.092561 0.102934 -0.054323 +v -0.161794 0.102934 -0.054319 +v -0.162023 0.102605 -0.054570 +v -0.092561 0.102222 -0.054732 +v -0.120941 0.102222 -0.054728 +v -0.086220 0.102605 -0.054574 +v -0.086342 0.102934 -0.054323 +v -0.092561 0.103186 -0.053996 +v -0.120941 0.103186 -0.053992 +v -0.172827 0.102605 0.062726 +v -0.172576 0.102934 0.062749 +v -0.162167 0.102222 -0.054728 +v -0.161495 0.103186 -0.053992 +v -0.092561 0.101811 -0.054786 +v -0.120941 0.101811 -0.054782 +v -0.086176 0.102222 -0.054732 +v -0.070467 0.090234 -0.003524 +v -0.070345 0.089905 -0.003775 +v -0.086532 0.103186 -0.053996 +v -0.092561 0.103344 -0.053614 +v -0.120941 0.103344 -0.053610 +v -0.172984 0.102222 0.062711 +v -0.172984 0.101400 0.062711 +v -0.173038 0.101811 0.062706 +v -0.172827 0.101017 0.062726 +v -0.172576 0.100689 0.062749 +v -0.172250 0.103186 0.062779 +v -0.172250 0.100436 0.062779 +v -0.171871 0.103344 0.062814 +v -0.171871 0.100278 0.062814 +v -0.171463 0.103399 0.062851 +v -0.171463 0.100224 0.062851 +v -0.171056 0.103344 0.062889 +v -0.171056 0.100278 0.062889 +v -0.170676 0.103186 0.062924 +v -0.170676 0.100436 0.062924 +v -0.170350 0.102934 0.062954 +v -0.170350 0.100689 0.062954 +v -0.170100 0.102605 0.062977 +v -0.170100 0.101017 0.062977 +v -0.169943 0.102222 0.062991 +v -0.169943 0.101400 0.062991 +v -0.169889 0.101811 0.062996 +v -0.162216 0.101811 -0.054782 +v -0.161147 0.103344 -0.053610 +v -0.120941 0.101400 -0.054728 +v -0.092561 0.101400 -0.054732 +v -0.086211 0.101811 -0.054786 +v -0.070301 0.089522 -0.003933 +v -0.019404 0.090234 -0.003524 +v -0.019526 0.089905 -0.003775 +v -0.070657 0.090486 -0.003197 +v -0.086778 0.103344 -0.053614 +v -0.087063 0.103399 -0.053205 +v -0.092561 0.103399 -0.053205 +v -0.160774 0.103399 -0.053201 +v -0.120941 0.103399 -0.053201 +v -0.162167 0.101400 -0.054728 +v -0.162023 0.101017 -0.054570 +v -0.160401 0.103344 -0.052792 +v -0.160053 0.103186 -0.052411 +v -0.159755 0.102934 -0.052083 +v -0.159526 0.102605 -0.051832 +v -0.159382 0.102222 -0.051674 +v -0.159332 0.101811 -0.051620 +v -0.159382 0.101400 -0.051674 +v -0.159526 0.101017 -0.051832 +v -0.159755 0.100689 -0.052083 +v -0.160053 0.100436 -0.052411 +v -0.160401 0.100278 -0.052792 +v -0.160774 0.100224 -0.053201 +v -0.161147 0.100278 -0.053610 +v -0.161495 0.100436 -0.053992 +v -0.161794 0.100689 -0.054319 +v -0.120941 0.101017 -0.054570 +v -0.092561 0.101017 -0.054574 +v -0.086323 0.101400 -0.054732 +v -0.070336 0.089111 -0.003987 +v -0.019571 0.089522 -0.003933 +v -0.003651 0.102605 -0.054576 +v -0.003529 0.102934 -0.054325 +v -0.019214 0.090486 -0.003197 +v -0.070903 0.090644 -0.002815 +v -0.071188 0.090699 -0.002406 +v -0.087367 0.103344 -0.052796 +v -0.092561 0.103344 -0.052796 +v -0.120941 0.103344 -0.052792 +v -0.120941 0.103186 -0.052411 +v -0.120941 0.102934 -0.052083 +v -0.120941 0.102605 -0.051832 +v -0.120941 0.102222 -0.051674 +v -0.120941 0.101811 -0.051620 +v -0.120941 0.101400 -0.051674 +v -0.120941 0.101017 -0.051832 +v -0.120941 0.100689 -0.052083 +v -0.120941 0.100436 -0.052411 +v -0.120941 0.100278 -0.052792 +v -0.120941 0.100224 -0.053201 +v -0.092561 0.100278 -0.052796 +v -0.087919 0.100278 -0.052796 +v -0.092561 0.100224 -0.053205 +v -0.087634 0.100224 -0.053205 +v -0.120941 0.100278 -0.053610 +v -0.092561 0.100278 -0.053614 +v -0.087330 0.100278 -0.053614 +v -0.120941 0.100436 -0.053992 +v -0.120941 0.100689 -0.054319 +v -0.092561 0.100689 -0.054323 +v -0.086506 0.101017 -0.054574 +v -0.070448 0.088700 -0.003933 +v -0.019536 0.089111 -0.003987 +v -0.003696 0.102222 -0.054734 +v 0.002689 0.102605 -0.054576 +v 0.002689 0.102934 -0.054325 +v -0.003339 0.103186 -0.053998 +v -0.018968 0.090644 -0.002815 +v -0.018684 0.090699 -0.002406 +v -0.071492 0.090644 -0.001997 +v -0.087670 0.103186 -0.052415 +v -0.092560 0.103186 -0.052415 +v -0.092560 0.102934 -0.052087 +v -0.092560 0.102605 -0.051836 +v -0.092560 0.102222 -0.051678 +v -0.092560 0.101811 -0.051624 +v -0.092560 0.101400 -0.051678 +v -0.092560 0.101017 -0.051836 +v -0.092560 0.100689 -0.052087 +v -0.092560 0.100436 -0.052415 +v -0.088164 0.100436 -0.052415 +v -0.071759 0.087524 -0.002406 +v -0.072044 0.087578 -0.001997 +v -0.071455 0.087578 -0.002815 +v -0.092561 0.100436 -0.053996 +v -0.087027 0.100436 -0.053996 +v -0.086746 0.100689 -0.054323 +v -0.070631 0.088317 -0.003775 +v -0.019423 0.088700 -0.003933 +v -0.003661 0.101811 -0.054788 +v 0.002689 0.102222 -0.054734 +v 0.161794 0.102934 -0.054319 +v 0.162023 0.102605 -0.054570 +v 0.002689 0.103186 -0.053998 +v -0.003093 0.103344 -0.053616 +v -0.002809 0.103399 -0.053207 +v -0.018380 0.090644 -0.001997 +v -0.071795 0.090486 -0.001616 +v -0.087951 0.102934 -0.052088 +v -0.088191 0.102605 -0.051836 +v -0.088373 0.102222 -0.051678 +v -0.088486 0.101811 -0.051625 +v -0.088521 0.101400 -0.051678 +v -0.088476 0.101017 -0.051836 +v -0.088355 0.100689 -0.052088 +v -0.072289 0.087736 -0.001616 +v -0.018112 0.087524 -0.002406 +v -0.017828 0.087578 -0.001997 +v -0.018417 0.087578 -0.002815 +v -0.071152 0.087736 -0.003197 +v -0.070871 0.087989 -0.003524 +v -0.019240 0.088317 -0.003775 +v -0.003548 0.101400 -0.054734 +v 0.002689 0.101811 -0.054788 +v 0.162167 0.102222 -0.054728 +v 0.172576 0.102934 0.062749 +v 0.172827 0.102605 0.062726 +v 0.161495 0.103186 -0.053992 +v 0.002689 0.103344 -0.053616 +v 0.160774 0.103399 -0.053201 +v 0.002689 0.103399 -0.053207 +v 0.161148 0.103344 -0.053610 +v -0.002504 0.103344 -0.052798 +v -0.018077 0.090486 -0.001616 +v -0.072076 0.090234 -0.001288 +v -0.072316 0.089905 -0.001037 +v -0.072498 0.089522 -0.000879 +v -0.072611 0.089111 -0.000825 +v -0.072646 0.088700 -0.000879 +v -0.072601 0.088317 -0.001037 +v -0.072480 0.087989 -0.001288 +v -0.017582 0.087736 -0.001616 +v -0.001953 0.100278 -0.052798 +v -0.002237 0.100224 -0.053207 +v -0.002542 0.100278 -0.053616 +v -0.018720 0.087736 -0.003197 +v -0.019001 0.087989 -0.003524 +v -0.003365 0.101017 -0.054576 +v 0.002689 0.101400 -0.054734 +v 0.162216 0.101811 -0.054782 +v 0.172984 0.102222 0.062711 +v 0.169943 0.102222 0.062991 +v 0.169943 0.101400 0.062991 +v 0.169889 0.101811 0.062996 +v 0.170100 0.101017 0.062977 +v 0.170100 0.102605 0.062977 +v 0.170350 0.100689 0.062954 +v 0.170350 0.102934 0.062954 +v 0.170676 0.100436 0.062924 +v 0.170676 0.103186 0.062924 +v 0.171056 0.100278 0.062889 +v 0.171056 0.103344 0.062889 +v 0.171463 0.100224 0.062851 +v 0.171463 0.103399 0.062851 +v 0.171871 0.100278 0.062814 +v 0.171871 0.103344 0.062814 +v 0.172250 0.100436 0.062779 +v 0.172250 0.103186 0.062779 +v 0.172576 0.100689 0.062749 +v 0.172827 0.101017 0.062726 +v 0.172984 0.101400 0.062711 +v 0.173038 0.101811 0.062706 +v 0.160401 0.103344 -0.052792 +v 0.002689 0.103344 -0.052798 +v -0.002202 0.103186 -0.052417 +v -0.017796 0.090234 -0.001288 +v -0.017556 0.089905 -0.001037 +v -0.017373 0.089522 -0.000879 +v -0.017260 0.089111 -0.000825 +v -0.017225 0.088700 -0.000879 +v -0.017270 0.088317 -0.001037 +v -0.017392 0.087989 -0.001288 +v -0.001707 0.100436 -0.052417 +v 0.160774 0.100224 -0.053201 +v 0.002689 0.100224 -0.053207 +v 0.002689 0.100278 -0.052798 +v 0.160401 0.100278 -0.052792 +v 0.161148 0.100278 -0.053610 +v 0.002689 0.100278 -0.053616 +v -0.002845 0.100436 -0.053998 +v -0.003126 0.100689 -0.054325 +v 0.002689 0.101017 -0.054576 +v 0.162167 0.101400 -0.054728 +v 0.162023 0.101017 -0.054570 +v 0.161794 0.100689 -0.054319 +v 0.161495 0.100436 -0.053992 +v 0.160053 0.100436 -0.052411 +v 0.159755 0.100689 -0.052083 +v 0.159525 0.101017 -0.051832 +v 0.159381 0.101400 -0.051674 +v 0.159332 0.101811 -0.051620 +v 0.159381 0.102222 -0.051674 +v 0.159525 0.102605 -0.051832 +v 0.159755 0.102934 -0.052083 +v 0.160053 0.103186 -0.052411 +v 0.002689 0.103186 -0.052417 +v -0.001921 0.102934 -0.052089 +v -0.001681 0.102605 -0.051838 +v -0.001498 0.102222 -0.051680 +v -0.001385 0.101811 -0.051626 +v -0.001350 0.101400 -0.051680 +v -0.001395 0.101017 -0.051838 +v -0.001517 0.100689 -0.052089 +v 0.002689 0.100436 -0.052417 +v 0.002689 0.100436 -0.053998 +v 0.002689 0.100689 -0.054325 +v 0.002689 0.100689 -0.052089 +v 0.002689 0.101017 -0.051838 +v 0.002689 0.101400 -0.051680 +v 0.002689 0.101811 -0.051626 +v 0.002689 0.102222 -0.051680 +v 0.002689 0.102605 -0.051838 +v 0.002689 0.102934 -0.052089 +v -0.092561 0.077205 -0.054574 +v -0.120941 0.077534 -0.054319 +v -0.120941 0.077205 -0.054570 +v -0.092561 0.077534 -0.054323 +v -0.161794 0.077534 -0.054319 +v -0.162023 0.077205 -0.054570 +v -0.092561 0.076822 -0.054732 +v -0.120941 0.076822 -0.054728 +v -0.086220 0.077205 -0.054574 +v -0.086342 0.077534 -0.054323 +v -0.092561 0.077786 -0.053996 +v -0.120941 0.077786 -0.053992 +v -0.172827 0.077205 0.062726 +v -0.172576 0.077534 0.062749 +v -0.162167 0.076822 -0.054728 +v -0.161495 0.077786 -0.053992 +v -0.092561 0.076411 -0.054786 +v -0.120941 0.076411 -0.054782 +v -0.086176 0.076822 -0.054732 +v -0.070467 0.064834 -0.003524 +v -0.070345 0.064505 -0.003775 +v -0.086532 0.077786 -0.053996 +v -0.092561 0.077944 -0.053614 +v -0.120941 0.077944 -0.053610 +v -0.172984 0.076822 0.062711 +v -0.172984 0.076000 0.062711 +v -0.173038 0.076411 0.062706 +v -0.172827 0.075617 0.062726 +v -0.172576 0.075289 0.062749 +v -0.172250 0.077786 0.062779 +v -0.172250 0.075036 0.062779 +v -0.171871 0.077944 0.062814 +v -0.171871 0.074878 0.062814 +v -0.171463 0.077999 0.062851 +v -0.171463 0.074824 0.062851 +v -0.171056 0.077944 0.062889 +v -0.171056 0.074878 0.062889 +v -0.170676 0.077786 0.062924 +v -0.170676 0.075036 0.062924 +v -0.170350 0.077534 0.062954 +v -0.170350 0.075289 0.062954 +v -0.170100 0.077205 0.062977 +v -0.170100 0.075617 0.062977 +v -0.169943 0.076822 0.062991 +v -0.169943 0.076000 0.062991 +v -0.169889 0.076411 0.062996 +v -0.162216 0.076411 -0.054782 +v -0.161147 0.077944 -0.053610 +v -0.120941 0.076000 -0.054728 +v -0.092561 0.076000 -0.054732 +v -0.086211 0.076411 -0.054786 +v -0.070301 0.064122 -0.003933 +v -0.019404 0.064834 -0.003524 +v -0.019526 0.064505 -0.003775 +v -0.070657 0.065086 -0.003197 +v -0.086778 0.077944 -0.053614 +v -0.087063 0.077999 -0.053205 +v -0.092561 0.077999 -0.053205 +v -0.160774 0.077999 -0.053201 +v -0.120941 0.077999 -0.053201 +v -0.162167 0.076000 -0.054728 +v -0.162023 0.075617 -0.054570 +v -0.160401 0.077944 -0.052792 +v -0.160053 0.077786 -0.052411 +v -0.159755 0.077534 -0.052083 +v -0.159526 0.077205 -0.051832 +v -0.159382 0.076822 -0.051674 +v -0.159332 0.076411 -0.051620 +v -0.159382 0.076000 -0.051674 +v -0.159526 0.075617 -0.051832 +v -0.159755 0.075289 -0.052083 +v -0.160053 0.075036 -0.052411 +v -0.160401 0.074878 -0.052792 +v -0.160774 0.074824 -0.053201 +v -0.161147 0.074878 -0.053610 +v -0.161495 0.075036 -0.053992 +v -0.161794 0.075289 -0.054319 +v -0.120941 0.075617 -0.054570 +v -0.092561 0.075617 -0.054574 +v -0.086323 0.076000 -0.054732 +v -0.070336 0.063711 -0.003987 +v -0.019571 0.064122 -0.003933 +v -0.003651 0.077205 -0.054576 +v -0.003529 0.077534 -0.054325 +v -0.019214 0.065086 -0.003197 +v -0.070903 0.065244 -0.002815 +v -0.071188 0.065299 -0.002406 +v -0.087367 0.077944 -0.052796 +v -0.092561 0.077944 -0.052796 +v -0.120941 0.077944 -0.052792 +v -0.120941 0.077786 -0.052411 +v -0.120941 0.077534 -0.052083 +v -0.120941 0.077205 -0.051832 +v -0.120941 0.076822 -0.051674 +v -0.120941 0.076411 -0.051620 +v -0.120941 0.076000 -0.051674 +v -0.120941 0.075617 -0.051832 +v -0.120941 0.075289 -0.052083 +v -0.120941 0.075036 -0.052411 +v -0.120941 0.074878 -0.052792 +v -0.120941 0.074824 -0.053201 +v -0.092561 0.074878 -0.052796 +v -0.087919 0.074878 -0.052796 +v -0.092561 0.074824 -0.053205 +v -0.087634 0.074824 -0.053205 +v -0.120941 0.074878 -0.053610 +v -0.092561 0.074878 -0.053614 +v -0.087330 0.074878 -0.053614 +v -0.120941 0.075036 -0.053992 +v -0.120941 0.075289 -0.054319 +v -0.092561 0.075289 -0.054323 +v -0.086506 0.075617 -0.054574 +v -0.070448 0.063300 -0.003933 +v -0.019536 0.063711 -0.003987 +v -0.003696 0.076822 -0.054734 +v 0.002689 0.077205 -0.054576 +v 0.002689 0.077534 -0.054325 +v -0.003339 0.077786 -0.053998 +v -0.018968 0.065244 -0.002815 +v -0.018684 0.065299 -0.002406 +v -0.071492 0.065244 -0.001997 +v -0.087670 0.077786 -0.052415 +v -0.092560 0.077786 -0.052415 +v -0.092560 0.077534 -0.052087 +v -0.092560 0.077205 -0.051836 +v -0.092560 0.076822 -0.051678 +v -0.092560 0.076411 -0.051624 +v -0.092560 0.076000 -0.051678 +v -0.092560 0.075617 -0.051836 +v -0.092560 0.075289 -0.052087 +v -0.092560 0.075036 -0.052415 +v -0.088164 0.075036 -0.052415 +v -0.071759 0.062124 -0.002406 +v -0.072044 0.062178 -0.001997 +v -0.071455 0.062178 -0.002815 +v -0.092561 0.075036 -0.053996 +v -0.087027 0.075036 -0.053996 +v -0.086746 0.075289 -0.054323 +v -0.070631 0.062917 -0.003775 +v -0.019423 0.063300 -0.003933 +v -0.003661 0.076411 -0.054788 +v 0.002689 0.076822 -0.054734 +v 0.161794 0.077534 -0.054319 +v 0.162023 0.077205 -0.054570 +v 0.002689 0.077786 -0.053998 +v -0.003093 0.077944 -0.053616 +v -0.002809 0.077999 -0.053207 +v -0.018380 0.065244 -0.001997 +v -0.071795 0.065086 -0.001616 +v -0.087951 0.077534 -0.052088 +v -0.088191 0.077205 -0.051836 +v -0.088373 0.076822 -0.051678 +v -0.088486 0.076411 -0.051625 +v -0.088521 0.076000 -0.051678 +v -0.088476 0.075617 -0.051836 +v -0.088355 0.075289 -0.052088 +v -0.072289 0.062336 -0.001616 +v -0.018112 0.062124 -0.002406 +v -0.017828 0.062178 -0.001997 +v -0.018417 0.062178 -0.002815 +v -0.071152 0.062336 -0.003197 +v -0.070871 0.062589 -0.003524 +v -0.019240 0.062917 -0.003775 +v -0.003548 0.076000 -0.054734 +v 0.002689 0.076411 -0.054788 +v 0.162167 0.076822 -0.054728 +v 0.172576 0.077534 0.062749 +v 0.172827 0.077205 0.062726 +v 0.161495 0.077786 -0.053992 +v 0.002689 0.077944 -0.053616 +v 0.160774 0.077999 -0.053201 +v 0.002689 0.077999 -0.053207 +v 0.161148 0.077944 -0.053610 +v -0.002504 0.077944 -0.052798 +v -0.018077 0.065086 -0.001616 +v -0.072076 0.064834 -0.001288 +v -0.072316 0.064505 -0.001037 +v -0.072498 0.064122 -0.000879 +v -0.072611 0.063711 -0.000825 +v -0.072646 0.063300 -0.000879 +v -0.072601 0.062917 -0.001037 +v -0.072480 0.062589 -0.001288 +v -0.017582 0.062336 -0.001616 +v -0.001953 0.074878 -0.052798 +v -0.002237 0.074824 -0.053207 +v -0.002542 0.074878 -0.053616 +v -0.018720 0.062336 -0.003197 +v -0.019001 0.062589 -0.003524 +v -0.003365 0.075617 -0.054576 +v 0.002689 0.076000 -0.054734 +v 0.162216 0.076411 -0.054782 +v 0.172984 0.076822 0.062711 +v 0.169943 0.076822 0.062991 +v 0.169943 0.076000 0.062991 +v 0.169889 0.076411 0.062996 +v 0.170100 0.075617 0.062977 +v 0.170100 0.077205 0.062977 +v 0.170350 0.075289 0.062954 +v 0.170350 0.077534 0.062954 +v 0.170676 0.075036 0.062924 +v 0.170676 0.077786 0.062924 +v 0.171056 0.074878 0.062889 +v 0.171056 0.077944 0.062889 +v 0.171463 0.074824 0.062851 +v 0.171463 0.077999 0.062851 +v 0.171871 0.074878 0.062814 +v 0.171871 0.077944 0.062814 +v 0.172250 0.075036 0.062779 +v 0.172250 0.077786 0.062779 +v 0.172576 0.075289 0.062749 +v 0.172827 0.075617 0.062726 +v 0.172984 0.076000 0.062711 +v 0.173038 0.076411 0.062706 +v 0.160401 0.077944 -0.052792 +v 0.002689 0.077944 -0.052798 +v -0.002202 0.077786 -0.052417 +v -0.017796 0.064834 -0.001288 +v -0.017556 0.064505 -0.001037 +v -0.017373 0.064122 -0.000879 +v -0.017260 0.063711 -0.000825 +v -0.017225 0.063300 -0.000879 +v -0.017270 0.062917 -0.001037 +v -0.017392 0.062589 -0.001288 +v -0.001707 0.075036 -0.052417 +v 0.160774 0.074824 -0.053201 +v 0.002689 0.074824 -0.053207 +v 0.002689 0.074878 -0.052798 +v 0.160401 0.074878 -0.052792 +v 0.161148 0.074878 -0.053610 +v 0.002689 0.074878 -0.053616 +v -0.002845 0.075036 -0.053998 +v -0.003126 0.075289 -0.054325 +v 0.002689 0.075617 -0.054576 +v 0.162167 0.076000 -0.054728 +v 0.162023 0.075617 -0.054570 +v 0.161794 0.075289 -0.054319 +v 0.161495 0.075036 -0.053992 +v 0.160053 0.075036 -0.052411 +v 0.159755 0.075289 -0.052083 +v 0.159525 0.075617 -0.051832 +v 0.159381 0.076000 -0.051674 +v 0.159332 0.076411 -0.051620 +v 0.159381 0.076822 -0.051674 +v 0.159525 0.077205 -0.051832 +v 0.159755 0.077534 -0.052083 +v 0.160053 0.077786 -0.052411 +v 0.002689 0.077786 -0.052417 +v -0.001921 0.077534 -0.052089 +v -0.001681 0.077205 -0.051838 +v -0.001498 0.076822 -0.051680 +v -0.001385 0.076411 -0.051626 +v -0.001350 0.076000 -0.051680 +v -0.001395 0.075617 -0.051838 +v -0.001517 0.075289 -0.052089 +v 0.002689 0.075036 -0.052417 +v 0.002689 0.075036 -0.053998 +v 0.002689 0.075289 -0.054325 +v 0.002689 0.075289 -0.052089 +v 0.002689 0.075617 -0.051838 +v 0.002689 0.076000 -0.051680 +v 0.002689 0.076411 -0.051626 +v 0.002689 0.076822 -0.051680 +v 0.002689 0.077205 -0.051838 +v 0.002689 0.077534 -0.052089 +v -0.092561 -0.013283 -0.054574 +v -0.120941 -0.012954 -0.054319 +v -0.120941 -0.013283 -0.054570 +v -0.092561 -0.012954 -0.054323 +v -0.161794 -0.012954 -0.054319 +v -0.162023 -0.013283 -0.054570 +v -0.092561 -0.013666 -0.054732 +v -0.120941 -0.013666 -0.054728 +v -0.086220 -0.013283 -0.054574 +v -0.086342 -0.012954 -0.054323 +v -0.092561 -0.012702 -0.053996 +v -0.120941 -0.012702 -0.053992 +v -0.172827 -0.013283 0.062726 +v -0.172576 -0.012954 0.062749 +v -0.162167 -0.013666 -0.054728 +v -0.161495 -0.012702 -0.053992 +v -0.092561 -0.014076 -0.054786 +v -0.120941 -0.014076 -0.054782 +v -0.086176 -0.013666 -0.054732 +v -0.070467 -0.025654 -0.003524 +v -0.070345 -0.025983 -0.003775 +v -0.086532 -0.012702 -0.053996 +v -0.092561 -0.012543 -0.053614 +v -0.120941 -0.012543 -0.053610 +v -0.172984 -0.013666 0.062711 +v -0.172984 -0.014487 0.062711 +v -0.173038 -0.014076 0.062706 +v -0.172827 -0.014870 0.062726 +v -0.172576 -0.015199 0.062749 +v -0.172250 -0.012702 0.062779 +v -0.172250 -0.015451 0.062779 +v -0.171871 -0.012543 0.062814 +v -0.171871 -0.015610 0.062814 +v -0.171463 -0.012489 0.062851 +v -0.171463 -0.015664 0.062851 +v -0.171056 -0.012543 0.062889 +v -0.171056 -0.015610 0.062889 +v -0.170676 -0.012702 0.062924 +v -0.170676 -0.015451 0.062924 +v -0.170350 -0.012954 0.062954 +v -0.170350 -0.015199 0.062954 +v -0.170100 -0.013283 0.062977 +v -0.170100 -0.014870 0.062977 +v -0.169943 -0.013666 0.062991 +v -0.169943 -0.014487 0.062991 +v -0.169889 -0.014076 0.062996 +v -0.162216 -0.014076 -0.054782 +v -0.161147 -0.012543 -0.053610 +v -0.120941 -0.014487 -0.054728 +v -0.092561 -0.014487 -0.054732 +v -0.086211 -0.014076 -0.054786 +v -0.070301 -0.026366 -0.003933 +v -0.019404 -0.025654 -0.003524 +v -0.019526 -0.025983 -0.003775 +v -0.070657 -0.025402 -0.003197 +v -0.086778 -0.012543 -0.053614 +v -0.087063 -0.012489 -0.053205 +v -0.092561 -0.012489 -0.053205 +v -0.160774 -0.012489 -0.053201 +v -0.120941 -0.012489 -0.053201 +v -0.162167 -0.014487 -0.054728 +v -0.162023 -0.014870 -0.054570 +v -0.160401 -0.012543 -0.052792 +v -0.160053 -0.012702 -0.052411 +v -0.159755 -0.012954 -0.052083 +v -0.159526 -0.013283 -0.051832 +v -0.159382 -0.013666 -0.051674 +v -0.159332 -0.014076 -0.051620 +v -0.159382 -0.014487 -0.051674 +v -0.159526 -0.014870 -0.051832 +v -0.159755 -0.015199 -0.052083 +v -0.160053 -0.015451 -0.052411 +v -0.160401 -0.015610 -0.052792 +v -0.160774 -0.015664 -0.053201 +v -0.161147 -0.015610 -0.053610 +v -0.161495 -0.015451 -0.053992 +v -0.161794 -0.015199 -0.054319 +v -0.120941 -0.014870 -0.054570 +v -0.092561 -0.014870 -0.054574 +v -0.086323 -0.014487 -0.054732 +v -0.070336 -0.026776 -0.003987 +v -0.019571 -0.026366 -0.003933 +v -0.003651 -0.013283 -0.054576 +v -0.003529 -0.012954 -0.054325 +v -0.019214 -0.025402 -0.003197 +v -0.070903 -0.025243 -0.002815 +v -0.071188 -0.025189 -0.002406 +v -0.087367 -0.012543 -0.052796 +v -0.092561 -0.012543 -0.052796 +v -0.120941 -0.012543 -0.052792 +v -0.120941 -0.012702 -0.052411 +v -0.120941 -0.012954 -0.052083 +v -0.120941 -0.013283 -0.051832 +v -0.120941 -0.013666 -0.051674 +v -0.120941 -0.014076 -0.051620 +v -0.120941 -0.014487 -0.051674 +v -0.120941 -0.014870 -0.051832 +v -0.120941 -0.015199 -0.052083 +v -0.120941 -0.015451 -0.052411 +v -0.120941 -0.015610 -0.052792 +v -0.120941 -0.015664 -0.053201 +v -0.092561 -0.015610 -0.052796 +v -0.087919 -0.015610 -0.052796 +v -0.092561 -0.015664 -0.053205 +v -0.087634 -0.015664 -0.053205 +v -0.120941 -0.015610 -0.053610 +v -0.092561 -0.015610 -0.053614 +v -0.087330 -0.015610 -0.053614 +v -0.120941 -0.015451 -0.053992 +v -0.120941 -0.015199 -0.054319 +v -0.092561 -0.015199 -0.054323 +v -0.086506 -0.014870 -0.054574 +v -0.070448 -0.027187 -0.003933 +v -0.019536 -0.026776 -0.003987 +v -0.003696 -0.013666 -0.054734 +v 0.002689 -0.013283 -0.054576 +v 0.002689 -0.012954 -0.054325 +v -0.003339 -0.012702 -0.053998 +v -0.018968 -0.025243 -0.002815 +v -0.018684 -0.025189 -0.002406 +v -0.071492 -0.025243 -0.001997 +v -0.087670 -0.012702 -0.052415 +v -0.092560 -0.012702 -0.052415 +v -0.092560 -0.012954 -0.052087 +v -0.092560 -0.013283 -0.051836 +v -0.092560 -0.013666 -0.051678 +v -0.092560 -0.014076 -0.051624 +v -0.092560 -0.014487 -0.051678 +v -0.092560 -0.014870 -0.051836 +v -0.092560 -0.015199 -0.052087 +v -0.092560 -0.015451 -0.052415 +v -0.088164 -0.015451 -0.052415 +v -0.071759 -0.028364 -0.002406 +v -0.072044 -0.028310 -0.001997 +v -0.071455 -0.028310 -0.002815 +v -0.092561 -0.015451 -0.053996 +v -0.087027 -0.015451 -0.053996 +v -0.086746 -0.015199 -0.054323 +v -0.070631 -0.027570 -0.003775 +v -0.019423 -0.027187 -0.003933 +v -0.003661 -0.014076 -0.054788 +v 0.002689 -0.013666 -0.054734 +v 0.161794 -0.012954 -0.054319 +v 0.162023 -0.013283 -0.054570 +v 0.002689 -0.012702 -0.053998 +v -0.003093 -0.012543 -0.053616 +v -0.002809 -0.012489 -0.053207 +v -0.018380 -0.025243 -0.001997 +v -0.071795 -0.025402 -0.001616 +v -0.087951 -0.012954 -0.052088 +v -0.088191 -0.013283 -0.051836 +v -0.088373 -0.013666 -0.051678 +v -0.088486 -0.014076 -0.051625 +v -0.088521 -0.014487 -0.051678 +v -0.088476 -0.014870 -0.051836 +v -0.088355 -0.015199 -0.052088 +v -0.072289 -0.028151 -0.001616 +v -0.018112 -0.028364 -0.002406 +v -0.017828 -0.028310 -0.001997 +v -0.018417 -0.028310 -0.002815 +v -0.071152 -0.028151 -0.003197 +v -0.070871 -0.027899 -0.003524 +v -0.019240 -0.027570 -0.003775 +v -0.003548 -0.014487 -0.054734 +v 0.002689 -0.014076 -0.054788 +v 0.162167 -0.013666 -0.054728 +v 0.172576 -0.012954 0.062749 +v 0.172827 -0.013283 0.062726 +v 0.161495 -0.012702 -0.053992 +v 0.002689 -0.012543 -0.053616 +v 0.160774 -0.012489 -0.053201 +v 0.002689 -0.012489 -0.053207 +v 0.161148 -0.012543 -0.053610 +v -0.002504 -0.012543 -0.052798 +v -0.018077 -0.025402 -0.001616 +v -0.072076 -0.025654 -0.001288 +v -0.072316 -0.025983 -0.001037 +v -0.072498 -0.026366 -0.000879 +v -0.072611 -0.026776 -0.000825 +v -0.072646 -0.027187 -0.000879 +v -0.072601 -0.027570 -0.001037 +v -0.072480 -0.027899 -0.001288 +v -0.017582 -0.028151 -0.001616 +v -0.001953 -0.015610 -0.052798 +v -0.002237 -0.015664 -0.053207 +v -0.002542 -0.015610 -0.053616 +v -0.018720 -0.028151 -0.003197 +v -0.019001 -0.027899 -0.003524 +v -0.003365 -0.014870 -0.054576 +v 0.002689 -0.014487 -0.054734 +v 0.162216 -0.014076 -0.054782 +v 0.172984 -0.013666 0.062711 +v 0.169943 -0.013666 0.062991 +v 0.169943 -0.014487 0.062991 +v 0.169889 -0.014076 0.062996 +v 0.170100 -0.014870 0.062977 +v 0.170100 -0.013283 0.062977 +v 0.170350 -0.015199 0.062954 +v 0.170350 -0.012954 0.062954 +v 0.170676 -0.015451 0.062924 +v 0.170676 -0.012702 0.062924 +v 0.171056 -0.015610 0.062889 +v 0.171056 -0.012543 0.062889 +v 0.171463 -0.015664 0.062851 +v 0.171463 -0.012489 0.062851 +v 0.171871 -0.015610 0.062814 +v 0.171871 -0.012543 0.062814 +v 0.172250 -0.015451 0.062779 +v 0.172250 -0.012702 0.062779 +v 0.172576 -0.015199 0.062749 +v 0.172827 -0.014870 0.062726 +v 0.172984 -0.014487 0.062711 +v 0.173038 -0.014076 0.062706 +v 0.160401 -0.012543 -0.052792 +v 0.002689 -0.012543 -0.052798 +v -0.002202 -0.012702 -0.052417 +v -0.017796 -0.025654 -0.001288 +v -0.017556 -0.025983 -0.001037 +v -0.017373 -0.026366 -0.000879 +v -0.017260 -0.026776 -0.000825 +v -0.017225 -0.027187 -0.000879 +v -0.017270 -0.027570 -0.001037 +v -0.017392 -0.027899 -0.001288 +v -0.001707 -0.015451 -0.052417 +v 0.160774 -0.015664 -0.053201 +v 0.002689 -0.015664 -0.053207 +v 0.002689 -0.015610 -0.052798 +v 0.160401 -0.015610 -0.052792 +v 0.161148 -0.015610 -0.053610 +v 0.002689 -0.015610 -0.053616 +v -0.002845 -0.015451 -0.053998 +v -0.003126 -0.015199 -0.054325 +v 0.002689 -0.014870 -0.054576 +v 0.162167 -0.014487 -0.054728 +v 0.162023 -0.014870 -0.054570 +v 0.161794 -0.015199 -0.054319 +v 0.161495 -0.015451 -0.053992 +v 0.160053 -0.015451 -0.052411 +v 0.159755 -0.015199 -0.052083 +v 0.159525 -0.014870 -0.051832 +v 0.159381 -0.014487 -0.051674 +v 0.159332 -0.014076 -0.051620 +v 0.159381 -0.013666 -0.051674 +v 0.159525 -0.013283 -0.051832 +v 0.159755 -0.012954 -0.052083 +v 0.160053 -0.012702 -0.052411 +v 0.002689 -0.012702 -0.052417 +v -0.001921 -0.012954 -0.052089 +v -0.001681 -0.013283 -0.051838 +v -0.001498 -0.013666 -0.051680 +v -0.001385 -0.014076 -0.051626 +v -0.001350 -0.014487 -0.051680 +v -0.001395 -0.014870 -0.051838 +v -0.001517 -0.015199 -0.052089 +v 0.002689 -0.015451 -0.052417 +v 0.002689 -0.015451 -0.053998 +v 0.002689 -0.015199 -0.054325 +v 0.002689 -0.015199 -0.052089 +v 0.002689 -0.014870 -0.051838 +v 0.002689 -0.014487 -0.051680 +v 0.002689 -0.014076 -0.051626 +v 0.002689 -0.013666 -0.051680 +v 0.002689 -0.013283 -0.051838 +v 0.002689 -0.012954 -0.052089 +v -0.092561 0.012117 -0.054574 +v -0.120941 0.012446 -0.054319 +v -0.120941 0.012117 -0.054570 +v -0.092561 0.012446 -0.054323 +v -0.161794 0.012446 -0.054319 +v -0.162023 0.012117 -0.054570 +v -0.092561 0.011734 -0.054732 +v -0.120941 0.011734 -0.054728 +v -0.086220 0.012117 -0.054574 +v -0.086342 0.012446 -0.054323 +v -0.092561 0.012698 -0.053996 +v -0.120941 0.012698 -0.053992 +v -0.172827 0.012117 0.062726 +v -0.172576 0.012446 0.062749 +v -0.162167 0.011734 -0.054728 +v -0.161495 0.012698 -0.053992 +v -0.092561 0.011324 -0.054786 +v -0.120941 0.011324 -0.054782 +v -0.086176 0.011734 -0.054732 +v -0.070467 -0.000254 -0.003524 +v -0.070345 -0.000583 -0.003775 +v -0.086532 0.012698 -0.053996 +v -0.092561 0.012857 -0.053614 +v -0.120941 0.012857 -0.053610 +v -0.172984 0.011734 0.062711 +v -0.172984 0.010913 0.062711 +v -0.173038 0.011324 0.062706 +v -0.172827 0.010530 0.062726 +v -0.172576 0.010201 0.062749 +v -0.172250 0.012698 0.062779 +v -0.172250 0.009949 0.062779 +v -0.171871 0.012857 0.062814 +v -0.171871 0.009790 0.062814 +v -0.171463 0.012911 0.062851 +v -0.171463 0.009736 0.062851 +v -0.171056 0.012857 0.062889 +v -0.171056 0.009790 0.062889 +v -0.170676 0.012698 0.062924 +v -0.170676 0.009949 0.062924 +v -0.170350 0.012446 0.062954 +v -0.170350 0.010201 0.062954 +v -0.170100 0.012117 0.062977 +v -0.170100 0.010530 0.062977 +v -0.169943 0.011734 0.062991 +v -0.169943 0.010913 0.062991 +v -0.169889 0.011324 0.062996 +v -0.162216 0.011324 -0.054782 +v -0.161147 0.012857 -0.053610 +v -0.120941 0.010913 -0.054728 +v -0.092561 0.010913 -0.054732 +v -0.086211 0.011324 -0.054786 +v -0.070301 -0.000966 -0.003933 +v -0.019404 -0.000254 -0.003524 +v -0.019526 -0.000583 -0.003775 +v -0.070657 -0.000002 -0.003197 +v -0.086778 0.012857 -0.053614 +v -0.087063 0.012911 -0.053205 +v -0.092561 0.012911 -0.053205 +v -0.160774 0.012911 -0.053201 +v -0.120941 0.012911 -0.053201 +v -0.162167 0.010913 -0.054728 +v -0.162023 0.010530 -0.054570 +v -0.160401 0.012857 -0.052792 +v -0.160053 0.012698 -0.052411 +v -0.159755 0.012446 -0.052083 +v -0.159526 0.012117 -0.051832 +v -0.159382 0.011734 -0.051674 +v -0.159332 0.011324 -0.051620 +v -0.159382 0.010913 -0.051674 +v -0.159526 0.010530 -0.051832 +v -0.159755 0.010201 -0.052083 +v -0.160053 0.009949 -0.052411 +v -0.160401 0.009790 -0.052792 +v -0.160774 0.009736 -0.053201 +v -0.161147 0.009790 -0.053610 +v -0.161495 0.009949 -0.053992 +v -0.161794 0.010201 -0.054319 +v -0.120941 0.010530 -0.054570 +v -0.092561 0.010530 -0.054574 +v -0.086323 0.010913 -0.054732 +v -0.070336 -0.001376 -0.003987 +v -0.019571 -0.000966 -0.003933 +v -0.003651 0.012117 -0.054576 +v -0.003529 0.012446 -0.054325 +v -0.019214 -0.000002 -0.003197 +v -0.070903 0.000157 -0.002815 +v -0.071188 0.000211 -0.002406 +v -0.087367 0.012857 -0.052796 +v -0.092561 0.012857 -0.052796 +v -0.120941 0.012857 -0.052792 +v -0.120941 0.012698 -0.052411 +v -0.120941 0.012446 -0.052083 +v -0.120941 0.012117 -0.051832 +v -0.120941 0.011734 -0.051674 +v -0.120941 0.011324 -0.051620 +v -0.120941 0.010913 -0.051674 +v -0.120941 0.010530 -0.051832 +v -0.120941 0.010201 -0.052083 +v -0.120941 0.009949 -0.052411 +v -0.120941 0.009790 -0.052792 +v -0.120941 0.009736 -0.053201 +v -0.092561 0.009790 -0.052796 +v -0.087919 0.009790 -0.052796 +v -0.092561 0.009736 -0.053205 +v -0.087634 0.009736 -0.053205 +v -0.120941 0.009790 -0.053610 +v -0.092561 0.009790 -0.053614 +v -0.087330 0.009790 -0.053614 +v -0.120941 0.009949 -0.053992 +v -0.120941 0.010201 -0.054319 +v -0.092561 0.010201 -0.054323 +v -0.086506 0.010530 -0.054574 +v -0.070448 -0.001787 -0.003933 +v -0.019536 -0.001376 -0.003987 +v -0.003696 0.011734 -0.054734 +v 0.002689 0.012117 -0.054576 +v 0.002689 0.012446 -0.054325 +v -0.003339 0.012698 -0.053998 +v -0.018968 0.000157 -0.002815 +v -0.018684 0.000211 -0.002406 +v -0.071492 0.000157 -0.001997 +v -0.087670 0.012698 -0.052415 +v -0.092560 0.012698 -0.052415 +v -0.092560 0.012446 -0.052087 +v -0.092560 0.012117 -0.051836 +v -0.092560 0.011734 -0.051678 +v -0.092560 0.011324 -0.051624 +v -0.092560 0.010913 -0.051678 +v -0.092560 0.010530 -0.051836 +v -0.092560 0.010201 -0.052087 +v -0.092560 0.009949 -0.052415 +v -0.088164 0.009949 -0.052415 +v -0.071759 -0.002964 -0.002406 +v -0.072044 -0.002910 -0.001997 +v -0.071455 -0.002910 -0.002815 +v -0.092561 0.009949 -0.053996 +v -0.087027 0.009949 -0.053996 +v -0.086746 0.010201 -0.054323 +v -0.070631 -0.002170 -0.003775 +v -0.019423 -0.001787 -0.003933 +v -0.003661 0.011324 -0.054788 +v 0.002689 0.011734 -0.054734 +v 0.161794 0.012446 -0.054319 +v 0.162023 0.012117 -0.054570 +v 0.002689 0.012698 -0.053998 +v -0.003093 0.012857 -0.053616 +v -0.002809 0.012911 -0.053207 +v -0.018380 0.000157 -0.001997 +v -0.071795 -0.000002 -0.001616 +v -0.087951 0.012446 -0.052088 +v -0.088191 0.012117 -0.051836 +v -0.088373 0.011734 -0.051678 +v -0.088486 0.011324 -0.051625 +v -0.088521 0.010913 -0.051678 +v -0.088476 0.010530 -0.051836 +v -0.088355 0.010201 -0.052088 +v -0.072289 -0.002751 -0.001616 +v -0.018112 -0.002964 -0.002406 +v -0.017828 -0.002910 -0.001997 +v -0.018417 -0.002910 -0.002815 +v -0.071152 -0.002751 -0.003197 +v -0.070871 -0.002499 -0.003524 +v -0.019240 -0.002170 -0.003775 +v -0.003548 0.010913 -0.054734 +v 0.002689 0.011324 -0.054788 +v 0.162167 0.011734 -0.054728 +v 0.172576 0.012446 0.062749 +v 0.172827 0.012117 0.062726 +v 0.161495 0.012698 -0.053992 +v 0.002689 0.012857 -0.053616 +v 0.160774 0.012911 -0.053201 +v 0.002689 0.012911 -0.053207 +v 0.161148 0.012857 -0.053610 +v -0.002504 0.012857 -0.052798 +v -0.018077 -0.000002 -0.001616 +v -0.072076 -0.000254 -0.001288 +v -0.072316 -0.000583 -0.001037 +v -0.072498 -0.000966 -0.000879 +v -0.072611 -0.001376 -0.000825 +v -0.072646 -0.001787 -0.000879 +v -0.072601 -0.002170 -0.001037 +v -0.072480 -0.002499 -0.001288 +v -0.017582 -0.002751 -0.001616 +v -0.001953 0.009790 -0.052798 +v -0.002237 0.009736 -0.053207 +v -0.002542 0.009790 -0.053616 +v -0.018720 -0.002751 -0.003197 +v -0.019001 -0.002499 -0.003524 +v -0.003365 0.010530 -0.054576 +v 0.002689 0.010913 -0.054734 +v 0.162216 0.011324 -0.054782 +v 0.172984 0.011734 0.062711 +v 0.169943 0.011734 0.062991 +v 0.169943 0.010913 0.062991 +v 0.169889 0.011324 0.062996 +v 0.170100 0.010530 0.062977 +v 0.170100 0.012117 0.062977 +v 0.170350 0.010201 0.062954 +v 0.170350 0.012446 0.062954 +v 0.170676 0.009949 0.062924 +v 0.170676 0.012698 0.062924 +v 0.171056 0.009790 0.062889 +v 0.171056 0.012857 0.062889 +v 0.171463 0.009736 0.062851 +v 0.171463 0.012911 0.062851 +v 0.171871 0.009790 0.062814 +v 0.171871 0.012857 0.062814 +v 0.172250 0.009949 0.062779 +v 0.172250 0.012698 0.062779 +v 0.172576 0.010201 0.062749 +v 0.172827 0.010530 0.062726 +v 0.172984 0.010913 0.062711 +v 0.173038 0.011324 0.062706 +v 0.160401 0.012857 -0.052792 +v 0.002689 0.012857 -0.052798 +v -0.002202 0.012698 -0.052417 +v -0.017796 -0.000254 -0.001288 +v -0.017556 -0.000583 -0.001037 +v -0.017373 -0.000966 -0.000879 +v -0.017260 -0.001376 -0.000825 +v -0.017225 -0.001787 -0.000879 +v -0.017270 -0.002170 -0.001037 +v -0.017392 -0.002499 -0.001288 +v -0.001707 0.009949 -0.052417 +v 0.160774 0.009736 -0.053201 +v 0.002689 0.009736 -0.053207 +v 0.002689 0.009790 -0.052798 +v 0.160401 0.009790 -0.052792 +v 0.161148 0.009790 -0.053610 +v 0.002689 0.009790 -0.053616 +v -0.002845 0.009949 -0.053998 +v -0.003126 0.010201 -0.054325 +v 0.002689 0.010530 -0.054576 +v 0.162167 0.010913 -0.054728 +v 0.162023 0.010530 -0.054570 +v 0.161794 0.010201 -0.054319 +v 0.161495 0.009949 -0.053992 +v 0.160053 0.009949 -0.052411 +v 0.159755 0.010201 -0.052083 +v 0.159525 0.010530 -0.051832 +v 0.159381 0.010913 -0.051674 +v 0.159332 0.011324 -0.051620 +v 0.159381 0.011734 -0.051674 +v 0.159525 0.012117 -0.051832 +v 0.159755 0.012446 -0.052083 +v 0.160053 0.012698 -0.052411 +v 0.002689 0.012698 -0.052417 +v -0.001921 0.012446 -0.052089 +v -0.001681 0.012117 -0.051838 +v -0.001498 0.011734 -0.051680 +v -0.001385 0.011324 -0.051626 +v -0.001350 0.010913 -0.051680 +v -0.001395 0.010530 -0.051838 +v -0.001517 0.010201 -0.052089 +v 0.002689 0.009949 -0.052417 +v 0.002689 0.009949 -0.053998 +v 0.002689 0.010201 -0.054325 +v 0.002689 0.010201 -0.052089 +v 0.002689 0.010530 -0.051838 +v 0.002689 0.010913 -0.051680 +v 0.002689 0.011324 -0.051626 +v 0.002689 0.011734 -0.051680 +v 0.002689 0.012117 -0.051838 +v 0.002689 0.012446 -0.052089 +v 0.167884 -0.136153 0.024626 +v 0.167166 -0.138078 0.021382 +v 0.167578 -0.138089 0.021349 +v 0.167471 -0.136143 0.024657 +v 0.166827 -0.138985 0.017758 +v 0.167239 -0.138997 0.017720 +v 0.168283 -0.136096 0.024544 +v 0.167978 -0.138022 0.021284 +v 0.167724 -0.133331 0.027360 +v 0.168137 -0.133339 0.027331 +v 0.166768 -0.137988 0.021383 +v 0.167072 -0.136068 0.024635 +v 0.166480 -0.138791 0.014044 +v 0.166892 -0.138804 0.014003 +v 0.167641 -0.138925 0.017676 +v 0.166432 -0.138889 0.017785 +v 0.168639 -0.135976 0.024416 +v 0.168338 -0.137881 0.021192 +v 0.168534 -0.133295 0.027236 +v 0.167907 -0.129843 0.029318 +v 0.168320 -0.129848 0.029290 +v 0.167323 -0.133274 0.027320 +v 0.166712 -0.135931 0.024561 +v 0.166412 -0.137828 0.021350 +v 0.166150 -0.137513 0.010513 +v 0.166562 -0.137524 0.010469 +v 0.167296 -0.138732 0.013980 +v 0.166087 -0.138697 0.014100 +v 0.168004 -0.138773 0.017627 +v 0.166081 -0.138716 0.017801 +v 0.168929 -0.135802 0.024252 +v 0.168632 -0.137675 0.021080 +v 0.168888 -0.133204 0.027080 +v 0.168716 -0.129820 0.029186 +v 0.168009 -0.125916 0.030411 +v 0.168422 -0.125918 0.030385 +v 0.167504 -0.129806 0.029266 +v 0.166960 -0.133170 0.027214 +v 0.166418 -0.135744 0.024440 +v 0.166123 -0.137606 0.021286 +v 0.165860 -0.135253 0.007415 +v 0.166272 -0.135261 0.007368 +v 0.166967 -0.137460 0.010465 +v 0.165760 -0.137428 0.010595 +v 0.167663 -0.138583 0.013976 +v 0.165741 -0.138527 0.014167 +v 0.168305 -0.138552 0.017578 +v 0.165798 -0.138478 0.017805 +v 0.168842 -0.137420 0.020956 +v 0.169132 -0.135586 0.024061 +v 0.169174 -0.133072 0.026875 +v 0.169068 -0.129762 0.029012 +v 0.168818 -0.125907 0.030276 +v 0.168024 -0.121809 0.030576 +v 0.168438 -0.121808 0.030549 +v 0.167606 -0.125901 0.030354 +v 0.167140 -0.129740 0.029139 +v 0.166662 -0.133028 0.027050 +v 0.165921 -0.137339 0.021196 +v 0.166209 -0.135517 0.024280 +v 0.166051 -0.132306 0.005003 +v 0.165640 -0.132299 0.005051 +v 0.166679 -0.135209 0.007380 +v 0.165472 -0.135183 0.007518 +v 0.167339 -0.137325 0.010502 +v 0.165418 -0.137275 0.010708 +v 0.167970 -0.138365 0.013991 +v 0.165465 -0.138292 0.014240 +v 0.168522 -0.138277 0.017531 +v 0.165603 -0.138190 0.017795 +v 0.168953 -0.137131 0.020827 +v 0.169236 -0.135341 0.023858 +v 0.169373 -0.132907 0.026635 +v 0.169352 -0.129677 0.028781 +v 0.169169 -0.125883 0.030093 +v 0.168834 -0.121814 0.030440 +v 0.168365 -0.117786 0.029775 +v 0.167952 -0.117789 0.029802 +v 0.167621 -0.121816 0.030517 +v 0.167241 -0.125874 0.030216 +v 0.166839 -0.129649 0.028946 +v 0.166448 -0.132855 0.026838 +v 0.165818 -0.137044 0.021084 +v 0.166099 -0.135268 0.024093 +v 0.166459 -0.132261 0.005021 +v 0.166834 -0.132167 0.005104 +v 0.165252 -0.132239 0.005162 +v 0.167153 -0.132029 0.005246 +v 0.164916 -0.132131 0.005328 +v 0.167392 -0.131858 0.005438 +v 0.164652 -0.131983 0.005538 +v 0.167537 -0.131665 0.005666 +v 0.164480 -0.131804 0.005777 +v 0.167576 -0.131462 0.005915 +v 0.164411 -0.131607 0.006030 +v 0.167507 -0.131265 0.006168 +v 0.164450 -0.131404 0.006279 +v 0.167335 -0.131086 0.006407 +v 0.164594 -0.131211 0.006507 +v 0.167071 -0.130938 0.006617 +v 0.164834 -0.131040 0.006699 +v 0.166734 -0.130830 0.006783 +v 0.165152 -0.130902 0.006841 +v 0.166347 -0.130770 0.006894 +v 0.165528 -0.130808 0.006924 +v 0.165936 -0.130763 0.006942 +v 0.167054 -0.135099 0.007451 +v 0.165134 -0.135057 0.007670 +v 0.167651 -0.137129 0.010577 +v 0.165148 -0.137063 0.010846 +v 0.168195 -0.138094 0.014025 +v 0.165278 -0.138009 0.014315 +v 0.168641 -0.137966 0.017489 +v 0.165508 -0.137873 0.017773 +v 0.168957 -0.136829 0.020703 +v 0.169233 -0.135085 0.023655 +v 0.169471 -0.132722 0.026375 +v 0.169548 -0.129572 0.028507 +v 0.169451 -0.125849 0.029847 +v 0.169184 -0.121825 0.030255 +v 0.168761 -0.117808 0.029669 +v 0.168209 -0.114113 0.028107 +v 0.167796 -0.114120 0.028135 +v 0.167549 -0.117819 0.029747 +v 0.167256 -0.121829 0.030378 +v 0.166938 -0.125837 0.030008 +v 0.166622 -0.129539 0.028700 +v 0.166332 -0.132666 0.026593 +v 0.165822 -0.136742 0.020960 +v 0.166096 -0.135012 0.023890 +v 0.165356 -0.133624 0.009020 +v 0.165730 -0.133514 0.009090 +v 0.166138 -0.133461 0.009102 +v 0.166549 -0.133470 0.009055 +v 0.166937 -0.133540 0.008953 +v 0.167275 -0.133666 0.008801 +v 0.167540 -0.133839 0.008611 +v 0.167715 -0.134047 0.008395 +v 0.167786 -0.134277 0.008168 +v 0.167749 -0.134513 0.007946 +v 0.167608 -0.134739 0.007743 +v 0.167371 -0.134938 0.007574 +v 0.164869 -0.134884 0.007860 +v 0.164695 -0.134676 0.008076 +v 0.164624 -0.134446 0.008303 +v 0.164660 -0.134210 0.008525 +v 0.164802 -0.133984 0.008727 +v 0.165039 -0.133785 0.008896 +v 0.167883 -0.136885 0.010685 +v 0.164968 -0.136808 0.010999 +v 0.168322 -0.137789 0.014075 +v 0.165192 -0.137697 0.014386 +v 0.168654 -0.137642 0.017457 +v 0.165521 -0.137548 0.017740 +v 0.168854 -0.136534 0.020591 +v 0.169123 -0.134835 0.023468 +v 0.169463 -0.132527 0.026114 +v 0.169643 -0.129453 0.028210 +v 0.169646 -0.125806 0.029556 +v 0.169466 -0.121842 0.030007 +v 0.169113 -0.117854 0.029491 +v 0.168606 -0.114151 0.028008 +v 0.167980 -0.111038 0.025645 +v 0.167567 -0.111047 0.025675 +v 0.167394 -0.114170 0.028090 +v 0.167185 -0.117871 0.029616 +v 0.166953 -0.121847 0.030168 +v 0.166719 -0.125792 0.029743 +v 0.166503 -0.129417 0.028416 +v 0.166324 -0.132471 0.026332 +v 0.165933 -0.136454 0.020831 +v 0.166199 -0.134767 0.023687 +v 0.165599 -0.135524 0.011623 +v 0.165971 -0.135389 0.011660 +v 0.166376 -0.135325 0.011656 +v 0.166788 -0.135336 0.011612 +v 0.167178 -0.135421 0.011530 +v 0.167520 -0.135574 0.011417 +v 0.167790 -0.135786 0.011279 +v 0.167970 -0.136041 0.011126 +v 0.168048 -0.136322 0.010970 +v 0.168018 -0.136610 0.010819 +v 0.164890 -0.136527 0.011155 +v 0.164920 -0.136239 0.011306 +v 0.165055 -0.135964 0.011440 +v 0.165287 -0.135720 0.011548 +v 0.168344 -0.137469 0.014138 +v 0.165214 -0.137377 0.014449 +v 0.168559 -0.137325 0.017435 +v 0.165640 -0.137238 0.017699 +v 0.168651 -0.136267 0.020501 +v 0.168914 -0.134609 0.023308 +v 0.169347 -0.132337 0.025869 +v 0.169630 -0.129328 0.027909 +v 0.169739 -0.125758 0.029239 +v 0.169660 -0.121863 0.029714 +v 0.169396 -0.117921 0.029253 +v 0.168959 -0.114231 0.027845 +v 0.168377 -0.111090 0.025558 +v 0.167277 -0.108786 0.022577 +v 0.167690 -0.108776 0.022544 +v 0.167166 -0.111117 0.025646 +v 0.167031 -0.114261 0.027976 +v 0.166883 -0.117944 0.029416 +v 0.166734 -0.121869 0.029900 +v 0.166598 -0.125743 0.029440 +v 0.166490 -0.129293 0.028116 +v 0.166422 -0.132285 0.026072 +v 0.166142 -0.136198 0.020707 +v 0.166403 -0.134550 0.023497 +v 0.165872 -0.136582 0.014548 +v 0.166240 -0.136433 0.014544 +v 0.166644 -0.136362 0.014521 +v 0.167056 -0.136374 0.014480 +v 0.167448 -0.136468 0.014424 +v 0.167794 -0.136639 0.014357 +v 0.168071 -0.136874 0.014284 +v 0.168258 -0.137157 0.014209 +v 0.165341 -0.137071 0.014499 +v 0.165566 -0.136800 0.014533 +v 0.168364 -0.137037 0.017425 +v 0.165857 -0.136963 0.017652 +v 0.168619 -0.134421 0.023187 +v 0.168362 -0.136046 0.020437 +v 0.169133 -0.132165 0.025657 +v 0.169511 -0.129207 0.027626 +v 0.169725 -0.125707 0.028918 +v 0.169753 -0.121886 0.029394 +v 0.169591 -0.118005 0.028971 +v 0.169244 -0.114347 0.027629 +v 0.168732 -0.111201 0.025419 +v 0.168089 -0.108840 0.022473 +v 0.166947 -0.107508 0.019046 +v 0.167360 -0.107496 0.019010 +v 0.166879 -0.108872 0.022569 +v 0.166806 -0.111242 0.025559 +v 0.166732 -0.114387 0.027800 +v 0.166665 -0.118032 0.029161 +v 0.166613 -0.121893 0.029594 +v 0.166584 -0.125692 0.029119 +v 0.166585 -0.129174 0.027819 +v 0.166621 -0.132121 0.025832 +v 0.166693 -0.134376 0.023332 +v 0.166437 -0.135993 0.020595 +v 0.166521 -0.136590 0.017554 +v 0.166158 -0.136742 0.017603 +v 0.166923 -0.136518 0.017510 +v 0.167335 -0.136530 0.017472 +v 0.167730 -0.136626 0.017445 +v 0.168081 -0.136799 0.017429 +v 0.168260 -0.134285 0.023113 +v 0.168007 -0.135885 0.020404 +v 0.168835 -0.132023 0.025493 +v 0.169294 -0.129097 0.027379 +v 0.169604 -0.125657 0.028614 +v 0.169739 -0.121910 0.029070 +v 0.169685 -0.118100 0.028665 +v 0.169442 -0.114492 0.027375 +v 0.169021 -0.111361 0.025239 +v 0.168447 -0.108974 0.022368 +v 0.167761 -0.107567 0.018958 +v 0.166600 -0.107315 0.015333 +v 0.167012 -0.107302 0.015293 +v 0.166551 -0.107603 0.019064 +v 0.166522 -0.109025 0.022521 +v 0.166510 -0.111415 0.025422 +v 0.166517 -0.114538 0.027574 +v 0.166545 -0.118128 0.028869 +v 0.166598 -0.121917 0.029270 +v 0.166677 -0.125644 0.028802 +v 0.166781 -0.129068 0.027545 +v 0.166907 -0.131988 0.025627 +v 0.167049 -0.134257 0.023204 +v 0.166797 -0.135852 0.020503 +v 0.167197 -0.135784 0.020438 +v 0.167609 -0.135796 0.020405 +v 0.167860 -0.134209 0.023091 +v 0.168473 -0.131919 0.025387 +v 0.168993 -0.129005 0.027186 +v 0.169385 -0.125613 0.028350 +v 0.169618 -0.121934 0.028764 +v 0.169672 -0.118199 0.028355 +v 0.169539 -0.114655 0.027100 +v 0.169223 -0.111561 0.025029 +v 0.168740 -0.109170 0.022236 +v 0.168123 -0.107717 0.018894 +v 0.167415 -0.107375 0.015262 +v 0.166262 -0.108222 0.011708 +v 0.166674 -0.108211 0.011665 +v 0.166207 -0.107411 0.015379 +v 0.166199 -0.107773 0.019063 +v 0.166231 -0.109236 0.022435 +v 0.166299 -0.111624 0.025242 +v 0.166400 -0.114704 0.027313 +v 0.166532 -0.118227 0.028559 +v 0.166691 -0.121941 0.028950 +v 0.166872 -0.125601 0.028510 +v 0.167065 -0.128983 0.027313 +v 0.167261 -0.131897 0.025471 +v 0.167447 -0.134200 0.023122 +v 0.168071 -0.131862 0.025347 +v 0.168629 -0.128939 0.027059 +v 0.169082 -0.125576 0.028141 +v 0.169398 -0.121956 0.028496 +v 0.169552 -0.118295 0.028063 +v 0.169529 -0.114826 0.026823 +v 0.169325 -0.111786 0.024804 +v 0.168948 -0.109414 0.022087 +v 0.168421 -0.107934 0.018822 +v 0.167782 -0.107526 0.015243 +v 0.167078 -0.108278 0.011654 +v 0.165956 -0.110156 0.008433 +v 0.166367 -0.110147 0.008387 +v 0.165871 -0.108311 0.011781 +v 0.165859 -0.107584 0.015428 +v 0.165914 -0.108008 0.019042 +v 0.166026 -0.109491 0.022319 +v 0.166187 -0.111854 0.025032 +v 0.166390 -0.114875 0.027036 +v 0.166626 -0.118322 0.028253 +v 0.166885 -0.121962 0.028656 +v 0.167154 -0.125567 0.028265 +v 0.167417 -0.128925 0.027139 +v 0.167658 -0.131854 0.025376 +v 0.168226 -0.128902 0.027008 +v 0.168717 -0.125549 0.028004 +v 0.169096 -0.121974 0.028286 +v 0.169334 -0.118383 0.027808 +v 0.169412 -0.114993 0.026562 +v 0.169319 -0.112022 0.024579 +v 0.169056 -0.109690 0.021930 +v 0.168636 -0.108205 0.018747 +v 0.168086 -0.107747 0.015236 +v 0.167449 -0.108419 0.011678 +v 0.166774 -0.110203 0.008394 +v 0.166114 -0.112961 0.005682 +v 0.165703 -0.112968 0.005730 +v 0.165567 -0.110232 0.008529 +v 0.165528 -0.108472 0.011879 +v 0.165581 -0.107822 0.015477 +v 0.165716 -0.108291 0.019003 +v 0.165920 -0.109772 0.022179 +v 0.166181 -0.112090 0.024807 +v 0.166487 -0.115038 0.026761 +v 0.166821 -0.118406 0.027971 +v 0.167167 -0.121978 0.028408 +v 0.167505 -0.125543 0.028081 +v 0.167813 -0.128897 0.027035 +v 0.168314 -0.125534 0.027946 +v 0.168730 -0.121987 0.028146 +v 0.169032 -0.118456 0.027608 +v 0.169197 -0.115144 0.026336 +v 0.169207 -0.112252 0.024369 +v 0.169057 -0.109978 0.021777 +v 0.168752 -0.108511 0.018675 +v 0.168308 -0.108023 0.015242 +v 0.167759 -0.108624 0.011733 +v 0.167147 -0.110323 0.008453 +v 0.166522 -0.113004 0.005702 +v 0.165932 -0.116452 0.003723 +v 0.165520 -0.116456 0.003773 +v 0.165316 -0.113026 0.005844 +v 0.165228 -0.110368 0.008668 +v 0.165255 -0.108694 0.011995 +v 0.165391 -0.108109 0.015523 +v 0.165618 -0.108603 0.018949 +v 0.165921 -0.110060 0.022026 +v 0.166283 -0.112315 0.024582 +v 0.166684 -0.115183 0.026507 +v 0.167104 -0.118473 0.027733 +v 0.167518 -0.121990 0.028224 +v 0.167900 -0.125532 0.027972 +v 0.168327 -0.121994 0.028088 +v 0.168668 -0.118508 0.027476 +v 0.168897 -0.115269 0.026160 +v 0.168996 -0.112461 0.024189 +v 0.168952 -0.110259 0.021637 +v 0.168761 -0.108831 0.018609 +v 0.168433 -0.108333 0.015260 +v 0.167988 -0.108880 0.011816 +v 0.167463 -0.110497 0.008562 +v 0.166899 -0.113095 0.005789 +v 0.166340 -0.116479 0.003752 +v 0.165829 -0.120381 0.002628 +v 0.165418 -0.120383 0.002679 +v 0.165134 -0.116493 0.003898 +v 0.164980 -0.113129 0.006015 +v 0.164961 -0.110556 0.008842 +v 0.165073 -0.108961 0.012122 +v 0.165302 -0.108426 0.015562 +v 0.165628 -0.108923 0.018883 +v 0.166030 -0.110336 0.021869 +v 0.166485 -0.112515 0.024372 +v 0.166969 -0.115299 0.026291 +v 0.167456 -0.118519 0.027555 +v 0.167914 -0.121995 0.028114 +v 0.168265 -0.118538 0.027422 +v 0.168534 -0.115360 0.026046 +v 0.168700 -0.112634 0.024051 +v 0.168747 -0.110514 0.021521 +v 0.168664 -0.109143 0.018555 +v 0.168451 -0.108658 0.015290 +v 0.168121 -0.109169 0.011923 +v 0.167698 -0.110714 0.008711 +v 0.167218 -0.113227 0.005938 +v 0.166718 -0.116537 0.003857 +v 0.166238 -0.120393 0.002662 +v 0.165819 -0.123178 0.002516 +v 0.165408 -0.123194 0.002566 +v 0.165033 -0.120398 0.002810 +v 0.164800 -0.116559 0.004090 +v 0.164717 -0.113272 0.006232 +v 0.164785 -0.110782 0.009038 +v 0.164992 -0.109255 0.012250 +v 0.165320 -0.108751 0.015591 +v 0.165744 -0.109228 0.018810 +v 0.166237 -0.110580 0.021720 +v 0.166773 -0.112675 0.024192 +v 0.167322 -0.115379 0.026129 +v 0.167852 -0.118541 0.027449 +v 0.168132 -0.115410 0.026002 +v 0.168340 -0.112760 0.023965 +v 0.168456 -0.110725 0.021435 +v 0.168466 -0.109426 0.018516 +v 0.168363 -0.108975 0.015329 +v 0.168148 -0.109471 0.012044 +v 0.167838 -0.110959 0.008892 +v 0.167458 -0.113392 0.006137 +v 0.167040 -0.116622 0.004033 +v 0.166617 -0.120416 0.002777 +v 0.166228 -0.123245 0.002548 +v 0.163467 -0.123461 -0.020912 +v 0.163880 -0.123444 -0.020948 +v 0.165022 -0.123292 0.002694 +v 0.164699 -0.120425 0.003013 +v 0.164540 -0.116651 0.004335 +v 0.164546 -0.113444 0.006480 +v 0.164711 -0.111032 0.009242 +v 0.165019 -0.109557 0.012372 +v 0.165445 -0.109062 0.015610 +v 0.165958 -0.109499 0.018735 +v 0.166530 -0.110776 0.021588 +v 0.167128 -0.112786 0.024053 +v 0.167719 -0.115417 0.026030 +v 0.167939 -0.112829 0.023936 +v 0.168099 -0.110879 0.021387 +v 0.168181 -0.109661 0.018495 +v 0.168172 -0.109262 0.015374 +v 0.168068 -0.109765 0.012172 +v 0.167872 -0.111214 0.009091 +v 0.167603 -0.113578 0.006374 +v 0.167283 -0.116728 0.004265 +v 0.166940 -0.120451 0.002966 +v 0.166606 -0.123390 0.002658 +v 0.164283 -0.123512 -0.020983 +v 0.160519 -0.123461 -0.052921 +v 0.160897 -0.123444 -0.053335 +v 0.163074 -0.123560 -0.020877 +v 0.164688 -0.123466 0.002891 +v 0.164441 -0.120462 0.003274 +v 0.164372 -0.116761 0.004618 +v 0.164478 -0.113634 0.006742 +v 0.164746 -0.111288 0.009441 +v 0.165152 -0.109846 0.012478 +v 0.165667 -0.109337 0.015616 +v 0.166257 -0.109717 0.018664 +v 0.166888 -0.110911 0.021483 +v 0.167526 -0.112838 0.023966 +v 0.167700 -0.110964 0.021379 +v 0.167828 -0.109831 0.018494 +v 0.167894 -0.109501 0.015424 +v 0.167885 -0.110032 0.012299 +v 0.167799 -0.111464 0.009296 +v 0.167643 -0.113772 0.006633 +v 0.167431 -0.116847 0.004540 +v 0.167185 -0.120494 0.003217 +v 0.166928 -0.123604 0.002840 +v 0.164650 -0.123659 -0.021014 +v 0.161266 -0.123512 -0.053740 +v -0.005122 -0.123461 -0.052927 +v -0.005422 -0.123444 -0.053341 +v 0.160159 -0.123560 -0.052526 +v 0.162726 -0.123736 -0.020845 +v 0.164428 -0.123703 0.003144 +v 0.164274 -0.120507 0.003575 +v 0.164307 -0.116882 0.004918 +v 0.164518 -0.113828 0.007000 +v 0.164885 -0.111532 0.009622 +v 0.165381 -0.110102 0.012561 +v 0.165971 -0.109558 0.015609 +v 0.166619 -0.109867 0.018599 +v 0.167287 -0.110975 0.021412 +v 0.167432 -0.109926 0.018511 +v 0.167546 -0.109674 0.015473 +v 0.167613 -0.110254 0.012415 +v 0.167623 -0.111691 0.009491 +v 0.167574 -0.113962 0.006895 +v 0.167475 -0.116971 0.004838 +v 0.167335 -0.120542 0.003511 +v 0.167172 -0.123873 0.003081 +v 0.164955 -0.123876 -0.021039 +v 0.161601 -0.123659 -0.054108 +v -0.005702 -0.123512 -0.053746 +v -0.021297 -0.136144 -0.002540 +v -0.020997 -0.136161 -0.002126 +v -0.004820 -0.123560 -0.052532 +v 0.159840 -0.123736 -0.052177 +v 0.162448 -0.123975 -0.020820 +v 0.164261 -0.123988 0.003435 +v 0.164212 -0.120557 0.003895 +v 0.164351 -0.117007 0.005216 +v 0.164663 -0.114014 0.007237 +v 0.165121 -0.111749 0.009771 +v 0.165692 -0.110307 0.012617 +v 0.166338 -0.109710 0.015589 +v 0.167020 -0.109938 0.018548 +v 0.167153 -0.109770 0.015519 +v 0.167270 -0.110415 0.012513 +v 0.167356 -0.111878 0.009665 +v 0.167403 -0.114134 0.007143 +v 0.167410 -0.117093 0.005138 +v 0.167381 -0.120593 0.003829 +v 0.167322 -0.124177 0.003365 +v 0.165178 -0.124147 -0.021057 +v 0.161881 -0.123876 -0.054414 +v -0.005940 -0.123659 -0.054114 +v -0.021577 -0.136212 -0.002945 +v -0.071760 -0.136161 -0.002126 +v -0.071459 -0.136144 -0.002540 +v -0.020695 -0.136260 -0.001732 +v -0.004537 -0.123736 -0.052183 +v 0.159586 -0.123975 -0.051898 +v 0.162259 -0.124263 -0.020801 +v 0.164198 -0.124300 0.003745 +v 0.164257 -0.120607 0.004213 +v 0.164499 -0.117126 0.005490 +v 0.164903 -0.114179 0.007436 +v 0.165436 -0.111923 0.009880 +v 0.166062 -0.110448 0.012640 +v 0.166741 -0.109782 0.015559 +v 0.166879 -0.110504 0.012587 +v 0.167017 -0.112015 0.009804 +v 0.167141 -0.114277 0.007360 +v 0.167243 -0.117203 0.005420 +v 0.167318 -0.120642 0.004149 +v 0.167366 -0.124496 0.003673 +v 0.165302 -0.124454 -0.021066 +v 0.162084 -0.124147 -0.054638 +v -0.006121 -0.123876 -0.054420 +v -0.021815 -0.136359 -0.003313 +v -0.071180 -0.136212 -0.002945 +v -0.087635 -0.123461 -0.052926 +v -0.087334 -0.123444 -0.053340 +v -0.072062 -0.136260 -0.001732 +v -0.020412 -0.136436 -0.001382 +v -0.004293 -0.123975 -0.051904 +v 0.159412 -0.124263 -0.051708 +v 0.162170 -0.124579 -0.020792 +v 0.164242 -0.124619 0.004052 +v 0.164408 -0.120656 0.004507 +v 0.164743 -0.117231 0.005723 +v 0.165222 -0.114311 0.007585 +v 0.165810 -0.112043 0.009939 +v 0.166467 -0.110515 0.012630 +v 0.166628 -0.112090 0.009900 +v 0.166804 -0.114381 0.007531 +v 0.166982 -0.117294 0.005666 +v 0.167152 -0.120687 0.004450 +v 0.167302 -0.124808 0.003982 +v 0.165322 -0.124777 -0.021066 +v 0.162199 -0.124454 -0.054763 +v -0.006233 -0.124147 -0.054644 +v -0.021996 -0.136576 -0.003619 +v -0.070942 -0.136359 -0.003313 +v -0.087055 -0.123512 -0.053744 +v -0.163782 -0.123444 -0.053335 +v -0.163405 -0.123461 -0.052921 +v -0.087937 -0.123560 -0.052531 +v -0.072345 -0.136436 -0.001382 +v -0.020168 -0.136675 -0.001103 +v -0.004104 -0.124263 -0.051713 +v 0.159331 -0.124579 -0.051619 +v 0.162189 -0.124901 -0.020792 +v 0.164392 -0.124923 0.004336 +v 0.164652 -0.120698 0.004758 +v 0.165064 -0.117316 0.005898 +v 0.165598 -0.114402 0.007672 +v 0.166216 -0.112100 0.009947 +v 0.166417 -0.114438 0.007644 +v 0.166648 -0.117361 0.005858 +v 0.166893 -0.120724 0.004711 +v 0.167135 -0.125093 0.004274 +v 0.165233 -0.125092 -0.021057 +v 0.162216 -0.124777 -0.054782 +v -0.006268 -0.124454 -0.054769 +v -0.022108 -0.136847 -0.003843 +v -0.070760 -0.136576 -0.003619 +v -0.086817 -0.123659 -0.054112 +v -0.164151 -0.123512 -0.053740 +v -0.168926 -0.123444 0.002517 +v -0.168516 -0.123461 0.002570 +v -0.163044 -0.123560 -0.052526 +v -0.088220 -0.123736 -0.052182 +v -0.072589 -0.136675 -0.001103 +v -0.019979 -0.136963 -0.000913 +v -0.003983 -0.124579 -0.051625 +v 0.159348 -0.124901 -0.051638 +v 0.162314 -0.125209 -0.020801 +v 0.164636 -0.125191 0.004578 +v 0.164975 -0.120733 0.004947 +v 0.165442 -0.117374 0.006004 +v 0.166006 -0.114445 0.007692 +v 0.166262 -0.117397 0.005983 +v 0.166560 -0.120751 0.004914 +v 0.166876 -0.125330 0.004526 +v 0.165044 -0.125380 -0.021038 +v 0.162135 -0.125092 -0.054693 +v -0.006224 -0.124777 -0.054788 +v -0.022143 -0.137154 -0.003968 +v -0.070649 -0.136847 -0.003843 +v -0.086636 -0.123876 -0.054419 +v -0.164487 -0.123659 -0.054108 +v -0.169335 -0.123512 0.002544 +v -0.168526 -0.120664 0.002682 +v -0.168936 -0.120661 0.002628 +v -0.168131 -0.123560 0.002702 +v -0.162726 -0.123736 -0.052177 +v -0.088464 -0.123975 -0.051902 +v -0.072778 -0.136963 -0.000913 +v -0.019858 -0.137279 -0.000824 +v -0.003939 -0.124901 -0.051643 +v 0.159462 -0.125209 -0.051763 +v 0.162537 -0.125480 -0.020819 +v 0.164958 -0.125406 0.004760 +v 0.165354 -0.120757 0.005062 +v 0.165851 -0.117402 0.006033 +v 0.166174 -0.120766 0.005045 +v 0.166542 -0.125503 0.004723 +v 0.164766 -0.125620 -0.021013 +v 0.161961 -0.125380 -0.054503 +v -0.006103 -0.125092 -0.054699 +v -0.022099 -0.137477 -0.003987 +v -0.070614 -0.137154 -0.003968 +v -0.086524 -0.124147 -0.054642 +v -0.164766 -0.123876 -0.054414 +v -0.169715 -0.123659 0.002651 +v -0.169346 -0.120671 0.002658 +v -0.168622 -0.116738 0.003776 +v -0.169033 -0.116731 0.003723 +v -0.168141 -0.120679 0.002817 +v -0.167799 -0.123736 0.002901 +v -0.162471 -0.123975 -0.051898 +v -0.088653 -0.124263 -0.051712 +v -0.072899 -0.137279 -0.000824 +v -0.019814 -0.137601 -0.000843 +v -0.003974 -0.125209 -0.051769 +v 0.159666 -0.125480 -0.051986 +v 0.162842 -0.125696 -0.020844 +v 0.165336 -0.125551 0.004870 +v 0.165763 -0.120768 0.005096 +v 0.166156 -0.125602 0.004851 +v 0.164418 -0.125795 -0.020981 +v 0.161707 -0.125620 -0.054223 +v -0.005914 -0.125380 -0.054509 +v -0.021978 -0.137792 -0.003898 +v -0.070658 -0.137477 -0.003987 +v -0.086489 -0.124454 -0.054767 +v -0.164970 -0.124147 -0.054638 +v -0.170039 -0.123876 0.002830 +v -0.169725 -0.120694 0.002770 +v -0.169442 -0.116756 0.003749 +v -0.168796 -0.113250 0.005734 +v -0.169207 -0.113240 0.005683 +v -0.168238 -0.116776 0.003904 +v -0.167810 -0.120707 0.003023 +v -0.167542 -0.123975 0.003156 +v -0.162297 -0.124263 -0.051708 +v -0.088774 -0.124579 -0.051623 +v -0.072943 -0.137601 -0.000843 +v -0.019849 -0.137909 -0.000968 +v -0.004086 -0.125480 -0.051992 +v 0.159945 -0.125696 -0.052293 +v 0.163209 -0.125843 -0.020875 +v 0.165745 -0.125618 0.004901 +v 0.164024 -0.125895 -0.020946 +v 0.161388 -0.125795 -0.053874 +v -0.005670 -0.125620 -0.054229 +v -0.021789 -0.138080 -0.003708 +v -0.070779 -0.137792 -0.003898 +v -0.086533 -0.124777 -0.054786 +v -0.165084 -0.124454 -0.054763 +v -0.170285 -0.124147 0.003069 +v -0.170050 -0.120728 0.002957 +v -0.169821 -0.116813 0.003852 +v -0.169615 -0.113280 0.005701 +v -0.169035 -0.110439 0.008437 +v -0.169447 -0.110425 0.008390 +v -0.168410 -0.113311 0.005850 +v -0.167905 -0.116845 0.004099 +v -0.167553 -0.120745 0.003286 +v -0.167377 -0.124263 0.003449 +v -0.162216 -0.124579 -0.051619 +v -0.088818 -0.124901 -0.051642 +v -0.072908 -0.137909 -0.000968 +v -0.019960 -0.138180 -0.001191 +v -0.004267 -0.125696 -0.052298 +v 0.160281 -0.125843 -0.052661 +v 0.163612 -0.125911 -0.020910 +v 0.161028 -0.125895 -0.053479 +v -0.005387 -0.125795 -0.053880 +v -0.021545 -0.138320 -0.003428 +v -0.070968 -0.138080 -0.003708 +v -0.086654 -0.125092 -0.054697 +v -0.165102 -0.124777 -0.054782 +v -0.170437 -0.124454 0.003352 +v -0.170297 -0.120770 0.003205 +v -0.170144 -0.116896 0.004024 +v -0.169993 -0.113367 0.005786 +v -0.169854 -0.110477 0.008395 +v -0.169737 -0.108489 0.011670 +v -0.169326 -0.108505 0.011713 +v -0.168648 -0.110519 0.008535 +v -0.168075 -0.113417 0.006023 +v -0.167647 -0.116937 0.004347 +v -0.167390 -0.120790 0.003588 +v -0.167317 -0.124579 0.003759 +v -0.162233 -0.124901 -0.051638 +v -0.088783 -0.125209 -0.051767 +v -0.072796 -0.138180 -0.001191 +v -0.020142 -0.138396 -0.001498 +v -0.004505 -0.125843 -0.052666 +v 0.160650 -0.125911 -0.053065 +v -0.005085 -0.125895 -0.053485 +v -0.021262 -0.138495 -0.003079 +v -0.071212 -0.138320 -0.003428 +v -0.086843 -0.125380 -0.054507 +v -0.165021 -0.125092 -0.054693 +v -0.170484 -0.124777 0.003659 +v -0.170450 -0.120819 0.003497 +v -0.170390 -0.117000 0.004255 +v -0.170314 -0.113497 0.005932 +v -0.170229 -0.110593 0.008453 +v -0.170143 -0.108551 0.011659 +v -0.170059 -0.107580 0.015300 +v -0.169647 -0.107598 0.015338 +v -0.168936 -0.108600 0.011786 +v -0.168310 -0.110659 0.008675 +v -0.167815 -0.113562 0.006242 +v -0.167482 -0.117049 0.004630 +v -0.167330 -0.120840 0.003909 +v -0.167364 -0.124901 0.004066 +v -0.162348 -0.125209 -0.051763 +v -0.088671 -0.125480 -0.051991 +v -0.072615 -0.138396 -0.001498 +v -0.020380 -0.138543 -0.001866 +v -0.004784 -0.125911 -0.053071 +v -0.020960 -0.138595 -0.002684 +v -0.071495 -0.138495 -0.003079 +v -0.087087 -0.125620 -0.054227 +v -0.164847 -0.125380 -0.054503 +v -0.170424 -0.125092 0.003969 +v -0.170498 -0.120869 0.003815 +v -0.170541 -0.117118 0.004528 +v -0.170556 -0.113660 0.006130 +v -0.170546 -0.110764 0.008560 +v -0.170515 -0.108687 0.011682 +v -0.170463 -0.107647 0.015271 +v -0.170388 -0.107774 0.019019 +v -0.169976 -0.107792 0.019052 +v -0.169254 -0.107700 0.015383 +v -0.168595 -0.108765 0.011884 +v -0.168046 -0.110850 0.008850 +v -0.167647 -0.113736 0.006490 +v -0.167421 -0.117171 0.004931 +v -0.167378 -0.120890 0.004227 +v -0.167516 -0.125209 0.004348 +v -0.162552 -0.125480 -0.051986 +v -0.088490 -0.125696 -0.052297 +v -0.072377 -0.138543 -0.001866 +v -0.020659 -0.138611 -0.002270 +v -0.071797 -0.138595 -0.002684 +v -0.087370 -0.125795 -0.053878 +v -0.164592 -0.125620 -0.054223 +v -0.170259 -0.125380 0.004261 +v -0.170439 -0.120919 0.004136 +v -0.170588 -0.117243 0.004826 +v -0.170704 -0.113845 0.006366 +v -0.170785 -0.110978 0.008709 +v -0.170828 -0.108889 0.011737 +v -0.170831 -0.107794 0.015252 +v -0.170791 -0.107840 0.018970 +v -0.170702 -0.109054 0.022556 +v -0.170289 -0.109070 0.022584 +v -0.169581 -0.107891 0.019067 +v -0.168909 -0.107877 0.015431 +v -0.168325 -0.108990 0.012000 +v -0.167873 -0.111078 0.009046 +v -0.167582 -0.113927 0.006753 +v -0.167467 -0.117295 0.005228 +v -0.167531 -0.120939 0.004520 +v -0.167762 -0.125480 0.004587 +v -0.162831 -0.125696 -0.052293 +v -0.088252 -0.125843 -0.052665 +v -0.072098 -0.138611 -0.002270 +v -0.087672 -0.125895 -0.053483 +v -0.164274 -0.125795 -0.053874 +v -0.170002 -0.125620 0.004516 +v -0.170275 -0.120964 0.004438 +v -0.170526 -0.117365 0.005126 +v -0.170747 -0.114039 0.006624 +v -0.170928 -0.111222 0.008889 +v -0.171060 -0.109142 0.011820 +v -0.171139 -0.108011 0.015246 +v -0.171155 -0.107984 0.018909 +v -0.171102 -0.109113 0.022489 +v -0.170976 -0.111317 0.025658 +v -0.170563 -0.111330 0.025682 +v -0.169891 -0.109160 0.022571 +v -0.169231 -0.108067 0.019063 +v -0.168633 -0.108120 0.015478 +v -0.168146 -0.109259 0.012127 +v -0.167803 -0.111329 0.009250 +v -0.167625 -0.114121 0.007011 +v -0.167618 -0.117413 0.005502 +v -0.167778 -0.120981 0.004768 +v -0.168086 -0.125696 0.004766 +v -0.163167 -0.125843 -0.052661 +v -0.087972 -0.125911 -0.053069 +v -0.163913 -0.125895 -0.053479 +v -0.169670 -0.125795 0.004716 +v -0.170019 -0.121002 0.004701 +v -0.170361 -0.117476 0.005410 +v -0.170682 -0.114230 0.006887 +v -0.170965 -0.111477 0.009088 +v -0.171197 -0.109429 0.011926 +v -0.171364 -0.108283 0.015252 +v -0.171457 -0.108198 0.018839 +v -0.171463 -0.109243 0.022387 +v -0.171376 -0.111365 0.025576 +v -0.170781 -0.114402 0.028142 +v -0.171195 -0.114392 0.028121 +v -0.170164 -0.111403 0.025647 +v -0.169537 -0.109317 0.022519 +v -0.168949 -0.108305 0.019039 +v -0.168447 -0.108410 0.015523 +v -0.168069 -0.109555 0.012255 +v -0.167841 -0.111585 0.009449 +v -0.167773 -0.114306 0.007247 +v -0.167864 -0.117518 0.005733 +v -0.168103 -0.121015 0.004954 +v -0.168466 -0.125843 0.004873 +v -0.163536 -0.125911 -0.053065 +v -0.169286 -0.125895 0.004847 +v -0.169687 -0.121029 0.004907 +v -0.170103 -0.117569 0.005658 +v -0.170514 -0.114404 0.007135 +v -0.170895 -0.111728 0.009292 +v -0.171227 -0.109730 0.012047 +v -0.171493 -0.108592 0.015271 +v -0.171676 -0.108466 0.018765 +v -0.171760 -0.109435 0.022259 +v -0.171734 -0.111472 0.025443 +v -0.171593 -0.114427 0.028029 +v -0.170929 -0.118070 0.029810 +v -0.171343 -0.118065 0.029790 +v -0.170380 -0.114455 0.028091 +v -0.169806 -0.111532 0.025556 +v -0.169249 -0.109532 0.022430 +v -0.168755 -0.108591 0.018999 +v -0.168362 -0.108728 0.015561 +v -0.168100 -0.109857 0.012376 +v -0.167984 -0.111828 0.009629 +v -0.168016 -0.114469 0.007445 +v -0.168187 -0.117601 0.005905 +v -0.168483 -0.121038 0.005066 +v -0.168875 -0.125911 0.004901 +v -0.169303 -0.121045 0.005042 +v -0.169771 -0.117637 0.005852 +v -0.170254 -0.114548 0.007354 +v -0.170722 -0.111956 0.009488 +v -0.171151 -0.110026 0.012175 +v -0.171516 -0.108916 0.015300 +v -0.171796 -0.108770 0.018693 +v -0.171972 -0.109677 0.022112 +v -0.172026 -0.111629 0.025267 +v -0.171949 -0.114504 0.027872 +v -0.171740 -0.118085 0.029691 +v -0.170997 -0.122089 0.030584 +v -0.171411 -0.122088 0.030565 +v -0.170527 -0.118101 0.029748 +v -0.170020 -0.114548 0.027971 +v -0.169514 -0.111708 0.025413 +v -0.169048 -0.109789 0.022311 +v -0.168662 -0.108904 0.018944 +v -0.168385 -0.109052 0.015591 +v -0.168237 -0.110144 0.012481 +v -0.168222 -0.112042 0.009778 +v -0.168337 -0.114599 0.007591 +v -0.168566 -0.117657 0.006008 +v -0.168892 -0.121048 0.005096 +v -0.169386 -0.117676 0.005980 +v -0.169919 -0.114655 0.007527 +v -0.170458 -0.112147 0.009662 +v -0.170972 -0.110296 0.012301 +v -0.171431 -0.109234 0.015339 +v -0.171810 -0.109089 0.018627 +v -0.172085 -0.109951 0.021956 +v -0.172233 -0.111826 0.025059 +v -0.172238 -0.114618 0.027660 +v -0.172095 -0.118130 0.029519 +v -0.171809 -0.122093 0.030463 +v -0.171397 -0.126198 0.030400 +v -0.170983 -0.126196 0.030419 +v -0.170595 -0.122097 0.030518 +v -0.170165 -0.118155 0.029610 +v -0.169724 -0.114676 0.027790 +v -0.169307 -0.111919 0.025230 +v -0.168948 -0.110071 0.022170 +v -0.168676 -0.109224 0.018878 +v -0.168514 -0.109361 0.015609 +v -0.168469 -0.110397 0.012565 +v -0.168540 -0.112213 0.009884 +v -0.168714 -0.114686 0.007676 +v -0.168975 -0.117683 0.006034 +v -0.169533 -0.114716 0.007643 +v -0.170121 -0.112287 0.009803 +v -0.170702 -0.110521 0.012418 +v -0.171245 -0.109524 0.015383 +v -0.171717 -0.109403 0.018572 +v -0.172092 -0.110239 0.021802 +v -0.172340 -0.112051 0.024835 +v -0.172441 -0.114761 0.027409 +v -0.172382 -0.118196 0.029286 +v -0.172163 -0.122104 0.030285 +v -0.171794 -0.126188 0.030299 +v -0.171300 -0.130129 0.029305 +v -0.170886 -0.130122 0.029325 +v -0.170581 -0.126180 0.030354 +v -0.170233 -0.122110 0.030372 +v -0.169867 -0.118229 0.029405 +v -0.169513 -0.114828 0.027560 +v -0.169200 -0.112150 0.025019 +v -0.168954 -0.110359 0.022016 +v -0.168797 -0.109528 0.018805 +v -0.168739 -0.109633 0.015616 +v -0.168782 -0.110598 0.012620 +v -0.168915 -0.112329 0.009943 +v -0.169122 -0.114726 0.007693 +v -0.169733 -0.112367 0.009901 +v -0.170361 -0.110686 0.012516 +v -0.170969 -0.109767 0.015431 +v -0.171523 -0.109688 0.018531 +v -0.171991 -0.110521 0.021661 +v -0.172340 -0.112286 0.024610 +v -0.172544 -0.114924 0.027136 +v -0.172583 -0.118279 0.029008 +v -0.172449 -0.122120 0.030042 +v -0.172148 -0.126165 0.030122 +v -0.171698 -0.130103 0.029208 +v -0.171126 -0.133620 0.027345 +v -0.170712 -0.133609 0.027367 +v -0.170484 -0.130083 0.029267 +v -0.170218 -0.126152 0.030210 +v -0.169934 -0.122129 0.030156 +v -0.169654 -0.118318 0.029146 +v -0.169402 -0.114995 0.027298 +v -0.169200 -0.112385 0.024794 +v -0.169067 -0.110633 0.021860 +v -0.169015 -0.109796 0.018732 +v -0.169047 -0.109850 0.015609 +v -0.169154 -0.110735 0.012643 +v -0.169322 -0.112381 0.009948 +v -0.169971 -0.110780 0.012589 +v -0.170624 -0.109944 0.015479 +v -0.171242 -0.109927 0.018508 +v -0.171790 -0.110778 0.021542 +v -0.172234 -0.112517 0.024399 +v -0.172540 -0.115094 0.026859 +v -0.172683 -0.118373 0.028704 +v -0.172649 -0.122141 0.029752 +v -0.172435 -0.126131 0.029882 +v -0.172053 -0.130047 0.029041 +v -0.171525 -0.133580 0.027256 +v -0.170886 -0.136434 0.024639 +v -0.170473 -0.136420 0.024664 +v -0.170312 -0.133548 0.027321 +v -0.170123 -0.130015 0.029134 +v -0.169920 -0.126114 0.029996 +v -0.169719 -0.122150 0.029885 +v -0.169540 -0.118415 0.028852 +v -0.169398 -0.115166 0.027021 +v -0.169307 -0.112609 0.024570 +v -0.169280 -0.110875 0.021713 +v -0.169317 -0.110009 0.018662 +v -0.169415 -0.109997 0.015591 +v -0.169560 -0.110797 0.012632 +v -0.170231 -0.110046 0.015523 +v -0.170892 -0.110102 0.018504 +v -0.171503 -0.110993 0.021453 +v -0.172027 -0.112728 0.024216 +v -0.172428 -0.115262 0.026596 +v -0.172676 -0.118472 0.028394 +v -0.172748 -0.122164 0.029434 +v -0.172635 -0.126089 0.029594 +v -0.172341 -0.129964 0.028814 +v -0.171881 -0.133492 0.027106 +v -0.171286 -0.136382 0.024562 +v -0.170183 -0.138354 0.021389 +v -0.170596 -0.138371 0.021359 +v -0.170074 -0.136340 0.024637 +v -0.169952 -0.133442 0.027210 +v -0.169826 -0.129922 0.028935 +v -0.169705 -0.126069 0.029727 +v -0.169604 -0.122174 0.029576 +v -0.169532 -0.118514 0.028542 +v -0.169500 -0.115328 0.026747 +v -0.169514 -0.112807 0.024363 +v -0.169576 -0.111067 0.021585 +v -0.169682 -0.110154 0.018600 +v -0.169819 -0.110064 0.015562 +v -0.170497 -0.110202 0.018519 +v -0.171148 -0.111150 0.021401 +v -0.171735 -0.112904 0.024074 +v -0.172218 -0.115414 0.026367 +v -0.172561 -0.118569 0.028099 +v -0.172739 -0.122189 0.029110 +v -0.172734 -0.126041 0.029279 +v -0.172542 -0.129859 0.028543 +v -0.172172 -0.133362 0.026906 +v -0.171645 -0.136266 0.024439 +v -0.170997 -0.138309 0.021298 +v -0.169862 -0.139261 0.017764 +v -0.170274 -0.139279 0.017729 +v -0.169786 -0.138260 0.021385 +v -0.169717 -0.136200 0.024557 +v -0.169658 -0.133297 0.027040 +v -0.169613 -0.129811 0.028685 +v -0.169590 -0.126020 0.029422 +v -0.169595 -0.122199 0.029252 +v -0.169632 -0.118608 0.028238 +v -0.169703 -0.115471 0.026496 +v -0.169807 -0.112964 0.024186 +v -0.169937 -0.111197 0.021484 +v -0.170084 -0.110219 0.018551 +v -0.170751 -0.111240 0.021388 +v -0.171377 -0.113033 0.023982 +v -0.171922 -0.115541 0.026186 +v -0.172348 -0.118657 0.027841 +v -0.172624 -0.122213 0.028801 +v -0.172726 -0.125990 0.028957 +v -0.172643 -0.129741 0.028248 +v -0.172376 -0.133199 0.026668 +v -0.171939 -0.136095 0.024278 +v -0.171359 -0.138172 0.021210 +v -0.170677 -0.139212 0.017687 +v -0.169533 -0.139068 0.014049 +v -0.169945 -0.139085 0.014009 +v -0.169468 -0.139160 0.017789 +v -0.169433 -0.138094 0.021349 +v -0.169427 -0.136010 0.024432 +v -0.169448 -0.133123 0.026825 +v -0.169500 -0.129689 0.028400 +v -0.169582 -0.125969 0.029101 +v -0.169694 -0.122222 0.028934 +v -0.169833 -0.118691 0.027959 +v -0.169993 -0.115585 0.026285 +v -0.170165 -0.113071 0.024053 +v -0.170338 -0.111256 0.021417 +v -0.170978 -0.113106 0.023947 +v -0.171561 -0.115635 0.026065 +v -0.172050 -0.118731 0.027635 +v -0.172409 -0.122234 0.028530 +v -0.172610 -0.125941 0.028652 +v -0.172636 -0.129616 0.027947 +v -0.172480 -0.133014 0.026410 +v -0.172147 -0.135881 0.024090 +v -0.171657 -0.137971 0.021101 +v -0.171043 -0.139066 0.017640 +v -0.170349 -0.139020 0.013987 +v -0.169220 -0.137790 0.010518 +v -0.169631 -0.137806 0.010473 +v -0.169141 -0.138968 0.014104 +v -0.169119 -0.138982 0.017802 +v -0.169148 -0.137869 0.021282 +v -0.169222 -0.135781 0.024270 +v -0.169338 -0.132933 0.026578 +v -0.169493 -0.129564 0.028099 +v -0.169681 -0.125921 0.028786 +v -0.169894 -0.122243 0.028644 +v -0.170120 -0.118757 0.027726 +v -0.170349 -0.115663 0.026128 +v -0.170564 -0.113119 0.023971 +v -0.171161 -0.115688 0.026014 +v -0.171688 -0.118786 0.027497 +v -0.172110 -0.122253 0.028314 +v -0.172396 -0.125895 0.028383 +v -0.172523 -0.129495 0.027662 +v -0.172477 -0.132820 0.026148 +v -0.172256 -0.135638 0.023887 +v -0.171872 -0.137718 0.020978 +v -0.171346 -0.138849 0.017592 +v -0.170719 -0.138875 0.013983 +v -0.170037 -0.137746 0.010469 +v -0.168945 -0.135530 0.007419 +v -0.169356 -0.135543 0.007371 +v -0.168831 -0.137700 0.010600 +v -0.168797 -0.138793 0.014170 +v -0.168839 -0.138740 0.017804 +v -0.168949 -0.137600 0.021189 +v -0.169117 -0.135530 0.024081 +v -0.169335 -0.132738 0.026317 +v -0.169594 -0.129446 0.027803 +v -0.169881 -0.125878 0.028498 +v -0.170180 -0.122259 0.028401 +v -0.170475 -0.118802 0.027555 +v -0.170747 -0.115698 0.026036 +v -0.171287 -0.118816 0.027436 +v -0.171748 -0.122266 0.028168 +v -0.172098 -0.125858 0.028169 +v -0.172310 -0.129383 0.027411 +v -0.172367 -0.132629 0.025902 +v -0.172258 -0.135382 0.023685 +v -0.171987 -0.137431 0.020850 +v -0.171568 -0.138577 0.017546 +v -0.171028 -0.138662 0.013999 +v -0.170411 -0.137616 0.010505 +v -0.169764 -0.135494 0.007381 +v -0.169147 -0.132587 0.005004 +v -0.168736 -0.132576 0.005054 +v -0.168558 -0.135456 0.007524 +v -0.168491 -0.137542 0.010714 +v -0.168524 -0.138554 0.014243 +v -0.168648 -0.138450 0.017793 +v -0.168851 -0.137304 0.021076 +v -0.169119 -0.135275 0.023878 +v -0.169439 -0.132553 0.026059 +v -0.169795 -0.129342 0.027533 +v -0.170167 -0.125845 0.028257 +v -0.170534 -0.122270 0.028223 +v -0.170873 -0.118822 0.027456 +v -0.171346 -0.122274 0.028102 +v -0.171735 -0.125830 0.028025 +v -0.172013 -0.129291 0.027213 +v -0.172157 -0.132456 0.025686 +v -0.172154 -0.135131 0.023496 +v -0.171996 -0.137129 0.020726 +v -0.171691 -0.138268 0.017505 +v -0.171256 -0.138394 0.014033 +v -0.170725 -0.137424 0.010579 +v -0.170140 -0.135388 0.007449 +v -0.169555 -0.132546 0.005020 +v -0.169932 -0.132454 0.005101 +v -0.168349 -0.132513 0.005167 +v -0.170252 -0.132319 0.005241 +v -0.168015 -0.132402 0.005335 +v -0.170494 -0.132150 0.005431 +v -0.167754 -0.132252 0.005547 +v -0.170641 -0.131958 0.005658 +v -0.167585 -0.132071 0.005787 +v -0.170683 -0.131756 0.005907 +v -0.167519 -0.131873 0.006040 +v -0.170618 -0.131558 0.006160 +v -0.167561 -0.131671 0.006289 +v -0.170449 -0.131377 0.006400 +v -0.167708 -0.131479 0.006516 +v -0.170188 -0.131226 0.006612 +v -0.167950 -0.131309 0.006706 +v -0.169853 -0.131116 0.006780 +v -0.168271 -0.131174 0.006847 +v -0.169467 -0.131052 0.006893 +v -0.168648 -0.131083 0.006927 +v -0.169056 -0.131041 0.006943 +v -0.168222 -0.135327 0.007677 +v -0.168224 -0.137327 0.010852 +v -0.168340 -0.138269 0.014317 +v -0.168558 -0.138132 0.017770 +v -0.168860 -0.137002 0.020952 +v -0.169228 -0.135031 0.023676 +v -0.169643 -0.132391 0.025821 +v -0.170083 -0.129258 0.027306 +v -0.170522 -0.125822 0.028080 +v -0.170932 -0.122275 0.028121 +v -0.171333 -0.125814 0.027960 +v -0.171651 -0.129222 0.027080 +v -0.171863 -0.132311 0.025517 +v -0.171949 -0.134903 0.023333 +v -0.171898 -0.136833 0.020613 +v -0.171708 -0.137943 0.017472 +v -0.171388 -0.138089 0.014083 +v -0.170960 -0.137183 0.010687 +v -0.170459 -0.135230 0.007572 +v -0.168464 -0.133895 0.009025 +v -0.168840 -0.133788 0.009094 +v -0.168145 -0.134052 0.008903 +v -0.167905 -0.134250 0.008735 +v -0.167760 -0.134474 0.008534 +v -0.167720 -0.134710 0.008312 +v -0.167788 -0.134941 0.008085 +v -0.167959 -0.135151 0.007869 +v -0.170699 -0.135033 0.007739 +v -0.170844 -0.134809 0.007941 +v -0.170883 -0.134573 0.008162 +v -0.170815 -0.134342 0.008389 +v -0.170644 -0.134131 0.008606 +v -0.170381 -0.133956 0.008797 +v -0.170045 -0.133827 0.008950 +v -0.169658 -0.133753 0.009055 +v -0.169247 -0.133740 0.009104 +v -0.168047 -0.137070 0.011005 +v -0.168258 -0.137955 0.014387 +v -0.168575 -0.137807 0.017737 +v -0.168976 -0.136715 0.020824 +v -0.169437 -0.134817 0.023488 +v -0.169934 -0.132261 0.025620 +v -0.170438 -0.129202 0.027139 +v -0.170919 -0.125811 0.027979 +v -0.171250 -0.129184 0.027021 +v -0.171503 -0.132204 0.025405 +v -0.171658 -0.134712 0.023208 +v -0.171700 -0.136564 0.020520 +v -0.171618 -0.137625 0.017450 +v -0.171413 -0.137770 0.014145 +v -0.171099 -0.136909 0.010820 +v -0.168694 -0.135792 0.011627 +v -0.169067 -0.135662 0.011663 +v -0.168379 -0.135985 0.011552 +v -0.168145 -0.136226 0.011445 +v -0.168006 -0.136500 0.011311 +v -0.167973 -0.136788 0.011161 +v -0.171132 -0.136621 0.010970 +v -0.171058 -0.136339 0.011127 +v -0.170881 -0.136081 0.011279 +v -0.170614 -0.135867 0.011417 +v -0.170274 -0.135709 0.011531 +v -0.169885 -0.135619 0.011613 +v -0.169473 -0.135603 0.011658 +v -0.168284 -0.137636 0.014450 +v -0.168698 -0.137498 0.017696 +v -0.169190 -0.136462 0.020701 +v -0.169731 -0.134646 0.023327 +v -0.170290 -0.132173 0.025471 +v -0.170836 -0.129177 0.027041 +v -0.171103 -0.132144 0.025359 +v -0.171301 -0.134572 0.023129 +v -0.171414 -0.136339 0.020453 +v -0.171427 -0.137335 0.017438 +v -0.171331 -0.137457 0.014216 +v -0.168953 -0.136850 0.014549 +v -0.169323 -0.136706 0.014546 +v -0.168644 -0.137064 0.014534 +v -0.168415 -0.137332 0.014500 +v -0.171148 -0.137171 0.014290 +v -0.170874 -0.136932 0.014363 +v -0.170530 -0.136757 0.014429 +v -0.170139 -0.136658 0.014483 +v -0.169727 -0.136640 0.014523 +v -0.168920 -0.137226 0.017650 +v -0.169488 -0.136261 0.020592 +v -0.170090 -0.134531 0.023204 +v -0.170689 -0.132133 0.025381 +v -0.170903 -0.134492 0.023101 +v -0.171061 -0.136173 0.020417 +v -0.171147 -0.137093 0.017440 +v -0.169589 -0.136862 0.017555 +v -0.169223 -0.137009 0.017602 +v -0.170798 -0.136915 0.017453 +v -0.170405 -0.136814 0.017478 +v -0.169992 -0.136796 0.017513 +v -0.169850 -0.136124 0.020504 +v -0.170489 -0.134478 0.023127 +v -0.170664 -0.136079 0.020413 +v -0.170252 -0.136063 0.020443 +v 0.167884 -0.055621 0.024626 +v 0.167166 -0.057546 0.021382 +v 0.167578 -0.057557 0.021349 +v 0.167471 -0.055612 0.024657 +v 0.166827 -0.058453 0.017758 +v 0.167239 -0.058466 0.017720 +v 0.168283 -0.055565 0.024544 +v 0.167978 -0.057490 0.021284 +v 0.167724 -0.052800 0.027360 +v 0.168137 -0.052807 0.027331 +v 0.166768 -0.057457 0.021383 +v 0.167072 -0.055536 0.024635 +v 0.166480 -0.058260 0.014044 +v 0.166892 -0.058272 0.014003 +v 0.167641 -0.058393 0.017676 +v 0.166432 -0.058357 0.017785 +v 0.168639 -0.055445 0.024416 +v 0.168338 -0.057349 0.021192 +v 0.168534 -0.052764 0.027236 +v 0.167907 -0.049312 0.029318 +v 0.168320 -0.049316 0.029290 +v 0.167323 -0.052742 0.027320 +v 0.166712 -0.055400 0.024561 +v 0.166412 -0.057296 0.021350 +v 0.166150 -0.056982 0.010513 +v 0.166562 -0.056992 0.010469 +v 0.167296 -0.058201 0.013980 +v 0.166087 -0.058165 0.014100 +v 0.168004 -0.058242 0.017627 +v 0.166081 -0.058184 0.017801 +v 0.168929 -0.055271 0.024252 +v 0.168632 -0.057144 0.021080 +v 0.168888 -0.052673 0.027080 +v 0.168716 -0.049289 0.029186 +v 0.168009 -0.045385 0.030411 +v 0.168422 -0.045387 0.030385 +v 0.167504 -0.049275 0.029266 +v 0.166960 -0.052639 0.027214 +v 0.166418 -0.055212 0.024440 +v 0.166123 -0.057074 0.021286 +v 0.165860 -0.054721 0.007415 +v 0.166272 -0.054730 0.007368 +v 0.166967 -0.056928 0.010465 +v 0.165760 -0.056896 0.010595 +v 0.167663 -0.058051 0.013976 +v 0.165741 -0.057995 0.014167 +v 0.168305 -0.058021 0.017578 +v 0.165798 -0.057946 0.017805 +v 0.168842 -0.056888 0.020956 +v 0.169132 -0.055054 0.024061 +v 0.169174 -0.052541 0.026875 +v 0.169068 -0.049231 0.029012 +v 0.168818 -0.045375 0.030276 +v 0.168024 -0.041278 0.030576 +v 0.168438 -0.041277 0.030549 +v 0.167606 -0.045370 0.030354 +v 0.167140 -0.049209 0.029139 +v 0.166662 -0.052496 0.027050 +v 0.165921 -0.056807 0.021196 +v 0.166209 -0.054986 0.024280 +v 0.166051 -0.051775 0.005003 +v 0.165640 -0.051767 0.005051 +v 0.166679 -0.054678 0.007380 +v 0.165472 -0.054651 0.007518 +v 0.167339 -0.056794 0.010502 +v 0.165418 -0.056743 0.010708 +v 0.167970 -0.057834 0.013991 +v 0.165465 -0.057760 0.014240 +v 0.168522 -0.057745 0.017531 +v 0.165603 -0.057659 0.017795 +v 0.168953 -0.056599 0.020827 +v 0.169236 -0.054809 0.023858 +v 0.169373 -0.052376 0.026635 +v 0.169352 -0.049146 0.028781 +v 0.169169 -0.045352 0.030093 +v 0.168834 -0.041282 0.030440 +v 0.168365 -0.037254 0.029775 +v 0.167952 -0.037258 0.029802 +v 0.167621 -0.041285 0.030517 +v 0.167241 -0.045343 0.030216 +v 0.166839 -0.049117 0.028946 +v 0.166448 -0.052324 0.026838 +v 0.165818 -0.056513 0.021084 +v 0.166099 -0.054736 0.024093 +v 0.166459 -0.051730 0.005021 +v 0.166834 -0.051635 0.005104 +v 0.165252 -0.051707 0.005162 +v 0.167153 -0.051498 0.005246 +v 0.164916 -0.051600 0.005328 +v 0.167392 -0.051326 0.005438 +v 0.164652 -0.051451 0.005538 +v 0.167537 -0.051133 0.005666 +v 0.164480 -0.051272 0.005777 +v 0.167576 -0.050931 0.005915 +v 0.164411 -0.051075 0.006030 +v 0.167507 -0.050733 0.006168 +v 0.164450 -0.050873 0.006279 +v 0.167335 -0.050555 0.006407 +v 0.164594 -0.050679 0.006507 +v 0.167071 -0.050406 0.006617 +v 0.164834 -0.050508 0.006699 +v 0.166734 -0.050298 0.006783 +v 0.165152 -0.050371 0.006841 +v 0.166347 -0.050239 0.006894 +v 0.165528 -0.050276 0.006924 +v 0.165936 -0.050231 0.006942 +v 0.167054 -0.054567 0.007451 +v 0.165134 -0.054526 0.007670 +v 0.167651 -0.056598 0.010577 +v 0.165148 -0.056532 0.010846 +v 0.168195 -0.057563 0.014025 +v 0.165278 -0.057477 0.014315 +v 0.168641 -0.057435 0.017489 +v 0.165508 -0.057342 0.017773 +v 0.168957 -0.056297 0.020703 +v 0.169233 -0.054554 0.023655 +v 0.169471 -0.052190 0.026375 +v 0.169548 -0.049040 0.028507 +v 0.169451 -0.045317 0.029847 +v 0.169184 -0.041294 0.030255 +v 0.168761 -0.037276 0.029669 +v 0.168209 -0.033582 0.028107 +v 0.167796 -0.033588 0.028135 +v 0.167549 -0.037287 0.029747 +v 0.167256 -0.041298 0.030378 +v 0.166938 -0.045306 0.030008 +v 0.166622 -0.049007 0.028700 +v 0.166332 -0.052134 0.026593 +v 0.165822 -0.056211 0.020960 +v 0.166096 -0.054480 0.023890 +v 0.165356 -0.053093 0.009020 +v 0.165730 -0.052982 0.009090 +v 0.166138 -0.052930 0.009102 +v 0.166549 -0.052939 0.009055 +v 0.166937 -0.053008 0.008953 +v 0.167275 -0.053134 0.008801 +v 0.167540 -0.053307 0.008611 +v 0.167715 -0.053516 0.008395 +v 0.167786 -0.053746 0.008168 +v 0.167749 -0.053982 0.007946 +v 0.167608 -0.054207 0.007743 +v 0.167371 -0.054407 0.007574 +v 0.164869 -0.054353 0.007860 +v 0.164695 -0.054144 0.008076 +v 0.164624 -0.053914 0.008303 +v 0.164660 -0.053678 0.008525 +v 0.164802 -0.053453 0.008727 +v 0.165039 -0.053253 0.008896 +v 0.167883 -0.056354 0.010685 +v 0.164968 -0.056277 0.010999 +v 0.168322 -0.057257 0.014075 +v 0.165192 -0.057165 0.014386 +v 0.168654 -0.057110 0.017457 +v 0.165521 -0.057017 0.017740 +v 0.168854 -0.056003 0.020591 +v 0.169123 -0.054304 0.023468 +v 0.169463 -0.051996 0.026114 +v 0.169643 -0.048921 0.028210 +v 0.169646 -0.045274 0.029556 +v 0.169466 -0.041310 0.030007 +v 0.169113 -0.037322 0.029491 +v 0.168606 -0.033620 0.028008 +v 0.167980 -0.030507 0.025645 +v 0.167567 -0.030515 0.025675 +v 0.167394 -0.033639 0.028090 +v 0.167185 -0.037340 0.029616 +v 0.166953 -0.041316 0.030168 +v 0.166719 -0.045261 0.029743 +v 0.166503 -0.048886 0.028416 +v 0.166324 -0.051940 0.026332 +v 0.165933 -0.055922 0.020831 +v 0.166199 -0.054235 0.023687 +v 0.165599 -0.054992 0.011623 +v 0.165971 -0.054857 0.011660 +v 0.166376 -0.054793 0.011656 +v 0.166788 -0.054804 0.011612 +v 0.167178 -0.054889 0.011530 +v 0.167520 -0.055043 0.011417 +v 0.167790 -0.055254 0.011279 +v 0.167970 -0.055509 0.011126 +v 0.168048 -0.055790 0.010970 +v 0.168018 -0.056078 0.010819 +v 0.164890 -0.055996 0.011155 +v 0.164920 -0.055707 0.011306 +v 0.165055 -0.055432 0.011440 +v 0.165287 -0.055188 0.011548 +v 0.168344 -0.056937 0.014138 +v 0.165214 -0.056845 0.014449 +v 0.168559 -0.056793 0.017435 +v 0.165640 -0.056706 0.017699 +v 0.168651 -0.055736 0.020501 +v 0.168914 -0.054077 0.023308 +v 0.169347 -0.051806 0.025869 +v 0.169630 -0.048797 0.027909 +v 0.169739 -0.045226 0.029239 +v 0.169660 -0.041331 0.029714 +v 0.169396 -0.037390 0.029253 +v 0.168959 -0.033700 0.027845 +v 0.168377 -0.030559 0.025558 +v 0.167277 -0.028255 0.022577 +v 0.167690 -0.028244 0.022544 +v 0.167166 -0.030585 0.025646 +v 0.167031 -0.033730 0.027976 +v 0.166883 -0.037412 0.029416 +v 0.166734 -0.041338 0.029900 +v 0.166598 -0.045211 0.029440 +v 0.166490 -0.048761 0.028116 +v 0.166422 -0.051754 0.026072 +v 0.166142 -0.055666 0.020707 +v 0.166403 -0.054019 0.023497 +v 0.165872 -0.056051 0.014548 +v 0.166240 -0.055901 0.014544 +v 0.166644 -0.055830 0.014521 +v 0.167056 -0.055842 0.014480 +v 0.167448 -0.055937 0.014424 +v 0.167794 -0.056107 0.014357 +v 0.168071 -0.056342 0.014284 +v 0.168258 -0.056625 0.014209 +v 0.165341 -0.056540 0.014499 +v 0.165566 -0.056269 0.014533 +v 0.168364 -0.056506 0.017425 +v 0.165857 -0.056431 0.017652 +v 0.168619 -0.053890 0.023187 +v 0.168362 -0.055514 0.020437 +v 0.169133 -0.051634 0.025657 +v 0.169511 -0.048675 0.027626 +v 0.169725 -0.045175 0.028918 +v 0.169753 -0.041354 0.029394 +v 0.169591 -0.037474 0.028971 +v 0.169244 -0.033816 0.027629 +v 0.168732 -0.030669 0.025419 +v 0.168089 -0.028308 0.022473 +v 0.166947 -0.026977 0.019046 +v 0.167360 -0.026964 0.019010 +v 0.166879 -0.028340 0.022569 +v 0.166806 -0.030711 0.025559 +v 0.166732 -0.033855 0.027800 +v 0.166665 -0.037500 0.029161 +v 0.166613 -0.041361 0.029594 +v 0.166584 -0.045161 0.029119 +v 0.166585 -0.048642 0.027819 +v 0.166621 -0.051589 0.025832 +v 0.166693 -0.053845 0.023332 +v 0.166437 -0.055461 0.020595 +v 0.166521 -0.056058 0.017554 +v 0.166158 -0.056210 0.017603 +v 0.166923 -0.055986 0.017510 +v 0.167335 -0.055998 0.017472 +v 0.167730 -0.056094 0.017445 +v 0.168081 -0.056267 0.017429 +v 0.168260 -0.053753 0.023113 +v 0.168007 -0.055353 0.020404 +v 0.168835 -0.051491 0.025493 +v 0.169294 -0.048565 0.027379 +v 0.169604 -0.045126 0.028614 +v 0.169739 -0.041379 0.029070 +v 0.169685 -0.037568 0.028665 +v 0.169442 -0.033960 0.027375 +v 0.169021 -0.030830 0.025239 +v 0.168447 -0.028443 0.022368 +v 0.167761 -0.027036 0.018958 +v 0.166600 -0.026783 0.015333 +v 0.167012 -0.026771 0.015293 +v 0.166551 -0.027071 0.019064 +v 0.166522 -0.028493 0.022521 +v 0.166510 -0.030884 0.025422 +v 0.166517 -0.034006 0.027574 +v 0.166545 -0.037597 0.028869 +v 0.166598 -0.041386 0.029270 +v 0.166677 -0.045112 0.028802 +v 0.166781 -0.048537 0.027545 +v 0.166907 -0.051457 0.025627 +v 0.167049 -0.053725 0.023204 +v 0.166797 -0.055320 0.020503 +v 0.167197 -0.055253 0.020438 +v 0.167609 -0.055264 0.020405 +v 0.167860 -0.053678 0.023091 +v 0.168473 -0.051387 0.025387 +v 0.168993 -0.048474 0.027186 +v 0.169385 -0.045081 0.028350 +v 0.169618 -0.041403 0.028764 +v 0.169672 -0.037667 0.028355 +v 0.169539 -0.034124 0.027100 +v 0.169223 -0.031029 0.025029 +v 0.168740 -0.028639 0.022236 +v 0.168123 -0.027185 0.018894 +v 0.167415 -0.026843 0.015262 +v 0.166262 -0.027690 0.011708 +v 0.166674 -0.027679 0.011665 +v 0.166207 -0.026879 0.015379 +v 0.166199 -0.027241 0.019063 +v 0.166231 -0.028705 0.022435 +v 0.166299 -0.031092 0.025242 +v 0.166400 -0.034173 0.027313 +v 0.166532 -0.037696 0.028559 +v 0.166691 -0.041409 0.028950 +v 0.166872 -0.045069 0.028510 +v 0.167065 -0.048452 0.027313 +v 0.167261 -0.051366 0.025471 +v 0.167447 -0.053668 0.023122 +v 0.168071 -0.051330 0.025347 +v 0.168629 -0.048407 0.027059 +v 0.169082 -0.045044 0.028141 +v 0.169398 -0.041424 0.028496 +v 0.169552 -0.037764 0.028063 +v 0.169529 -0.034294 0.026823 +v 0.169325 -0.031255 0.024804 +v 0.168948 -0.028883 0.022087 +v 0.168421 -0.027403 0.018822 +v 0.167782 -0.026995 0.015243 +v 0.167078 -0.027746 0.011654 +v 0.165956 -0.029625 0.008433 +v 0.166367 -0.029615 0.008387 +v 0.165871 -0.027780 0.011781 +v 0.165859 -0.027052 0.015428 +v 0.165914 -0.027476 0.019042 +v 0.166026 -0.028960 0.022319 +v 0.166187 -0.031322 0.025032 +v 0.166390 -0.034343 0.027036 +v 0.166626 -0.037790 0.028253 +v 0.166885 -0.041430 0.028656 +v 0.167154 -0.045035 0.028265 +v 0.167417 -0.048394 0.027139 +v 0.167658 -0.051323 0.025376 +v 0.168226 -0.048371 0.027008 +v 0.168717 -0.045017 0.028004 +v 0.169096 -0.041442 0.028286 +v 0.169334 -0.037851 0.027808 +v 0.169412 -0.034461 0.026562 +v 0.169319 -0.031491 0.024579 +v 0.169056 -0.029158 0.021930 +v 0.168636 -0.027674 0.018747 +v 0.168086 -0.027216 0.015236 +v 0.167449 -0.027887 0.011678 +v 0.166774 -0.029672 0.008394 +v 0.166114 -0.032429 0.005682 +v 0.165703 -0.032437 0.005730 +v 0.165567 -0.029700 0.008529 +v 0.165528 -0.027940 0.011879 +v 0.165581 -0.027290 0.015477 +v 0.165716 -0.027759 0.019003 +v 0.165920 -0.029241 0.022179 +v 0.166181 -0.031558 0.024807 +v 0.166487 -0.034507 0.026761 +v 0.166821 -0.037874 0.027971 +v 0.167167 -0.041447 0.028408 +v 0.167505 -0.045011 0.028081 +v 0.167813 -0.048366 0.027035 +v 0.168314 -0.045002 0.027946 +v 0.168730 -0.041455 0.028146 +v 0.169032 -0.037924 0.027608 +v 0.169197 -0.034612 0.026336 +v 0.169207 -0.031721 0.024369 +v 0.169057 -0.029446 0.021777 +v 0.168752 -0.027979 0.018675 +v 0.168308 -0.027491 0.015242 +v 0.167759 -0.028093 0.011733 +v 0.167147 -0.029791 0.008453 +v 0.166522 -0.032473 0.005702 +v 0.165932 -0.035920 0.003723 +v 0.165520 -0.035925 0.003773 +v 0.165316 -0.032494 0.005844 +v 0.165228 -0.029837 0.008668 +v 0.165255 -0.028162 0.011995 +v 0.165391 -0.027578 0.015523 +v 0.165618 -0.028071 0.018949 +v 0.165921 -0.029529 0.022026 +v 0.166283 -0.031784 0.024582 +v 0.166684 -0.034651 0.026507 +v 0.167104 -0.037942 0.027733 +v 0.167518 -0.041458 0.028224 +v 0.167900 -0.045000 0.027972 +v 0.168327 -0.041463 0.028088 +v 0.168668 -0.037977 0.027476 +v 0.168897 -0.034737 0.026160 +v 0.168996 -0.031929 0.024189 +v 0.168952 -0.029727 0.021637 +v 0.168761 -0.028299 0.018609 +v 0.168433 -0.027802 0.015260 +v 0.167988 -0.028348 0.011816 +v 0.167463 -0.029966 0.008562 +v 0.166899 -0.032563 0.005789 +v 0.166340 -0.035948 0.003752 +v 0.165829 -0.039850 0.002628 +v 0.165418 -0.039852 0.002679 +v 0.165134 -0.035961 0.003898 +v 0.164980 -0.032598 0.006015 +v 0.164961 -0.030024 0.008842 +v 0.165073 -0.028429 0.012122 +v 0.165302 -0.027895 0.015562 +v 0.165628 -0.028391 0.018883 +v 0.166030 -0.029804 0.021869 +v 0.166485 -0.031983 0.024372 +v 0.166969 -0.034767 0.026291 +v 0.167456 -0.037988 0.027555 +v 0.167914 -0.041464 0.028114 +v 0.168265 -0.038006 0.027422 +v 0.168534 -0.034828 0.026046 +v 0.168700 -0.032102 0.024051 +v 0.168747 -0.029982 0.021521 +v 0.168664 -0.028611 0.018555 +v 0.168451 -0.028126 0.015290 +v 0.168121 -0.028637 0.011923 +v 0.167698 -0.030182 0.008711 +v 0.167218 -0.032696 0.005938 +v 0.166718 -0.036006 0.003857 +v 0.166238 -0.039861 0.002662 +v 0.165819 -0.042646 0.002516 +v 0.165408 -0.042663 0.002566 +v 0.165033 -0.039867 0.002810 +v 0.164800 -0.036028 0.004090 +v 0.164717 -0.032740 0.006232 +v 0.164785 -0.030251 0.009038 +v 0.164992 -0.028724 0.012250 +v 0.165320 -0.028220 0.015591 +v 0.165744 -0.028697 0.018810 +v 0.166237 -0.030048 0.021720 +v 0.166773 -0.032144 0.024192 +v 0.167322 -0.034847 0.026129 +v 0.167852 -0.038010 0.027449 +v 0.168132 -0.034879 0.026002 +v 0.168340 -0.032228 0.023965 +v 0.168456 -0.030194 0.021435 +v 0.168466 -0.028894 0.018516 +v 0.168363 -0.028443 0.015329 +v 0.168148 -0.028939 0.012044 +v 0.167838 -0.030427 0.008892 +v 0.167458 -0.032861 0.006137 +v 0.167040 -0.036091 0.004033 +v 0.166617 -0.039885 0.002777 +v 0.166228 -0.042713 0.002548 +v 0.163467 -0.042929 -0.020912 +v 0.163880 -0.042913 -0.020948 +v 0.165022 -0.042761 0.002694 +v 0.164699 -0.039894 0.003013 +v 0.164540 -0.036119 0.004335 +v 0.164546 -0.032912 0.006480 +v 0.164711 -0.030500 0.009242 +v 0.165019 -0.029026 0.012372 +v 0.165445 -0.028530 0.015610 +v 0.165958 -0.028968 0.018735 +v 0.166530 -0.030244 0.021588 +v 0.167128 -0.032254 0.024053 +v 0.167719 -0.034885 0.026030 +v 0.167939 -0.032298 0.023936 +v 0.168099 -0.030347 0.021387 +v 0.168181 -0.029129 0.018495 +v 0.168172 -0.028731 0.015374 +v 0.168068 -0.029234 0.012172 +v 0.167872 -0.030683 0.009091 +v 0.167603 -0.033046 0.006374 +v 0.167283 -0.036196 0.004265 +v 0.166940 -0.039919 0.002966 +v 0.166606 -0.042859 0.002658 +v 0.164283 -0.042980 -0.020983 +v 0.160519 -0.042929 -0.052921 +v 0.160897 -0.042913 -0.053335 +v 0.163074 -0.043029 -0.020877 +v 0.164688 -0.042934 0.002891 +v 0.164441 -0.039931 0.003274 +v 0.164372 -0.036229 0.004618 +v 0.164478 -0.033102 0.006742 +v 0.164746 -0.030756 0.009441 +v 0.165152 -0.029314 0.012478 +v 0.165667 -0.028805 0.015616 +v 0.166257 -0.029186 0.018664 +v 0.166888 -0.030379 0.021483 +v 0.167526 -0.032307 0.023966 +v 0.167700 -0.030432 0.021379 +v 0.167828 -0.029300 0.018494 +v 0.167894 -0.028969 0.015424 +v 0.167885 -0.029501 0.012299 +v 0.167799 -0.030933 0.009296 +v 0.167643 -0.033241 0.006633 +v 0.167431 -0.036315 0.004540 +v 0.167185 -0.039962 0.003217 +v 0.166928 -0.043073 0.002840 +v 0.164650 -0.043127 -0.021014 +v 0.161266 -0.042980 -0.053740 +v -0.005122 -0.042929 -0.052927 +v -0.005422 -0.042913 -0.053341 +v 0.160159 -0.043029 -0.052526 +v 0.162726 -0.043204 -0.020845 +v 0.164428 -0.043172 0.003144 +v 0.164274 -0.039976 0.003575 +v 0.164307 -0.036351 0.004918 +v 0.164518 -0.033297 0.007000 +v 0.164885 -0.031001 0.009622 +v 0.165381 -0.029570 0.012561 +v 0.165971 -0.029026 0.015609 +v 0.166619 -0.029335 0.018599 +v 0.167287 -0.030443 0.021412 +v 0.167432 -0.029394 0.018511 +v 0.167546 -0.029142 0.015473 +v 0.167613 -0.029722 0.012415 +v 0.167623 -0.031159 0.009491 +v 0.167574 -0.033431 0.006895 +v 0.167475 -0.036440 0.004838 +v 0.167335 -0.040010 0.003511 +v 0.167172 -0.043341 0.003081 +v 0.164955 -0.043344 -0.021039 +v 0.161601 -0.043127 -0.054108 +v -0.005702 -0.042980 -0.053746 +v -0.021297 -0.055613 -0.002540 +v -0.020997 -0.055629 -0.002126 +v -0.004820 -0.043029 -0.052532 +v 0.159840 -0.043204 -0.052177 +v 0.162448 -0.043444 -0.020820 +v 0.164261 -0.043456 0.003435 +v 0.164212 -0.040025 0.003895 +v 0.164351 -0.036475 0.005216 +v 0.164663 -0.033483 0.007237 +v 0.165121 -0.031218 0.009771 +v 0.165692 -0.029775 0.012617 +v 0.166338 -0.029178 0.015589 +v 0.167020 -0.029406 0.018548 +v 0.167153 -0.029238 0.015519 +v 0.167270 -0.029883 0.012513 +v 0.167356 -0.031347 0.009665 +v 0.167403 -0.033603 0.007143 +v 0.167410 -0.036561 0.005138 +v 0.167381 -0.040061 0.003829 +v 0.167322 -0.043645 0.003365 +v 0.165178 -0.043615 -0.021057 +v 0.161881 -0.043344 -0.054414 +v -0.005940 -0.043127 -0.054114 +v -0.021577 -0.055680 -0.002945 +v -0.071760 -0.055629 -0.002126 +v -0.071459 -0.055613 -0.002540 +v -0.020695 -0.055729 -0.001732 +v -0.004537 -0.043204 -0.052183 +v 0.159586 -0.043444 -0.051898 +v 0.162259 -0.043731 -0.020801 +v 0.164198 -0.043769 0.003745 +v 0.164257 -0.040076 0.004213 +v 0.164499 -0.036594 0.005490 +v 0.164903 -0.033647 0.007436 +v 0.165436 -0.031392 0.009880 +v 0.166062 -0.029916 0.012640 +v 0.166741 -0.029250 0.015559 +v 0.166879 -0.029972 0.012587 +v 0.167017 -0.031483 0.009804 +v 0.167141 -0.033745 0.007360 +v 0.167243 -0.036671 0.005420 +v 0.167318 -0.040110 0.004149 +v 0.167366 -0.043964 0.003673 +v 0.165302 -0.043922 -0.021066 +v 0.162084 -0.043615 -0.054638 +v -0.006121 -0.043344 -0.054420 +v -0.021815 -0.055827 -0.003313 +v -0.071180 -0.055680 -0.002945 +v -0.087635 -0.042929 -0.052926 +v -0.087334 -0.042913 -0.053340 +v -0.072062 -0.055729 -0.001732 +v -0.020412 -0.055904 -0.001382 +v -0.004293 -0.043444 -0.051904 +v 0.159412 -0.043731 -0.051708 +v 0.162170 -0.044047 -0.020792 +v 0.164242 -0.044088 0.004052 +v 0.164408 -0.040124 0.004507 +v 0.164743 -0.036700 0.005723 +v 0.165222 -0.033780 0.007585 +v 0.165810 -0.031511 0.009939 +v 0.166467 -0.029984 0.012630 +v 0.166628 -0.031559 0.009900 +v 0.166804 -0.033849 0.007531 +v 0.166982 -0.036763 0.005666 +v 0.167152 -0.040155 0.004450 +v 0.167302 -0.044277 0.003982 +v 0.165322 -0.044245 -0.021066 +v 0.162199 -0.043922 -0.054763 +v -0.006233 -0.043615 -0.054644 +v -0.021996 -0.056044 -0.003619 +v -0.070942 -0.055827 -0.003313 +v -0.087055 -0.042980 -0.053744 +v -0.163782 -0.042913 -0.053335 +v -0.163405 -0.042929 -0.052921 +v -0.087937 -0.043029 -0.052531 +v -0.072345 -0.055904 -0.001382 +v -0.020168 -0.056144 -0.001103 +v -0.004104 -0.043731 -0.051713 +v 0.159331 -0.044047 -0.051619 +v 0.162189 -0.044370 -0.020792 +v 0.164392 -0.044392 0.004336 +v 0.164652 -0.040167 0.004758 +v 0.165064 -0.036785 0.005898 +v 0.165598 -0.033871 0.007672 +v 0.166216 -0.031568 0.009947 +v 0.166417 -0.033906 0.007644 +v 0.166648 -0.036829 0.005858 +v 0.166893 -0.040192 0.004711 +v 0.167135 -0.044561 0.004274 +v 0.165233 -0.044561 -0.021057 +v 0.162216 -0.044245 -0.054782 +v -0.006268 -0.043922 -0.054769 +v -0.022108 -0.056315 -0.003843 +v -0.070760 -0.056044 -0.003619 +v -0.086817 -0.043127 -0.054112 +v -0.164151 -0.042980 -0.053740 +v -0.168926 -0.042913 0.002517 +v -0.168516 -0.042929 0.002570 +v -0.163044 -0.043029 -0.052526 +v -0.088220 -0.043204 -0.052182 +v -0.072589 -0.056144 -0.001103 +v -0.019979 -0.056431 -0.000913 +v -0.003983 -0.044047 -0.051625 +v 0.159348 -0.044370 -0.051638 +v 0.162314 -0.044677 -0.020801 +v 0.164636 -0.044660 0.004578 +v 0.164975 -0.040201 0.004947 +v 0.165442 -0.036843 0.006004 +v 0.166006 -0.033914 0.007692 +v 0.166262 -0.036866 0.005983 +v 0.166560 -0.040219 0.004914 +v 0.166876 -0.044798 0.004526 +v 0.165044 -0.044849 -0.021038 +v 0.162135 -0.044561 -0.054693 +v -0.006224 -0.044245 -0.054788 +v -0.022143 -0.056623 -0.003968 +v -0.070649 -0.056315 -0.003843 +v -0.086636 -0.043344 -0.054419 +v -0.164487 -0.043127 -0.054108 +v -0.169335 -0.042980 0.002544 +v -0.168526 -0.040132 0.002682 +v -0.168936 -0.040129 0.002628 +v -0.168131 -0.043029 0.002702 +v -0.162726 -0.043204 -0.052177 +v -0.088464 -0.043444 -0.051902 +v -0.072778 -0.056431 -0.000913 +v -0.019858 -0.056747 -0.000824 +v -0.003939 -0.044370 -0.051643 +v 0.159462 -0.044677 -0.051763 +v 0.162537 -0.044948 -0.020819 +v 0.164958 -0.044874 0.004760 +v 0.165354 -0.040225 0.005062 +v 0.165851 -0.036870 0.006033 +v 0.166174 -0.040234 0.005045 +v 0.166542 -0.044972 0.004723 +v 0.164766 -0.045088 -0.021013 +v 0.161961 -0.044849 -0.054503 +v -0.006103 -0.044561 -0.054699 +v -0.022099 -0.056945 -0.003987 +v -0.070614 -0.056623 -0.003968 +v -0.086524 -0.043615 -0.054642 +v -0.164766 -0.043344 -0.054414 +v -0.169715 -0.043127 0.002651 +v -0.169346 -0.040140 0.002658 +v -0.168622 -0.036206 0.003776 +v -0.169033 -0.036199 0.003723 +v -0.168141 -0.040148 0.002817 +v -0.167799 -0.043204 0.002901 +v -0.162471 -0.043444 -0.051898 +v -0.088653 -0.043731 -0.051712 +v -0.072899 -0.056747 -0.000824 +v -0.019814 -0.057070 -0.000843 +v -0.003974 -0.044677 -0.051769 +v 0.159666 -0.044948 -0.051986 +v 0.162842 -0.045165 -0.020844 +v 0.165336 -0.045019 0.004870 +v 0.165763 -0.040236 0.005096 +v 0.166156 -0.045070 0.004851 +v 0.164418 -0.045264 -0.020981 +v 0.161707 -0.045088 -0.054223 +v -0.005914 -0.044849 -0.054509 +v -0.021978 -0.057261 -0.003898 +v -0.070658 -0.056945 -0.003987 +v -0.086489 -0.043922 -0.054767 +v -0.164970 -0.043615 -0.054638 +v -0.170039 -0.043344 0.002830 +v -0.169725 -0.040163 0.002770 +v -0.169442 -0.036225 0.003749 +v -0.168796 -0.032719 0.005734 +v -0.169207 -0.032708 0.005683 +v -0.168238 -0.036245 0.003904 +v -0.167810 -0.040176 0.003023 +v -0.167542 -0.043444 0.003156 +v -0.162297 -0.043731 -0.051708 +v -0.088774 -0.044047 -0.051623 +v -0.072943 -0.057070 -0.000843 +v -0.019849 -0.057377 -0.000968 +v -0.004086 -0.044948 -0.051992 +v 0.159945 -0.045165 -0.052293 +v 0.163209 -0.045312 -0.020875 +v 0.165745 -0.045086 0.004901 +v 0.164024 -0.045363 -0.020946 +v 0.161388 -0.045264 -0.053874 +v -0.005670 -0.045088 -0.054229 +v -0.021789 -0.057549 -0.003708 +v -0.070779 -0.057261 -0.003898 +v -0.086533 -0.044245 -0.054786 +v -0.165084 -0.043922 -0.054763 +v -0.170285 -0.043615 0.003069 +v -0.170050 -0.040196 0.002957 +v -0.169821 -0.036281 0.003852 +v -0.169615 -0.032748 0.005701 +v -0.169035 -0.029908 0.008437 +v -0.169447 -0.029893 0.008390 +v -0.168410 -0.032780 0.005850 +v -0.167905 -0.036313 0.004099 +v -0.167553 -0.040213 0.003286 +v -0.167377 -0.043731 0.003449 +v -0.162216 -0.044047 -0.051619 +v -0.088818 -0.044370 -0.051642 +v -0.072908 -0.057377 -0.000968 +v -0.019960 -0.057648 -0.001191 +v -0.004267 -0.045165 -0.052298 +v 0.160281 -0.045312 -0.052661 +v 0.163612 -0.045380 -0.020910 +v 0.161028 -0.045363 -0.053479 +v -0.005387 -0.045264 -0.053880 +v -0.021545 -0.057788 -0.003428 +v -0.070968 -0.057549 -0.003708 +v -0.086654 -0.044561 -0.054697 +v -0.165102 -0.044245 -0.054782 +v -0.170437 -0.043922 0.003352 +v -0.170297 -0.040239 0.003205 +v -0.170144 -0.036364 0.004024 +v -0.169993 -0.032836 0.005786 +v -0.169854 -0.029946 0.008395 +v -0.169737 -0.027957 0.011670 +v -0.169326 -0.027974 0.011713 +v -0.168648 -0.029987 0.008535 +v -0.168075 -0.032886 0.006023 +v -0.167647 -0.036406 0.004347 +v -0.167390 -0.040259 0.003588 +v -0.167317 -0.044047 0.003759 +v -0.162233 -0.044370 -0.051638 +v -0.088783 -0.044677 -0.051767 +v -0.072796 -0.057648 -0.001191 +v -0.020142 -0.057865 -0.001498 +v -0.004505 -0.045312 -0.052666 +v 0.160650 -0.045380 -0.053065 +v -0.005085 -0.045363 -0.053485 +v -0.021262 -0.057964 -0.003079 +v -0.071212 -0.057788 -0.003428 +v -0.086843 -0.044849 -0.054507 +v -0.165021 -0.044561 -0.054693 +v -0.170484 -0.044245 0.003659 +v -0.170450 -0.040287 0.003497 +v -0.170390 -0.036468 0.004255 +v -0.170314 -0.032966 0.005932 +v -0.170229 -0.030062 0.008453 +v -0.170143 -0.028019 0.011659 +v -0.170059 -0.027049 0.015300 +v -0.169647 -0.027067 0.015338 +v -0.168936 -0.028068 0.011786 +v -0.168310 -0.030127 0.008675 +v -0.167815 -0.033031 0.006242 +v -0.167482 -0.036517 0.004630 +v -0.167330 -0.040308 0.003909 +v -0.167364 -0.044370 0.004066 +v -0.162348 -0.044677 -0.051763 +v -0.088671 -0.044948 -0.051991 +v -0.072615 -0.057865 -0.001498 +v -0.020380 -0.058012 -0.001866 +v -0.004784 -0.045380 -0.053071 +v -0.020960 -0.058063 -0.002684 +v -0.071495 -0.057964 -0.003079 +v -0.087087 -0.045088 -0.054227 +v -0.164847 -0.044849 -0.054503 +v -0.170424 -0.044561 0.003969 +v -0.170498 -0.040338 0.003815 +v -0.170541 -0.036587 0.004528 +v -0.170556 -0.033129 0.006130 +v -0.170546 -0.030232 0.008560 +v -0.170515 -0.028156 0.011682 +v -0.170463 -0.027115 0.015271 +v -0.170388 -0.027242 0.019019 +v -0.169976 -0.027260 0.019052 +v -0.169254 -0.027168 0.015383 +v -0.168595 -0.028233 0.011884 +v -0.168046 -0.030318 0.008850 +v -0.167647 -0.033205 0.006490 +v -0.167421 -0.036639 0.004931 +v -0.167378 -0.040359 0.004227 +v -0.167516 -0.044677 0.004348 +v -0.162552 -0.044948 -0.051986 +v -0.088490 -0.045165 -0.052297 +v -0.072377 -0.058012 -0.001866 +v -0.020659 -0.058080 -0.002270 +v -0.071797 -0.058063 -0.002684 +v -0.087370 -0.045264 -0.053878 +v -0.164592 -0.045088 -0.054223 +v -0.170259 -0.044849 0.004261 +v -0.170439 -0.040387 0.004136 +v -0.170588 -0.036711 0.004826 +v -0.170704 -0.033314 0.006366 +v -0.170785 -0.030447 0.008709 +v -0.170828 -0.028357 0.011737 +v -0.170831 -0.027262 0.015252 +v -0.170791 -0.027308 0.018970 +v -0.170702 -0.028522 0.022556 +v -0.170289 -0.028538 0.022584 +v -0.169581 -0.027360 0.019067 +v -0.168909 -0.027346 0.015431 +v -0.168325 -0.028458 0.012000 +v -0.167873 -0.030547 0.009046 +v -0.167582 -0.033395 0.006753 +v -0.167467 -0.036763 0.005228 +v -0.167531 -0.040407 0.004520 +v -0.167762 -0.044948 0.004587 +v -0.162831 -0.045165 -0.052293 +v -0.088252 -0.045312 -0.052665 +v -0.072098 -0.058080 -0.002270 +v -0.087672 -0.045363 -0.053483 +v -0.164274 -0.045264 -0.053874 +v -0.170002 -0.045088 0.004516 +v -0.170275 -0.040432 0.004438 +v -0.170526 -0.036833 0.005126 +v -0.170747 -0.033508 0.006624 +v -0.170928 -0.030690 0.008889 +v -0.171060 -0.028610 0.011820 +v -0.171139 -0.027479 0.015246 +v -0.171155 -0.027453 0.018909 +v -0.171102 -0.028581 0.022489 +v -0.170976 -0.030785 0.025658 +v -0.170563 -0.030798 0.025682 +v -0.169891 -0.028628 0.022571 +v -0.169231 -0.027535 0.019063 +v -0.168633 -0.027588 0.015478 +v -0.168146 -0.028728 0.012127 +v -0.167803 -0.030797 0.009250 +v -0.167625 -0.033589 0.007011 +v -0.167618 -0.036882 0.005502 +v -0.167778 -0.040449 0.004768 +v -0.168086 -0.045165 0.004766 +v -0.163167 -0.045312 -0.052661 +v -0.087972 -0.045380 -0.053069 +v -0.163913 -0.045363 -0.053479 +v -0.169670 -0.045264 0.004716 +v -0.170019 -0.040470 0.004701 +v -0.170361 -0.036944 0.005410 +v -0.170682 -0.033698 0.006887 +v -0.170965 -0.030946 0.009088 +v -0.171197 -0.028897 0.011926 +v -0.171364 -0.027751 0.015252 +v -0.171457 -0.027666 0.018839 +v -0.171463 -0.028712 0.022387 +v -0.171376 -0.030833 0.025576 +v -0.170781 -0.033870 0.028142 +v -0.171195 -0.033861 0.028121 +v -0.170164 -0.030872 0.025647 +v -0.169537 -0.028786 0.022519 +v -0.168949 -0.027773 0.019039 +v -0.168447 -0.027878 0.015523 +v -0.168069 -0.029024 0.012255 +v -0.167841 -0.031053 0.009449 +v -0.167773 -0.033774 0.007247 +v -0.167864 -0.036986 0.005733 +v -0.168103 -0.040483 0.004954 +v -0.168466 -0.045312 0.004873 +v -0.163536 -0.045380 -0.053065 +v -0.169286 -0.045363 0.004847 +v -0.169687 -0.040498 0.004907 +v -0.170103 -0.037037 0.005658 +v -0.170514 -0.033872 0.007135 +v -0.170895 -0.031196 0.009292 +v -0.171227 -0.029199 0.012047 +v -0.171493 -0.028060 0.015271 +v -0.171676 -0.027934 0.018765 +v -0.171760 -0.028904 0.022259 +v -0.171734 -0.030940 0.025443 +v -0.171593 -0.033896 0.028029 +v -0.170929 -0.037539 0.029810 +v -0.171343 -0.037533 0.029790 +v -0.170380 -0.033923 0.028091 +v -0.169806 -0.031001 0.025556 +v -0.169249 -0.029000 0.022430 +v -0.168755 -0.028059 0.018999 +v -0.168362 -0.028196 0.015561 +v -0.168100 -0.029325 0.012376 +v -0.167984 -0.031296 0.009629 +v -0.168016 -0.033937 0.007445 +v -0.168187 -0.037069 0.005905 +v -0.168483 -0.040506 0.005066 +v -0.168875 -0.045380 0.004901 +v -0.169303 -0.040514 0.005042 +v -0.169771 -0.037105 0.005852 +v -0.170254 -0.034017 0.007354 +v -0.170722 -0.031425 0.009488 +v -0.171151 -0.029494 0.012175 +v -0.171516 -0.028385 0.015300 +v -0.171796 -0.028238 0.018693 +v -0.171972 -0.029145 0.022112 +v -0.172026 -0.031097 0.025267 +v -0.171949 -0.033973 0.027872 +v -0.171740 -0.037554 0.029691 +v -0.170997 -0.041558 0.030584 +v -0.171411 -0.041556 0.030565 +v -0.170527 -0.037570 0.029748 +v -0.170020 -0.034017 0.027971 +v -0.169514 -0.031176 0.025413 +v -0.169048 -0.029258 0.022311 +v -0.168662 -0.028372 0.018944 +v -0.168385 -0.028521 0.015591 +v -0.168237 -0.029612 0.012481 +v -0.168222 -0.031511 0.009778 +v -0.168337 -0.034067 0.007591 +v -0.168566 -0.037126 0.006008 +v -0.168892 -0.040516 0.005096 +v -0.169386 -0.037144 0.005980 +v -0.169919 -0.034123 0.007527 +v -0.170458 -0.031616 0.009662 +v -0.170972 -0.029764 0.012301 +v -0.171431 -0.028703 0.015339 +v -0.171810 -0.028558 0.018627 +v -0.172085 -0.029419 0.021956 +v -0.172233 -0.031295 0.025059 +v -0.172238 -0.034087 0.027660 +v -0.172095 -0.037598 0.029519 +v -0.171809 -0.041561 0.030463 +v -0.171397 -0.045667 0.030400 +v -0.170983 -0.045664 0.030419 +v -0.170595 -0.041565 0.030518 +v -0.170165 -0.037624 0.029610 +v -0.169724 -0.034144 0.027790 +v -0.169307 -0.031387 0.025230 +v -0.168948 -0.029540 0.022170 +v -0.168676 -0.028692 0.018878 +v -0.168514 -0.028830 0.015609 +v -0.168469 -0.029865 0.012565 +v -0.168540 -0.031682 0.009884 +v -0.168714 -0.034155 0.007676 +v -0.168975 -0.037151 0.006034 +v -0.169533 -0.034184 0.007643 +v -0.170121 -0.031756 0.009803 +v -0.170702 -0.029989 0.012418 +v -0.171245 -0.028993 0.015383 +v -0.171717 -0.028871 0.018572 +v -0.172092 -0.029707 0.021802 +v -0.172340 -0.031519 0.024835 +v -0.172441 -0.034230 0.027409 +v -0.172382 -0.037664 0.029286 +v -0.172163 -0.041573 0.030285 +v -0.171794 -0.045656 0.030299 +v -0.171300 -0.049597 0.029305 +v -0.170886 -0.049590 0.029325 +v -0.170581 -0.045648 0.030354 +v -0.170233 -0.041579 0.030372 +v -0.169867 -0.037698 0.029405 +v -0.169513 -0.034297 0.027560 +v -0.169200 -0.031618 0.025019 +v -0.168954 -0.029828 0.022016 +v -0.168797 -0.028996 0.018805 +v -0.168739 -0.029102 0.015616 +v -0.168782 -0.030067 0.012620 +v -0.168915 -0.031797 0.009943 +v -0.169122 -0.034195 0.007693 +v -0.169733 -0.031836 0.009901 +v -0.170361 -0.030154 0.012516 +v -0.170969 -0.029235 0.015431 +v -0.171523 -0.029157 0.018531 +v -0.171991 -0.029989 0.021661 +v -0.172340 -0.031755 0.024610 +v -0.172544 -0.034392 0.027136 +v -0.172583 -0.037747 0.029008 +v -0.172449 -0.041589 0.030042 +v -0.172148 -0.045634 0.030122 +v -0.171698 -0.049571 0.029208 +v -0.171126 -0.053088 0.027345 +v -0.170712 -0.053077 0.027367 +v -0.170484 -0.049551 0.029267 +v -0.170218 -0.045621 0.030210 +v -0.169934 -0.041597 0.030156 +v -0.169654 -0.037786 0.029146 +v -0.169402 -0.034464 0.027298 +v -0.169200 -0.031854 0.024794 +v -0.169067 -0.030102 0.021860 +v -0.169015 -0.029264 0.018732 +v -0.169047 -0.029319 0.015609 +v -0.169154 -0.030203 0.012643 +v -0.169322 -0.031850 0.009948 +v -0.169971 -0.030249 0.012589 +v -0.170624 -0.029413 0.015479 +v -0.171242 -0.029395 0.018508 +v -0.171790 -0.030246 0.021542 +v -0.172234 -0.031986 0.024399 +v -0.172540 -0.034563 0.026859 +v -0.172683 -0.037842 0.028704 +v -0.172649 -0.041609 0.029752 +v -0.172435 -0.045600 0.029882 +v -0.172053 -0.049515 0.029041 +v -0.171525 -0.053048 0.027256 +v -0.170886 -0.055903 0.024639 +v -0.170473 -0.055889 0.024664 +v -0.170312 -0.053017 0.027321 +v -0.170123 -0.049483 0.029134 +v -0.169920 -0.045583 0.029996 +v -0.169719 -0.041619 0.029885 +v -0.169540 -0.037883 0.028852 +v -0.169398 -0.034634 0.027021 +v -0.169307 -0.032078 0.024570 +v -0.169280 -0.030343 0.021713 +v -0.169317 -0.029478 0.018662 +v -0.169415 -0.029465 0.015591 +v -0.169560 -0.030265 0.012632 +v -0.170231 -0.029514 0.015523 +v -0.170892 -0.029570 0.018504 +v -0.171503 -0.030461 0.021453 +v -0.172027 -0.032196 0.024216 +v -0.172428 -0.034730 0.026596 +v -0.172676 -0.037941 0.028394 +v -0.172748 -0.041633 0.029434 +v -0.172635 -0.045557 0.029594 +v -0.172341 -0.049432 0.028814 +v -0.171881 -0.052960 0.027106 +v -0.171286 -0.055850 0.024562 +v -0.170183 -0.057822 0.021389 +v -0.170596 -0.057839 0.021359 +v -0.170074 -0.055809 0.024637 +v -0.169952 -0.052910 0.027210 +v -0.169826 -0.049390 0.028935 +v -0.169705 -0.045538 0.029727 +v -0.169604 -0.041643 0.029576 +v -0.169532 -0.037982 0.028542 +v -0.169500 -0.034797 0.026747 +v -0.169514 -0.032275 0.024363 +v -0.169576 -0.030535 0.021585 +v -0.169682 -0.029622 0.018600 +v -0.169819 -0.029532 0.015562 +v -0.170497 -0.029670 0.018519 +v -0.171148 -0.030619 0.021401 +v -0.171735 -0.032372 0.024074 +v -0.172218 -0.034883 0.026367 +v -0.172561 -0.038037 0.028099 +v -0.172739 -0.041657 0.029110 +v -0.172734 -0.045509 0.029279 +v -0.172542 -0.049328 0.028543 +v -0.172172 -0.052830 0.026906 +v -0.171645 -0.055735 0.024439 +v -0.170997 -0.057777 0.021298 +v -0.169862 -0.058730 0.017764 +v -0.170274 -0.058747 0.017729 +v -0.169786 -0.057728 0.021385 +v -0.169717 -0.055669 0.024557 +v -0.169658 -0.052765 0.027040 +v -0.169613 -0.049279 0.028685 +v -0.169590 -0.045488 0.029422 +v -0.169595 -0.041667 0.029252 +v -0.169632 -0.038076 0.028238 +v -0.169703 -0.034940 0.026496 +v -0.169807 -0.032433 0.024186 +v -0.169937 -0.030666 0.021484 +v -0.170084 -0.029688 0.018551 +v -0.170751 -0.030709 0.021388 +v -0.171377 -0.032501 0.023982 +v -0.171922 -0.035010 0.026186 +v -0.172348 -0.038126 0.027841 +v -0.172624 -0.041681 0.028801 +v -0.172726 -0.045459 0.028957 +v -0.172643 -0.049209 0.028248 +v -0.172376 -0.052668 0.026668 +v -0.171939 -0.055564 0.024278 +v -0.171359 -0.057641 0.021210 +v -0.170677 -0.058681 0.017687 +v -0.169533 -0.058536 0.014049 +v -0.169945 -0.058554 0.014009 +v -0.169468 -0.058628 0.017789 +v -0.169433 -0.057563 0.021349 +v -0.169427 -0.055478 0.024432 +v -0.169448 -0.052592 0.026825 +v -0.169500 -0.049157 0.028400 +v -0.169582 -0.045437 0.029101 +v -0.169694 -0.041691 0.028934 +v -0.169833 -0.038159 0.027959 +v -0.169993 -0.035054 0.026285 +v -0.170165 -0.032539 0.024053 +v -0.170338 -0.030725 0.021417 +v -0.170978 -0.032575 0.023947 +v -0.171561 -0.035103 0.026065 +v -0.172050 -0.038200 0.027635 +v -0.172409 -0.041703 0.028530 +v -0.172610 -0.045409 0.028652 +v -0.172636 -0.049085 0.027947 +v -0.172480 -0.052483 0.026410 +v -0.172147 -0.055349 0.024090 +v -0.171657 -0.057439 0.021101 +v -0.171043 -0.058534 0.017640 +v -0.170349 -0.058488 0.013987 +v -0.169220 -0.057258 0.010518 +v -0.169631 -0.057274 0.010473 +v -0.169141 -0.058436 0.014104 +v -0.169119 -0.058450 0.017802 +v -0.169148 -0.057338 0.021282 +v -0.169222 -0.055249 0.024270 +v -0.169338 -0.052401 0.026578 +v -0.169493 -0.049033 0.028099 +v -0.169681 -0.045389 0.028786 +v -0.169894 -0.041711 0.028644 +v -0.170120 -0.038225 0.027726 +v -0.170349 -0.035131 0.026128 +v -0.170564 -0.032588 0.023971 +v -0.171161 -0.035157 0.026014 +v -0.171688 -0.038254 0.027497 +v -0.172110 -0.041721 0.028314 +v -0.172396 -0.045364 0.028383 +v -0.172523 -0.048963 0.027662 +v -0.172477 -0.052288 0.026148 +v -0.172256 -0.055106 0.023887 +v -0.171872 -0.057186 0.020978 +v -0.171346 -0.058317 0.017592 +v -0.170719 -0.058344 0.013983 +v -0.170037 -0.057215 0.010469 +v -0.168945 -0.054998 0.007419 +v -0.169356 -0.055011 0.007371 +v -0.168831 -0.057168 0.010600 +v -0.168797 -0.058261 0.014170 +v -0.168839 -0.058208 0.017804 +v -0.168949 -0.057068 0.021189 +v -0.169117 -0.054999 0.024081 +v -0.169335 -0.052207 0.026317 +v -0.169594 -0.048914 0.027803 +v -0.169881 -0.045347 0.028498 +v -0.170180 -0.041727 0.028401 +v -0.170475 -0.038270 0.027555 +v -0.170747 -0.035166 0.026036 +v -0.171287 -0.038285 0.027436 +v -0.171748 -0.041735 0.028168 +v -0.172098 -0.045326 0.028169 +v -0.172310 -0.048852 0.027411 +v -0.172367 -0.052098 0.025902 +v -0.172258 -0.054850 0.023685 +v -0.171987 -0.056899 0.020850 +v -0.171568 -0.058045 0.017546 +v -0.171028 -0.058130 0.013999 +v -0.170411 -0.057085 0.010505 +v -0.169764 -0.054963 0.007381 +v -0.169147 -0.052056 0.005004 +v -0.168736 -0.052045 0.005054 +v -0.168558 -0.054925 0.007524 +v -0.168491 -0.057011 0.010714 +v -0.168524 -0.058023 0.014243 +v -0.168648 -0.057918 0.017793 +v -0.168851 -0.056773 0.021076 +v -0.169119 -0.054743 0.023878 +v -0.169439 -0.052022 0.026059 +v -0.169795 -0.048810 0.027533 +v -0.170167 -0.045313 0.028257 +v -0.170534 -0.041739 0.028223 +v -0.170873 -0.038290 0.027456 +v -0.171346 -0.041742 0.028102 +v -0.171735 -0.045298 0.028025 +v -0.172013 -0.048759 0.027213 +v -0.172157 -0.051924 0.025686 +v -0.172154 -0.054600 0.023496 +v -0.171996 -0.056597 0.020726 +v -0.171691 -0.057736 0.017505 +v -0.171256 -0.057862 0.014033 +v -0.170725 -0.056892 0.010579 +v -0.170140 -0.054856 0.007449 +v -0.169555 -0.052014 0.005020 +v -0.169932 -0.051923 0.005101 +v -0.168349 -0.051982 0.005167 +v -0.170252 -0.051788 0.005241 +v -0.168015 -0.051871 0.005335 +v -0.170494 -0.051619 0.005431 +v -0.167754 -0.051720 0.005547 +v -0.170641 -0.051426 0.005658 +v -0.167585 -0.051540 0.005787 +v -0.170683 -0.051224 0.005907 +v -0.167519 -0.051341 0.006040 +v -0.170618 -0.051026 0.006160 +v -0.167561 -0.051139 0.006289 +v -0.170449 -0.050845 0.006400 +v -0.167708 -0.050947 0.006516 +v -0.170188 -0.050695 0.006612 +v -0.167950 -0.050778 0.006706 +v -0.169853 -0.050584 0.006780 +v -0.168271 -0.050643 0.006847 +v -0.169467 -0.050521 0.006893 +v -0.168648 -0.050551 0.006927 +v -0.169056 -0.050510 0.006943 +v -0.168222 -0.054796 0.007677 +v -0.168224 -0.056796 0.010852 +v -0.168340 -0.057737 0.014317 +v -0.168558 -0.057600 0.017770 +v -0.168860 -0.056471 0.020952 +v -0.169228 -0.054500 0.023676 +v -0.169643 -0.051859 0.025821 +v -0.170083 -0.048727 0.027306 +v -0.170522 -0.045290 0.028080 +v -0.170932 -0.041744 0.028121 +v -0.171333 -0.045282 0.027960 +v -0.171651 -0.048691 0.027080 +v -0.171863 -0.051779 0.025517 +v -0.171949 -0.054371 0.023333 +v -0.171898 -0.056302 0.020613 +v -0.171708 -0.057412 0.017472 +v -0.171388 -0.057558 0.014083 +v -0.170960 -0.056651 0.010687 +v -0.170459 -0.054699 0.007572 +v -0.168464 -0.053363 0.009025 +v -0.168840 -0.053257 0.009094 +v -0.168145 -0.053521 0.008903 +v -0.167905 -0.053718 0.008735 +v -0.167760 -0.053943 0.008534 +v -0.167720 -0.054178 0.008312 +v -0.167788 -0.054409 0.008085 +v -0.167959 -0.054620 0.007869 +v -0.170699 -0.054501 0.007739 +v -0.170844 -0.054277 0.007941 +v -0.170883 -0.054041 0.008162 +v -0.170815 -0.053810 0.008389 +v -0.170644 -0.053600 0.008606 +v -0.170381 -0.053424 0.008797 +v -0.170045 -0.053295 0.008950 +v -0.169658 -0.053221 0.009055 +v -0.169247 -0.053208 0.009104 +v -0.168047 -0.056538 0.011005 +v -0.168258 -0.057424 0.014387 +v -0.168575 -0.057275 0.017737 +v -0.168976 -0.056184 0.020824 +v -0.169437 -0.054285 0.023488 +v -0.169934 -0.051729 0.025620 +v -0.170438 -0.048671 0.027139 +v -0.170919 -0.045280 0.027979 +v -0.171250 -0.048652 0.027021 +v -0.171503 -0.051673 0.025405 +v -0.171658 -0.054181 0.023208 +v -0.171700 -0.056032 0.020520 +v -0.171618 -0.057093 0.017450 +v -0.171413 -0.057238 0.014145 +v -0.171099 -0.056377 0.010820 +v -0.168694 -0.055261 0.011627 +v -0.169067 -0.055131 0.011663 +v -0.168379 -0.055453 0.011552 +v -0.168145 -0.055694 0.011445 +v -0.168006 -0.055968 0.011311 +v -0.167973 -0.056256 0.011161 +v -0.171132 -0.056089 0.010970 +v -0.171058 -0.055807 0.011127 +v -0.170881 -0.055550 0.011279 +v -0.170614 -0.055335 0.011417 +v -0.170274 -0.055177 0.011531 +v -0.169885 -0.055087 0.011613 +v -0.169473 -0.055071 0.011658 +v -0.168284 -0.057104 0.014450 +v -0.168698 -0.056966 0.017696 +v -0.169190 -0.055931 0.020701 +v -0.169731 -0.054115 0.023327 +v -0.170290 -0.051641 0.025471 +v -0.170836 -0.048645 0.027041 +v -0.171103 -0.051612 0.025359 +v -0.171301 -0.054040 0.023129 +v -0.171414 -0.055807 0.020453 +v -0.171427 -0.056803 0.017438 +v -0.171331 -0.056925 0.014216 +v -0.168953 -0.056319 0.014549 +v -0.169323 -0.056174 0.014546 +v -0.168644 -0.056532 0.014534 +v -0.168415 -0.056800 0.014500 +v -0.171148 -0.056639 0.014290 +v -0.170874 -0.056401 0.014363 +v -0.170530 -0.056226 0.014429 +v -0.170139 -0.056126 0.014483 +v -0.169727 -0.056108 0.014523 +v -0.168920 -0.056694 0.017650 +v -0.169488 -0.055729 0.020592 +v -0.170090 -0.053999 0.023204 +v -0.170689 -0.051601 0.025381 +v -0.170903 -0.053961 0.023101 +v -0.171061 -0.055642 0.020417 +v -0.171147 -0.056561 0.017440 +v -0.169589 -0.056331 0.017555 +v -0.169223 -0.056478 0.017602 +v -0.170798 -0.056383 0.017453 +v -0.170405 -0.056282 0.017478 +v -0.169992 -0.056264 0.017513 +v -0.169850 -0.055593 0.020504 +v -0.170489 -0.053947 0.023127 +v -0.170664 -0.055548 0.020413 +v -0.170252 -0.055531 0.020443 +v 0.167884 0.031859 0.024626 +v 0.167166 0.029934 0.021382 +v 0.167578 0.029923 0.021349 +v 0.167471 0.031869 0.024657 +v 0.166827 0.029027 0.017758 +v 0.167239 0.029015 0.017720 +v 0.168283 0.031916 0.024544 +v 0.167978 0.029990 0.021284 +v 0.167724 0.034681 0.027360 +v 0.168137 0.034673 0.027331 +v 0.166768 0.030023 0.021383 +v 0.167072 0.031944 0.024635 +v 0.166480 0.029220 0.014044 +v 0.166892 0.029208 0.014003 +v 0.167641 0.029087 0.017676 +v 0.166432 0.029123 0.017785 +v 0.168639 0.032035 0.024416 +v 0.168338 0.030131 0.021192 +v 0.168534 0.034717 0.027236 +v 0.167907 0.038169 0.029318 +v 0.168320 0.038164 0.029290 +v 0.167323 0.034738 0.027320 +v 0.166712 0.032080 0.024561 +v 0.166412 0.030184 0.021350 +v 0.166150 0.030499 0.010513 +v 0.166562 0.030488 0.010469 +v 0.167296 0.029279 0.013980 +v 0.166087 0.029315 0.014100 +v 0.168004 0.029239 0.017627 +v 0.166081 0.029296 0.017801 +v 0.168929 0.032209 0.024252 +v 0.168632 0.030337 0.021080 +v 0.168888 0.034807 0.027080 +v 0.168716 0.038192 0.029186 +v 0.168009 0.042096 0.030411 +v 0.168422 0.042094 0.030385 +v 0.167504 0.038205 0.029266 +v 0.166960 0.034842 0.027214 +v 0.166418 0.032268 0.024440 +v 0.166123 0.030406 0.021286 +v 0.165860 0.032759 0.007415 +v 0.166272 0.032750 0.007368 +v 0.166967 0.030552 0.010465 +v 0.165760 0.030584 0.010595 +v 0.167663 0.029429 0.013976 +v 0.165741 0.029485 0.014167 +v 0.168305 0.029460 0.017578 +v 0.165798 0.029534 0.017805 +v 0.168842 0.030592 0.020956 +v 0.169132 0.032426 0.024061 +v 0.169174 0.034940 0.026875 +v 0.169068 0.038250 0.029012 +v 0.168818 0.042105 0.030276 +v 0.168024 0.046203 0.030576 +v 0.168438 0.046204 0.030549 +v 0.167606 0.042110 0.030354 +v 0.167140 0.038272 0.029139 +v 0.166662 0.034984 0.027050 +v 0.165921 0.030673 0.021196 +v 0.166209 0.032495 0.024280 +v 0.166051 0.035706 0.005003 +v 0.165640 0.035713 0.005051 +v 0.166679 0.032803 0.007380 +v 0.165472 0.032829 0.007518 +v 0.167339 0.030687 0.010502 +v 0.165418 0.030737 0.010708 +v 0.167970 0.029647 0.013991 +v 0.165465 0.029720 0.014240 +v 0.168522 0.029735 0.017531 +v 0.165603 0.029822 0.017795 +v 0.168953 0.030881 0.020827 +v 0.169236 0.032671 0.023858 +v 0.169373 0.035104 0.026635 +v 0.169352 0.038335 0.028781 +v 0.169169 0.042129 0.030093 +v 0.168834 0.046198 0.030440 +v 0.168365 0.050226 0.029775 +v 0.167952 0.050223 0.029802 +v 0.167621 0.046195 0.030517 +v 0.167241 0.042137 0.030216 +v 0.166839 0.038363 0.028946 +v 0.166448 0.035156 0.026838 +v 0.165818 0.030968 0.021084 +v 0.166099 0.032744 0.024093 +v 0.166459 0.035751 0.005021 +v 0.166834 0.035845 0.005104 +v 0.165252 0.035773 0.005162 +v 0.167153 0.035983 0.005246 +v 0.164916 0.035881 0.005328 +v 0.167392 0.036154 0.005438 +v 0.164652 0.036029 0.005538 +v 0.167537 0.036347 0.005666 +v 0.164480 0.036208 0.005777 +v 0.167576 0.036550 0.005915 +v 0.164411 0.036405 0.006030 +v 0.167507 0.036747 0.006168 +v 0.164450 0.036608 0.006279 +v 0.167335 0.036926 0.006407 +v 0.164594 0.036801 0.006507 +v 0.167071 0.037074 0.006617 +v 0.164834 0.036972 0.006699 +v 0.166734 0.037182 0.006783 +v 0.165152 0.037110 0.006841 +v 0.166347 0.037242 0.006894 +v 0.165528 0.037204 0.006924 +v 0.165936 0.037249 0.006942 +v 0.167054 0.032913 0.007451 +v 0.165134 0.032955 0.007670 +v 0.167651 0.030883 0.010577 +v 0.165148 0.030949 0.010846 +v 0.168195 0.029918 0.014025 +v 0.165278 0.030003 0.014315 +v 0.168641 0.030045 0.017489 +v 0.165508 0.030139 0.017773 +v 0.168957 0.031183 0.020703 +v 0.169233 0.032927 0.023655 +v 0.169471 0.035290 0.026375 +v 0.169548 0.038440 0.028507 +v 0.169451 0.042163 0.029847 +v 0.169184 0.046187 0.030255 +v 0.168761 0.050204 0.029669 +v 0.168209 0.053898 0.028107 +v 0.167796 0.053892 0.028135 +v 0.167549 0.050193 0.029747 +v 0.167256 0.046182 0.030378 +v 0.166938 0.042175 0.030008 +v 0.166622 0.038473 0.028700 +v 0.166332 0.035346 0.026593 +v 0.165822 0.031269 0.020960 +v 0.166096 0.033000 0.023890 +v 0.165356 0.034388 0.009020 +v 0.165730 0.034498 0.009090 +v 0.166138 0.034550 0.009102 +v 0.166549 0.034542 0.009055 +v 0.166937 0.034472 0.008953 +v 0.167275 0.034346 0.008801 +v 0.167540 0.034173 0.008611 +v 0.167715 0.033965 0.008395 +v 0.167786 0.033734 0.008168 +v 0.167749 0.033499 0.007946 +v 0.167608 0.033273 0.007743 +v 0.167371 0.033073 0.007574 +v 0.164869 0.033128 0.007860 +v 0.164695 0.033336 0.008076 +v 0.164624 0.033566 0.008303 +v 0.164660 0.033802 0.008525 +v 0.164802 0.034028 0.008727 +v 0.165039 0.034227 0.008896 +v 0.167883 0.031127 0.010685 +v 0.164968 0.031204 0.010999 +v 0.168322 0.030223 0.014075 +v 0.165192 0.030315 0.014386 +v 0.168654 0.030370 0.017457 +v 0.165521 0.030463 0.017740 +v 0.168854 0.031477 0.020591 +v 0.169123 0.033176 0.023468 +v 0.169463 0.035485 0.026114 +v 0.169643 0.038559 0.028210 +v 0.169646 0.042206 0.029556 +v 0.169466 0.046170 0.030007 +v 0.169113 0.050158 0.029491 +v 0.168606 0.053861 0.028008 +v 0.167980 0.056974 0.025645 +v 0.167567 0.056965 0.025675 +v 0.167394 0.053842 0.028090 +v 0.167185 0.050141 0.029616 +v 0.166953 0.046164 0.030168 +v 0.166719 0.042219 0.029743 +v 0.166503 0.038595 0.028416 +v 0.166324 0.035541 0.026332 +v 0.165933 0.031558 0.020831 +v 0.166199 0.033245 0.023687 +v 0.165599 0.032488 0.011623 +v 0.165971 0.032623 0.011660 +v 0.166376 0.032687 0.011656 +v 0.166788 0.032676 0.011612 +v 0.167178 0.032591 0.011530 +v 0.167520 0.032438 0.011417 +v 0.167790 0.032226 0.011279 +v 0.167970 0.031971 0.011126 +v 0.168048 0.031690 0.010970 +v 0.168018 0.031402 0.010819 +v 0.164890 0.031485 0.011155 +v 0.164920 0.031773 0.011306 +v 0.165055 0.032048 0.011440 +v 0.165287 0.032292 0.011548 +v 0.168344 0.030543 0.014138 +v 0.165214 0.030635 0.014449 +v 0.168559 0.030687 0.017435 +v 0.165640 0.030774 0.017699 +v 0.168651 0.031745 0.020501 +v 0.168914 0.033403 0.023308 +v 0.169347 0.035675 0.025869 +v 0.169630 0.038684 0.027909 +v 0.169739 0.042254 0.029239 +v 0.169660 0.046149 0.029714 +v 0.169396 0.050091 0.029253 +v 0.168959 0.053781 0.027845 +v 0.168377 0.056921 0.025558 +v 0.167277 0.059225 0.022577 +v 0.167690 0.059236 0.022544 +v 0.167166 0.056895 0.025646 +v 0.167031 0.053751 0.027976 +v 0.166883 0.050068 0.029416 +v 0.166734 0.046143 0.029900 +v 0.166598 0.042269 0.029440 +v 0.166490 0.038719 0.028116 +v 0.166422 0.035727 0.026072 +v 0.166142 0.031814 0.020707 +v 0.166403 0.033462 0.023497 +v 0.165872 0.031429 0.014548 +v 0.166240 0.031579 0.014544 +v 0.166644 0.031650 0.014521 +v 0.167056 0.031638 0.014480 +v 0.167448 0.031543 0.014424 +v 0.167794 0.031373 0.014357 +v 0.168071 0.031138 0.014284 +v 0.168258 0.030855 0.014209 +v 0.165341 0.030941 0.014499 +v 0.165566 0.031212 0.014533 +v 0.168364 0.030975 0.017425 +v 0.165857 0.031049 0.017652 +v 0.168619 0.033591 0.023187 +v 0.168362 0.031966 0.020437 +v 0.169133 0.035847 0.025657 +v 0.169511 0.038805 0.027626 +v 0.169725 0.042305 0.028918 +v 0.169753 0.046126 0.029394 +v 0.169591 0.050007 0.028971 +v 0.169244 0.053665 0.027629 +v 0.168732 0.056811 0.025419 +v 0.168089 0.059172 0.022473 +v 0.166947 0.060504 0.019046 +v 0.167360 0.060516 0.019010 +v 0.166879 0.059140 0.022569 +v 0.166806 0.056770 0.025559 +v 0.166732 0.053625 0.027800 +v 0.166665 0.049980 0.029161 +v 0.166613 0.046119 0.029594 +v 0.166584 0.042320 0.029119 +v 0.166585 0.038838 0.027819 +v 0.166621 0.035891 0.025832 +v 0.166693 0.033636 0.023332 +v 0.166437 0.032019 0.020595 +v 0.166521 0.031422 0.017554 +v 0.166158 0.031270 0.017603 +v 0.166923 0.031494 0.017510 +v 0.167335 0.031482 0.017472 +v 0.167730 0.031386 0.017445 +v 0.168081 0.031213 0.017429 +v 0.168260 0.033727 0.023113 +v 0.168007 0.032127 0.020404 +v 0.168835 0.035989 0.025493 +v 0.169294 0.038915 0.027379 +v 0.169604 0.042354 0.028614 +v 0.169739 0.046101 0.029070 +v 0.169685 0.049912 0.028665 +v 0.169442 0.053520 0.027375 +v 0.169021 0.056651 0.025239 +v 0.168447 0.059038 0.022368 +v 0.167761 0.060445 0.018958 +v 0.166600 0.060697 0.015333 +v 0.167012 0.060710 0.015293 +v 0.166551 0.060409 0.019064 +v 0.166522 0.058987 0.022521 +v 0.166510 0.056597 0.025422 +v 0.166517 0.053474 0.027574 +v 0.166545 0.049884 0.028869 +v 0.166598 0.046094 0.029270 +v 0.166677 0.042368 0.028802 +v 0.166781 0.038944 0.027545 +v 0.166907 0.036024 0.025627 +v 0.167049 0.033755 0.023204 +v 0.166797 0.032160 0.020503 +v 0.167197 0.032227 0.020438 +v 0.167609 0.032216 0.020405 +v 0.167860 0.033803 0.023091 +v 0.168473 0.036093 0.025387 +v 0.168993 0.039007 0.027186 +v 0.169385 0.042399 0.028350 +v 0.169618 0.046078 0.028764 +v 0.169672 0.049813 0.028355 +v 0.169539 0.053357 0.027100 +v 0.169223 0.056451 0.025029 +v 0.168740 0.058842 0.022236 +v 0.168123 0.060295 0.018894 +v 0.167415 0.060637 0.015262 +v 0.166262 0.059790 0.011708 +v 0.166674 0.059801 0.011665 +v 0.166207 0.060601 0.015379 +v 0.166199 0.060239 0.019063 +v 0.166231 0.058775 0.022435 +v 0.166299 0.056388 0.025242 +v 0.166400 0.053308 0.027313 +v 0.166532 0.049785 0.028559 +v 0.166691 0.046071 0.028950 +v 0.166872 0.042411 0.028510 +v 0.167065 0.039029 0.027313 +v 0.167261 0.036114 0.025471 +v 0.167447 0.033812 0.023122 +v 0.168071 0.036150 0.025347 +v 0.168629 0.039073 0.027059 +v 0.169082 0.042436 0.028141 +v 0.169398 0.046056 0.028496 +v 0.169552 0.049716 0.028063 +v 0.169529 0.053186 0.026823 +v 0.169325 0.056226 0.024804 +v 0.168948 0.058598 0.022087 +v 0.168421 0.060078 0.018822 +v 0.167782 0.060486 0.015243 +v 0.167078 0.059734 0.011654 +v 0.165956 0.057856 0.008433 +v 0.166367 0.057865 0.008387 +v 0.165871 0.059701 0.011781 +v 0.165859 0.060428 0.015428 +v 0.165914 0.060004 0.019042 +v 0.166026 0.058521 0.022319 +v 0.166187 0.056158 0.025032 +v 0.166390 0.053137 0.027036 +v 0.166626 0.049690 0.028253 +v 0.166885 0.046050 0.028656 +v 0.167154 0.042445 0.028265 +v 0.167417 0.039087 0.027139 +v 0.167658 0.036158 0.025376 +v 0.168226 0.039110 0.027008 +v 0.168717 0.042463 0.028004 +v 0.169096 0.046038 0.028286 +v 0.169334 0.049629 0.027808 +v 0.169412 0.053019 0.026562 +v 0.169319 0.055990 0.024579 +v 0.169056 0.058322 0.021930 +v 0.168636 0.059807 0.018747 +v 0.168086 0.060265 0.015236 +v 0.167449 0.059593 0.011678 +v 0.166774 0.057808 0.008394 +v 0.166114 0.055051 0.005682 +v 0.165703 0.055044 0.005730 +v 0.165567 0.057780 0.008529 +v 0.165528 0.059540 0.011879 +v 0.165581 0.060190 0.015477 +v 0.165716 0.059721 0.019003 +v 0.165920 0.058239 0.022179 +v 0.166181 0.055922 0.024807 +v 0.166487 0.052974 0.026761 +v 0.166821 0.049606 0.027971 +v 0.167167 0.046034 0.028408 +v 0.167505 0.042469 0.028081 +v 0.167813 0.039114 0.027035 +v 0.168314 0.042478 0.027946 +v 0.168730 0.046025 0.028146 +v 0.169032 0.049556 0.027608 +v 0.169197 0.052868 0.026336 +v 0.169207 0.055760 0.024369 +v 0.169057 0.058034 0.021777 +v 0.168752 0.059501 0.018675 +v 0.168308 0.059989 0.015242 +v 0.167759 0.059388 0.011733 +v 0.167147 0.057689 0.008453 +v 0.166522 0.055008 0.005702 +v 0.165932 0.051560 0.003723 +v 0.165520 0.051556 0.003773 +v 0.165316 0.054986 0.005844 +v 0.165228 0.057644 0.008668 +v 0.165255 0.059318 0.011995 +v 0.165391 0.059903 0.015523 +v 0.165618 0.059409 0.018949 +v 0.165921 0.057951 0.022026 +v 0.166283 0.055697 0.024582 +v 0.166684 0.052829 0.026507 +v 0.167104 0.049539 0.027733 +v 0.167518 0.046022 0.028224 +v 0.167900 0.042480 0.027972 +v 0.168327 0.046018 0.028088 +v 0.168668 0.049503 0.027476 +v 0.168897 0.052743 0.026160 +v 0.168996 0.055551 0.024189 +v 0.168952 0.057753 0.021637 +v 0.168761 0.059181 0.018609 +v 0.168433 0.059679 0.015260 +v 0.167988 0.059132 0.011816 +v 0.167463 0.057515 0.008562 +v 0.166899 0.054917 0.005789 +v 0.166340 0.051533 0.003752 +v 0.165829 0.047631 0.002628 +v 0.165418 0.047629 0.002679 +v 0.165134 0.051519 0.003898 +v 0.164980 0.054883 0.006015 +v 0.164961 0.057456 0.008842 +v 0.165073 0.059051 0.012122 +v 0.165302 0.059586 0.015562 +v 0.165628 0.059089 0.018883 +v 0.166030 0.057676 0.021869 +v 0.166485 0.055497 0.024372 +v 0.166969 0.052713 0.026291 +v 0.167456 0.049492 0.027555 +v 0.167914 0.046017 0.028114 +v 0.168265 0.049474 0.027422 +v 0.168534 0.052652 0.026046 +v 0.168700 0.055378 0.024051 +v 0.168747 0.057498 0.021521 +v 0.168664 0.058869 0.018555 +v 0.168451 0.059354 0.015290 +v 0.168121 0.058843 0.011923 +v 0.167698 0.057298 0.008711 +v 0.167218 0.054785 0.005938 +v 0.166718 0.051474 0.003857 +v 0.166238 0.047619 0.002662 +v 0.165819 0.044834 0.002516 +v 0.165408 0.044818 0.002566 +v 0.165033 0.047614 0.002810 +v 0.164800 0.051453 0.004090 +v 0.164717 0.054740 0.006232 +v 0.164785 0.057230 0.009038 +v 0.164992 0.058757 0.012250 +v 0.165320 0.059261 0.015591 +v 0.165744 0.058783 0.018810 +v 0.166237 0.057432 0.021720 +v 0.166773 0.055336 0.024192 +v 0.167322 0.052633 0.026129 +v 0.167852 0.049470 0.027449 +v 0.168132 0.052602 0.026002 +v 0.168340 0.055252 0.023965 +v 0.168456 0.057287 0.021435 +v 0.168466 0.058586 0.018516 +v 0.168363 0.059037 0.015329 +v 0.168148 0.058541 0.012044 +v 0.167838 0.057053 0.008892 +v 0.167458 0.054620 0.006137 +v 0.167040 0.051390 0.004033 +v 0.166617 0.047596 0.002777 +v 0.166228 0.044767 0.002548 +v 0.163467 0.044551 -0.020912 +v 0.163880 0.044568 -0.020948 +v 0.165022 0.044720 0.002694 +v 0.164699 0.047587 0.003013 +v 0.164540 0.051361 0.004335 +v 0.164546 0.054568 0.006480 +v 0.164711 0.056980 0.009242 +v 0.165019 0.058455 0.012372 +v 0.165445 0.058950 0.015610 +v 0.165958 0.058512 0.018735 +v 0.166530 0.057236 0.021588 +v 0.167128 0.055226 0.024053 +v 0.167719 0.052595 0.026030 +v 0.167939 0.055183 0.023936 +v 0.168099 0.057133 0.021387 +v 0.168181 0.058351 0.018495 +v 0.168172 0.058750 0.015374 +v 0.168068 0.058247 0.012172 +v 0.167872 0.056797 0.009091 +v 0.167603 0.054434 0.006374 +v 0.167283 0.051284 0.004265 +v 0.166940 0.047561 0.002966 +v 0.166606 0.044622 0.002658 +v 0.164283 0.044500 -0.020983 +v 0.160519 0.044551 -0.052921 +v 0.160897 0.044568 -0.053335 +v 0.163074 0.044452 -0.020877 +v 0.164688 0.044546 0.002891 +v 0.164441 0.047550 0.003274 +v 0.164372 0.051251 0.004618 +v 0.164478 0.054378 0.006742 +v 0.164746 0.056724 0.009441 +v 0.165152 0.058166 0.012478 +v 0.165667 0.058675 0.015616 +v 0.166257 0.058295 0.018664 +v 0.166888 0.057101 0.021483 +v 0.167526 0.055174 0.023966 +v 0.167700 0.057048 0.021379 +v 0.167828 0.058181 0.018494 +v 0.167894 0.058511 0.015424 +v 0.167885 0.057980 0.012299 +v 0.167799 0.056548 0.009296 +v 0.167643 0.054239 0.006633 +v 0.167431 0.051165 0.004540 +v 0.167185 0.047518 0.003217 +v 0.166928 0.044408 0.002840 +v 0.164650 0.044353 -0.021014 +v 0.161266 0.044500 -0.053740 +v -0.005122 0.044551 -0.052927 +v -0.005422 0.044568 -0.053341 +v 0.160159 0.044452 -0.052526 +v 0.162726 0.044276 -0.020845 +v 0.164428 0.044309 0.003144 +v 0.164274 0.047505 0.003575 +v 0.164307 0.051129 0.004918 +v 0.164518 0.054184 0.007000 +v 0.164885 0.056479 0.009622 +v 0.165381 0.057910 0.012561 +v 0.165971 0.058454 0.015609 +v 0.166619 0.058145 0.018599 +v 0.167287 0.057037 0.021412 +v 0.167432 0.058086 0.018511 +v 0.167546 0.058338 0.015473 +v 0.167613 0.057758 0.012415 +v 0.167623 0.056321 0.009491 +v 0.167574 0.054050 0.006895 +v 0.167475 0.051041 0.004838 +v 0.167335 0.047470 0.003511 +v 0.167172 0.044139 0.003081 +v 0.164955 0.044136 -0.021039 +v 0.161601 0.044353 -0.054108 +v -0.005702 0.044500 -0.053746 +v -0.021297 0.031868 -0.002540 +v -0.020997 0.031851 -0.002126 +v -0.004820 0.044452 -0.052532 +v 0.159840 0.044276 -0.052177 +v 0.162448 0.044036 -0.020820 +v 0.164261 0.044024 0.003435 +v 0.164212 0.047455 0.003895 +v 0.164351 0.051005 0.005216 +v 0.164663 0.053998 0.007237 +v 0.165121 0.056263 0.009771 +v 0.165692 0.057705 0.012617 +v 0.166338 0.058302 0.015589 +v 0.167020 0.058074 0.018548 +v 0.167153 0.058242 0.015519 +v 0.167270 0.057597 0.012513 +v 0.167356 0.056134 0.009665 +v 0.167403 0.053878 0.007143 +v 0.167410 0.050919 0.005138 +v 0.167381 0.047419 0.003829 +v 0.167322 0.043835 0.003365 +v 0.165178 0.043865 -0.021057 +v 0.161881 0.044136 -0.054414 +v -0.005940 0.044353 -0.054114 +v -0.021577 0.031800 -0.002945 +v -0.071760 0.031851 -0.002126 +v -0.071459 0.031868 -0.002540 +v -0.020695 0.031752 -0.001732 +v -0.004537 0.044276 -0.052183 +v 0.159586 0.044036 -0.051898 +v 0.162259 0.043749 -0.020801 +v 0.164198 0.043712 0.003745 +v 0.164257 0.047405 0.004213 +v 0.164499 0.050886 0.005490 +v 0.164903 0.053833 0.007436 +v 0.165436 0.056088 0.009880 +v 0.166062 0.057564 0.012640 +v 0.166741 0.058230 0.015559 +v 0.166879 0.057508 0.012587 +v 0.167017 0.055997 0.009804 +v 0.167141 0.053735 0.007360 +v 0.167243 0.050809 0.005420 +v 0.167318 0.047370 0.004149 +v 0.167366 0.043516 0.003673 +v 0.165302 0.043558 -0.021066 +v 0.162084 0.043865 -0.054638 +v -0.006121 0.044136 -0.054420 +v -0.021815 0.031653 -0.003313 +v -0.071180 0.031800 -0.002945 +v -0.087635 0.044551 -0.052926 +v -0.087334 0.044568 -0.053340 +v -0.072062 0.031752 -0.001732 +v -0.020412 0.031576 -0.001382 +v -0.004293 0.044036 -0.051904 +v 0.159412 0.043749 -0.051708 +v 0.162170 0.043433 -0.020792 +v 0.164242 0.043393 0.004052 +v 0.164408 0.047356 0.004507 +v 0.164743 0.050780 0.005723 +v 0.165222 0.053701 0.007585 +v 0.165810 0.055969 0.009939 +v 0.166467 0.057497 0.012630 +v 0.166628 0.055922 0.009900 +v 0.166804 0.053631 0.007531 +v 0.166982 0.050718 0.005666 +v 0.167152 0.047325 0.004450 +v 0.167302 0.043204 0.003982 +v 0.165322 0.043235 -0.021066 +v 0.162199 0.043558 -0.054763 +v -0.006233 0.043865 -0.054644 +v -0.021996 0.031436 -0.003619 +v -0.070942 0.031653 -0.003313 +v -0.087055 0.044500 -0.053744 +v -0.163782 0.044568 -0.053335 +v -0.163405 0.044551 -0.052921 +v -0.087937 0.044452 -0.052531 +v -0.072345 0.031576 -0.001382 +v -0.020168 0.031337 -0.001103 +v -0.004104 0.043749 -0.051713 +v 0.159331 0.043433 -0.051619 +v 0.162189 0.043110 -0.020792 +v 0.164392 0.043089 0.004336 +v 0.164652 0.047313 0.004758 +v 0.165064 0.050696 0.005898 +v 0.165598 0.053610 0.007672 +v 0.166216 0.055912 0.009947 +v 0.166417 0.053574 0.007644 +v 0.166648 0.050651 0.005858 +v 0.166893 0.047288 0.004711 +v 0.167135 0.042919 0.004274 +v 0.165233 0.042919 -0.021057 +v 0.162216 0.043235 -0.054782 +v -0.006268 0.043558 -0.054769 +v -0.022108 0.031165 -0.003843 +v -0.070760 0.031436 -0.003619 +v -0.086817 0.044353 -0.054112 +v -0.164151 0.044500 -0.053740 +v -0.168926 0.044568 0.002517 +v -0.168516 0.044551 0.002570 +v -0.163044 0.044452 -0.052526 +v -0.088220 0.044276 -0.052182 +v -0.072589 0.031337 -0.001103 +v -0.019979 0.031049 -0.000913 +v -0.003983 0.043433 -0.051625 +v 0.159348 0.043110 -0.051638 +v 0.162314 0.042803 -0.020801 +v 0.164636 0.042820 0.004578 +v 0.164975 0.047279 0.004947 +v 0.165442 0.050638 0.006004 +v 0.166006 0.053567 0.007692 +v 0.166262 0.050615 0.005983 +v 0.166560 0.047261 0.004914 +v 0.166876 0.042682 0.004526 +v 0.165044 0.042632 -0.021038 +v 0.162135 0.042919 -0.054693 +v -0.006224 0.043235 -0.054788 +v -0.022143 0.030858 -0.003968 +v -0.070649 0.031165 -0.003843 +v -0.086636 0.044136 -0.054419 +v -0.164487 0.044353 -0.054108 +v -0.169335 0.044500 0.002544 +v -0.168526 0.047348 0.002682 +v -0.168936 0.047351 0.002628 +v -0.168131 0.044452 0.002702 +v -0.162726 0.044276 -0.052177 +v -0.088464 0.044036 -0.051902 +v -0.072778 0.031049 -0.000913 +v -0.019858 0.030733 -0.000824 +v -0.003939 0.043110 -0.051643 +v 0.159462 0.042803 -0.051763 +v 0.162537 0.042532 -0.020819 +v 0.164958 0.042606 0.004760 +v 0.165354 0.047255 0.005062 +v 0.165851 0.050610 0.006033 +v 0.166174 0.047246 0.005045 +v 0.166542 0.042509 0.004723 +v 0.164766 0.042392 -0.021013 +v 0.161961 0.042632 -0.054503 +v -0.006103 0.042919 -0.054699 +v -0.022099 0.030535 -0.003987 +v -0.070614 0.030858 -0.003968 +v -0.086524 0.043865 -0.054642 +v -0.164766 0.044136 -0.054414 +v -0.169715 0.044353 0.002651 +v -0.169346 0.047341 0.002658 +v -0.168622 0.051274 0.003776 +v -0.169033 0.051281 0.003723 +v -0.168141 0.047332 0.002817 +v -0.167799 0.044276 0.002901 +v -0.162471 0.044036 -0.051898 +v -0.088653 0.043749 -0.051712 +v -0.072899 0.030733 -0.000824 +v -0.019814 0.030410 -0.000843 +v -0.003974 0.042803 -0.051769 +v 0.159666 0.042532 -0.051986 +v 0.162842 0.042315 -0.020844 +v 0.165336 0.042461 0.004870 +v 0.165763 0.047244 0.005096 +v 0.166156 0.042410 0.004851 +v 0.164418 0.042217 -0.020981 +v 0.161707 0.042392 -0.054223 +v -0.005914 0.042632 -0.054509 +v -0.021978 0.030219 -0.003898 +v -0.070658 0.030535 -0.003987 +v -0.086489 0.043558 -0.054767 +v -0.164970 0.043865 -0.054638 +v -0.170039 0.044136 0.002830 +v -0.169725 0.047318 0.002770 +v -0.169442 0.051256 0.003749 +v -0.168796 0.054761 0.005734 +v -0.169207 0.054772 0.005683 +v -0.168238 0.051235 0.003904 +v -0.167810 0.047305 0.003023 +v -0.167542 0.044036 0.003156 +v -0.162297 0.043749 -0.051708 +v -0.088774 0.043433 -0.051623 +v -0.072943 0.030410 -0.000843 +v -0.019849 0.030103 -0.000968 +v -0.004086 0.042532 -0.051992 +v 0.159945 0.042315 -0.052293 +v 0.163209 0.042168 -0.020875 +v 0.165745 0.042394 0.004901 +v 0.164024 0.042117 -0.020946 +v 0.161388 0.042217 -0.053874 +v -0.005670 0.042392 -0.054229 +v -0.021789 0.029932 -0.003708 +v -0.070779 0.030219 -0.003898 +v -0.086533 0.043235 -0.054786 +v -0.165084 0.043558 -0.054763 +v -0.170285 0.043865 0.003069 +v -0.170050 0.047284 0.002957 +v -0.169821 0.051199 0.003852 +v -0.169615 0.054732 0.005701 +v -0.169035 0.057573 0.008437 +v -0.169447 0.057587 0.008390 +v -0.168410 0.054701 0.005850 +v -0.167905 0.051167 0.004099 +v -0.167553 0.047267 0.003286 +v -0.167377 0.043749 0.003449 +v -0.162216 0.043433 -0.051619 +v -0.088818 0.043110 -0.051642 +v -0.072908 0.030103 -0.000968 +v -0.019960 0.029832 -0.001191 +v -0.004267 0.042315 -0.052298 +v 0.160281 0.042168 -0.052661 +v 0.163612 0.042101 -0.020910 +v 0.161028 0.042117 -0.053479 +v -0.005387 0.042217 -0.053880 +v -0.021545 0.029692 -0.003428 +v -0.070968 0.029932 -0.003708 +v -0.086654 0.042919 -0.054697 +v -0.165102 0.043235 -0.054782 +v -0.170437 0.043558 0.003352 +v -0.170297 0.047241 0.003205 +v -0.170144 0.051116 0.004024 +v -0.169993 0.054644 0.005786 +v -0.169854 0.057534 0.008395 +v -0.169737 0.059523 0.011670 +v -0.169326 0.059506 0.011713 +v -0.168648 0.057493 0.008535 +v -0.168075 0.054594 0.006023 +v -0.167647 0.051074 0.004347 +v -0.167390 0.047222 0.003588 +v -0.167317 0.043433 0.003759 +v -0.162233 0.043110 -0.051638 +v -0.088783 0.042803 -0.051767 +v -0.072796 0.029832 -0.001191 +v -0.020142 0.029615 -0.001498 +v -0.004505 0.042168 -0.052666 +v 0.160650 0.042101 -0.053065 +v -0.005085 0.042117 -0.053485 +v -0.021262 0.029517 -0.003079 +v -0.071212 0.029692 -0.003428 +v -0.086843 0.042632 -0.054507 +v -0.165021 0.042919 -0.054693 +v -0.170484 0.043235 0.003659 +v -0.170450 0.047193 0.003497 +v -0.170390 0.051012 0.004255 +v -0.170314 0.054515 0.005932 +v -0.170229 0.057419 0.008453 +v -0.170143 0.059461 0.011659 +v -0.170059 0.060432 0.015300 +v -0.169647 0.060414 0.015338 +v -0.168936 0.059412 0.011786 +v -0.168310 0.057353 0.008675 +v -0.167815 0.054449 0.006242 +v -0.167482 0.050963 0.004630 +v -0.167330 0.047172 0.003909 +v -0.167364 0.043110 0.004066 +v -0.162348 0.042803 -0.051763 +v -0.088671 0.042532 -0.051991 +v -0.072615 0.029615 -0.001498 +v -0.020380 0.029468 -0.001866 +v -0.004784 0.042101 -0.053071 +v -0.020960 0.029417 -0.002684 +v -0.071495 0.029517 -0.003079 +v -0.087087 0.042392 -0.054227 +v -0.164847 0.042632 -0.054503 +v -0.170424 0.042919 0.003969 +v -0.170498 0.047143 0.003815 +v -0.170541 0.050894 0.004528 +v -0.170556 0.054352 0.006130 +v -0.170546 0.057248 0.008560 +v -0.170515 0.059325 0.011682 +v -0.170463 0.060365 0.015271 +v -0.170388 0.060238 0.019019 +v -0.169976 0.060220 0.019052 +v -0.169254 0.060312 0.015383 +v -0.168595 0.059247 0.011884 +v -0.168046 0.057162 0.008850 +v -0.167647 0.054276 0.006490 +v -0.167421 0.050841 0.004931 +v -0.167378 0.047122 0.004227 +v -0.167516 0.042803 0.004348 +v -0.162552 0.042532 -0.051986 +v -0.088490 0.042315 -0.052297 +v -0.072377 0.029468 -0.001866 +v -0.020659 0.029401 -0.002270 +v -0.071797 0.029417 -0.002684 +v -0.087370 0.042217 -0.053878 +v -0.164592 0.042392 -0.054223 +v -0.170259 0.042632 0.004261 +v -0.170439 0.047093 0.004136 +v -0.170588 0.050769 0.004826 +v -0.170704 0.054167 0.006366 +v -0.170785 0.057034 0.008709 +v -0.170828 0.059123 0.011737 +v -0.170831 0.060218 0.015252 +v -0.170791 0.060172 0.018970 +v -0.170702 0.058958 0.022556 +v -0.170289 0.058942 0.022584 +v -0.169581 0.060120 0.019067 +v -0.168909 0.060135 0.015431 +v -0.168325 0.059022 0.012000 +v -0.167873 0.056934 0.009046 +v -0.167582 0.054085 0.006753 +v -0.167467 0.050717 0.005228 +v -0.167531 0.047073 0.004520 +v -0.167762 0.042532 0.004587 +v -0.162831 0.042315 -0.052293 +v -0.088252 0.042168 -0.052665 +v -0.072098 0.029401 -0.002270 +v -0.087672 0.042117 -0.053483 +v -0.164274 0.042217 -0.053874 +v -0.170002 0.042392 0.004516 +v -0.170275 0.047048 0.004438 +v -0.170526 0.050647 0.005126 +v -0.170747 0.053972 0.006624 +v -0.170928 0.056790 0.008889 +v -0.171060 0.058870 0.011820 +v -0.171139 0.060001 0.015246 +v -0.171155 0.060028 0.018909 +v -0.171102 0.058899 0.022489 +v -0.170976 0.056695 0.025658 +v -0.170563 0.056682 0.025682 +v -0.169891 0.058852 0.022571 +v -0.169231 0.059945 0.019063 +v -0.168633 0.059892 0.015478 +v -0.168146 0.058752 0.012127 +v -0.167803 0.056683 0.009250 +v -0.167625 0.053891 0.007011 +v -0.167618 0.050598 0.005502 +v -0.167778 0.047031 0.004768 +v -0.168086 0.042315 0.004766 +v -0.163167 0.042168 -0.052661 +v -0.087972 0.042101 -0.053069 +v -0.163913 0.042117 -0.053479 +v -0.169670 0.042217 0.004716 +v -0.170019 0.047010 0.004701 +v -0.170361 0.050536 0.005410 +v -0.170682 0.053782 0.006887 +v -0.170965 0.056535 0.009088 +v -0.171197 0.058583 0.011926 +v -0.171364 0.059729 0.015252 +v -0.171457 0.059814 0.018839 +v -0.171463 0.058769 0.022387 +v -0.171376 0.056647 0.025576 +v -0.170781 0.053610 0.028142 +v -0.171195 0.053620 0.028121 +v -0.170164 0.056609 0.025647 +v -0.169537 0.058695 0.022519 +v -0.168949 0.059707 0.019039 +v -0.168447 0.059602 0.015523 +v -0.168069 0.058457 0.012255 +v -0.167841 0.056427 0.009449 +v -0.167773 0.053706 0.007247 +v -0.167864 0.050494 0.005733 +v -0.168103 0.046997 0.004954 +v -0.168466 0.042168 0.004873 +v -0.163536 0.042101 -0.053065 +v -0.169286 0.042117 0.004847 +v -0.169687 0.046982 0.004907 +v -0.170103 0.050443 0.005658 +v -0.170514 0.053608 0.007135 +v -0.170895 0.056284 0.009292 +v -0.171227 0.058282 0.012047 +v -0.171493 0.059420 0.015271 +v -0.171676 0.059546 0.018765 +v -0.171760 0.058577 0.022259 +v -0.171734 0.056540 0.025443 +v -0.171593 0.053585 0.028029 +v -0.170929 0.049942 0.029810 +v -0.171343 0.049947 0.029790 +v -0.170380 0.053557 0.028091 +v -0.169806 0.056480 0.025556 +v -0.169249 0.058480 0.022430 +v -0.168755 0.059421 0.018999 +v -0.168362 0.059284 0.015561 +v -0.168100 0.058155 0.012376 +v -0.167984 0.056184 0.009629 +v -0.168016 0.053543 0.007445 +v -0.168187 0.050411 0.005905 +v -0.168483 0.046974 0.005066 +v -0.168875 0.042101 0.004901 +v -0.169303 0.046967 0.005042 +v -0.169771 0.050375 0.005852 +v -0.170254 0.053463 0.007354 +v -0.170722 0.056055 0.009488 +v -0.171151 0.057986 0.012175 +v -0.171516 0.059096 0.015300 +v -0.171796 0.059242 0.018693 +v -0.171972 0.058335 0.022112 +v -0.172026 0.056383 0.025267 +v -0.171949 0.053507 0.027872 +v -0.171740 0.049927 0.029691 +v -0.170997 0.045923 0.030584 +v -0.171411 0.045924 0.030565 +v -0.170527 0.049911 0.029748 +v -0.170020 0.053463 0.027971 +v -0.169514 0.056304 0.025413 +v -0.169048 0.058223 0.022311 +v -0.168662 0.059108 0.018944 +v -0.168385 0.058960 0.015591 +v -0.168237 0.057868 0.012481 +v -0.168222 0.055970 0.009778 +v -0.168337 0.053413 0.007591 +v -0.168566 0.050355 0.006008 +v -0.168892 0.046964 0.005096 +v -0.169386 0.050336 0.005980 +v -0.169919 0.053357 0.007527 +v -0.170458 0.055865 0.009662 +v -0.170972 0.057716 0.012301 +v -0.171431 0.058778 0.015339 +v -0.171810 0.058922 0.018627 +v -0.172085 0.058061 0.021956 +v -0.172233 0.056185 0.025059 +v -0.172238 0.053393 0.027660 +v -0.172095 0.049882 0.029519 +v -0.171809 0.045919 0.030463 +v -0.171397 0.041813 0.030400 +v -0.170983 0.041816 0.030419 +v -0.170595 0.045915 0.030518 +v -0.170165 0.049856 0.029610 +v -0.169724 0.053336 0.027790 +v -0.169307 0.056093 0.025230 +v -0.168948 0.057940 0.022170 +v -0.168676 0.058788 0.018878 +v -0.168514 0.058651 0.015609 +v -0.168469 0.057615 0.012565 +v -0.168540 0.055799 0.009884 +v -0.168714 0.053325 0.007676 +v -0.168975 0.050329 0.006034 +v -0.169533 0.053296 0.007643 +v -0.170121 0.055725 0.009803 +v -0.170702 0.057491 0.012418 +v -0.171245 0.058487 0.015383 +v -0.171717 0.058609 0.018572 +v -0.172092 0.057773 0.021802 +v -0.172340 0.055961 0.024835 +v -0.172441 0.053250 0.027409 +v -0.172382 0.049816 0.029286 +v -0.172163 0.045908 0.030285 +v -0.171794 0.041824 0.030299 +v -0.171300 0.037883 0.029305 +v -0.170886 0.037890 0.029325 +v -0.170581 0.041832 0.030354 +v -0.170233 0.045901 0.030372 +v -0.169867 0.049783 0.029405 +v -0.169513 0.053184 0.027560 +v -0.169200 0.055862 0.025019 +v -0.168954 0.057653 0.022016 +v -0.168797 0.058484 0.018805 +v -0.168739 0.058379 0.015616 +v -0.168782 0.057414 0.012620 +v -0.168915 0.055683 0.009943 +v -0.169122 0.053286 0.007693 +v -0.169733 0.055645 0.009901 +v -0.170361 0.057326 0.012516 +v -0.170969 0.058245 0.015431 +v -0.171523 0.058323 0.018531 +v -0.171991 0.057491 0.021661 +v -0.172340 0.055726 0.024610 +v -0.172544 0.053088 0.027136 +v -0.172583 0.049733 0.029008 +v -0.172449 0.045891 0.030042 +v -0.172148 0.041847 0.030122 +v -0.171698 0.037909 0.029208 +v -0.171126 0.034392 0.027345 +v -0.170712 0.034403 0.027367 +v -0.170484 0.037929 0.029267 +v -0.170218 0.041860 0.030210 +v -0.169934 0.045883 0.030156 +v -0.169654 0.049694 0.029146 +v -0.169402 0.053016 0.027298 +v -0.169200 0.055627 0.024794 +v -0.169067 0.057379 0.021860 +v -0.169015 0.058216 0.018732 +v -0.169047 0.058162 0.015609 +v -0.169154 0.057277 0.012643 +v -0.169322 0.055631 0.009948 +v -0.169971 0.057232 0.012589 +v -0.170624 0.058067 0.015479 +v -0.171242 0.058085 0.018508 +v -0.171790 0.057234 0.021542 +v -0.172234 0.055495 0.024399 +v -0.172540 0.052917 0.026859 +v -0.172683 0.049639 0.028704 +v -0.172649 0.045871 0.029752 +v -0.172435 0.041881 0.029882 +v -0.172053 0.037965 0.029041 +v -0.171525 0.034432 0.027256 +v -0.170886 0.031578 0.024639 +v -0.170473 0.031592 0.024664 +v -0.170312 0.034464 0.027321 +v -0.170123 0.037997 0.029134 +v -0.169920 0.041898 0.029996 +v -0.169719 0.045861 0.029885 +v -0.169540 0.049597 0.028852 +v -0.169398 0.052846 0.027021 +v -0.169307 0.055402 0.024570 +v -0.169280 0.057137 0.021713 +v -0.169317 0.058003 0.018662 +v -0.169415 0.058015 0.015591 +v -0.169560 0.057215 0.012632 +v -0.170231 0.057966 0.015523 +v -0.170892 0.057910 0.018504 +v -0.171503 0.057019 0.021453 +v -0.172027 0.055284 0.024216 +v -0.172428 0.052750 0.026596 +v -0.172676 0.049540 0.028394 +v -0.172748 0.045848 0.029434 +v -0.172635 0.041923 0.029594 +v -0.172341 0.038048 0.028814 +v -0.171881 0.034520 0.027106 +v -0.171286 0.031630 0.024562 +v -0.170183 0.029658 0.021389 +v -0.170596 0.029641 0.021359 +v -0.170074 0.031671 0.024637 +v -0.169952 0.034570 0.027210 +v -0.169826 0.038090 0.028935 +v -0.169705 0.041943 0.029727 +v -0.169604 0.045837 0.029576 +v -0.169532 0.049498 0.028542 +v -0.169500 0.052683 0.026747 +v -0.169514 0.055205 0.024363 +v -0.169576 0.056945 0.021585 +v -0.169682 0.057858 0.018600 +v -0.169819 0.057948 0.015562 +v -0.170497 0.057810 0.018519 +v -0.171148 0.056861 0.021401 +v -0.171735 0.055108 0.024074 +v -0.172218 0.052598 0.026367 +v -0.172561 0.049443 0.028099 +v -0.172739 0.045823 0.029110 +v -0.172734 0.041971 0.029279 +v -0.172542 0.038153 0.028543 +v -0.172172 0.034650 0.026906 +v -0.171645 0.031746 0.024439 +v -0.170997 0.029703 0.021298 +v -0.169862 0.028751 0.017764 +v -0.170274 0.028733 0.017729 +v -0.169786 0.029752 0.021385 +v -0.169717 0.031812 0.024557 +v -0.169658 0.034715 0.027040 +v -0.169613 0.038201 0.028685 +v -0.169590 0.041992 0.029422 +v -0.169595 0.045813 0.029252 +v -0.169632 0.049404 0.028238 +v -0.169703 0.052540 0.026496 +v -0.169807 0.055047 0.024186 +v -0.169937 0.056815 0.021484 +v -0.170084 0.057792 0.018551 +v -0.170751 0.056772 0.021388 +v -0.171377 0.054979 0.023982 +v -0.171922 0.052470 0.026186 +v -0.172348 0.049354 0.027841 +v -0.172624 0.045799 0.028801 +v -0.172726 0.042022 0.028957 +v -0.172643 0.038271 0.028248 +v -0.172376 0.034813 0.026668 +v -0.171939 0.031917 0.024278 +v -0.171359 0.029840 0.021210 +v -0.170677 0.028800 0.017687 +v -0.169533 0.028944 0.014049 +v -0.169945 0.028927 0.014009 +v -0.169468 0.028852 0.017789 +v -0.169433 0.029917 0.021349 +v -0.169427 0.032002 0.024432 +v -0.169448 0.034889 0.026825 +v -0.169500 0.038323 0.028400 +v -0.169582 0.042043 0.029101 +v -0.169694 0.045790 0.028934 +v -0.169833 0.049321 0.027959 +v -0.169993 0.052426 0.026285 +v -0.170165 0.054941 0.024053 +v -0.170338 0.056756 0.021417 +v -0.170978 0.054906 0.023947 +v -0.171561 0.052377 0.026065 +v -0.172050 0.049281 0.027635 +v -0.172409 0.045777 0.028530 +v -0.172610 0.042071 0.028652 +v -0.172636 0.038395 0.027947 +v -0.172480 0.034998 0.026410 +v -0.172147 0.032131 0.024090 +v -0.171657 0.030041 0.021101 +v -0.171043 0.028946 0.017640 +v -0.170349 0.028992 0.013987 +v -0.169220 0.030222 0.010518 +v -0.169631 0.030206 0.010473 +v -0.169141 0.029044 0.014104 +v -0.169119 0.029030 0.017802 +v -0.169148 0.030143 0.021282 +v -0.169222 0.032231 0.024270 +v -0.169338 0.035079 0.026578 +v -0.169493 0.038448 0.028099 +v -0.169681 0.042091 0.028786 +v -0.169894 0.045769 0.028644 +v -0.170120 0.049255 0.027726 +v -0.170349 0.052349 0.026128 +v -0.170564 0.054893 0.023971 +v -0.171161 0.052324 0.026014 +v -0.171688 0.049226 0.027497 +v -0.172110 0.045759 0.028314 +v -0.172396 0.042117 0.028383 +v -0.172523 0.038517 0.027662 +v -0.172477 0.035192 0.026148 +v -0.172256 0.032374 0.023887 +v -0.171872 0.030294 0.020978 +v -0.171346 0.029163 0.017592 +v -0.170719 0.029137 0.013983 +v -0.170037 0.030265 0.010469 +v -0.168945 0.032482 0.007419 +v -0.169356 0.032469 0.007371 +v -0.168831 0.030312 0.010600 +v -0.168797 0.029219 0.014170 +v -0.168839 0.029272 0.017804 +v -0.168949 0.030412 0.021189 +v -0.169117 0.032481 0.024081 +v -0.169335 0.035274 0.026317 +v -0.169594 0.038566 0.027803 +v -0.169881 0.042134 0.028498 +v -0.170180 0.045753 0.028401 +v -0.170475 0.049210 0.027555 +v -0.170747 0.052314 0.026036 +v -0.171287 0.049195 0.027436 +v -0.171748 0.045746 0.028168 +v -0.172098 0.042154 0.028169 +v -0.172310 0.038629 0.027411 +v -0.172367 0.035382 0.025902 +v -0.172258 0.032630 0.023685 +v -0.171987 0.030581 0.020850 +v -0.171568 0.029435 0.017546 +v -0.171028 0.029350 0.013999 +v -0.170411 0.030396 0.010505 +v -0.169764 0.032518 0.007381 +v -0.169147 0.035425 0.005004 +v -0.168736 0.035436 0.005054 +v -0.168558 0.032556 0.007524 +v -0.168491 0.030470 0.010714 +v -0.168524 0.029458 0.014243 +v -0.168648 0.029562 0.017793 +v -0.168851 0.030708 0.021076 +v -0.169119 0.032737 0.023878 +v -0.169439 0.035458 0.026059 +v -0.169795 0.038670 0.027533 +v -0.170167 0.042167 0.028257 +v -0.170534 0.045742 0.028223 +v -0.170873 0.049190 0.027456 +v -0.171346 0.045738 0.028102 +v -0.171735 0.042182 0.028025 +v -0.172013 0.038721 0.027213 +v -0.172157 0.035556 0.025686 +v -0.172154 0.032880 0.023496 +v -0.171996 0.030883 0.020726 +v -0.171691 0.029744 0.017505 +v -0.171256 0.029618 0.014033 +v -0.170725 0.030588 0.010579 +v -0.170140 0.032624 0.007449 +v -0.169555 0.035466 0.005020 +v -0.169932 0.035557 0.005101 +v -0.168349 0.035499 0.005167 +v -0.170252 0.035692 0.005241 +v -0.168015 0.035609 0.005335 +v -0.170494 0.035862 0.005431 +v -0.167754 0.035760 0.005547 +v -0.170641 0.036054 0.005658 +v -0.167585 0.035941 0.005787 +v -0.170683 0.036256 0.005907 +v -0.167519 0.036139 0.006040 +v -0.170618 0.036454 0.006160 +v -0.167561 0.036341 0.006289 +v -0.170449 0.036635 0.006400 +v -0.167708 0.036533 0.006516 +v -0.170188 0.036786 0.006612 +v -0.167950 0.036703 0.006706 +v -0.169853 0.036896 0.006780 +v -0.168271 0.036838 0.006847 +v -0.169467 0.036959 0.006893 +v -0.168648 0.036929 0.006927 +v -0.169056 0.036971 0.006943 +v -0.168222 0.032685 0.007677 +v -0.168224 0.030685 0.010852 +v -0.168340 0.029743 0.014317 +v -0.168558 0.029880 0.017770 +v -0.168860 0.031009 0.020952 +v -0.169228 0.032980 0.023676 +v -0.169643 0.035621 0.025821 +v -0.170083 0.038753 0.027306 +v -0.170522 0.042190 0.028080 +v -0.170932 0.045737 0.028121 +v -0.171333 0.042198 0.027960 +v -0.171651 0.038790 0.027080 +v -0.171863 0.035701 0.025517 +v -0.171949 0.033109 0.023333 +v -0.171898 0.031179 0.020613 +v -0.171708 0.030069 0.017472 +v -0.171388 0.029922 0.014083 +v -0.170960 0.030829 0.010687 +v -0.170459 0.032781 0.007572 +v -0.168464 0.034117 0.009025 +v -0.168840 0.034224 0.009094 +v -0.168145 0.033960 0.008903 +v -0.167905 0.033762 0.008735 +v -0.167760 0.033538 0.008534 +v -0.167720 0.033302 0.008312 +v -0.167788 0.033071 0.008085 +v -0.167959 0.032861 0.007869 +v -0.170699 0.032979 0.007739 +v -0.170844 0.033203 0.007941 +v -0.170883 0.033439 0.008162 +v -0.170815 0.033670 0.008389 +v -0.170644 0.033880 0.008606 +v -0.170381 0.034056 0.008797 +v -0.170045 0.034185 0.008950 +v -0.169658 0.034259 0.009055 +v -0.169247 0.034272 0.009104 +v -0.168047 0.030942 0.011005 +v -0.168258 0.030057 0.014387 +v -0.168575 0.030205 0.017737 +v -0.168976 0.031297 0.020824 +v -0.169437 0.033195 0.023488 +v -0.169934 0.035751 0.025620 +v -0.170438 0.038810 0.027139 +v -0.170919 0.042201 0.027979 +v -0.171250 0.038828 0.027021 +v -0.171503 0.035808 0.025405 +v -0.171658 0.033300 0.023208 +v -0.171700 0.031448 0.020520 +v -0.171618 0.030387 0.017450 +v -0.171413 0.030242 0.014145 +v -0.171099 0.031103 0.010820 +v -0.168694 0.032220 0.011627 +v -0.169067 0.032350 0.011663 +v -0.168379 0.032027 0.011552 +v -0.168145 0.031786 0.011445 +v -0.168006 0.031512 0.011311 +v -0.167973 0.031224 0.011161 +v -0.171132 0.031391 0.010970 +v -0.171058 0.031673 0.011127 +v -0.170881 0.031931 0.011279 +v -0.170614 0.032145 0.011417 +v -0.170274 0.032303 0.011531 +v -0.169885 0.032393 0.011613 +v -0.169473 0.032409 0.011658 +v -0.168284 0.030376 0.014450 +v -0.168698 0.030514 0.017696 +v -0.169190 0.031549 0.020701 +v -0.169731 0.033366 0.023327 +v -0.170290 0.035839 0.025471 +v -0.170836 0.038835 0.027041 +v -0.171103 0.035868 0.025359 +v -0.171301 0.033440 0.023129 +v -0.171414 0.031673 0.020453 +v -0.171427 0.030677 0.017438 +v -0.171331 0.030555 0.014216 +v -0.168953 0.031162 0.014549 +v -0.169323 0.031306 0.014546 +v -0.168644 0.030948 0.014534 +v -0.168415 0.030680 0.014500 +v -0.171148 0.030841 0.014290 +v -0.170874 0.031079 0.014363 +v -0.170530 0.031255 0.014429 +v -0.170139 0.031354 0.014483 +v -0.169727 0.031372 0.014523 +v -0.168920 0.030786 0.017650 +v -0.169488 0.031751 0.020592 +v -0.170090 0.033481 0.023204 +v -0.170689 0.035879 0.025381 +v -0.170903 0.033520 0.023101 +v -0.171061 0.031839 0.020417 +v -0.171147 0.030919 0.017440 +v -0.169589 0.031150 0.017555 +v -0.169223 0.031003 0.017602 +v -0.170798 0.031097 0.017453 +v -0.170405 0.031198 0.017478 +v -0.169992 0.031216 0.017513 +v -0.169850 0.031887 0.020504 +v -0.170489 0.033534 0.023127 +v -0.170664 0.031933 0.020413 +v -0.170252 0.031949 0.020443 +v 0.167884 0.116804 0.024626 +v 0.167166 0.114880 0.021382 +v 0.167578 0.114869 0.021349 +v 0.167471 0.116814 0.024657 +v 0.166827 0.113972 0.017758 +v 0.167239 0.113960 0.017720 +v 0.168283 0.116861 0.024544 +v 0.167978 0.114936 0.021284 +v 0.167724 0.119626 0.027360 +v 0.168137 0.119619 0.027331 +v 0.166768 0.114969 0.021383 +v 0.167072 0.116890 0.024635 +v 0.166480 0.114166 0.014044 +v 0.166892 0.114154 0.014003 +v 0.167641 0.114032 0.017676 +v 0.166432 0.114068 0.017785 +v 0.168639 0.116981 0.024416 +v 0.168338 0.115077 0.021192 +v 0.168534 0.119662 0.027236 +v 0.167907 0.123114 0.029318 +v 0.168320 0.123109 0.029290 +v 0.167323 0.119684 0.027320 +v 0.166712 0.117026 0.024561 +v 0.166412 0.115130 0.021350 +v 0.166150 0.115444 0.010513 +v 0.166562 0.115433 0.010469 +v 0.167296 0.114225 0.013980 +v 0.166087 0.114260 0.014100 +v 0.168004 0.114184 0.017627 +v 0.166081 0.114241 0.017801 +v 0.168929 0.117155 0.024252 +v 0.168632 0.115282 0.021080 +v 0.168888 0.119753 0.027080 +v 0.168716 0.123137 0.029186 +v 0.168009 0.127041 0.030411 +v 0.168422 0.127039 0.030385 +v 0.167504 0.123151 0.029266 +v 0.166960 0.119787 0.027214 +v 0.166418 0.117214 0.024440 +v 0.166123 0.115351 0.021286 +v 0.165860 0.117705 0.007415 +v 0.166272 0.117696 0.007368 +v 0.166967 0.115498 0.010465 +v 0.165760 0.115529 0.010595 +v 0.167663 0.114375 0.013976 +v 0.165741 0.114431 0.014167 +v 0.168305 0.114405 0.017578 +v 0.165798 0.114480 0.017805 +v 0.168842 0.115538 0.020956 +v 0.169132 0.117372 0.024061 +v 0.169174 0.119885 0.026875 +v 0.169068 0.123195 0.029012 +v 0.168818 0.127050 0.030276 +v 0.168024 0.131148 0.030576 +v 0.168438 0.131149 0.030549 +v 0.167606 0.127056 0.030354 +v 0.167140 0.123217 0.029139 +v 0.166662 0.119930 0.027050 +v 0.165921 0.115619 0.021196 +v 0.166209 0.117440 0.024280 +v 0.166051 0.120651 0.005003 +v 0.165640 0.120659 0.005051 +v 0.166679 0.117748 0.007380 +v 0.165472 0.117774 0.007518 +v 0.167339 0.115632 0.010502 +v 0.165418 0.115683 0.010708 +v 0.167970 0.114592 0.013991 +v 0.165465 0.114666 0.014240 +v 0.168522 0.114680 0.017531 +v 0.165603 0.114767 0.017795 +v 0.168953 0.115827 0.020827 +v 0.169236 0.117616 0.023858 +v 0.169373 0.120050 0.026635 +v 0.169352 0.123280 0.028781 +v 0.169169 0.127074 0.030093 +v 0.168834 0.131144 0.030440 +v 0.168365 0.135172 0.029775 +v 0.167952 0.135168 0.029802 +v 0.167621 0.131141 0.030517 +v 0.167241 0.127083 0.030216 +v 0.166839 0.123309 0.028946 +v 0.166448 0.120102 0.026838 +v 0.165818 0.115913 0.021084 +v 0.166099 0.117690 0.024093 +v 0.166459 0.120696 0.005021 +v 0.166834 0.120791 0.005104 +v 0.165252 0.120718 0.005162 +v 0.167153 0.120928 0.005246 +v 0.164916 0.120826 0.005328 +v 0.167392 0.121099 0.005438 +v 0.164652 0.120975 0.005538 +v 0.167537 0.121293 0.005666 +v 0.164480 0.121154 0.005777 +v 0.167576 0.121495 0.005915 +v 0.164411 0.121351 0.006030 +v 0.167507 0.121692 0.006168 +v 0.164450 0.121553 0.006279 +v 0.167335 0.121871 0.006407 +v 0.164594 0.121746 0.006507 +v 0.167071 0.122020 0.006617 +v 0.164834 0.121918 0.006699 +v 0.166734 0.122127 0.006783 +v 0.165152 0.122055 0.006841 +v 0.166347 0.122187 0.006894 +v 0.165528 0.122150 0.006924 +v 0.165936 0.122195 0.006942 +v 0.167054 0.117859 0.007451 +v 0.165134 0.117900 0.007670 +v 0.167651 0.115828 0.010577 +v 0.165148 0.115894 0.010846 +v 0.168195 0.114863 0.014025 +v 0.165278 0.114949 0.014315 +v 0.168641 0.114991 0.017489 +v 0.165508 0.115084 0.017773 +v 0.168957 0.116128 0.020703 +v 0.169233 0.117872 0.023655 +v 0.169471 0.120236 0.026375 +v 0.169548 0.123386 0.028507 +v 0.169451 0.127109 0.029847 +v 0.169184 0.131132 0.030255 +v 0.168761 0.135150 0.029669 +v 0.168209 0.138844 0.028107 +v 0.167796 0.138838 0.028135 +v 0.167549 0.135139 0.029747 +v 0.167256 0.131128 0.030378 +v 0.166938 0.127120 0.030008 +v 0.166622 0.123419 0.028700 +v 0.166332 0.120292 0.026593 +v 0.165822 0.116215 0.020960 +v 0.166096 0.117946 0.023890 +v 0.165356 0.119333 0.009020 +v 0.165730 0.119444 0.009090 +v 0.166138 0.119496 0.009102 +v 0.166549 0.119487 0.009055 +v 0.166937 0.119417 0.008953 +v 0.167275 0.119292 0.008801 +v 0.167540 0.119119 0.008611 +v 0.167715 0.118910 0.008395 +v 0.167786 0.118680 0.008168 +v 0.167749 0.118444 0.007946 +v 0.167608 0.118219 0.007743 +v 0.167371 0.118019 0.007574 +v 0.164869 0.118073 0.007860 +v 0.164695 0.118282 0.008076 +v 0.164624 0.118512 0.008303 +v 0.164660 0.118748 0.008525 +v 0.164802 0.118973 0.008727 +v 0.165039 0.119173 0.008896 +v 0.167883 0.116072 0.010685 +v 0.164968 0.116149 0.010999 +v 0.168322 0.115169 0.014075 +v 0.165192 0.115261 0.014386 +v 0.168654 0.115316 0.017457 +v 0.165521 0.115409 0.017740 +v 0.168854 0.116423 0.020591 +v 0.169123 0.118122 0.023468 +v 0.169463 0.120430 0.026114 +v 0.169643 0.123505 0.028210 +v 0.169646 0.127151 0.029556 +v 0.169466 0.131116 0.030007 +v 0.169113 0.135104 0.029491 +v 0.168606 0.138806 0.028008 +v 0.167980 0.141919 0.025645 +v 0.167567 0.141910 0.025675 +v 0.167394 0.138787 0.028090 +v 0.167185 0.135086 0.029616 +v 0.166953 0.131110 0.030168 +v 0.166719 0.127165 0.029743 +v 0.166503 0.123540 0.028416 +v 0.166324 0.120486 0.026332 +v 0.165933 0.116504 0.020831 +v 0.166199 0.118190 0.023687 +v 0.165599 0.117434 0.011623 +v 0.165971 0.117569 0.011660 +v 0.166376 0.117633 0.011656 +v 0.166788 0.117622 0.011612 +v 0.167178 0.117537 0.011530 +v 0.167520 0.117383 0.011417 +v 0.167790 0.117172 0.011279 +v 0.167970 0.116917 0.011126 +v 0.168048 0.116636 0.010970 +v 0.168018 0.116348 0.010819 +v 0.164890 0.116430 0.011155 +v 0.164920 0.116718 0.011306 +v 0.165055 0.116994 0.011440 +v 0.165287 0.117238 0.011548 +v 0.168344 0.115489 0.014138 +v 0.165214 0.115581 0.014449 +v 0.168559 0.115633 0.017435 +v 0.165640 0.115720 0.017699 +v 0.168651 0.116690 0.020501 +v 0.168914 0.118348 0.023308 +v 0.169347 0.120620 0.025869 +v 0.169630 0.123629 0.027909 +v 0.169739 0.127200 0.029239 +v 0.169660 0.131095 0.029714 +v 0.169396 0.135036 0.029253 +v 0.168959 0.138726 0.027845 +v 0.168377 0.141867 0.025558 +v 0.167277 0.144171 0.022577 +v 0.167690 0.144182 0.022544 +v 0.167166 0.141841 0.025646 +v 0.167031 0.138696 0.027976 +v 0.166883 0.135014 0.029416 +v 0.166734 0.131088 0.029900 +v 0.166598 0.127214 0.029440 +v 0.166490 0.123665 0.028116 +v 0.166422 0.120672 0.026072 +v 0.166142 0.116759 0.020707 +v 0.166403 0.118407 0.023497 +v 0.165872 0.116375 0.014548 +v 0.166240 0.116524 0.014544 +v 0.166644 0.116596 0.014521 +v 0.167056 0.116583 0.014480 +v 0.167448 0.116489 0.014424 +v 0.167794 0.116319 0.014357 +v 0.168071 0.116084 0.014284 +v 0.168258 0.115801 0.014209 +v 0.165341 0.115886 0.014499 +v 0.165566 0.116157 0.014533 +v 0.168364 0.115920 0.017425 +v 0.165857 0.115995 0.017652 +v 0.168619 0.118536 0.023187 +v 0.168362 0.116912 0.020437 +v 0.169133 0.120792 0.025657 +v 0.169511 0.123751 0.027626 +v 0.169725 0.127251 0.028918 +v 0.169753 0.131071 0.029394 +v 0.169591 0.134952 0.028971 +v 0.169244 0.138610 0.027629 +v 0.168732 0.141757 0.025419 +v 0.168089 0.144118 0.022473 +v 0.166947 0.145449 0.019046 +v 0.167360 0.145461 0.019010 +v 0.166879 0.144086 0.022569 +v 0.166806 0.141715 0.025559 +v 0.166732 0.138571 0.027800 +v 0.166665 0.134926 0.029161 +v 0.166613 0.131064 0.029594 +v 0.166584 0.127265 0.029119 +v 0.166585 0.123784 0.027819 +v 0.166621 0.120837 0.025832 +v 0.166693 0.118581 0.023332 +v 0.166437 0.116965 0.020595 +v 0.166521 0.116368 0.017554 +v 0.166158 0.116216 0.017603 +v 0.166923 0.116440 0.017510 +v 0.167335 0.116428 0.017472 +v 0.167730 0.116332 0.017445 +v 0.168081 0.116159 0.017429 +v 0.168260 0.118673 0.023113 +v 0.168007 0.117072 0.020404 +v 0.168835 0.120935 0.025493 +v 0.169294 0.123861 0.027379 +v 0.169604 0.127300 0.028614 +v 0.169739 0.131047 0.029070 +v 0.169685 0.134858 0.028665 +v 0.169442 0.138465 0.027375 +v 0.169021 0.141596 0.025239 +v 0.168447 0.143983 0.022368 +v 0.167761 0.145390 0.018958 +v 0.166600 0.145643 0.015333 +v 0.167012 0.145655 0.015293 +v 0.166551 0.145355 0.019064 +v 0.166522 0.143932 0.022521 +v 0.166510 0.141542 0.025422 +v 0.166517 0.138420 0.027574 +v 0.166545 0.134829 0.028869 +v 0.166598 0.131040 0.029270 +v 0.166677 0.127313 0.028802 +v 0.166781 0.123889 0.027545 +v 0.166907 0.120969 0.025627 +v 0.167049 0.118701 0.023204 +v 0.166797 0.117106 0.020503 +v 0.167197 0.117173 0.020438 +v 0.167609 0.117162 0.020405 +v 0.167860 0.118748 0.023091 +v 0.168473 0.121038 0.025387 +v 0.168993 0.123952 0.027186 +v 0.169385 0.127345 0.028350 +v 0.169618 0.131023 0.028764 +v 0.169672 0.134759 0.028355 +v 0.169539 0.138302 0.027100 +v 0.169223 0.141397 0.025029 +v 0.168740 0.143787 0.022236 +v 0.168123 0.145241 0.018894 +v 0.167415 0.145583 0.015262 +v 0.166262 0.144735 0.011708 +v 0.166674 0.144747 0.011665 +v 0.166207 0.145547 0.015379 +v 0.166199 0.145185 0.019063 +v 0.166231 0.143721 0.022435 +v 0.166299 0.141334 0.025242 +v 0.166400 0.138253 0.027313 +v 0.166532 0.134730 0.028559 +v 0.166691 0.131017 0.028950 +v 0.166872 0.127356 0.028510 +v 0.167065 0.123974 0.027313 +v 0.167261 0.121060 0.025471 +v 0.167447 0.118758 0.023122 +v 0.168071 0.121096 0.025347 +v 0.168629 0.124018 0.027059 +v 0.169082 0.127382 0.028141 +v 0.169398 0.131001 0.028496 +v 0.169552 0.134662 0.028063 +v 0.169529 0.138131 0.026823 +v 0.169325 0.141171 0.024804 +v 0.168948 0.143543 0.022087 +v 0.168421 0.145023 0.018822 +v 0.167782 0.145431 0.015243 +v 0.167078 0.144680 0.011654 +v 0.165956 0.142801 0.008433 +v 0.166367 0.142811 0.008387 +v 0.165871 0.144646 0.011781 +v 0.165859 0.145374 0.015428 +v 0.165914 0.144950 0.019042 +v 0.166026 0.143466 0.022319 +v 0.166187 0.141103 0.025032 +v 0.166390 0.138083 0.027036 +v 0.166626 0.134636 0.028253 +v 0.166885 0.130996 0.028656 +v 0.167154 0.127391 0.028265 +v 0.167417 0.124032 0.027139 +v 0.167658 0.121103 0.025376 +v 0.168226 0.124055 0.027008 +v 0.168717 0.127409 0.028004 +v 0.169096 0.130984 0.028286 +v 0.169334 0.134574 0.027808 +v 0.169412 0.137965 0.026562 +v 0.169319 0.140935 0.024579 +v 0.169056 0.143268 0.021930 +v 0.168636 0.144752 0.018747 +v 0.168086 0.145210 0.015236 +v 0.167449 0.144539 0.011678 +v 0.166774 0.142754 0.008394 +v 0.166114 0.139997 0.005682 +v 0.165703 0.139989 0.005730 +v 0.165567 0.142726 0.008529 +v 0.165528 0.144485 0.011879 +v 0.165581 0.145136 0.015477 +v 0.165716 0.144667 0.019003 +v 0.165920 0.143185 0.022179 +v 0.166181 0.140868 0.024807 +v 0.166487 0.137919 0.026761 +v 0.166821 0.134552 0.027971 +v 0.167167 0.130979 0.028408 +v 0.167505 0.127415 0.028081 +v 0.167813 0.124060 0.027035 +v 0.168314 0.127424 0.027946 +v 0.168730 0.130970 0.028146 +v 0.169032 0.134502 0.027608 +v 0.169197 0.137814 0.026336 +v 0.169207 0.140705 0.024369 +v 0.169057 0.142980 0.021777 +v 0.168752 0.144446 0.018675 +v 0.168308 0.144935 0.015242 +v 0.167759 0.144333 0.011733 +v 0.167147 0.142634 0.008453 +v 0.166522 0.139953 0.005702 +v 0.165932 0.136506 0.003723 +v 0.165520 0.136501 0.003773 +v 0.165316 0.139932 0.005844 +v 0.165228 0.142589 0.008668 +v 0.165255 0.144264 0.011995 +v 0.165391 0.144848 0.015523 +v 0.165618 0.144355 0.018949 +v 0.165921 0.142897 0.022026 +v 0.166283 0.140642 0.024582 +v 0.166684 0.137775 0.026507 +v 0.167104 0.134484 0.027733 +v 0.167518 0.130968 0.028224 +v 0.167900 0.127426 0.027972 +v 0.168327 0.130963 0.028088 +v 0.168668 0.134449 0.027476 +v 0.168897 0.137689 0.026160 +v 0.168996 0.140497 0.024189 +v 0.168952 0.142699 0.021637 +v 0.168761 0.144127 0.018609 +v 0.168433 0.144624 0.015260 +v 0.167988 0.144077 0.011816 +v 0.167463 0.142460 0.008562 +v 0.166899 0.139862 0.005789 +v 0.166340 0.136478 0.003752 +v 0.165829 0.132576 0.002628 +v 0.165418 0.132574 0.002679 +v 0.165134 0.136464 0.003898 +v 0.164980 0.139828 0.006015 +v 0.164961 0.142402 0.008842 +v 0.165073 0.143997 0.012122 +v 0.165302 0.144531 0.015562 +v 0.165628 0.144035 0.018883 +v 0.166030 0.142621 0.021869 +v 0.166485 0.140442 0.024372 +v 0.166969 0.137658 0.026291 +v 0.167456 0.134438 0.027555 +v 0.167914 0.130962 0.028114 +v 0.168265 0.134420 0.027422 +v 0.168534 0.137598 0.026046 +v 0.168700 0.140324 0.024051 +v 0.168747 0.142444 0.021521 +v 0.168664 0.143814 0.018555 +v 0.168451 0.144299 0.015290 +v 0.168121 0.143789 0.011923 +v 0.167698 0.142244 0.008711 +v 0.167218 0.139730 0.005938 +v 0.166718 0.136420 0.003857 +v 0.166238 0.132565 0.002662 +v 0.165819 0.129780 0.002516 +v 0.165408 0.129763 0.002566 +v 0.165033 0.132559 0.002810 +v 0.164800 0.136398 0.004090 +v 0.164717 0.139685 0.006232 +v 0.164785 0.142175 0.009038 +v 0.164992 0.143702 0.012250 +v 0.165320 0.144206 0.015591 +v 0.165744 0.143729 0.018810 +v 0.166237 0.142377 0.021720 +v 0.166773 0.140282 0.024192 +v 0.167322 0.137579 0.026129 +v 0.167852 0.134416 0.027449 +v 0.168132 0.137547 0.026002 +v 0.168340 0.140198 0.023965 +v 0.168456 0.142232 0.021435 +v 0.168466 0.143531 0.018516 +v 0.168363 0.143983 0.015329 +v 0.168148 0.143487 0.012044 +v 0.167838 0.141999 0.008892 +v 0.167458 0.139565 0.006137 +v 0.167040 0.136335 0.004033 +v 0.166617 0.132541 0.002777 +v 0.166228 0.129713 0.002548 +v 0.163467 0.129497 -0.020912 +v 0.163880 0.129513 -0.020948 +v 0.165022 0.129665 0.002694 +v 0.164699 0.132532 0.003013 +v 0.164540 0.136307 0.004335 +v 0.164546 0.139513 0.006480 +v 0.164711 0.141925 0.009242 +v 0.165019 0.143400 0.012372 +v 0.165445 0.143896 0.015610 +v 0.165958 0.143458 0.018735 +v 0.166530 0.142181 0.021588 +v 0.167128 0.140172 0.024053 +v 0.167719 0.137541 0.026030 +v 0.167939 0.140128 0.023936 +v 0.168099 0.142079 0.021387 +v 0.168181 0.143297 0.018495 +v 0.168172 0.143695 0.015374 +v 0.168068 0.143192 0.012172 +v 0.167872 0.141743 0.009091 +v 0.167603 0.139379 0.006374 +v 0.167283 0.136230 0.004265 +v 0.166940 0.132507 0.002966 +v 0.166606 0.129567 0.002658 +v 0.164283 0.129445 -0.020983 +v 0.160519 0.129497 -0.052921 +v 0.160897 0.129513 -0.053335 +v 0.163074 0.129397 -0.020877 +v 0.164688 0.129491 0.002891 +v 0.164441 0.132495 0.003274 +v 0.164372 0.136197 0.004618 +v 0.164478 0.139324 0.006742 +v 0.164746 0.141670 0.009441 +v 0.165152 0.143112 0.012478 +v 0.165667 0.143621 0.015616 +v 0.166257 0.143240 0.018664 +v 0.166888 0.142047 0.021483 +v 0.167526 0.140119 0.023966 +v 0.167700 0.141994 0.021379 +v 0.167828 0.143126 0.018494 +v 0.167894 0.143457 0.015424 +v 0.167885 0.142925 0.012299 +v 0.167799 0.141493 0.009296 +v 0.167643 0.139185 0.006633 +v 0.167431 0.136111 0.004540 +v 0.167185 0.132464 0.003217 +v 0.166928 0.129353 0.002840 +v 0.164650 0.129298 -0.021014 +v 0.161266 0.129445 -0.053740 +v -0.005122 0.129497 -0.052927 +v -0.005422 0.129513 -0.053341 +v 0.160159 0.129397 -0.052526 +v 0.162726 0.129222 -0.020845 +v 0.164428 0.129254 0.003144 +v 0.164274 0.132450 0.003575 +v 0.164307 0.136075 0.004918 +v 0.164518 0.139129 0.007000 +v 0.164885 0.141425 0.009622 +v 0.165381 0.142856 0.012561 +v 0.165971 0.143400 0.015609 +v 0.166619 0.143091 0.018599 +v 0.167287 0.141983 0.021412 +v 0.167432 0.143032 0.018511 +v 0.167546 0.143284 0.015473 +v 0.167613 0.142704 0.012415 +v 0.167623 0.141267 0.009491 +v 0.167574 0.138995 0.006895 +v 0.167475 0.135986 0.004838 +v 0.167335 0.132415 0.003511 +v 0.167172 0.129085 0.003081 +v 0.164955 0.129082 -0.021039 +v 0.161601 0.129298 -0.054108 +v -0.005702 0.129445 -0.053746 +v -0.021297 0.116813 -0.002540 +v -0.020997 0.116797 -0.002126 +v -0.004820 0.129397 -0.052532 +v 0.159840 0.129222 -0.052177 +v 0.162448 0.128982 -0.020820 +v 0.164261 0.128970 0.003435 +v 0.164212 0.132401 0.003895 +v 0.164351 0.135951 0.005216 +v 0.164663 0.138943 0.007237 +v 0.165121 0.141208 0.009771 +v 0.165692 0.142650 0.012617 +v 0.166338 0.143248 0.015589 +v 0.167020 0.143020 0.018548 +v 0.167153 0.143188 0.015519 +v 0.167270 0.142543 0.012513 +v 0.167356 0.141079 0.009665 +v 0.167403 0.138823 0.007143 +v 0.167410 0.135865 0.005138 +v 0.167381 0.132365 0.003829 +v 0.167322 0.128781 0.003365 +v 0.165178 0.128811 -0.021057 +v 0.161881 0.129082 -0.054414 +v -0.005940 0.129298 -0.054114 +v -0.021577 0.116745 -0.002945 +v -0.071760 0.116797 -0.002126 +v -0.071459 0.116813 -0.002540 +v -0.020695 0.116697 -0.001732 +v -0.004537 0.129222 -0.052183 +v 0.159586 0.128982 -0.051898 +v 0.162259 0.128694 -0.020801 +v 0.164198 0.128657 0.003745 +v 0.164257 0.132350 0.004213 +v 0.164499 0.135831 0.005490 +v 0.164903 0.138779 0.007436 +v 0.165436 0.141034 0.009880 +v 0.166062 0.142509 0.012640 +v 0.166741 0.143176 0.015559 +v 0.166879 0.142454 0.012587 +v 0.167017 0.140943 0.009804 +v 0.167141 0.138680 0.007360 +v 0.167243 0.135755 0.005420 +v 0.167318 0.132315 0.004149 +v 0.167366 0.128462 0.003673 +v 0.165302 0.128503 -0.021066 +v 0.162084 0.128811 -0.054638 +v -0.006121 0.129082 -0.054420 +v -0.021815 0.116598 -0.003313 +v -0.071180 0.116745 -0.002945 +v -0.087635 0.129497 -0.052926 +v -0.087334 0.129513 -0.053340 +v -0.072062 0.116697 -0.001732 +v -0.020412 0.116522 -0.001382 +v -0.004293 0.128982 -0.051904 +v 0.159412 0.128694 -0.051708 +v 0.162170 0.128379 -0.020792 +v 0.164242 0.128338 0.004052 +v 0.164408 0.132302 0.004507 +v 0.164743 0.135726 0.005723 +v 0.165222 0.138646 0.007585 +v 0.165810 0.140915 0.009939 +v 0.166467 0.142442 0.012630 +v 0.166628 0.140867 0.009900 +v 0.166804 0.138577 0.007531 +v 0.166982 0.135663 0.005666 +v 0.167152 0.132271 0.004450 +v 0.167302 0.128149 0.003982 +v 0.165322 0.128181 -0.021066 +v 0.162199 0.128503 -0.054763 +v -0.006233 0.128811 -0.054644 +v -0.021996 0.116382 -0.003619 +v -0.070942 0.116598 -0.003313 +v -0.087055 0.129445 -0.053744 +v -0.163782 0.129513 -0.053335 +v -0.163405 0.129497 -0.052921 +v -0.087937 0.129397 -0.052531 +v -0.072345 0.116522 -0.001382 +v -0.020168 0.116282 -0.001103 +v -0.004104 0.128694 -0.051713 +v 0.159331 0.128379 -0.051619 +v 0.162189 0.128056 -0.020792 +v 0.164392 0.128034 0.004336 +v 0.164652 0.132259 0.004758 +v 0.165064 0.135641 0.005898 +v 0.165598 0.138555 0.007672 +v 0.166216 0.140858 0.009947 +v 0.166417 0.138519 0.007644 +v 0.166648 0.135597 0.005858 +v 0.166893 0.132233 0.004711 +v 0.167135 0.127865 0.004274 +v 0.165233 0.127865 -0.021057 +v 0.162216 0.128181 -0.054782 +v -0.006268 0.128503 -0.054769 +v -0.022108 0.116111 -0.003843 +v -0.070760 0.116382 -0.003619 +v -0.086817 0.129298 -0.054112 +v -0.164151 0.129445 -0.053740 +v -0.168926 0.129513 0.002517 +v -0.168516 0.129497 0.002570 +v -0.163044 0.129397 -0.052526 +v -0.088220 0.129222 -0.052182 +v -0.072589 0.116282 -0.001103 +v -0.019979 0.115994 -0.000913 +v -0.003983 0.128379 -0.051625 +v 0.159348 0.128056 -0.051638 +v 0.162314 0.127749 -0.020801 +v 0.164636 0.127766 0.004578 +v 0.164975 0.132224 0.004947 +v 0.165442 0.135583 0.006004 +v 0.166006 0.138512 0.007692 +v 0.166262 0.135560 0.005983 +v 0.166560 0.132206 0.004914 +v 0.166876 0.127628 0.004526 +v 0.165044 0.127577 -0.021038 +v 0.162135 0.127865 -0.054693 +v -0.006224 0.128181 -0.054788 +v -0.022143 0.115803 -0.003968 +v -0.070649 0.116111 -0.003843 +v -0.086636 0.129082 -0.054419 +v -0.164487 0.129298 -0.054108 +v -0.169335 0.129445 0.002544 +v -0.168526 0.132294 0.002682 +v -0.168936 0.132297 0.002628 +v -0.168131 0.129397 0.002702 +v -0.162726 0.129222 -0.052177 +v -0.088464 0.128982 -0.051902 +v -0.072778 0.115994 -0.000913 +v -0.019858 0.115679 -0.000824 +v -0.003939 0.128056 -0.051643 +v 0.159462 0.127749 -0.051763 +v 0.162537 0.127478 -0.020819 +v 0.164958 0.127552 0.004760 +v 0.165354 0.132201 0.005062 +v 0.165851 0.135555 0.006033 +v 0.166174 0.132191 0.005045 +v 0.166542 0.127454 0.004723 +v 0.164766 0.127338 -0.021013 +v 0.161961 0.127577 -0.054503 +v -0.006103 0.127865 -0.054699 +v -0.022099 0.115481 -0.003987 +v -0.070614 0.115803 -0.003968 +v -0.086524 0.128811 -0.054642 +v -0.164766 0.129082 -0.054414 +v -0.169715 0.129298 0.002651 +v -0.169346 0.132286 0.002658 +v -0.168622 0.136220 0.003776 +v -0.169033 0.136227 0.003723 +v -0.168141 0.132278 0.002817 +v -0.167799 0.129222 0.002901 +v -0.162471 0.128982 -0.051898 +v -0.088653 0.128694 -0.051712 +v -0.072899 0.115679 -0.000824 +v -0.019814 0.115356 -0.000843 +v -0.003974 0.127749 -0.051769 +v 0.159666 0.127478 -0.051986 +v 0.162842 0.127261 -0.020844 +v 0.165336 0.127406 0.004870 +v 0.165763 0.132190 0.005096 +v 0.166156 0.127356 0.004851 +v 0.164418 0.127162 -0.020981 +v 0.161707 0.127338 -0.054223 +v -0.005914 0.127577 -0.054509 +v -0.021978 0.115165 -0.003898 +v -0.070658 0.115481 -0.003987 +v -0.086489 0.128503 -0.054767 +v -0.164970 0.128811 -0.054638 +v -0.170039 0.129082 0.002830 +v -0.169725 0.132263 0.002770 +v -0.169442 0.136201 0.003749 +v -0.168796 0.139707 0.005734 +v -0.169207 0.139718 0.005683 +v -0.168238 0.136181 0.003904 +v -0.167810 0.132250 0.003023 +v -0.167542 0.128982 0.003156 +v -0.162297 0.128694 -0.051708 +v -0.088774 0.128379 -0.051623 +v -0.072943 0.115356 -0.000843 +v -0.019849 0.115049 -0.000968 +v -0.004086 0.127478 -0.051992 +v 0.159945 0.127261 -0.052293 +v 0.163209 0.127114 -0.020875 +v 0.165745 0.127340 0.004901 +v 0.164024 0.127063 -0.020946 +v 0.161388 0.127162 -0.053874 +v -0.005670 0.127338 -0.054229 +v -0.021789 0.114877 -0.003708 +v -0.070779 0.115165 -0.003898 +v -0.086533 0.128181 -0.054786 +v -0.165084 0.128503 -0.054763 +v -0.170285 0.128811 0.003069 +v -0.170050 0.132229 0.002957 +v -0.169821 0.136145 0.003852 +v -0.169615 0.139678 0.005701 +v -0.169035 0.142518 0.008437 +v -0.169447 0.142532 0.008390 +v -0.168410 0.139646 0.005850 +v -0.167905 0.136113 0.004099 +v -0.167553 0.132213 0.003286 +v -0.167377 0.128694 0.003449 +v -0.162216 0.128379 -0.051619 +v -0.088818 0.128056 -0.051642 +v -0.072908 0.115049 -0.000968 +v -0.019960 0.114778 -0.001191 +v -0.004267 0.127261 -0.052298 +v 0.160281 0.127114 -0.052661 +v 0.163612 0.127046 -0.020910 +v 0.161028 0.127063 -0.053479 +v -0.005387 0.127162 -0.053880 +v -0.021545 0.114638 -0.003428 +v -0.070968 0.114877 -0.003708 +v -0.086654 0.127865 -0.054697 +v -0.165102 0.128181 -0.054782 +v -0.170437 0.128503 0.003352 +v -0.170297 0.132187 0.003205 +v -0.170144 0.136062 0.004024 +v -0.169993 0.139590 0.005786 +v -0.169854 0.142480 0.008395 +v -0.169737 0.144469 0.011670 +v -0.169326 0.144452 0.011713 +v -0.168648 0.142439 0.008535 +v -0.168075 0.139540 0.006023 +v -0.167647 0.136020 0.004347 +v -0.167390 0.132167 0.003588 +v -0.167317 0.128379 0.003759 +v -0.162233 0.128056 -0.051638 +v -0.088783 0.127749 -0.051767 +v -0.072796 0.114778 -0.001191 +v -0.020142 0.114561 -0.001498 +v -0.004505 0.127114 -0.052666 +v 0.160650 0.127046 -0.053065 +v -0.005085 0.127063 -0.053485 +v -0.021262 0.114462 -0.003079 +v -0.071212 0.114638 -0.003428 +v -0.086843 0.127577 -0.054507 +v -0.165021 0.127865 -0.054693 +v -0.170484 0.128181 0.003659 +v -0.170450 0.132139 0.003497 +v -0.170390 0.135957 0.004255 +v -0.170314 0.139460 0.005932 +v -0.170229 0.142364 0.008453 +v -0.170143 0.144407 0.011659 +v -0.170059 0.145377 0.015300 +v -0.169647 0.145359 0.015338 +v -0.168936 0.144358 0.011786 +v -0.168310 0.142298 0.008675 +v -0.167815 0.139395 0.006242 +v -0.167482 0.135909 0.004630 +v -0.167330 0.132118 0.003909 +v -0.167364 0.128056 0.004066 +v -0.162348 0.127749 -0.051763 +v -0.088671 0.127478 -0.051991 +v -0.072615 0.114561 -0.001498 +v -0.020380 0.114414 -0.001866 +v -0.004784 0.127046 -0.053071 +v -0.020960 0.114363 -0.002684 +v -0.071495 0.114462 -0.003079 +v -0.087087 0.127338 -0.054227 +v -0.164847 0.127577 -0.054503 +v -0.170424 0.127865 0.003969 +v -0.170498 0.132088 0.003815 +v -0.170541 0.135839 0.004528 +v -0.170556 0.139297 0.006130 +v -0.170546 0.142194 0.008560 +v -0.170515 0.144270 0.011682 +v -0.170463 0.145310 0.015271 +v -0.170388 0.145183 0.019019 +v -0.169976 0.145166 0.019052 +v -0.169254 0.145258 0.015383 +v -0.168595 0.144193 0.011884 +v -0.168046 0.142108 0.008850 +v -0.167647 0.139221 0.006490 +v -0.167421 0.135787 0.004931 +v -0.167378 0.132067 0.004227 +v -0.167516 0.127749 0.004348 +v -0.162552 0.127478 -0.051986 +v -0.088490 0.127261 -0.052297 +v -0.072377 0.114414 -0.001866 +v -0.020659 0.114346 -0.002270 +v -0.071797 0.114363 -0.002684 +v -0.087370 0.127162 -0.053878 +v -0.164592 0.127338 -0.054223 +v -0.170259 0.127577 0.004261 +v -0.170439 0.132039 0.004136 +v -0.170588 0.135715 0.004826 +v -0.170704 0.139112 0.006366 +v -0.170785 0.141979 0.008709 +v -0.170828 0.144069 0.011737 +v -0.170831 0.145164 0.015252 +v -0.170791 0.145118 0.018970 +v -0.170702 0.143904 0.022556 +v -0.170289 0.143888 0.022584 +v -0.169581 0.145066 0.019067 +v -0.168909 0.145080 0.015431 +v -0.168325 0.143967 0.012000 +v -0.167873 0.141879 0.009046 +v -0.167582 0.139031 0.006753 +v -0.167467 0.135662 0.005228 +v -0.167531 0.132019 0.004520 +v -0.167762 0.127478 0.004587 +v -0.162831 0.127261 -0.052293 +v -0.088252 0.127114 -0.052665 +v -0.072098 0.114346 -0.002270 +v -0.087672 0.127063 -0.053483 +v -0.164274 0.127162 -0.053874 +v -0.170002 0.127338 0.004516 +v -0.170275 0.131993 0.004438 +v -0.170526 0.135593 0.005126 +v -0.170747 0.138918 0.006624 +v -0.170928 0.141736 0.008889 +v -0.171060 0.143816 0.011820 +v -0.171139 0.144947 0.015246 +v -0.171155 0.144973 0.018909 +v -0.171102 0.143845 0.022489 +v -0.170976 0.141641 0.025658 +v -0.170563 0.141628 0.025682 +v -0.169891 0.143798 0.022571 +v -0.169231 0.144891 0.019063 +v -0.168633 0.144838 0.015478 +v -0.168146 0.143698 0.012127 +v -0.167803 0.141629 0.009250 +v -0.167625 0.138837 0.007011 +v -0.167618 0.135544 0.005502 +v -0.167778 0.131976 0.004768 +v -0.168086 0.127261 0.004766 +v -0.163167 0.127114 -0.052661 +v -0.087972 0.127046 -0.053069 +v -0.163913 0.127063 -0.053479 +v -0.169670 0.127162 0.004716 +v -0.170019 0.131956 0.004701 +v -0.170361 0.135481 0.005410 +v -0.170682 0.138728 0.006887 +v -0.170965 0.141480 0.009088 +v -0.171197 0.143529 0.011926 +v -0.171364 0.144675 0.015252 +v -0.171457 0.144760 0.018839 +v -0.171463 0.143714 0.022387 +v -0.171376 0.141592 0.025576 +v -0.170781 0.138556 0.028142 +v -0.171195 0.138565 0.028121 +v -0.170164 0.141554 0.025647 +v -0.169537 0.143640 0.022519 +v -0.168949 0.144652 0.019039 +v -0.168447 0.144548 0.015523 +v -0.168069 0.143402 0.012255 +v -0.167841 0.141373 0.009449 +v -0.167773 0.138652 0.007247 +v -0.167864 0.135440 0.005733 +v -0.168103 0.131943 0.004954 +v -0.168466 0.127114 0.004873 +v -0.163536 0.127046 -0.053065 +v -0.169286 0.127063 0.004847 +v -0.169687 0.131928 0.004907 +v -0.170103 0.135389 0.005658 +v -0.170514 0.138554 0.007135 +v -0.170895 0.141230 0.009292 +v -0.171227 0.143227 0.012047 +v -0.171493 0.144366 0.015271 +v -0.171676 0.144492 0.018765 +v -0.171760 0.143522 0.022259 +v -0.171734 0.141486 0.025443 +v -0.171593 0.138530 0.028029 +v -0.170929 0.134887 0.029810 +v -0.171343 0.134893 0.029790 +v -0.170380 0.138502 0.028091 +v -0.169806 0.141425 0.025556 +v -0.169249 0.143425 0.022430 +v -0.168755 0.144367 0.018999 +v -0.168362 0.144230 0.015561 +v -0.168100 0.143101 0.012376 +v -0.167984 0.141130 0.009629 +v -0.168016 0.138489 0.007445 +v -0.168187 0.135357 0.005905 +v -0.168483 0.131920 0.005066 +v -0.168875 0.127046 0.004901 +v -0.169303 0.131912 0.005042 +v -0.169771 0.135320 0.005852 +v -0.170254 0.138409 0.007354 +v -0.170722 0.141001 0.009488 +v -0.171151 0.142931 0.012175 +v -0.171516 0.144041 0.015300 +v -0.171796 0.144188 0.018693 +v -0.171972 0.143281 0.022112 +v -0.172026 0.141329 0.025267 +v -0.171949 0.138453 0.027872 +v -0.171740 0.134872 0.029691 +v -0.170997 0.130868 0.030584 +v -0.171411 0.130869 0.030565 +v -0.170527 0.134856 0.029748 +v -0.170020 0.138409 0.027971 +v -0.169514 0.141249 0.025413 +v -0.169048 0.143168 0.022311 +v -0.168662 0.144054 0.018944 +v -0.168385 0.143905 0.015591 +v -0.168237 0.142813 0.012481 +v -0.168222 0.140915 0.009778 +v -0.168337 0.138359 0.007591 +v -0.168566 0.135300 0.006008 +v -0.168892 0.131909 0.005096 +v -0.169386 0.135282 0.005980 +v -0.169919 0.138302 0.007527 +v -0.170458 0.140810 0.009662 +v -0.170972 0.142662 0.012301 +v -0.171431 0.143723 0.015339 +v -0.171810 0.143868 0.018627 +v -0.172085 0.143007 0.021956 +v -0.172233 0.141131 0.025059 +v -0.172238 0.138339 0.027660 +v -0.172095 0.134828 0.029519 +v -0.171809 0.130864 0.030463 +v -0.171397 0.126759 0.030400 +v -0.170983 0.126762 0.030419 +v -0.170595 0.130860 0.030518 +v -0.170165 0.134802 0.029610 +v -0.169724 0.138282 0.027790 +v -0.169307 0.141039 0.025230 +v -0.168948 0.142886 0.022170 +v -0.168676 0.143734 0.018878 +v -0.168514 0.143596 0.015609 +v -0.168469 0.142561 0.012565 +v -0.168540 0.140744 0.009884 +v -0.168714 0.138271 0.007676 +v -0.168975 0.135275 0.006034 +v -0.169533 0.138242 0.007643 +v -0.170121 0.140670 0.009803 +v -0.170702 0.142437 0.012418 +v -0.171245 0.143433 0.015383 +v -0.171717 0.143555 0.018572 +v -0.172092 0.142719 0.021802 +v -0.172340 0.140907 0.024835 +v -0.172441 0.138196 0.027409 +v -0.172382 0.134761 0.029286 +v -0.172163 0.130853 0.030285 +v -0.171794 0.126769 0.030299 +v -0.171300 0.122829 0.029305 +v -0.170886 0.122836 0.029325 +v -0.170581 0.126778 0.030354 +v -0.170233 0.130847 0.030372 +v -0.169867 0.134728 0.029405 +v -0.169513 0.138129 0.027560 +v -0.169200 0.140808 0.025019 +v -0.168954 0.142598 0.022016 +v -0.168797 0.143430 0.018805 +v -0.168739 0.143324 0.015616 +v -0.168782 0.142359 0.012620 +v -0.168915 0.140629 0.009943 +v -0.169122 0.138231 0.007693 +v -0.169733 0.140590 0.009901 +v -0.170361 0.142272 0.012516 +v -0.170969 0.143191 0.015431 +v -0.171523 0.143269 0.018531 +v -0.171991 0.142437 0.021661 +v -0.172340 0.140671 0.024610 +v -0.172544 0.138034 0.027136 +v -0.172583 0.134678 0.029008 +v -0.172449 0.130837 0.030042 +v -0.172148 0.126792 0.030122 +v -0.171698 0.122854 0.029208 +v -0.171126 0.119338 0.027345 +v -0.170712 0.119349 0.027367 +v -0.170484 0.122875 0.029267 +v -0.170218 0.126805 0.030210 +v -0.169934 0.130829 0.030156 +v -0.169654 0.134640 0.029146 +v -0.169402 0.137962 0.027298 +v -0.169200 0.140572 0.024794 +v -0.169067 0.142324 0.021860 +v -0.169015 0.143162 0.018732 +v -0.169047 0.143107 0.015609 +v -0.169154 0.142223 0.012643 +v -0.169322 0.140576 0.009948 +v -0.169971 0.142177 0.012589 +v -0.170624 0.143013 0.015479 +v -0.171242 0.143031 0.018508 +v -0.171790 0.142179 0.021542 +v -0.172234 0.140440 0.024399 +v -0.172540 0.137863 0.026859 +v -0.172683 0.134584 0.028704 +v -0.172649 0.130817 0.029752 +v -0.172435 0.126826 0.029882 +v -0.172053 0.122911 0.029041 +v -0.171525 0.119378 0.027256 +v -0.170886 0.116523 0.024639 +v -0.170473 0.116537 0.024664 +v -0.170312 0.119409 0.027321 +v -0.170123 0.122943 0.029134 +v -0.169920 0.126843 0.029996 +v -0.169719 0.130807 0.029885 +v -0.169540 0.134543 0.028852 +v -0.169398 0.137791 0.027021 +v -0.169307 0.140348 0.024570 +v -0.169280 0.142083 0.021713 +v -0.169317 0.142948 0.018662 +v -0.169415 0.142960 0.015591 +v -0.169560 0.142161 0.012632 +v -0.170231 0.142912 0.015523 +v -0.170892 0.142855 0.018504 +v -0.171503 0.141965 0.021453 +v -0.172027 0.140230 0.024216 +v -0.172428 0.137696 0.026596 +v -0.172676 0.134485 0.028394 +v -0.172748 0.130793 0.029434 +v -0.172635 0.126869 0.029594 +v -0.172341 0.122994 0.028814 +v -0.171881 0.119466 0.027106 +v -0.171286 0.116576 0.024562 +v -0.170183 0.114604 0.021389 +v -0.170596 0.114587 0.021359 +v -0.170074 0.116617 0.024637 +v -0.169952 0.119516 0.027210 +v -0.169826 0.123036 0.028935 +v -0.169705 0.126888 0.029727 +v -0.169604 0.130783 0.029576 +v -0.169532 0.134444 0.028542 +v -0.169500 0.137629 0.026747 +v -0.169514 0.140150 0.024363 +v -0.169576 0.141891 0.021585 +v -0.169682 0.142804 0.018600 +v -0.169819 0.142894 0.015562 +v -0.170497 0.142756 0.018519 +v -0.171148 0.141807 0.021401 +v -0.171735 0.140054 0.024074 +v -0.172218 0.137543 0.026367 +v -0.172561 0.134388 0.028099 +v -0.172739 0.130769 0.029110 +v -0.172734 0.126917 0.029279 +v -0.172542 0.123098 0.028543 +v -0.172172 0.119595 0.026906 +v -0.171645 0.116691 0.024439 +v -0.170997 0.114649 0.021298 +v -0.169862 0.113696 0.017764 +v -0.170274 0.113678 0.017729 +v -0.169786 0.114698 0.021385 +v -0.169717 0.116757 0.024557 +v -0.169658 0.119661 0.027040 +v -0.169613 0.123147 0.028685 +v -0.169590 0.126938 0.029422 +v -0.169595 0.130759 0.029252 +v -0.169632 0.134350 0.028238 +v -0.169703 0.137486 0.026496 +v -0.169807 0.139993 0.024186 +v -0.169937 0.141760 0.021484 +v -0.170084 0.142738 0.018551 +v -0.170751 0.141717 0.021388 +v -0.171377 0.139925 0.023982 +v -0.171922 0.137416 0.026186 +v -0.172348 0.134300 0.027841 +v -0.172624 0.130745 0.028801 +v -0.172726 0.126967 0.028957 +v -0.172643 0.123217 0.028248 +v -0.172376 0.119758 0.026668 +v -0.171939 0.116862 0.024278 +v -0.171359 0.114785 0.021210 +v -0.170677 0.113745 0.017687 +v -0.169533 0.113890 0.014049 +v -0.169945 0.113872 0.014009 +v -0.169468 0.113798 0.017789 +v -0.169433 0.114863 0.021349 +v -0.169427 0.116948 0.024432 +v -0.169448 0.119834 0.026825 +v -0.169500 0.123269 0.028400 +v -0.169582 0.126989 0.029101 +v -0.169694 0.130735 0.028934 +v -0.169833 0.134267 0.027959 +v -0.169993 0.137372 0.026285 +v -0.170165 0.139887 0.024053 +v -0.170338 0.141701 0.021417 +v -0.170978 0.139851 0.023947 +v -0.171561 0.137322 0.026065 +v -0.172050 0.134226 0.027635 +v -0.172409 0.130723 0.028530 +v -0.172610 0.127017 0.028652 +v -0.172636 0.123341 0.027947 +v -0.172480 0.119943 0.026410 +v -0.172147 0.117076 0.024090 +v -0.171657 0.114987 0.021101 +v -0.171043 0.113892 0.017640 +v -0.170349 0.113938 0.013987 +v -0.169220 0.115168 0.010518 +v -0.169631 0.115152 0.010473 +v -0.169141 0.113990 0.014104 +v -0.169119 0.113976 0.017802 +v -0.169148 0.115088 0.021282 +v -0.169222 0.117176 0.024270 +v -0.169338 0.120025 0.026578 +v -0.169493 0.123393 0.028099 +v -0.169681 0.127037 0.028786 +v -0.169894 0.130715 0.028644 +v -0.170120 0.134201 0.027726 +v -0.170349 0.137295 0.026128 +v -0.170564 0.139838 0.023971 +v -0.171161 0.137269 0.026014 +v -0.171688 0.134172 0.027497 +v -0.172110 0.130705 0.028314 +v -0.172396 0.127062 0.028383 +v -0.172523 0.123463 0.027662 +v -0.172477 0.120138 0.026148 +v -0.172256 0.117320 0.023887 +v -0.171872 0.115240 0.020978 +v -0.171346 0.114109 0.017592 +v -0.170719 0.114082 0.013983 +v -0.170037 0.115211 0.010469 +v -0.168945 0.117428 0.007419 +v -0.169356 0.117415 0.007371 +v -0.168831 0.115258 0.010600 +v -0.168797 0.114165 0.014170 +v -0.168839 0.114218 0.017804 +v -0.168949 0.115358 0.021189 +v -0.169117 0.117427 0.024081 +v -0.169335 0.120219 0.026317 +v -0.169594 0.123512 0.027803 +v -0.169881 0.127079 0.028498 +v -0.170180 0.130698 0.028401 +v -0.170475 0.134156 0.027555 +v -0.170747 0.137260 0.026036 +v -0.171287 0.134141 0.027436 +v -0.171748 0.130691 0.028168 +v -0.172098 0.127100 0.028169 +v -0.172310 0.123574 0.027411 +v -0.172367 0.120328 0.025902 +v -0.172258 0.117575 0.023685 +v -0.171987 0.115527 0.020850 +v -0.171568 0.114381 0.017546 +v -0.171028 0.114296 0.013999 +v -0.170411 0.115341 0.010505 +v -0.169764 0.117463 0.007381 +v -0.169147 0.120370 0.005004 +v -0.168736 0.120381 0.005054 +v -0.168558 0.117501 0.007524 +v -0.168491 0.115415 0.010714 +v -0.168524 0.114403 0.014243 +v -0.168648 0.114508 0.017793 +v -0.168851 0.115653 0.021076 +v -0.169119 0.117683 0.023878 +v -0.169439 0.120404 0.026059 +v -0.169795 0.123616 0.027533 +v -0.170167 0.127113 0.028257 +v -0.170534 0.130687 0.028223 +v -0.170873 0.134136 0.027456 +v -0.171346 0.130684 0.028102 +v -0.171735 0.127128 0.028025 +v -0.172013 0.123667 0.027213 +v -0.172157 0.120502 0.025686 +v -0.172154 0.117826 0.023496 +v -0.171996 0.115828 0.020726 +v -0.171691 0.114690 0.017505 +v -0.171256 0.114564 0.014033 +v -0.170725 0.115534 0.010579 +v -0.170140 0.117570 0.007449 +v -0.169555 0.120412 0.005020 +v -0.169932 0.120503 0.005101 +v -0.168349 0.120444 0.005167 +v -0.170252 0.120638 0.005241 +v -0.168015 0.120555 0.005335 +v -0.170494 0.120807 0.005431 +v -0.167754 0.120706 0.005547 +v -0.170641 0.121000 0.005658 +v -0.167585 0.120886 0.005787 +v -0.170683 0.121202 0.005907 +v -0.167519 0.121084 0.006040 +v -0.170618 0.121400 0.006160 +v -0.167561 0.121287 0.006289 +v -0.170449 0.121580 0.006400 +v -0.167708 0.121479 0.006516 +v -0.170188 0.121731 0.006612 +v -0.167950 0.121648 0.006706 +v -0.169853 0.121842 0.006780 +v -0.168271 0.121783 0.006847 +v -0.169467 0.121905 0.006893 +v -0.168648 0.121875 0.006927 +v -0.169056 0.121916 0.006943 +v -0.168222 0.117630 0.007677 +v -0.168224 0.115630 0.010852 +v -0.168340 0.114689 0.014317 +v -0.168558 0.114826 0.017770 +v -0.168860 0.115955 0.020952 +v -0.169228 0.117926 0.023676 +v -0.169643 0.120567 0.025821 +v -0.170083 0.123699 0.027306 +v -0.170522 0.127136 0.028080 +v -0.170932 0.130682 0.028121 +v -0.171333 0.127143 0.027960 +v -0.171651 0.123735 0.027080 +v -0.171863 0.120647 0.025517 +v -0.171949 0.118055 0.023333 +v -0.171898 0.116124 0.020613 +v -0.171708 0.115014 0.017472 +v -0.171388 0.114868 0.014083 +v -0.170960 0.115775 0.010687 +v -0.170459 0.117727 0.007572 +v -0.168464 0.119063 0.009025 +v -0.168840 0.119169 0.009094 +v -0.168145 0.118905 0.008903 +v -0.167905 0.118708 0.008735 +v -0.167760 0.118483 0.008534 +v -0.167720 0.118248 0.008312 +v -0.167788 0.118017 0.008085 +v -0.167959 0.117806 0.007869 +v -0.170699 0.117925 0.007739 +v -0.170844 0.118149 0.007941 +v -0.170883 0.118384 0.008162 +v -0.170815 0.118615 0.008389 +v -0.170644 0.118826 0.008606 +v -0.170381 0.119002 0.008797 +v -0.170045 0.119131 0.008950 +v -0.169658 0.119204 0.009055 +v -0.169247 0.119217 0.009104 +v -0.168047 0.115887 0.011005 +v -0.168258 0.115002 0.014387 +v -0.168575 0.115150 0.017737 +v -0.168976 0.116242 0.020824 +v -0.169437 0.118140 0.023488 +v -0.169934 0.120697 0.025620 +v -0.170438 0.123755 0.027139 +v -0.170919 0.127146 0.027979 +v -0.171250 0.123774 0.027021 +v -0.171503 0.120753 0.025405 +v -0.171658 0.118245 0.023208 +v -0.171700 0.116394 0.020520 +v -0.171618 0.115332 0.017450 +v -0.171413 0.115188 0.014145 +v -0.171099 0.116049 0.010820 +v -0.168694 0.117165 0.011627 +v -0.169067 0.117295 0.011663 +v -0.168379 0.116973 0.011552 +v -0.168145 0.116731 0.011445 +v -0.168006 0.116457 0.011311 +v -0.167973 0.116170 0.011161 +v -0.171132 0.116337 0.010970 +v -0.171058 0.116619 0.011127 +v -0.170881 0.116876 0.011279 +v -0.170614 0.117091 0.011417 +v -0.170274 0.117249 0.011531 +v -0.169885 0.117338 0.011613 +v -0.169473 0.117354 0.011658 +v -0.168284 0.115322 0.014450 +v -0.168698 0.115459 0.017696 +v -0.169190 0.116495 0.020701 +v -0.169731 0.118311 0.023327 +v -0.170290 0.120785 0.025471 +v -0.170836 0.123781 0.027041 +v -0.171103 0.120814 0.025359 +v -0.171301 0.118385 0.023129 +v -0.171414 0.116619 0.020453 +v -0.171427 0.115623 0.017438 +v -0.171331 0.115501 0.014216 +v -0.168953 0.116107 0.014549 +v -0.169323 0.116252 0.014546 +v -0.168644 0.115894 0.014534 +v -0.168415 0.115626 0.014500 +v -0.171148 0.115787 0.014290 +v -0.170874 0.116025 0.014363 +v -0.170530 0.116200 0.014429 +v -0.170139 0.116300 0.014483 +v -0.169727 0.116318 0.014523 +v -0.168920 0.115732 0.017650 +v -0.169488 0.116697 0.020592 +v -0.170090 0.118427 0.023204 +v -0.170689 0.120824 0.025381 +v -0.170903 0.118465 0.023101 +v -0.171061 0.116784 0.020417 +v -0.171147 0.115865 0.017440 +v -0.169589 0.116095 0.017555 +v -0.169223 0.115948 0.017602 +v -0.170798 0.116043 0.017453 +v -0.170405 0.116144 0.017478 +v -0.169992 0.116162 0.017513 +v -0.169850 0.116833 0.020504 +v -0.170489 0.118479 0.023127 +v -0.170664 0.116878 0.020413 +v -0.170252 0.116895 0.020443 +vn 0.7927 -0.0000 0.6096 +vn 0.5613 -0.5613 0.6082 +vn 0.5614 -0.5614 0.6080 +vn 0.5616 -0.5616 0.6077 +vn 0.5610 -0.5610 0.6087 +vn 0.6093 -0.0000 0.7930 +vn 0.5610 0.5610 0.6087 +vn 0.5610 0.5610 0.6088 +vn 0.9244 -0.0000 0.3814 +vn -0.0000 -0.7939 0.6081 +vn 0.4304 -0.4305 0.7934 +vn 0.4302 -0.4303 0.7936 +vn 0.4301 -0.4301 0.7937 +vn 0.4307 -0.4307 0.7930 +vn 0.6534 -0.6534 0.3822 +vn 0.6534 -0.6534 0.3821 +vn 0.3834 -0.0000 0.9236 +vn 0.4307 0.4307 0.7931 +vn -0.0000 0.7927 0.6096 +vn 0.6534 0.6534 0.3822 +vn 0.6534 0.6534 0.3821 +vn 0.9914 -0.0000 0.1305 +vn 0.9914 -0.0000 0.1309 +vn 0.9914 -0.0000 0.1307 +vn -0.5616 -0.5616 0.6077 +vn -0.5616 -0.5616 0.6076 +vn -0.0000 -0.6080 0.7939 +vn -0.0000 -0.6081 0.7939 +vn -0.0000 -0.9244 0.3814 +vn -0.0000 -0.9244 0.3813 +vn 0.2710 -0.2710 0.9237 +vn 0.7010 -0.7009 0.1316 +vn 0.7009 -0.7010 0.1317 +vn 0.1303 -0.0000 0.9915 +vn 0.2710 0.2710 0.9237 +vn -0.0000 0.6093 0.7930 +vn -0.0000 0.6092 0.7930 +vn -0.5613 0.5613 0.6083 +vn -0.5614 0.5614 0.6081 +vn -0.5616 0.5616 0.6077 +vn -0.5610 0.5610 0.6087 +vn -0.0000 0.9244 0.3814 +vn -0.0000 0.9244 0.3813 +vn 0.7008 0.7009 0.1327 +vn 0.7009 0.7009 0.1322 +vn 0.7009 0.7010 0.1316 +vn 0.7008 0.7008 0.1333 +vn 0.9915 -0.0000 -0.1302 +vn 0.9915 -0.0000 -0.1303 +vn -0.7938 -0.0000 0.6082 +vn -0.7939 -0.0000 0.6081 +vn -0.4301 -0.4301 0.7937 +vn -0.6534 -0.6534 0.3821 +vn -0.6534 -0.6534 0.3822 +vn -0.0000 -0.3834 0.9236 +vn -0.0000 -0.9914 0.1306 +vn 0.0921 -0.0921 0.9915 +vn 0.7010 -0.7010 -0.1313 +vn -0.1302 -0.0000 0.9915 +vn -0.1303 -0.0000 0.9915 +vn 0.0921 0.0921 0.9915 +vn -0.0000 0.3834 0.9236 +vn -0.4305 0.4305 0.7933 +vn -0.4303 0.4303 0.7935 +vn -0.4301 0.4302 0.7937 +vn -0.4307 0.4307 0.7931 +vn -0.6534 0.6534 0.3822 +vn -0.6534 0.6534 0.3821 +vn -0.0000 0.9911 0.1330 +vn -0.0000 0.9938 0.1110 +vn -0.0000 -0.0000 1.0000 +vn -0.0001 0.0001 1.0000 +vn 0.0014 0.9911 0.1330 +vn 0.7009 0.7009 -0.1318 +vn 0.7009 0.7009 -0.1325 +vn 0.7010 0.7010 -0.1313 +vn 0.7008 0.7008 -0.1330 +vn 0.9244 -0.0000 -0.3813 +vn 0.9244 -0.0000 -0.3814 +vn -0.6081 -0.0000 0.7938 +vn -0.6081 -0.0000 0.7939 +vn -0.9244 -0.0000 0.3813 +vn -0.9244 -0.0000 0.3814 +vn -0.2710 -0.2710 0.9237 +vn -0.7010 -0.7009 0.1317 +vn -0.7010 -0.7010 0.1316 +vn -0.0000 -0.1303 0.9915 +vn -0.0000 -0.9915 -0.1303 +vn -0.0921 0.0921 0.9915 +vn 0.6534 -0.6534 -0.3822 +vn 0.6534 -0.6534 -0.3821 +vn -0.3834 -0.0000 0.9236 +vn -0.0921 -0.0922 0.9915 +vn -0.0922 -0.0921 0.9915 +vn -0.0000 0.1303 0.9915 +vn -0.2710 0.2710 0.9237 +vn -0.7008 0.7008 0.1328 +vn -0.7009 0.7009 0.1322 +vn -0.7010 0.7009 0.1316 +vn -0.7008 0.7008 0.1334 +vn -0.0000 0.9938 -0.1112 +vn -0.0000 0.9912 -0.1326 +vn 0.0162 0.9937 -0.1112 +vn 0.0013 0.9935 -0.1134 +vn 0.6534 0.6534 -0.3821 +vn 0.6534 0.6534 -0.3822 +vn 0.7927 -0.0000 -0.6096 +vn -0.9915 -0.0000 0.1305 +vn -0.9914 -0.0000 0.1305 +vn -0.9914 -0.0000 0.1308 +vn -0.9914 -0.0000 0.1306 +vn -0.0921 -0.0921 0.9915 +vn -0.7010 -0.7010 -0.1313 +vn -0.7010 -0.7010 -0.1314 +vn -0.0000 -0.9244 -0.3814 +vn -0.0000 -0.9244 -0.3813 +vn -0.2710 0.2709 0.9237 +vn 0.5614 -0.5614 -0.6080 +vn 0.5612 -0.5612 -0.6084 +vn 0.5616 -0.5616 -0.6077 +vn 0.5610 -0.5610 -0.6087 +vn -0.6093 -0.0000 0.7930 +vn -0.6092 -0.0000 0.7930 +vn -0.2710 -0.2709 0.9237 +vn -0.7009 0.7009 -0.1325 +vn -0.7009 0.7009 -0.1319 +vn -0.7010 0.7010 -0.1313 +vn -0.7008 0.7008 -0.1330 +vn -0.0000 0.9230 -0.3849 +vn -0.0000 0.9225 -0.3859 +vn -0.0000 0.9244 -0.3813 +vn -0.0000 0.9226 -0.3857 +vn 0.5610 0.5610 -0.6088 +vn 0.5610 0.5610 -0.6087 +vn 0.6093 -0.0000 -0.7930 +vn -0.9915 -0.0000 -0.1302 +vn 0.0922 0.0921 0.9915 +vn 0.0921 0.0922 0.9915 +vn -0.6534 -0.6534 -0.3821 +vn -0.6534 -0.6534 -0.3822 +vn -0.0000 -0.7938 -0.6081 +vn -0.0000 -0.7939 -0.6081 +vn -0.4303 0.4304 0.7935 +vn -0.4301 0.4301 0.7937 +vn 0.4303 -0.4303 -0.7935 +vn 0.4305 -0.4305 -0.7933 +vn 0.4301 -0.4301 -0.7937 +vn 0.4307 -0.4307 -0.7931 +vn -0.7928 -0.0000 0.6095 +vn -0.7927 -0.0000 0.6096 +vn -0.4307 -0.4307 0.7931 +vn -0.6534 0.6534 -0.3822 +vn -0.6534 0.6534 -0.3821 +vn -0.0000 0.7927 -0.6096 +vn 0.4307 0.4307 -0.7931 +vn 0.4307 0.4307 -0.7930 +vn 0.3834 -0.0000 -0.9236 +vn 0.1302 -0.0000 0.9915 +vn -0.9244 -0.0000 -0.3814 +vn -0.9244 -0.0000 -0.3813 +vn 0.2710 0.2709 0.9237 +vn -0.5616 -0.5616 -0.6076 +vn -0.5616 -0.5615 -0.6077 +vn -0.0000 0.6081 0.7939 +vn -0.0000 -0.6081 -0.7939 +vn -0.5612 0.5612 0.6084 +vn 0.2710 -0.2710 -0.9237 +vn -0.5610 -0.5610 0.6088 +vn -0.0000 -0.6092 0.7930 +vn -0.0000 -0.6093 0.7930 +vn 0.2710 -0.2709 0.9237 +vn -0.5614 0.5614 -0.6080 +vn -0.5612 0.5612 -0.6084 +vn -0.5616 0.5616 -0.6077 +vn -0.5610 0.5610 -0.6088 +vn -0.0000 0.6092 -0.7930 +vn 0.2710 0.2710 -0.9237 +vn 0.1303 -0.0000 -0.9915 +vn -0.7939 -0.0000 -0.6080 +vn -0.7938 -0.0000 -0.6081 +vn 0.4301 0.4301 0.7937 +vn -0.4301 -0.4302 -0.7937 +vn -0.4301 -0.4301 -0.7937 +vn -0.0000 0.7939 0.6081 +vn -0.0000 -0.3834 -0.9236 +vn 0.0922 -0.0921 -0.9915 +vn -0.0000 -0.7927 0.6096 +vn 0.4305 -0.4306 0.7932 +vn 0.4303 -0.4303 0.7935 +vn 0.4307 -0.4307 0.7931 +vn -0.4303 0.4303 -0.7935 +vn -0.4301 0.4301 -0.7937 +vn -0.4307 0.4307 -0.7931 +vn -0.0000 0.3834 -0.9236 +vn 0.0922 0.0921 -0.9915 +vn -0.1303 -0.0000 -0.9915 +vn 0.6081 -0.0000 0.7938 +vn 0.6080 -0.0000 0.7939 +vn -0.6081 -0.0000 -0.7938 +vn -0.6081 -0.0000 -0.7939 +vn 0.5616 0.5616 0.6077 +vn -0.2710 -0.2710 -0.9237 +vn -0.0000 -0.1303 -0.9915 +vn -0.7010 0.7010 0.1316 +vn -0.7009 0.7010 0.1317 +vn -0.0922 0.0922 -0.9915 +vn -0.0922 0.0921 -0.9915 +vn -0.9915 -0.0000 -0.1303 +vn -0.7008 -0.7009 0.1327 +vn -0.7009 -0.7009 0.1322 +vn -0.7008 -0.7008 0.1333 +vn 0.5612 -0.5612 0.6084 +vn 0.5610 -0.5610 0.6088 +vn -0.2710 0.2710 -0.9237 +vn -0.0000 0.1303 -0.9915 +vn -0.0922 -0.0921 -0.9915 +vn -0.3834 -0.0000 -0.9236 +vn 0.7939 -0.0000 0.6081 +vn 0.7939 -0.0000 0.6080 +vn -0.0000 0.9914 0.1306 +vn -0.7010 0.7010 -0.1314 +vn -0.7009 -0.7009 -0.1325 +vn -0.7009 -0.7009 -0.1319 +vn -0.7008 -0.7008 -0.1330 +vn -0.0000 -0.9911 0.1330 +vn -0.6093 -0.0000 -0.7930 +vn -0.6092 -0.0000 -0.7930 +vn 0.9244 -0.0000 0.3813 +vn 0.7010 0.7009 0.1317 +vn -0.0000 0.9915 -0.1303 +vn -0.0000 0.3835 -0.9236 +vn -0.4304 0.4303 -0.7935 +vn -0.4305 0.4304 -0.7933 +vn -0.7927 -0.0000 -0.6096 +vn -0.7928 -0.0000 -0.6095 +vn -0.0000 -0.9912 -0.1326 +vn 0.7009 -0.7009 0.1321 +vn 0.7008 -0.7008 0.1328 +vn 0.7008 -0.7008 0.1333 +vn 0.0922 -0.0922 -0.9915 +vn -0.0000 -0.3835 -0.9236 +vn -0.4307 -0.4307 -0.7931 +vn 0.9914 -0.0000 0.1306 +vn 0.7010 0.7010 -0.1314 +vn -0.0000 0.9244 -0.3814 +vn -0.0000 0.6081 -0.7939 +vn -0.5614 0.5613 -0.6081 +vn -0.5610 -0.5610 -0.6088 +vn 0.7009 -0.7009 -0.1319 +vn 0.7009 -0.7009 -0.1325 +vn 0.7008 -0.7008 -0.1330 +vn 0.7010 -0.7010 -0.1314 +vn -0.0000 -0.6092 -0.7930 +vn 0.4301 0.4301 -0.7937 +vn -0.0000 0.7939 -0.6081 +vn -0.0000 -0.7927 -0.6096 +vn 0.4302 -0.4303 -0.7936 +vn 0.6081 -0.0000 -0.7938 +vn 0.6080 -0.0000 -0.7939 +vn 0.5616 0.5616 -0.6077 +vn 0.5613 -0.5614 -0.6081 +vn 0.5610 -0.5610 -0.6088 +vn 0.7939 -0.0000 -0.6080 +vn 0.7939 -0.0000 -0.6081 +vn -0.0000 -0.0000 -1.0000 +vn -0.9895 -0.0000 -0.1448 +vn -0.0000 0.9815 -0.1914 +vn 0.9895 -0.0000 -0.1446 +vn -0.0000 -0.9815 -0.1914 +vn -0.0000 -0.9815 -0.1915 +vn 0.1294 0.9874 -0.0910 +vn 0.1289 0.9875 -0.0910 +vn 0.1297 0.9874 -0.0910 +vn 0.1287 0.9875 -0.0910 +vn 0.3825 0.9201 -0.0848 +vn 0.3833 0.9197 -0.0847 +vn 0.3820 0.9202 -0.0848 +vn 0.3838 0.9195 -0.0847 +vn 0.0058 -0.1015 -0.9948 +vn 0.0038 -0.1015 -0.9948 +vn 0.0092 -0.1014 -0.9948 +vn -0.0026 -0.1013 -0.9948 +vn -0.0027 -0.1013 -0.9949 +vn 0.0001 -0.1011 -0.9949 +vn -0.0008 -0.1011 -0.9949 +vn 0.0009 -0.1012 -0.9949 +vn 0.0009 -0.1015 -0.9948 +vn -0.0002 -0.1015 -0.9948 +vn -0.0006 -0.1016 -0.9948 +vn 0.0007 -0.1018 -0.9948 +vn -0.0000 -0.1018 -0.9948 +vn -0.0007 -0.1018 -0.9948 +vn 0.0005 -0.1015 -0.9948 +vn 0.0001 -0.1016 -0.9948 +vn 0.0010 -0.1015 -0.9948 +vn 0.0011 -0.1012 -0.9949 +vn -0.0009 -0.1011 -0.9949 +vn -0.0002 -0.1011 -0.9949 +vn 0.0015 -0.1012 -0.9949 +vn 0.0034 -0.1013 -0.9948 +vn -0.0043 -0.1016 -0.9948 +vn -0.0052 -0.1015 -0.9948 +vn -0.0089 -0.1014 -0.9948 +vn -0.1290 0.9875 -0.0910 +vn -0.1294 0.9874 -0.0910 +vn -0.1297 0.9874 -0.0910 +vn -0.1286 0.9875 -0.0910 +vn 0.1305 -0.9873 -0.0910 +vn 0.1300 -0.9873 -0.0910 +vn 0.1297 -0.9874 -0.0910 +vn 0.1308 -0.9872 -0.0910 +vn 0.3822 -0.0000 -0.9241 +vn 0.6070 0.7913 -0.0729 +vn 0.6069 0.7914 -0.0729 +vn 0.6066 0.7916 -0.0730 +vn -0.7917 0.6083 -0.0561 +vn -0.7918 0.6082 -0.0561 +vn -0.7920 0.6079 -0.0560 +vn -0.6069 0.7914 -0.0729 +vn -0.6068 0.7915 -0.0730 +vn -0.6066 0.7916 -0.0730 +vn -0.6070 0.7913 -0.0729 +vn -0.3824 0.9201 -0.0848 +vn -0.3816 0.9204 -0.0849 +vn -0.3812 0.9206 -0.0849 +vn -0.3830 0.9199 -0.0848 +vn 0.7920 0.6080 -0.0560 +vn 0.7917 0.6083 -0.0561 +vn 0.9232 0.3827 -0.0353 +vn 0.9230 0.3832 -0.0353 +vn 0.9231 0.3830 -0.0353 +vn 0.9232 0.3828 -0.0353 +vn 0.9914 0.1304 -0.0120 +vn 0.9914 0.1303 -0.0120 +vn 0.9914 -0.1304 0.0120 +vn 0.9914 -0.1303 0.0120 +vn 0.9231 -0.3831 0.0353 +vn 0.9230 -0.3831 0.0354 +vn 0.9228 -0.3837 0.0355 +vn 0.7927 -0.6070 0.0559 +vn 0.7927 -0.6070 0.0560 +vn 0.6067 -0.7916 0.0730 +vn 0.6069 -0.7914 0.0729 +vn 0.6066 -0.7916 0.0730 +vn 0.6071 -0.7913 0.0729 +vn 0.3821 -0.9202 0.0847 +vn 0.3820 -0.9203 0.0848 +vn 0.3818 -0.9203 0.0848 +vn -0.0000 -0.9958 0.0918 +vn 0.1303 -0.9873 0.0909 +vn -0.0000 -0.9957 0.0921 +vn 0.1283 -0.9875 0.0910 +vn -0.0000 -0.7397 -0.6729 +vn -0.1112 -0.9896 0.0912 +vn -0.1287 -0.9875 0.0910 +vn -0.3812 -0.9206 0.0849 +vn -0.3810 -0.9207 0.0849 +vn -0.6069 -0.7915 0.0730 +vn -0.6071 -0.7913 0.0729 +vn -0.6066 -0.7917 0.0730 +vn -0.7927 -0.6070 0.0560 +vn -0.7927 -0.6070 0.0559 +vn -0.9229 -0.3835 0.0353 +vn -0.9228 -0.3836 0.0353 +vn -0.9230 -0.3831 0.0353 +vn -0.9227 -0.3839 0.0352 +vn -0.9914 -0.1303 0.0120 +vn -0.9914 -0.1304 0.0120 +vn -0.9914 0.1303 -0.0120 +vn -0.9914 0.1304 -0.0120 +vn -0.9232 0.3828 -0.0353 +vn -0.9230 0.3831 -0.0353 +vn -0.0070 0.0916 -0.9958 +vn -0.0060 0.0915 -0.9958 +vn -0.0101 0.0916 -0.9957 +vn 0.0003 0.0917 -0.9958 +vn 0.0001 0.0917 -0.9958 +vn -0.0003 0.0917 -0.9958 +vn -0.0000 0.0917 -0.9958 +vn 0.0008 0.0917 -0.9958 +vn 0.0009 0.0917 -0.9958 +vn 0.0006 0.0917 -0.9958 +vn -0.0010 0.0919 -0.9958 +vn -0.0010 0.0920 -0.9958 +vn -0.0000 0.0919 -0.9958 +vn 0.0009 0.0918 -0.9958 +vn 0.0020 0.0916 -0.9958 +vn -0.0011 0.0917 -0.9958 +vn -0.0008 0.0917 -0.9958 +vn -0.0001 0.0917 -0.9958 +vn 0.0058 0.0916 -0.9958 +vn 0.0072 0.0916 -0.9958 +vn 0.0101 0.0916 -0.9957 +vn 0.3815 -0.9205 -0.0848 +vn 0.3820 -0.9203 -0.0847 +vn 0.3819 -0.9203 -0.0848 +vn 0.3817 -0.9204 -0.0848 +vn -0.1301 -0.9873 -0.0910 +vn -0.1305 -0.9873 -0.0910 +vn -0.1297 -0.9874 -0.0910 +vn -0.1308 -0.9872 -0.0910 +vn 0.6066 -0.0000 -0.7950 +vn -0.3814 -0.0000 -0.9244 +vn -0.7930 -0.0000 -0.6092 +vn -0.7930 -0.0000 -0.6093 +vn -0.6066 -0.0000 -0.7950 +vn -0.6065 -0.0000 -0.7951 +vn 0.7930 -0.0000 -0.6092 +vn 0.7929 -0.0000 -0.6093 +vn 0.9229 -0.0000 -0.3851 +vn 0.9914 -0.0000 -0.1309 +vn 0.9914 -0.0000 -0.1308 +vn 0.9232 -0.0000 0.3842 +vn 0.9232 -0.0000 0.3843 +vn 0.9231 -0.0000 0.3845 +vn 0.7921 -0.0000 0.6104 +vn 0.6066 -0.0000 0.7950 +vn 0.3822 -0.0000 0.9241 +vn -0.1307 -0.0000 0.9914 +vn -0.3814 -0.0000 0.9244 +vn -0.6066 -0.0000 0.7950 +vn -0.6065 -0.0000 0.7951 +vn -0.7921 -0.0000 0.6104 +vn -0.9232 -0.0000 0.3842 +vn -0.9232 -0.0000 0.3844 +vn -0.9914 -0.0000 0.1309 +vn -0.9914 -0.0000 -0.1309 +vn -0.9228 -0.0000 -0.3852 +vn -0.9229 -0.0000 -0.3851 +vn -0.3806 0.9208 0.0849 +vn -0.3811 0.9206 0.0849 +vn -0.0000 0.9958 0.0918 +vn -0.1308 0.9872 0.0909 +vn -0.0000 0.9957 0.0922 +vn -0.1317 0.9871 0.0910 +vn -0.0000 0.7395 -0.6731 +vn 0.1114 0.9896 0.0912 +vn 0.1309 0.9872 0.0910 +vn 0.3815 0.9205 0.0848 +vn 0.3818 0.9203 0.0848 +vn 0.3819 0.9203 0.0848 +vn 0.6066 0.7916 0.0730 +vn 0.6067 0.7916 0.0730 +vn 0.7922 0.6076 0.0560 +vn 0.7920 0.6080 0.0561 +vn 0.7924 0.6074 0.0560 +vn 0.7919 0.6081 0.0561 +vn 0.9231 0.3830 0.0353 +vn 0.9914 0.1304 0.0120 +vn 0.9914 -0.1304 -0.0120 +vn 0.9914 -0.1301 -0.0120 +vn 0.9229 -0.3835 -0.0354 +vn 0.9229 -0.3834 -0.0353 +vn 0.9227 -0.3838 -0.0354 +vn 0.9230 -0.3831 -0.0353 +vn 0.7925 -0.6073 -0.0560 +vn 0.7927 -0.6070 -0.0559 +vn 0.7924 -0.6074 -0.0560 +vn 0.7928 -0.6069 -0.0559 +vn 0.6077 -0.7908 -0.0729 +vn 0.6072 -0.7912 -0.0730 +vn 0.6067 -0.7916 -0.0730 +vn 0.6082 -0.7905 -0.0729 +vn -0.3808 -0.9207 -0.0849 +vn -0.3811 -0.9206 -0.0849 +vn -0.6072 -0.7912 -0.0730 +vn -0.6077 -0.7908 -0.0729 +vn -0.6067 -0.7916 -0.0730 +vn -0.6082 -0.7905 -0.0729 +vn -0.7925 -0.6074 -0.0560 +vn -0.7925 -0.6073 -0.0560 +vn -0.7928 -0.6069 -0.0559 +vn -0.9230 -0.3832 -0.0353 +vn -0.9228 -0.3836 -0.0354 +vn -0.9227 -0.3839 -0.0354 +vn -0.9230 -0.3831 -0.0353 +vn -0.9914 -0.1301 -0.0120 +vn -0.9914 -0.1304 -0.0120 +vn -0.9914 0.1304 0.0120 +vn -0.9231 0.3830 0.0353 +vn -0.9233 0.3826 0.0353 +vn -0.7921 0.6078 0.0561 +vn -0.7923 0.6075 0.0561 +vn -0.7924 0.6074 0.0560 +vn -0.7919 0.6081 0.0560 +vn -0.6067 0.7916 0.0730 +vn -0.6066 0.7916 0.0730 +vn 0.1302 0.9873 -0.0910 +vn 0.1304 0.9873 -0.0910 +vn 0.1298 0.9874 -0.0910 +vn 0.1310 0.9872 -0.0910 +vn 0.3821 0.9202 -0.0848 +vn 0.3820 0.9203 -0.0848 +vn 0.3818 0.9204 -0.0848 +vn 0.0061 -0.1014 -0.9948 +vn 0.0062 -0.1009 -0.9949 +vn 0.0093 -0.1015 -0.9948 +vn 0.0010 -0.1018 -0.9948 +vn 0.0005 -0.1017 -0.9948 +vn -0.0014 -0.1015 -0.9948 +vn -0.0013 -0.1015 -0.9948 +vn -0.0005 -0.1015 -0.9948 +vn 0.0002 -0.1015 -0.9948 +vn 0.0016 -0.1015 -0.9948 +vn 0.0017 -0.1015 -0.9948 +vn -0.0000 -0.1014 -0.9948 +vn -0.0017 -0.1014 -0.9948 +vn -0.0007 -0.1015 -0.9948 +vn 0.0004 -0.1015 -0.9948 +vn 0.0012 -0.1015 -0.9948 +vn -0.0003 -0.1014 -0.9948 +vn -0.0000 -0.1017 -0.9948 +vn -0.0008 -0.1016 -0.9948 +vn -0.0053 -0.1014 -0.9948 +vn -0.0073 -0.1014 -0.9948 +vn -0.0093 -0.1014 -0.9948 +vn -0.1306 0.9872 -0.0910 +vn -0.1302 0.9873 -0.0910 +vn -0.1298 0.9874 -0.0910 +vn -0.1310 0.9872 -0.0910 +vn 0.1301 -0.9873 -0.0910 +vn 0.1307 -0.9872 -0.0910 +vn 0.6069 0.7914 -0.0730 +vn 0.6067 0.7916 -0.0730 +vn -0.7926 0.6071 -0.0560 +vn -0.7928 0.6069 -0.0559 +vn -0.6069 0.7914 -0.0730 +vn -0.3811 0.9206 -0.0849 +vn -0.3809 0.9207 -0.0849 +vn 0.7928 0.6069 -0.0559 +vn 0.7927 0.6071 -0.0560 +vn 0.9227 0.3838 -0.0354 +vn 0.9229 0.3835 -0.0354 +vn 0.9914 0.1301 -0.0120 +vn 0.9227 -0.3840 0.0354 +vn 0.9229 -0.3835 0.0353 +vn 0.7928 -0.6069 0.0559 +vn 0.6070 -0.7913 0.0729 +vn 0.3819 -0.9203 0.0848 +vn 0.1304 -0.9873 0.0909 +vn 0.1307 -0.9872 0.0910 +vn -0.0000 -0.7388 -0.6739 +vn -0.1113 -0.9896 0.0912 +vn -0.0000 -0.9957 0.0922 +vn -0.1314 -0.9871 0.0910 +vn -0.3811 -0.9206 0.0849 +vn -0.6066 -0.7916 0.0730 +vn -0.7927 -0.6071 0.0560 +vn -0.7928 -0.6069 0.0559 +vn -0.9228 -0.3838 0.0354 +vn -0.9229 -0.3835 0.0354 +vn -0.9227 -0.3839 0.0354 +vn -0.9914 0.1301 -0.0120 +vn -0.9229 0.3835 -0.0354 +vn -0.9227 0.3838 -0.0354 +vn -0.0002 0.0928 -0.9957 +vn -0.0012 0.0929 -0.9957 +vn 0.0009 0.0928 -0.9957 +vn 0.0001 0.0934 -0.9956 +vn -0.0003 0.0925 -0.9957 +vn 0.0002 0.0922 -0.9957 +vn 0.0002 0.0921 -0.9958 +vn 0.0001 0.0920 -0.9958 +vn -0.0004 0.0920 -0.9958 +vn 0.0007 0.0918 -0.9958 +vn 0.0004 0.0917 -0.9958 +vn 0.0002 0.0916 -0.9958 +vn -0.0000 0.0916 -0.9958 +vn -0.0002 0.0917 -0.9958 +vn 0.0005 0.0918 -0.9958 +vn -0.0005 0.0920 -0.9958 +vn -0.0001 0.0921 -0.9958 +vn -0.0003 0.0926 -0.9957 +vn 0.0005 0.0928 -0.9957 +vn -0.0010 0.0927 -0.9957 +vn -0.0012 0.0928 -0.9957 +vn -0.0009 0.0928 -0.9957 +vn 0.3814 -0.9205 -0.0848 +vn -0.1299 -0.9873 -0.0910 +vn -0.1304 -0.9873 -0.0910 +vn -0.1307 -0.9872 -0.0910 +vn 0.6065 -0.0000 -0.7951 +vn -0.3813 -0.0000 -0.9244 +vn -0.7921 -0.0000 -0.6104 +vn 0.7921 -0.0000 -0.6104 +vn 0.9232 -0.0000 -0.3843 +vn 0.9232 -0.0000 -0.3842 +vn 0.9229 -0.0000 0.3852 +vn 0.9229 -0.0000 0.3851 +vn 0.7930 -0.0000 0.6093 +vn 0.7930 -0.0000 0.6092 +vn 0.6065 -0.0000 0.7951 +vn -0.3813 -0.0000 0.9244 +vn -0.7930 -0.0000 0.6092 +vn -0.9229 -0.0000 0.3852 +vn -0.9229 -0.0000 0.3851 +vn -0.9232 -0.0000 -0.3842 +vn -0.1307 0.9872 0.0909 +vn -0.1287 0.9875 0.0910 +vn -0.0000 0.7389 -0.6738 +vn 0.1112 0.9896 0.0912 +vn -0.0000 0.9957 0.0921 +vn 0.1280 0.9876 0.0910 +vn 0.3814 0.9205 0.0848 +vn 0.6077 0.7908 0.0729 +vn 0.6071 0.7913 0.0730 +vn 0.6082 0.7905 0.0729 +vn 0.7926 0.6072 0.0560 +vn 0.7925 0.6073 0.0560 +vn 0.7928 0.6069 0.0559 +vn 0.9227 0.3839 0.0354 +vn 0.9227 0.3838 0.0354 +vn 0.9914 0.1300 0.0120 +vn 0.9914 0.1301 0.0120 +vn 0.9228 -0.3836 -0.0354 +vn 0.9231 -0.3830 -0.0353 +vn 0.9227 -0.3839 -0.0354 +vn 0.6066 -0.7916 -0.0730 +vn -0.3807 -0.9208 -0.0849 +vn -0.3806 -0.9208 -0.0849 +vn -0.6066 -0.7916 -0.0730 +vn -0.7927 -0.6070 -0.0559 +vn -0.7923 -0.6075 -0.0560 +vn -0.9229 -0.3835 -0.0354 +vn -0.9230 -0.3833 -0.0353 +vn -0.9231 -0.3830 -0.0353 +vn -0.9914 0.1301 0.0120 +vn -0.9227 0.3838 0.0354 +vn -0.9227 0.3839 0.0354 +vn -0.7928 0.6069 0.0559 +vn -0.7926 0.6072 0.0560 +vn -0.7925 0.6073 0.0560 +vn -0.6071 0.7913 0.0730 +vn -0.6077 0.7908 0.0729 +vn -0.6082 0.7905 0.0729 +vn 0.1301 0.9873 -0.0910 +vn 0.1306 0.9872 -0.0910 +vn 0.3818 0.9203 -0.0848 +vn 0.0060 -0.1014 -0.9948 +vn 0.0060 -0.1010 -0.9949 +vn 0.0093 -0.1014 -0.9948 +vn 0.0008 -0.1017 -0.9948 +vn 0.0005 -0.1016 -0.9948 +vn -0.0012 -0.1015 -0.9948 +vn 0.0014 -0.1015 -0.9948 +vn -0.0000 -0.1015 -0.9948 +vn -0.0002 -0.1014 -0.9948 +vn -0.0001 -0.1016 -0.9948 +vn 0.0009 -0.1017 -0.9948 +vn -0.0006 -0.1015 -0.9948 +vn -0.0051 -0.1015 -0.9948 +vn -0.0070 -0.1015 -0.9948 +vn -0.1301 0.9873 -0.0910 +vn -0.1309 0.9872 -0.0910 +vn 0.1304 -0.9873 -0.0910 +vn 0.1299 -0.9873 -0.0910 +vn 0.6070 0.7914 -0.0729 +vn -0.7924 0.6074 -0.0560 +vn -0.7921 0.6078 -0.0560 +vn -0.7928 0.6070 -0.0559 +vn -0.3810 0.9207 -0.0849 +vn 0.7921 0.6078 -0.0560 +vn 0.7924 0.6074 -0.0560 +vn 0.7928 0.6070 -0.0559 +vn 0.7918 0.6082 -0.0561 +vn 0.9232 0.3826 -0.0353 +vn 0.9229 -0.3835 0.0354 +vn 0.9227 -0.3839 0.0355 +vn 0.9227 -0.3839 0.0354 +vn 0.7927 -0.6071 0.0560 +vn 0.1305 -0.9873 0.0909 +vn 0.1280 -0.9876 0.0910 +vn -0.0000 -0.7384 -0.6744 +vn -0.1114 -0.9896 0.0912 +vn -0.1288 -0.9875 0.0910 +vn -0.6067 -0.7916 0.0730 +vn -0.6068 -0.7915 0.0730 +vn -0.6070 -0.7913 0.0729 +vn -0.9228 0.3835 -0.0354 +vn -0.9231 0.3830 -0.0353 +vn -0.9232 0.3827 -0.0353 +vn -0.0003 0.0928 -0.9957 +vn -0.0010 0.0928 -0.9957 +vn 0.0009 0.0929 -0.9957 +vn 0.0005 0.0931 -0.9957 +vn -0.0004 0.0925 -0.9957 +vn 0.0003 0.0920 -0.9958 +vn -0.0001 0.0920 -0.9958 +vn 0.0008 0.0918 -0.9958 +vn 0.0003 0.0916 -0.9958 +vn -0.0001 0.0915 -0.9958 +vn -0.0006 0.0917 -0.9958 +vn -0.0010 0.0918 -0.9958 +vn -0.0000 0.0921 -0.9958 +vn 0.0005 0.0933 -0.9956 +vn -0.0014 0.0928 -0.9957 +vn -0.0006 0.0930 -0.9957 +vn 0.3820 -0.9203 -0.0848 +vn 0.6065 -0.0000 -0.7950 +vn -0.6065 -0.0000 -0.7950 +vn 0.6065 -0.0000 0.7950 +vn -0.6065 -0.0000 0.7950 +vn -0.9228 -0.0000 0.3852 +vn -0.9232 -0.0000 -0.3843 +vn -0.3808 0.9207 0.0849 +vn -0.0161 0.9956 0.0921 +vn -0.1288 0.9875 0.0910 +vn -0.0000 0.7379 -0.6749 +vn 0.1113 0.9896 0.0912 +vn 0.3816 0.9204 0.0849 +vn 0.3820 0.9203 0.0848 +vn 0.7925 0.6072 0.0560 +vn 0.7927 0.6070 0.0559 +vn 0.9230 0.3833 0.0353 +vn 0.9228 0.3836 0.0354 +vn 0.9914 -0.1300 -0.0120 +vn 0.9229 -0.3834 -0.0354 +vn 0.6066 -0.7917 -0.0730 +vn -0.6066 -0.7917 -0.0730 +vn -0.7926 -0.6071 -0.0559 +vn -0.7924 -0.6074 -0.0560 +vn -0.7928 -0.6070 -0.0559 +vn -0.9914 -0.1300 -0.0120 +vn -0.9228 0.3837 0.0354 +vn -0.9229 0.3834 0.0353 +vn -0.7927 0.6070 0.0559 +vn 0.3809 0.9207 -0.0849 +vn 0.3812 0.9206 -0.0849 +vn -0.0007 -0.1016 -0.9948 +vn 0.0001 -0.1015 -0.9948 +vn 0.0018 -0.1014 -0.9948 +vn -0.0018 -0.1012 -0.9949 +vn -0.1308 0.9872 -0.0910 +vn -0.1304 0.9873 -0.0910 +vn -0.1313 0.9872 -0.0910 +vn 0.3814 -0.0000 -0.9244 +vn -0.1306 -0.0000 -0.9914 +vn -0.6067 0.7916 -0.0730 +vn -0.3813 0.9205 -0.0849 +vn 0.9228 -0.3837 0.0354 +vn 0.9227 -0.3838 0.0354 +vn 0.3810 -0.9207 0.0849 +vn 0.3811 -0.9206 0.0849 +vn 0.3807 -0.9208 0.0849 +vn 0.1307 -0.9872 0.0909 +vn 0.1311 -0.9872 0.0910 +vn -0.0000 -0.7398 -0.6728 +vn -0.1116 -0.9896 0.0912 +vn -0.9227 -0.3840 0.0354 +vn -0.9914 0.1300 -0.0120 +vn -0.0002 0.0925 -0.9957 +vn -0.0000 0.0923 -0.9957 +vn -0.0000 0.0922 -0.9957 +vn 0.0007 0.0917 -0.9958 +vn -0.0003 0.0921 -0.9958 +vn -0.0002 0.0922 -0.9957 +vn 0.0001 0.0923 -0.9957 +vn -0.0013 0.0928 -0.9957 +vn 0.3811 -0.9206 -0.0849 +vn 0.3806 -0.9208 -0.0849 +vn -0.1300 -0.9873 -0.0910 +vn -0.1311 -0.9872 -0.0910 +vn 0.3814 -0.0000 0.9244 +vn 0.1307 -0.0000 0.9914 +vn -0.7930 -0.0000 0.6093 +vn -0.0000 0.7376 -0.6752 +vn 0.1284 0.9875 0.0910 +vn 0.3811 0.9206 0.0849 +vn 0.3809 0.9207 0.0849 +vn 0.3806 0.9208 0.0849 +vn 0.6078 0.7908 0.0729 +vn 0.6072 0.7912 0.0730 +vn 0.9230 -0.3832 -0.0353 +vn -0.7924 -0.6074 -0.0559 +vn -0.7927 -0.6070 -0.0560 +vn -0.9229 -0.3834 -0.0354 +vn -0.9229 -0.3834 -0.0353 +vn -0.9914 -0.1303 -0.0120 +vn -0.9914 0.1300 0.0120 +vn -0.7928 0.6070 0.0559 +vn -0.6072 0.7912 0.0730 +vn 0.1305 0.9873 -0.0910 +vn -0.0010 -0.1015 -0.9948 +vn 0.0013 -0.1015 -0.9948 +vn -0.0003 -0.1015 -0.9948 +vn 0.0011 -0.1015 -0.9948 +vn -0.0000 -0.1016 -0.9948 +vn -0.0051 -0.1014 -0.9948 +vn -0.0071 -0.1014 -0.9948 +vn 0.7921 0.6079 -0.0560 +vn 0.9229 0.3834 -0.0354 +vn 0.9914 0.1299 -0.0120 +vn 0.7930 -0.6067 0.0559 +vn 0.3817 -0.9204 0.0848 +vn -0.0000 -0.7398 -0.6729 +vn 0.0005 0.0927 -0.9957 +vn 0.0003 0.0922 -0.9957 +vn -0.0001 0.0916 -0.9958 +vn -0.0010 0.0917 -0.9958 +vn -0.0001 0.0921 -0.9957 +vn 0.0001 0.0922 -0.9957 +vn -0.0003 0.0924 -0.9957 +vn 0.0006 0.0933 -0.9956 +vn -0.0014 0.0927 -0.9957 +vn 0.3816 -0.9204 -0.0848 +vn -0.0158 0.9956 0.0921 +vn -0.0000 0.7400 -0.6726 +vn 0.3820 0.9203 0.0847 +vn -0.7926 -0.6072 -0.0559 +vn -0.9229 -0.3833 -0.0353 +vn -0.9230 0.3833 0.0353 +vn -0.7928 0.6068 0.0559 +vn 0.0001 -0.6077 0.7941 +vn -0.0000 -0.6077 0.7941 +vn 0.0001 -0.3813 0.9244 +vn 0.0001 -0.3814 0.9244 +vn -0.0000 -0.7909 0.6119 +vn 0.7900 -0.6088 0.0727 +vn 0.7904 -0.6082 0.0728 +vn 0.7908 -0.6078 0.0728 +vn 0.7896 -0.6094 0.0727 +vn -0.0000 -0.3813 0.9244 +vn -0.0000 -0.7909 0.6120 +vn 0.0001 -0.1303 0.9915 +vn -0.8780 -0.4504 0.1618 +vn -0.8781 -0.4504 0.1618 +vn -0.0000 -0.9241 0.3822 +vn 0.0929 -0.0000 -0.9957 +vn 0.0933 -0.0000 -0.9956 +vn 0.0921 -0.0000 -0.9957 +vn 0.0946 -0.0000 -0.9955 +vn 0.0938 -0.0000 -0.9956 +vn 0.0916 -0.0000 -0.9958 +vn 0.0919 -0.0000 -0.9958 +vn 0.0917 -0.0000 -0.9958 +vn 0.0903 -0.0000 -0.9959 +vn 0.0925 -0.0000 -0.9957 +vn 0.0920 -0.0000 -0.9958 +vn 0.0899 -0.0000 -0.9960 +vn 0.0895 -0.0000 -0.9960 +vn 0.0906 -0.0000 -0.9959 +vn 0.0910 -0.0000 -0.9958 +vn 0.9207 -0.3808 0.0848 +vn 0.9208 -0.3807 0.0848 +vn 0.9206 -0.3812 0.0848 +vn 0.6088 -0.7913 0.0561 +vn 0.6090 -0.7912 0.0561 +vn 0.6085 -0.7916 0.0560 +vn -0.0000 -0.1302 0.9915 +vn 0.0001 0.1303 0.9915 +vn -0.9470 -0.2093 0.2437 +vn -0.9470 -0.2093 0.2436 +vn -0.0000 -0.6077 0.7942 +vn -0.7514 -0.6560 0.0709 +vn -0.7515 -0.6559 0.0709 +vn -0.0000 -0.9913 0.1317 +vn -0.0000 -0.9914 0.1307 +vn -0.0000 -0.9914 0.1309 +vn -0.0000 -0.9914 0.1312 +vn -0.0000 -0.9913 0.1318 +vn -0.0000 -1.0000 -0.0000 +vn 0.9206 0.3811 0.0848 +vn 0.9208 0.3808 0.0848 +vn 0.9873 0.1303 0.0909 +vn 0.9873 0.1300 0.0909 +vn 0.9874 0.1297 0.0910 +vn 0.9872 0.1308 0.0909 +vn 0.9873 -0.1302 0.0909 +vn 0.9873 -0.1301 0.0909 +vn 0.9874 -0.1297 0.0910 +vn 0.9872 -0.1308 0.0909 +vn 0.3816 -0.9237 0.0352 +vn 0.3812 -0.9238 0.0351 +vn 0.3810 -0.9239 0.0351 +vn 0.3818 -0.9236 0.0352 +vn 0.1303 -0.9914 0.0120 +vn 0.1304 -0.9914 0.0120 +vn 0.1301 -0.9914 0.0120 +vn -0.1303 -0.9914 -0.0120 +vn -0.1304 -0.9914 -0.0120 +vn -0.3810 -0.9239 -0.0351 +vn -0.6092 -0.7910 -0.0561 +vn -0.6093 -0.7910 -0.0561 +vn -0.6096 -0.7907 -0.0561 +vn -0.6089 -0.7913 -0.0561 +vn -0.7908 -0.6078 -0.0728 +vn -0.7907 -0.6079 -0.0728 +vn -0.9207 -0.3809 -0.0848 +vn -0.9208 -0.3808 -0.0848 +vn -0.9208 -0.3806 -0.0848 +vn -0.9206 -0.3811 -0.0848 +vn -0.9871 -0.1316 -0.0909 +vn -0.9872 -0.1312 -0.0909 +vn -0.9872 -0.1308 -0.0909 +vn -0.9871 -0.1320 -0.0909 +vn -0.9871 0.1316 -0.0909 +vn -0.9872 0.1312 -0.0909 +vn -0.9872 0.1308 -0.0909 +vn -0.9871 0.1320 -0.0909 +vn -0.9207 0.3809 -0.0848 +vn -0.9208 0.3808 -0.0848 +vn -0.9209 0.3806 -0.0848 +vn -0.9206 0.3811 -0.0848 +vn -0.7916 0.6066 -0.0729 +vn -0.7916 0.6067 -0.0729 +vn -0.6079 0.7921 -0.0560 +vn -0.6077 0.7922 -0.0559 +vn -0.6080 0.7919 -0.0560 +vn -0.6074 0.7924 -0.0559 +vn -0.3829 0.9231 -0.0353 +vn -0.3832 0.9230 -0.0353 +vn -0.1303 0.9914 -0.0120 +vn -0.1305 0.9914 -0.0120 +vn 0.1302 0.9914 0.0120 +vn 0.1301 0.9914 0.0120 +vn 0.1304 0.9914 0.0120 +vn 0.3833 0.9229 0.0353 +vn 0.3836 0.9228 0.0354 +vn 0.3831 0.9230 0.0353 +vn 0.3838 0.9227 0.0354 +vn 0.6073 0.7925 0.0559 +vn 0.6070 0.7927 0.0559 +vn 0.7909 0.6076 0.0728 +vn 0.7913 0.6071 0.0729 +vn 0.7916 0.6066 0.0729 +vn 0.7905 0.6081 0.0728 +vn 0.0001 0.3814 0.9244 +vn -0.9508 0.0406 0.3073 +vn -0.9508 0.0406 0.3072 +vn -0.0000 -0.3814 0.9244 +vn 0.8780 -0.4504 0.1618 +vn 0.8781 -0.4504 0.1618 +vn -0.5698 -0.8213 -0.0273 +vn -0.3514 -0.9282 -0.1223 +vn -0.0000 -0.9913 -0.1318 +vn -0.0000 -0.9914 -0.1307 +vn -0.0000 -0.9914 -0.1309 +vn -0.0000 -0.9914 -0.1310 +vn -0.0000 -0.9912 -0.1320 +vn -0.0000 -0.9913 -0.1313 +vn -0.0000 0.3814 0.9244 +vn -0.0000 -0.9237 -0.3831 +vn -0.0000 -0.7918 -0.6108 +vn -0.0001 -0.6077 -0.7941 +vn -0.0000 -0.6077 -0.7941 +vn -0.0001 -0.3814 -0.9244 +vn -0.0000 -0.3814 -0.9244 +vn -0.0001 -0.1303 -0.9915 +vn -0.0001 0.1303 -0.9915 +vn -0.0000 0.3814 -0.9244 +vn -0.0001 0.3814 -0.9244 +vn -0.0000 0.6065 -0.7951 +vn -0.0001 0.6066 -0.7950 +vn -0.0000 0.7930 -0.6092 +vn -0.0000 0.9229 -0.3852 +vn -0.0000 0.9913 -0.1314 +vn -0.0000 0.9914 -0.1311 +vn -0.0000 0.9914 -0.1309 +vn -0.0000 0.9913 -0.1317 +vn -0.0000 1.0000 0.0059 +vn -0.0000 0.9914 -0.1307 +vn -0.0000 0.9913 0.1315 +vn -0.0000 0.9914 0.1311 +vn -0.0000 0.9914 0.1309 +vn -0.0000 0.9912 0.1322 +vn -0.0000 0.9913 0.1316 +vn -0.0000 1.0000 -0.0000 +vn -0.0000 0.9914 0.1307 +vn -0.0000 0.9232 0.3843 +vn -0.0000 0.7921 0.6104 +vn -0.0000 0.6066 0.7950 +vn 0.0001 0.6065 0.7951 +vn 0.0001 0.6066 0.7950 +vn -0.0000 0.3813 0.9244 +vn -0.8909 0.2889 0.3506 +vn -0.8909 0.2888 0.3506 +vn 0.9467 -0.2115 0.2430 +vn 0.7515 -0.6560 0.0709 +vn 0.7515 -0.6559 0.0709 +vn -0.1086 -0.9719 -0.2090 +vn -0.0001 -0.7918 -0.6108 +vn -0.0000 -0.7914 -0.6113 +vn -0.0001 -0.6086 -0.7935 +vn -0.0001 -0.1289 -0.9917 +vn -0.0001 0.1288 -0.9917 +vn -0.0001 0.6065 -0.7950 +vn -0.0001 0.6074 -0.7944 +vn -0.0001 0.7930 -0.6092 +vn -0.0000 0.7926 -0.6098 +vn -0.0000 0.9229 -0.3851 +vn 0.3513 0.9282 0.1223 +vn 0.1085 0.9719 0.2090 +vn 0.1086 0.9719 0.2090 +vn -0.7693 0.5204 0.3705 +vn 0.9508 0.0406 0.3072 +vn 0.9508 0.0406 0.3073 +vn 0.5699 -0.8213 -0.0273 +vn 0.5698 -0.8213 -0.0272 +vn 0.3521 -0.9280 -0.1220 +vn 0.1412 -0.9491 -0.2814 +vn 0.1411 -0.9492 -0.2814 +vn -0.0001 -0.7915 -0.6111 +vn -0.0000 -0.7909 -0.6119 +vn -0.0001 -0.6081 -0.7939 +vn -0.0000 -0.6093 -0.7930 +vn -0.0000 -0.1296 -0.9916 +vn -0.0002 -0.1278 -0.9918 +vn -0.0000 0.1297 -0.9916 +vn -0.0002 0.1279 -0.9918 +vn -0.0001 0.6069 -0.7948 +vn -0.0001 0.7927 -0.6096 +vn -0.0000 0.7921 -0.6104 +vn 0.5724 0.8196 0.0260 +vn 0.5725 0.8195 0.0260 +vn -0.1427 0.9488 0.2818 +vn -0.1426 0.9488 0.2818 +vn -0.5971 0.7142 0.3652 +vn -0.5972 0.7142 0.3652 +vn 0.8901 0.2908 0.3509 +vn -0.7904 -0.6083 0.0728 +vn -0.7899 -0.6089 0.0727 +vn -0.7896 -0.6094 0.0727 +vn -0.7908 -0.6078 0.0728 +vn -0.0000 -0.9913 0.1313 +vn -0.0000 -0.9913 0.1315 +vn -0.0000 -0.9992 -0.0403 +vn 0.1083 -0.9719 -0.2091 +vn 0.1086 -0.9719 -0.2090 +vn 0.3841 -0.8603 -0.3351 +vn 0.3844 -0.8602 -0.3352 +vn 0.3837 -0.8606 -0.3350 +vn 0.5955 -0.7157 -0.3650 +vn 0.5956 -0.7156 -0.3650 +vn 0.5952 -0.7159 -0.3650 +vn 0.5959 -0.7153 -0.3650 +vn 0.7704 -0.5189 -0.3705 +vn 0.8902 -0.2905 -0.3509 +vn 0.8903 -0.2904 -0.3508 +vn 0.8901 -0.2908 -0.3509 +vn 0.8904 -0.2900 -0.3507 +vn 0.9508 -0.0407 -0.3073 +vn 0.9507 -0.0411 -0.3074 +vn 0.9508 -0.0406 -0.3073 +vn 0.9507 -0.0413 -0.3074 +vn 0.9467 0.2115 -0.2430 +vn 0.8791 0.4480 -0.1627 +vn 0.8793 0.4476 -0.1629 +vn 0.7495 0.6584 -0.0696 +vn 0.7495 0.6583 -0.0696 +vn 0.7493 0.6586 -0.0696 +vn -0.3521 0.9280 0.1220 +vn -0.1083 0.9719 0.2091 +vn -0.1084 0.9719 0.2091 +vn -0.3826 0.8611 0.3348 +vn -0.0000 0.6065 0.7950 +vn 0.7693 0.5204 0.3705 +vn 0.7693 0.5205 0.3705 +vn -0.9208 -0.3808 0.0848 +vn -0.9206 -0.3811 0.0848 +vn -0.0906 -0.0000 -0.9959 +vn -0.0910 -0.0000 -0.9958 +vn -0.0921 -0.0000 -0.9957 +vn -0.0894 -0.0000 -0.9960 +vn -0.0899 -0.0000 -0.9960 +vn -0.0916 -0.0000 -0.9958 +vn -0.0917 -0.0000 -0.9958 +vn -0.0920 -0.0000 -0.9958 +vn -0.0925 -0.0000 -0.9957 +vn -0.0929 -0.0000 -0.9957 +vn -0.0904 -0.0000 -0.9959 +vn -0.0919 -0.0000 -0.9958 +vn -0.0938 -0.0000 -0.9956 +vn -0.0946 -0.0000 -0.9955 +vn -0.0933 -0.0000 -0.9956 +vn -0.6087 -0.7914 0.0561 +vn -0.6084 -0.7916 0.0560 +vn -0.6090 -0.7912 0.0561 +vn -0.1301 -0.9914 0.0120 +vn -0.1300 -0.9914 0.0120 +vn -0.0000 -0.9980 0.0634 +vn -0.1414 -0.9491 -0.2815 +vn -0.1412 -0.9492 -0.2814 +vn -0.0000 -0.7918 -0.6107 +vn -0.0000 -0.6077 -0.7942 +vn -0.0000 0.6065 -0.7950 +vn -0.5713 0.8203 0.0265 +vn -0.0000 0.9918 0.1274 +vn -0.0000 0.9919 0.1268 +vn -0.0000 0.9916 0.1295 +vn -0.0000 0.9911 -0.1330 +vn -0.0000 0.9913 -0.1313 +vn -0.0000 0.9926 0.1217 +vn -0.0000 0.9922 0.1243 +vn -0.0000 0.9930 0.1179 +vn 0.1426 0.9488 0.2818 +vn 0.1428 0.9488 0.2818 +vn 0.5983 0.7132 0.3652 +vn 0.5982 0.7132 0.3653 +vn -0.9873 -0.1302 0.0909 +vn -0.9873 -0.1303 0.0909 +vn -0.9872 -0.1308 0.0909 +vn -0.9874 -0.1297 0.0909 +vn -0.9206 0.3811 0.0848 +vn -0.9208 0.3808 0.0848 +vn -0.7913 0.6071 0.0729 +vn -0.7907 0.6078 0.0728 +vn -0.7905 0.6081 0.0728 +vn -0.7916 0.6066 0.0729 +vn -0.6074 0.7924 0.0559 +vn -0.6073 0.7925 0.0559 +vn -0.6070 0.7927 0.0559 +vn -0.6077 0.7922 0.0558 +vn -0.3839 0.9227 0.0354 +vn -0.3838 0.9227 0.0354 +vn -0.1301 0.9914 0.0120 +vn -0.1302 0.9914 0.0120 +vn 0.1305 0.9914 -0.0120 +vn 0.1304 0.9914 -0.0120 +vn 0.1303 0.9914 -0.0120 +vn 0.3832 0.9230 -0.0353 +vn 0.3829 0.9231 -0.0353 +vn 0.6077 0.7922 -0.0559 +vn 0.6079 0.7921 -0.0560 +vn 0.6081 0.7919 -0.0560 +vn 0.6074 0.7924 -0.0559 +vn 0.7913 0.6071 -0.0729 +vn 0.7908 0.6077 -0.0728 +vn 0.7905 0.6081 -0.0728 +vn 0.7916 0.6067 -0.0729 +vn 0.9208 0.3807 -0.0848 +vn 0.9207 0.3809 -0.0848 +vn 0.9206 0.3811 -0.0848 +vn 0.9209 0.3806 -0.0848 +vn 0.9873 0.1305 -0.0909 +vn 0.9873 0.1303 -0.0909 +vn 0.9874 0.1297 -0.0909 +vn 0.9872 0.1308 -0.0909 +vn 0.9873 -0.1303 -0.0909 +vn 0.9873 -0.1302 -0.0909 +vn 0.9874 -0.1297 -0.0909 +vn 0.9872 -0.1308 -0.0909 +vn 0.9208 -0.3807 -0.0848 +vn 0.9209 -0.3806 -0.0848 +vn 0.9206 -0.3812 -0.0848 +vn 0.7904 -0.6083 -0.0728 +vn 0.7899 -0.6089 -0.0727 +vn 0.7896 -0.6093 -0.0727 +vn 0.7907 -0.6079 -0.0728 +vn 0.6094 -0.7909 -0.0561 +vn 0.6092 -0.7910 -0.0561 +vn 0.6096 -0.7907 -0.0561 +vn 0.6089 -0.7913 -0.0561 +vn 0.3810 -0.9239 -0.0351 +vn 0.1304 -0.9914 -0.0120 +vn -0.3818 -0.9236 0.0352 +vn -0.9873 0.1301 0.0910 +vn -0.9873 0.1303 0.0909 +vn -0.9872 0.1308 0.0909 +vn -0.9874 0.1297 0.0910 +vn -0.0000 -0.9237 -0.3830 +vn -0.3837 -0.8606 -0.3351 +vn -0.3837 -0.8605 -0.3350 +vn -0.5958 -0.7153 -0.3650 +vn -0.5959 -0.7153 -0.3650 +vn -0.7693 -0.5204 -0.3705 +vn -0.7693 -0.5205 -0.3705 +vn -0.8901 -0.2909 -0.3509 +vn -0.8901 -0.2908 -0.3508 +vn -0.9508 -0.0406 -0.3072 +vn -0.9467 0.2115 -0.2430 +vn -0.8785 0.4494 -0.1621 +vn -0.7505 0.6572 -0.0702 +vn -0.7504 0.6572 -0.0702 +vn 0.3825 0.8611 0.3348 +vn 0.3826 0.8611 0.3348 +vn -0.0000 0.6065 0.7951 +vn -0.0000 0.6066 -0.7950 +vn -0.0000 -0.1302 -0.9915 +vn -0.0000 -0.6078 -0.7941 +vn -0.0000 0.1302 -0.9915 +vn 0.0001 -0.6077 0.7942 +vn 0.7903 -0.6083 0.0728 +vn 0.7907 -0.6078 0.0728 +vn 0.7896 -0.6093 0.0727 +vn 0.0922 -0.0000 -0.9957 +vn 0.0945 -0.0000 -0.9955 +vn 0.0912 -0.0000 -0.9958 +vn 0.0918 -0.0000 -0.9958 +vn 0.0894 -0.0000 -0.9960 +vn 0.0910 -0.0000 -0.9959 +vn 0.9206 -0.3811 0.0848 +vn 0.9208 -0.3808 0.0848 +vn 0.6086 -0.7915 0.0560 +vn 0.6089 -0.7912 0.0561 +vn -0.7515 -0.6559 0.0708 +vn -0.0000 -0.9243 0.3818 +vn -0.0000 -1.0000 0.0058 +vn 0.9873 0.1301 0.0909 +vn 0.9874 0.1297 0.0909 +vn 0.9873 -0.1305 0.0909 +vn 0.9873 -0.1301 0.0910 +vn 0.3815 -0.9237 0.0352 +vn 0.3809 -0.9239 0.0351 +vn 0.3819 -0.9236 0.0352 +vn -0.3809 -0.9239 -0.0351 +vn -0.6090 -0.7912 -0.0561 +vn -0.7908 -0.6077 -0.0728 +vn -0.9207 -0.3810 -0.0848 +vn -0.9208 -0.3807 -0.0848 +vn -0.9209 -0.3805 -0.0848 +vn -0.9206 -0.3812 -0.0848 +vn -0.9871 0.1317 -0.0909 +vn -0.9872 0.1313 -0.0909 +vn -0.9208 0.3807 -0.0848 +vn -0.6078 0.7921 -0.0560 +vn -0.6076 0.7923 -0.0559 +vn -0.6081 0.7919 -0.0560 +vn -0.3830 0.9231 -0.0353 +vn -0.3831 0.9230 -0.0353 +vn -0.1304 0.9914 -0.0120 +vn 0.3833 0.9230 0.0353 +vn 0.3837 0.9228 0.0354 +vn 0.6074 0.7924 0.0559 +vn 0.6070 0.7928 0.0559 +vn 0.7908 0.6077 0.0728 +vn 0.0001 0.3813 0.9244 +vn -0.5699 -0.8213 -0.0273 +vn -0.3513 -0.9282 -0.1223 +vn -0.0000 -0.9914 -0.1311 +vn -0.0001 -0.6077 -0.7942 +vn -0.0000 0.9914 -0.1310 +vn -0.0000 0.9913 0.1318 +vn 0.9467 -0.2114 0.2430 +vn 0.7515 -0.6559 0.0708 +vn 0.7514 -0.6560 0.0709 +vn -0.0001 -0.7918 -0.6107 +vn -0.0000 -0.7913 -0.6115 +vn -0.0001 -0.6085 -0.7935 +vn -0.0001 0.1289 -0.9917 +vn -0.0002 0.6065 -0.7950 +vn -0.0001 0.7926 -0.6097 +vn 0.3514 0.9282 0.1223 +vn 0.5698 -0.8213 -0.0273 +vn 0.5699 -0.8213 -0.0272 +vn 0.1410 -0.9492 -0.2814 +vn -0.0002 -0.6081 -0.7939 +vn -0.0001 0.7927 -0.6097 +vn 0.5724 0.8195 0.0260 +vn -0.5972 0.7142 0.3651 +vn -0.0000 -0.9990 0.0436 +vn 0.3839 -0.8604 -0.3351 +vn 0.3836 -0.8606 -0.3351 +vn 0.5958 -0.7153 -0.3651 +vn 0.8902 -0.2906 -0.3509 +vn 0.8903 -0.2902 -0.3509 +vn 0.8901 -0.2908 -0.3508 +vn 0.8904 -0.2900 -0.3508 +vn 0.9507 -0.0409 -0.3073 +vn 0.9507 -0.0406 -0.3073 +vn 0.7494 0.6585 -0.0696 +vn -0.9208 -0.3807 0.0848 +vn -0.0910 -0.0000 -0.9959 +vn -0.0918 -0.0000 -0.9958 +vn -0.0912 -0.0000 -0.9958 +vn -0.0945 -0.0000 -0.9955 +vn -0.0930 -0.0000 -0.9957 +vn -0.0922 -0.0000 -0.9957 +vn -0.6085 -0.7915 0.0560 +vn -0.6085 -0.7916 0.0560 +vn -0.6089 -0.7912 0.0561 +vn -0.0000 -0.9913 -0.1314 +vn -0.0000 -0.9913 -0.1316 +vn -0.0000 -0.9948 -0.1018 +vn -0.1411 -0.9492 -0.2814 +vn -0.0000 0.7930 -0.6093 +vn -0.5712 0.8204 0.0265 +vn -0.0000 0.9912 -0.1322 +vn -0.0000 0.9912 -0.1325 +vn -0.0000 0.9922 0.1244 +vn -0.0000 0.9913 0.1313 +vn 0.1427 0.9488 0.2818 +vn 0.5982 0.7133 0.3653 +vn -0.9208 0.3807 0.0848 +vn -0.7908 0.6077 0.0728 +vn -0.7905 0.6082 0.0728 +vn -0.6074 0.7925 0.0559 +vn -0.6070 0.7927 0.0558 +vn -0.6070 0.7928 0.0559 +vn -0.3840 0.9227 0.0354 +vn -0.3838 0.9228 0.0354 +vn 0.3831 0.9230 -0.0353 +vn 0.3830 0.9231 -0.0353 +vn 0.6078 0.7921 -0.0560 +vn 0.7905 0.6082 -0.0728 +vn 0.7916 0.6066 -0.0729 +vn 0.9208 0.3808 -0.0848 +vn 0.9208 0.3806 -0.0848 +vn 0.9873 0.1304 -0.0909 +vn 0.9873 0.1302 -0.0909 +vn 0.9873 -0.1304 -0.0909 +vn 0.9874 -0.1296 -0.0909 +vn 0.9208 -0.3808 -0.0848 +vn 0.9207 -0.3809 -0.0848 +vn 0.9206 -0.3811 -0.0848 +vn 0.9208 -0.3806 -0.0848 +vn 0.7908 -0.6078 -0.0728 +vn 0.6096 -0.7908 -0.0561 +vn 0.3809 -0.9239 -0.0351 +vn 0.1303 -0.9914 -0.0120 +vn -0.3819 -0.9236 0.0352 +vn -0.9874 0.1297 0.0909 +vn -0.3838 -0.8605 -0.3350 +vn -0.3837 -0.8606 -0.3350 +vn -0.8901 -0.2908 -0.3509 +vn -0.9507 -0.0406 -0.3073 +vn -0.8785 0.4495 -0.1621 +vn -0.8785 0.4495 -0.1622 +vn 0.0001 -0.6066 0.7950 +vn 0.0001 -0.6065 0.7951 +vn -0.0000 -0.6066 0.7950 +vn -0.0000 -0.7921 0.6104 +vn 0.7908 -0.6077 0.0728 +vn 0.7913 -0.6070 0.0729 +vn 0.7916 -0.6066 0.0729 +vn 0.7905 -0.6081 0.0728 +vn -0.8785 -0.4495 0.1622 +vn 0.0937 -0.0000 -0.9956 +vn 0.0902 -0.0000 -0.9959 +vn 0.0904 -0.0000 -0.9959 +vn 0.0928 -0.0000 -0.9957 +vn 0.0898 -0.0000 -0.9960 +vn 0.0907 -0.0000 -0.9959 +vn 0.6072 -0.7925 0.0559 +vn 0.6070 -0.7927 0.0559 +vn 0.6070 -0.7928 0.0559 +vn 0.6074 -0.7924 0.0559 +vn -0.0000 -0.6065 0.7950 +vn -0.7506 -0.6571 0.0703 +vn -0.7505 -0.6571 0.0703 +vn -0.0000 -0.9910 0.1342 +vn -0.0000 -0.9911 0.1333 +vn -0.0000 -0.9911 0.1334 +vn -0.0000 -0.9910 0.1337 +vn 0.9873 0.1304 0.0909 +vn 0.9873 -0.1303 0.0909 +vn 0.9874 -0.1296 0.0909 +vn 0.3814 -0.9238 0.0352 +vn 0.3811 -0.9239 0.0351 +vn 0.3817 -0.9236 0.0352 +vn 0.1327 -0.9911 0.0122 +vn 0.1325 -0.9911 0.0122 +vn -0.1328 -0.9911 -0.0122 +vn -0.1327 -0.9911 -0.0122 +vn -0.6076 -0.7923 -0.0559 +vn -0.6078 -0.7922 -0.0560 +vn -0.6081 -0.7919 -0.0560 +vn -0.6074 -0.7924 -0.0559 +vn -0.7916 -0.6067 -0.0729 +vn -0.7917 -0.6066 -0.0729 +vn -0.9209 -0.3806 -0.0848 +vn -0.9871 -0.1315 -0.0909 +vn -0.9872 -0.1313 -0.0909 +vn -0.9871 0.1315 -0.0909 +vn -0.9871 0.1314 -0.0909 +vn -0.9207 0.3808 -0.0848 +vn -0.7907 0.6079 -0.0728 +vn -0.7908 0.6078 -0.0728 +vn -0.6092 0.7910 -0.0561 +vn -0.6091 0.7911 -0.0561 +vn -0.6096 0.7907 -0.0561 +vn -0.6089 0.7912 -0.0561 +vn -0.3809 0.9239 -0.0351 +vn -0.3810 0.9239 -0.0351 +vn 0.1306 0.9914 0.0120 +vn 0.1303 0.9914 0.0120 +vn 0.3812 0.9238 0.0351 +vn 0.3815 0.9237 0.0352 +vn 0.3810 0.9239 0.0351 +vn 0.3818 0.9236 0.0352 +vn 0.6087 0.7914 0.0560 +vn 0.6089 0.7913 0.0561 +vn 0.6085 0.7916 0.0560 +vn 0.6090 0.7912 0.0561 +vn 0.7900 0.6088 0.0727 +vn 0.7904 0.6083 0.0728 +vn 0.7907 0.6078 0.0728 +vn 0.7896 0.6094 0.0727 +vn -0.9507 0.0406 0.3073 +vn 0.8784 -0.4495 0.1622 +vn 0.8785 -0.4494 0.1621 +vn -0.3533 -0.9276 -0.1215 +vn -0.0000 -0.9910 -0.1342 +vn -0.0000 -0.9911 -0.1330 +vn -0.0000 -0.9911 -0.1333 +vn -0.0000 -0.9911 -0.1335 +vn -0.0000 -0.9910 -0.1337 +vn -0.0000 -1.0000 0.0059 +vn -0.0000 -0.7930 -0.6093 +vn -0.0000 -0.7930 -0.6092 +vn -0.0001 -0.6066 -0.7950 +vn -0.0000 -0.6065 -0.7951 +vn -0.0000 0.6077 -0.7941 +vn -0.0001 0.6077 -0.7941 +vn -0.0000 0.7918 -0.6108 +vn -0.0000 0.9237 -0.3831 +vn -0.0000 0.9914 0.1312 +vn -0.0000 0.9913 0.1317 +vn -0.0000 1.0000 0.0058 +vn -0.0000 0.9241 0.3822 +vn -0.0000 0.7909 0.6119 +vn -0.0000 0.6077 0.7941 +vn 0.0001 0.6077 0.7941 +vn -0.0000 -0.6065 0.7951 +vn 0.7506 -0.6570 0.0703 +vn 0.7505 -0.6571 0.0703 +vn -0.1064 -0.9719 -0.2098 +vn -0.1064 -0.9720 -0.2098 +vn -0.0001 -0.7930 -0.6093 +vn -0.0000 -0.7925 -0.6098 +vn -0.0001 -0.6065 -0.7950 +vn -0.0001 -0.6075 -0.7943 +vn -0.0002 0.6077 -0.7941 +vn -0.0001 0.6087 -0.7934 +vn -0.0001 0.7918 -0.6108 +vn -0.0000 0.7914 -0.6113 +vn -0.0000 0.7909 0.6120 +vn 0.3540 -0.9273 -0.1212 +vn 0.3541 -0.9273 -0.1212 +vn 0.1412 -0.9492 -0.2814 +vn -0.0001 -0.7928 -0.6095 +vn -0.0000 -0.7921 -0.6104 +vn -0.0001 -0.6070 -0.7947 +vn -0.0001 0.6081 -0.7938 +vn -0.0000 0.6093 -0.7930 +vn -0.0001 0.7915 -0.6111 +vn -0.0000 0.7909 -0.6119 +vn 0.5708 0.8206 0.0268 +vn 0.5709 0.8206 0.0268 +vn -0.1404 0.9493 0.2812 +vn -0.1405 0.9493 0.2812 +vn -0.5958 0.7154 0.3651 +vn -0.5958 0.7153 0.3651 +vn 0.8901 0.2908 0.3508 +vn -0.7913 -0.6071 0.0729 +vn -0.7909 -0.6076 0.0728 +vn -0.7905 -0.6081 0.0728 +vn -0.7916 -0.6066 0.0729 +vn -0.0000 -0.9910 0.1338 +vn -0.0000 -0.9910 0.1341 +vn -0.0000 -0.9953 0.0969 +vn -0.0000 -0.9909 0.1344 +vn 0.1061 -0.9720 -0.2098 +vn 0.1064 -0.9719 -0.2097 +vn 0.3821 -0.8613 -0.3347 +vn 0.3823 -0.8613 -0.3348 +vn 0.3826 -0.8611 -0.3348 +vn 0.3818 -0.8615 -0.3347 +vn 0.5967 -0.7146 -0.3651 +vn 0.5969 -0.7144 -0.3652 +vn 0.5965 -0.7148 -0.3651 +vn 0.5972 -0.7142 -0.3651 +vn 0.9507 -0.0408 -0.3073 +vn 0.9508 -0.0403 -0.3073 +vn 0.8787 0.4489 -0.1624 +vn 0.8789 0.4486 -0.1625 +vn 0.7504 0.6572 -0.0702 +vn 0.7503 0.6574 -0.0701 +vn -0.3845 0.8601 0.3352 +vn -0.3844 0.8602 0.3352 +vn -0.0000 0.6077 0.7942 +vn -0.9206 -0.3812 0.0848 +vn -0.0907 -0.0000 -0.9959 +vn -0.0893 -0.0000 -0.9960 +vn -0.0898 -0.0000 -0.9960 +vn -0.0903 -0.0000 -0.9959 +vn -0.6070 -0.7927 0.0559 +vn -0.6074 -0.7924 0.0559 +vn -0.1325 -0.9911 0.0122 +vn -0.1324 -0.9911 0.0122 +vn -0.0000 -0.9910 -0.1339 +vn -0.0000 -0.9909 -0.1347 +vn -0.0000 -0.9998 0.0176 +vn -0.0000 -0.9908 -0.1350 +vn -0.1415 -0.9491 -0.2815 +vn -0.0000 -0.6065 -0.7950 +vn -0.0000 0.6077 -0.7942 +vn -0.0000 0.7918 -0.6107 +vn -0.5698 0.8213 0.0273 +vn -0.5698 0.8214 0.0273 +vn -0.0000 0.9919 -0.1268 +vn -0.0000 0.9918 -0.1279 +vn -0.0000 0.9921 -0.1251 +vn -0.0000 0.9913 -0.1318 +vn -0.0000 0.9930 -0.1179 +vn -0.0000 0.9912 0.1325 +vn -0.0000 0.9911 0.1328 +vn 0.1405 0.9493 0.2812 +vn 0.1404 0.9493 0.2812 +vn 0.5970 0.7144 0.3651 +vn 0.5969 0.7144 0.3651 +vn -0.9873 -0.1300 0.0909 +vn -0.9873 -0.1304 0.0909 +vn -0.7899 0.6089 0.0727 +vn -0.7896 0.6094 0.0727 +vn -0.6085 0.7916 0.0560 +vn -0.6086 0.7915 0.0560 +vn -0.6089 0.7912 0.0561 +vn -0.3818 0.9236 0.0352 +vn -0.1299 0.9915 0.0120 +vn 0.3810 0.9239 -0.0351 +vn 0.3809 0.9239 -0.0351 +vn 0.6092 0.7910 -0.0561 +vn 0.6093 0.7909 -0.0561 +vn 0.6096 0.7907 -0.0561 +vn 0.6089 0.7912 -0.0561 +vn 0.7904 0.6082 -0.0728 +vn 0.7899 0.6090 -0.0727 +vn 0.7896 0.6093 -0.0727 +vn 0.7908 0.6078 -0.0728 +vn 0.9207 0.3808 -0.0848 +vn 0.9873 0.1301 -0.0909 +vn 0.7913 -0.6071 -0.0729 +vn 0.7908 -0.6077 -0.0728 +vn 0.7905 -0.6082 -0.0728 +vn 0.7916 -0.6066 -0.0729 +vn 0.6078 -0.7921 -0.0560 +vn 0.6076 -0.7923 -0.0559 +vn 0.6081 -0.7919 -0.0560 +vn 0.6074 -0.7924 -0.0559 +vn 0.1327 -0.9911 -0.0122 +vn 0.1328 -0.9911 -0.0122 +vn -0.3817 -0.9236 0.0352 +vn -0.3819 -0.9235 0.0352 +vn -0.9873 0.1302 0.0910 +vn -0.3819 -0.8615 -0.3347 +vn -0.5971 -0.7142 -0.3652 +vn -0.5972 -0.7142 -0.3651 +vn -0.9508 -0.0406 -0.3073 +vn -0.8781 0.4504 -0.1618 +vn -0.8780 0.4504 -0.1618 +vn -0.7514 0.6561 -0.0708 +vn -0.7514 0.6560 -0.0708 +vn -0.0000 0.9237 -0.3830 +vn 0.3844 0.8602 0.3352 +vn 0.3845 0.8601 0.3352 +vn -0.0000 0.6078 -0.7941 +vn -0.0000 -0.6066 -0.7950 +vn 0.7913 -0.6071 0.0729 +vn 0.7917 -0.6066 0.0729 +vn 0.6070 -0.7927 0.0558 +vn 0.6074 -0.7925 0.0559 +vn -0.0000 -0.9910 0.1335 +vn -0.0000 -0.9909 0.1343 +vn 0.9873 0.1302 0.0909 +vn 0.3813 -0.9238 0.0351 +vn 0.1328 -0.9911 0.0122 +vn -0.3812 -0.9238 -0.0351 +vn -0.3808 -0.9240 -0.0351 +vn -0.6075 -0.7923 -0.0559 +vn -0.6079 -0.7921 -0.0560 +vn -0.6075 -0.7924 -0.0559 +vn -0.7916 -0.6066 -0.0729 +vn -0.9208 0.3806 -0.0848 +vn -0.6093 0.7910 -0.0561 +vn -0.6096 0.7908 -0.0561 +vn 0.3814 0.9238 0.0352 +vn 0.3809 0.9239 0.0351 +vn 0.7899 0.6089 0.0727 +vn 0.7908 0.6078 0.0728 +vn 0.8785 -0.4495 0.1622 +vn -0.5697 -0.8214 -0.0273 +vn -0.3534 -0.9276 -0.1215 +vn -0.0000 -0.9909 -0.1343 +vn -0.0001 0.6077 -0.7942 +vn -0.0000 0.9913 -0.1316 +vn 0.0001 0.6077 0.7942 +vn -0.0000 -0.9910 0.1336 +vn -0.0000 -0.7925 -0.6099 +vn -0.0001 -0.6074 -0.7944 +vn -0.0002 0.6077 -0.7942 +vn -0.0001 0.6085 -0.7935 +vn 0.3542 -0.9273 -0.1212 +vn -0.0001 -0.7927 -0.6096 +vn -0.0001 0.6081 -0.7939 +vn -0.5959 0.7153 0.3650 +vn 0.8901 0.2909 0.3509 +vn -0.7912 -0.6071 0.0729 +vn -0.7908 -0.6077 0.0728 +vn -0.0000 -0.9968 0.0803 +vn 0.1064 -0.9720 -0.2097 +vn 0.3819 -0.8615 -0.3347 +vn 0.5968 -0.7145 -0.3651 +vn 0.5970 -0.7143 -0.3652 +vn 0.5965 -0.7147 -0.3651 +vn 0.5971 -0.7142 -0.3652 +vn 0.8903 -0.2904 -0.3509 +vn 0.8904 -0.2901 -0.3508 +vn 0.9507 -0.0412 -0.3074 +vn 0.8789 0.4485 -0.1626 +vn 0.8787 0.4490 -0.1624 +vn 0.8789 0.4485 -0.1625 +vn -0.6070 -0.7927 0.0558 +vn -0.0000 -0.9905 -0.1374 +vn -0.0000 -0.9908 -0.1357 +vn -0.0000 -0.9975 0.0704 +vn -0.0000 -0.9907 -0.1363 +vn -0.5697 0.8214 0.0273 +vn -0.0000 0.9914 -0.1312 +vn -0.0000 0.9912 0.1320 +vn -0.0000 0.9912 0.1323 +vn -0.9873 -0.1301 0.0909 +vn -0.9874 -0.1297 0.0910 +vn -0.9206 0.3812 0.0848 +vn -0.7896 0.6093 0.0727 +vn -0.6089 0.7913 0.0561 +vn -0.1300 0.9914 0.0120 +vn 0.1302 0.9914 -0.0120 +vn 0.7899 0.6089 -0.0727 +vn 0.7916 -0.6067 -0.0729 +vn 0.6077 -0.7922 -0.0559 +vn 0.3811 -0.9239 -0.0351 +vn 0.3808 -0.9240 -0.0351 +vn -0.3814 -0.9237 0.0353 +vn -0.3818 -0.8615 -0.3347 +vn 0.3843 0.8602 0.3352 +vn 0.0001 -0.6065 0.7950 +vn 0.7916 -0.6067 0.0729 +vn 0.7905 -0.6082 0.0728 +vn -0.0000 -0.9232 0.3843 +vn 0.0905 -0.0000 -0.9959 +vn 0.0930 -0.0000 -0.9957 +vn 0.0899 -0.0000 -0.9959 +vn 0.0893 -0.0000 -0.9960 +vn 0.0908 -0.0000 -0.9959 +vn -0.0000 -0.9914 0.1311 +vn -0.0000 -0.9913 0.1319 +vn 0.9203 0.3820 0.0848 +vn 0.9204 0.3816 0.0848 +vn 0.3836 -0.9228 0.0354 +vn 0.3832 -0.9230 0.0353 +vn 0.3830 -0.9231 0.0353 +vn 0.3839 -0.9227 0.0354 +vn 0.1306 -0.9914 0.0121 +vn -0.3831 -0.9230 -0.0353 +vn -0.3830 -0.9231 -0.0353 +vn -0.6078 -0.7921 -0.0560 +vn -0.9203 0.3818 -0.0848 +vn -0.9204 0.3816 -0.0848 +vn -0.9205 0.3814 -0.0848 +vn -0.9203 0.3820 -0.0848 +vn -0.6079 0.7920 -0.0560 +vn 0.1305 0.9914 0.0120 +vn 0.3832 0.9230 0.0353 +vn 0.3830 0.9231 0.0353 +vn 0.6070 0.7927 0.0560 +vn 0.6072 0.7925 0.0560 +vn 0.7904 0.6082 0.0728 +vn 0.0001 0.3822 0.9241 +vn 0.8785 -0.4495 0.1621 +vn -0.5714 -0.8203 -0.0265 +vn -0.0000 -0.9913 -0.1319 +vn -0.0000 -1.0000 0.0030 +vn -0.0000 0.3822 0.9241 +vn -0.0000 -0.9229 -0.3851 +vn -0.0000 0.3822 -0.9241 +vn -0.0001 0.3822 -0.9241 +vn -0.0000 1.0000 -0.0058 +vn 0.0001 0.6065 0.7950 +vn -0.0001 -0.7930 -0.6092 +vn -0.0002 0.6066 -0.7950 +vn -0.0001 0.6075 -0.7943 +vn -0.0000 0.7925 -0.6098 +vn -0.7686 0.5215 0.3706 +vn 0.5714 -0.8203 -0.0265 +vn 0.1433 -0.9487 -0.2820 +vn -0.0002 -0.6069 -0.7948 +vn -0.0001 0.6070 -0.7947 +vn -0.7904 -0.6082 0.0728 +vn -0.0000 -1.0000 0.0034 +vn -0.0000 -0.9912 0.1321 +vn 0.5966 -0.7146 -0.3651 +vn 0.5964 -0.7148 -0.3651 +vn 0.8903 -0.2903 -0.3509 +vn 0.9466 0.2120 -0.2428 +vn 0.7493 0.6585 -0.0695 +vn 0.7686 0.5215 0.3706 +vn 0.7686 0.5215 0.3705 +vn -0.0899 -0.0000 -0.9959 +vn -0.0905 -0.0000 -0.9959 +vn -0.6072 -0.7926 0.0559 +vn -0.6073 -0.7925 0.0559 +vn -0.6070 -0.7928 0.0559 +vn -0.0000 -0.9914 -0.1312 +vn -0.0000 -0.9999 -0.0141 +vn -0.1433 -0.9487 -0.2819 +vn -0.1436 -0.9486 -0.2820 +vn -0.0000 0.9919 0.1273 +vn -0.0000 0.9915 0.1297 +vn -0.0000 0.9912 -0.1321 +vn -0.0000 0.9913 -0.1315 +vn -0.9204 0.3816 0.0848 +vn -0.9203 0.3820 0.0848 +vn -0.7912 0.6071 0.0729 +vn 0.6076 0.7923 -0.0559 +vn 0.9873 0.1306 -0.0909 +vn 0.9873 0.1300 -0.0909 +vn 0.9872 0.1311 -0.0909 +vn 0.9873 -0.1301 -0.0909 +vn 0.9873 -0.1300 -0.0908 +vn 0.7913 -0.6070 -0.0729 +vn 0.6075 -0.7923 -0.0559 +vn 0.3830 -0.9231 -0.0353 +vn 0.3831 -0.9230 -0.0353 +vn -0.3839 -0.9227 0.0354 +vn -0.9873 0.1300 0.0909 +vn -0.9873 0.1302 0.0909 +vn -0.9873 0.1305 0.0909 +vn -0.9874 0.1296 0.0909 +vn -0.9507 -0.0407 -0.3073 +vn -0.9466 0.2120 -0.2428 +vn -0.9467 0.2118 -0.2429 +vn -0.0000 0.1306 -0.9914 +vn -0.8784 -0.4495 0.1622 +vn 0.9873 -0.1303 0.0910 +vn 0.3835 -0.9228 0.0354 +vn 0.3838 -0.9227 0.0354 +vn 0.1300 -0.9914 0.0120 +vn -0.9207 -0.3808 -0.0848 +vn -0.9871 -0.1317 -0.0909 +vn -0.9872 -0.1311 -0.0909 +vn -0.9204 0.3817 -0.0848 +vn -0.3833 0.9230 -0.0353 +vn 0.7905 0.6082 0.0728 +vn -0.5713 -0.8203 -0.0265 +vn -0.0000 -0.9913 -0.1317 +vn -0.0000 0.9913 0.1314 +vn -0.0000 1.0000 -0.0029 +vn 0.7506 -0.6571 0.0703 +vn 0.5713 -0.8203 -0.0265 +vn 0.1433 -0.9487 -0.2819 +vn -0.0001 0.6069 -0.7947 +vn -0.7917 -0.6066 0.0729 +vn -0.0000 -0.9997 -0.0235 +vn 0.3822 -0.8613 -0.3348 +vn -0.3520 0.9280 0.1220 +vn -0.0939 -0.0000 -0.9956 +vn -0.6074 -0.7925 0.0559 +vn -0.0000 -1.0000 -0.0053 +vn -0.0000 0.9913 -0.1319 +vn -0.0000 0.9912 -0.1323 +vn 0.5982 0.7132 0.3652 +vn 0.7913 0.6070 -0.0729 +vn 0.9872 0.1307 -0.0909 +vn -0.3838 -0.9227 0.0354 +vn -0.9467 0.2116 -0.2429 +vn 0.6071 -0.7926 0.0559 +vn -0.0000 -1.0000 0.0029 +vn 0.3814 -0.9237 0.0352 +vn -0.3811 -0.9239 -0.0351 +vn -0.6079 -0.7920 -0.0560 +vn -0.9207 0.3810 -0.0848 +vn -0.7907 0.6078 -0.0728 +vn -0.6092 0.7911 -0.0561 +vn -0.6095 0.7908 -0.0561 +vn 0.6086 0.7915 0.0560 +vn 0.6089 0.7912 0.0561 +vn 0.7896 0.6093 0.0727 +vn -0.5698 -0.8213 -0.0272 +vn -0.1063 -0.9720 -0.2098 +vn -0.0000 -0.7924 -0.6099 +vn -0.0001 0.6086 -0.7935 +vn -0.0000 0.7912 -0.6116 +vn -0.7693 0.5205 0.3705 +vn -0.0002 0.6081 -0.7939 +vn -0.0001 0.7916 -0.6110 +vn -0.5958 0.7153 0.3650 +vn -0.7912 -0.6072 0.0729 +vn -0.7905 -0.6082 0.0728 +vn -0.0000 -0.9992 0.0403 +vn 0.1063 -0.9720 -0.2097 +vn 0.3822 -0.8613 -0.3347 +vn 0.5969 -0.7145 -0.3651 +vn 0.9508 -0.0406 -0.3072 +vn 0.8788 0.4486 -0.1625 +vn -0.0000 0.9242 0.3819 +vn -0.0934 -0.0000 -0.9956 +vn -0.0000 -1.0000 0.0071 +vn -0.0000 0.9918 -0.1280 +vn -0.0000 0.9912 0.1324 +vn -0.7904 0.6082 0.0728 +vn -0.7908 0.6078 0.0728 +vn -0.6088 0.7914 0.0561 +vn -0.6086 0.7915 0.0561 +vn -0.3815 0.9237 0.0353 +vn -0.3814 0.9237 0.0353 +vn 0.6092 0.7911 -0.0561 +vn 0.6094 0.7908 -0.0561 +vn 0.6095 0.7908 -0.0561 +vn 0.6089 0.7913 -0.0561 +vn 0.9873 0.1301 -0.0908 +vn 0.6079 -0.7921 -0.0560 +vn 0.6080 -0.7919 -0.0560 +vn -0.9873 0.1304 0.0909 +vn -0.9873 0.1304 0.0908 +vn -0.0000 0.9238 -0.3828 +vn 0.0931 -0.0000 -0.9957 +vn 0.6073 -0.7925 0.0559 +vn -0.0000 -1.0000 -0.0029 +vn 0.9206 0.3812 0.0848 +vn 0.9874 0.1296 0.0909 +vn 0.3811 -0.9238 0.0351 +vn -0.6093 0.7909 -0.0561 +vn 0.6088 0.7914 0.0561 +vn -0.0000 -0.9909 -0.1344 +vn -0.0000 -1.0000 -0.0030 +vn -0.0000 1.0000 0.0029 +vn -0.0000 0.7913 -0.6114 +vn -0.0001 -0.6069 -0.7948 +vn -0.0000 -0.9999 0.0134 +vn -0.0000 -0.9909 0.1345 +vn 0.3823 -0.8612 -0.3348 +vn -0.6071 -0.7926 0.0559 +vn -0.0000 -1.0000 -0.0018 +vn -0.1411 -0.9491 -0.2814 +vn -0.0000 0.9980 -0.0629 +vn -0.0000 0.9913 -0.1320 +vn -0.0000 0.9912 0.1326 +vn -0.0000 0.9911 0.1329 +vn -0.7904 0.6083 0.0728 +vn -0.7895 0.6094 0.0727 +vn 0.3811 0.9239 -0.0351 +vn 0.9207 0.3810 -0.0848 +vn 0.9209 -0.3805 -0.0848 +vn 0.7904 -0.6082 -0.0728 +vn -0.9873 0.1301 0.0909 +vn 0.7912 -0.6071 0.0729 +vn 0.0001 -0.1306 0.9914 +vn 0.0932 -0.0000 -0.9956 +vn -0.0000 -0.1306 0.9914 +vn 0.9872 -0.1307 0.0909 +vn 0.9873 -0.1304 0.0909 +vn 0.9873 -0.1300 0.0909 +vn 0.9872 -0.1311 0.0909 +vn -0.6077 -0.7922 -0.0559 +vn -0.6080 -0.7919 -0.0560 +vn -0.9870 -0.1323 -0.0909 +vn 0.3834 0.9229 0.0353 +vn 0.3835 0.9229 0.0354 +vn -0.9507 0.0407 0.3073 +vn -0.0001 -0.1306 -0.9914 +vn -0.0000 -0.1306 -0.9914 +vn -0.0000 1.0000 0.0007 +vn -0.0000 -0.7926 -0.6098 +vn -0.0001 -0.1291 -0.9916 +vn -0.0000 0.7924 -0.6100 +vn 0.9507 0.0407 0.3073 +vn 0.9508 0.0407 0.3073 +vn -0.0000 -0.1299 -0.9915 +vn -0.0002 -0.1281 -0.9918 +vn -0.0000 -0.9913 0.1314 +vn -0.0000 -1.0000 -0.0013 +vn -0.0000 -0.9912 0.1322 +vn 0.8899 -0.2913 -0.3510 +vn 0.8899 -0.2915 -0.3509 +vn 0.8902 -0.2907 -0.3509 +vn 0.9507 -0.0410 -0.3073 +vn 0.9507 -0.0414 -0.3074 +vn 0.7494 0.6585 -0.0695 +vn -0.0908 -0.0000 -0.9959 +vn -0.0000 -0.9913 -0.1320 +vn -0.0000 -1.0000 -0.0049 +vn -0.0000 -0.9912 -0.1323 +vn -0.0000 0.9999 0.0158 +vn -0.0000 0.9912 0.1321 +vn -0.9872 -0.1307 0.0909 +vn -0.9872 -0.1311 0.0909 +vn -0.6071 0.7926 0.0559 +vn 0.9873 -0.1305 -0.0909 +vn 0.9873 -0.1300 -0.0909 +vn 0.9872 -0.1311 -0.0909 +vn -0.5972 -0.7142 -0.3652 +vn -0.8899 -0.2915 -0.3509 +vn -0.8899 -0.2915 -0.3510 +vn 0.0911 -0.0000 -0.9958 +vn 0.3835 0.9228 0.0354 +vn 0.6071 0.7926 0.0559 +vn 0.6074 0.7925 0.0559 +vn 0.7912 0.6071 0.0729 +vn -0.0000 -0.1300 -0.9915 +vn -0.0002 0.6069 -0.7947 +vn -0.0000 -1.0000 0.0013 +vn 0.8901 -0.2907 -0.3509 +vn -0.0911 -0.0000 -0.9958 +vn -0.0000 -0.9912 -0.1321 +vn -0.0000 -1.0000 -0.0066 +vn -0.0000 -0.9912 -0.1324 +vn -0.0000 1.0000 -0.0074 +vn -0.0000 0.9913 0.1320 +vn 0.6080 0.7919 -0.0560 +vn 0.0172 -0.8615 0.5074 +vn 0.0177 -0.8616 0.5074 +vn -0.0063 -0.9699 0.2434 +vn -0.0064 -0.9699 0.2433 +vn -0.0056 -0.9700 0.2431 +vn -0.0064 -0.9699 0.2434 +vn 0.2204 -0.8487 0.4808 +vn 0.2202 -0.8487 0.4808 +vn 0.2203 -0.8487 0.4808 +vn 0.2201 -0.8487 0.4809 +vn 0.0372 -0.6942 0.7188 +vn 0.0370 -0.6943 0.7188 +vn -0.1875 -0.8379 0.5126 +vn -0.1864 -0.8382 0.5125 +vn -0.1882 -0.8378 0.5125 +vn -0.1858 -0.8384 0.5124 +vn -0.0355 -0.9982 -0.0486 +vn -0.0343 -0.9982 -0.0488 +vn -0.0361 -0.9982 -0.0483 +vn -0.0336 -0.9982 -0.0490 +vn 0.1954 -0.9555 0.2209 +vn 0.1957 -0.9555 0.2209 +vn 0.1953 -0.9555 0.2209 +vn 0.1959 -0.9554 0.2208 +vn -0.2120 -0.9432 0.2559 +vn -0.2123 -0.9431 0.2560 +vn -0.2118 -0.9432 0.2559 +vn -0.2126 -0.9430 0.2560 +vn 0.4228 -0.7970 0.4314 +vn 0.4232 -0.7969 0.4312 +vn 0.4225 -0.7970 0.4315 +vn 0.4235 -0.7967 0.4311 +vn 0.2402 -0.6838 0.6890 +vn 0.2397 -0.6839 0.6891 +vn 0.2394 -0.6839 0.6891 +vn 0.2406 -0.6837 0.6889 +vn 0.0526 -0.4908 0.8697 +vn 0.0520 -0.4908 0.8697 +vn 0.0516 -0.4909 0.8697 +vn 0.0530 -0.4908 0.8697 +vn -0.1674 -0.6753 0.7183 +vn -0.1669 -0.6754 0.7183 +vn -0.1665 -0.6754 0.7184 +vn -0.1677 -0.6753 0.7182 +vn -0.3955 -0.7742 0.4942 +vn -0.3947 -0.7745 0.4943 +vn -0.3940 -0.7748 0.4945 +vn -0.3962 -0.7740 0.4940 +vn -0.0614 -0.9403 -0.3347 +vn -0.0623 -0.9403 -0.3346 +vn -0.0609 -0.9403 -0.3349 +vn -0.0629 -0.9403 -0.3345 +vn 0.1706 -0.9830 -0.0672 +vn 0.1695 -0.9832 -0.0669 +vn 0.1714 -0.9829 -0.0673 +vn 0.1688 -0.9834 -0.0668 +vn -0.2368 -0.9711 -0.0286 +vn -0.2375 -0.9710 -0.0284 +vn -0.2363 -0.9713 -0.0286 +vn -0.2380 -0.9709 -0.0283 +vn 0.4005 -0.8970 0.1870 +vn 0.3997 -0.8973 0.1872 +vn 0.4008 -0.8969 0.1869 +vn 0.3994 -0.8974 0.1873 +vn -0.4173 -0.8716 0.2572 +vn -0.4164 -0.8721 0.2571 +vn -0.4177 -0.8714 0.2572 +vn -0.4159 -0.8723 0.2570 +vn 0.6225 -0.6980 0.3540 +vn 0.6213 -0.6987 0.3547 +vn 0.6233 -0.6975 0.3535 +vn 0.6204 -0.6992 0.3552 +vn 0.4417 -0.6421 0.6266 +vn 0.4413 -0.6421 0.6269 +vn 0.4417 -0.6420 0.6267 +vn 0.4413 -0.6420 0.6269 +vn 0.2540 -0.4834 0.8377 +vn 0.2542 -0.4834 0.8377 +vn 0.0599 -0.2693 0.9612 +vn 0.0612 -0.2692 0.9611 +vn 0.0619 -0.2691 0.9611 +vn 0.0592 -0.2694 0.9612 +vn -0.1550 -0.4774 0.8649 +vn -0.1545 -0.4774 0.8650 +vn -0.1541 -0.4774 0.8651 +vn -0.1554 -0.4773 0.8649 +vn -0.3780 -0.6236 0.6843 +vn -0.3784 -0.6235 0.6842 +vn -0.6043 -0.6601 0.4463 +vn -0.6049 -0.6597 0.4460 +vn -0.6053 -0.6594 0.4459 +vn -0.6038 -0.6604 0.4465 +vn -0.0826 -0.8087 -0.5824 +vn -0.0834 -0.8087 -0.5823 +vn -0.0822 -0.8087 -0.5824 +vn -0.0838 -0.8087 -0.5822 +vn 0.1435 -0.9262 -0.3487 +vn 0.1445 -0.9260 -0.3488 +vn 0.1429 -0.9263 -0.3486 +vn 0.1451 -0.9259 -0.3489 +vn -0.2634 -0.9147 -0.3065 +vn -0.2629 -0.9148 -0.3066 +vn -0.2638 -0.9146 -0.3064 +vn -0.2625 -0.9149 -0.3067 +vn 0.3744 -0.9235 -0.0830 +vn 0.3749 -0.9233 -0.0831 +vn 0.3741 -0.9237 -0.0830 +vn 0.3753 -0.9231 -0.0832 +vn -0.4422 -0.8969 -0.0053 +vn -0.4418 -0.8971 -0.0054 +vn -0.4423 -0.8969 -0.0053 +vn 0.6014 -0.7865 0.1407 +vn 0.6027 -0.7856 0.1402 +vn 0.6007 -0.7869 0.1410 +vn 0.6034 -0.7851 0.1399 +vn -0.6226 -0.7434 0.2442 +vn -0.6236 -0.7426 0.2442 +vn -0.6221 -0.7439 0.2442 +vn -0.6241 -0.7422 0.2442 +vn 0.8046 -0.5412 0.2444 +vn 0.8039 -0.5420 0.2448 +vn 0.8035 -0.5425 0.2451 +vn 0.8050 -0.5406 0.2442 +vn 0.6369 -0.5631 0.5266 +vn 0.6365 -0.5633 0.5267 +vn 0.6361 -0.5636 0.5270 +vn 0.6372 -0.5630 0.5264 +vn 0.4540 -0.4540 0.7667 +vn 0.4539 -0.4541 0.7667 +vn 0.4544 -0.4539 0.7664 +vn 0.2628 -0.2653 0.9277 +vn 0.2624 -0.2653 0.9278 +vn 0.2627 -0.2652 0.9277 +vn 0.0644 -0.0401 0.9971 +vn 0.0633 -0.0402 0.9972 +vn 0.0626 -0.0403 0.9972 +vn 0.0651 -0.0400 0.9971 +vn -0.1453 -0.2618 0.9541 +vn -0.1464 -0.2619 0.9539 +vn -0.1471 -0.2619 0.9538 +vn -0.1447 -0.2619 0.9542 +vn -0.3660 -0.4408 0.8196 +vn -0.3659 -0.4408 0.8196 +vn -0.3657 -0.4408 0.8197 +vn -0.5888 -0.5319 0.6086 +vn -0.5886 -0.5321 0.6087 +vn -0.5885 -0.5321 0.6087 +vn -0.7987 -0.4825 0.3596 +vn -0.7987 -0.4824 0.3596 +vn -0.7984 -0.4828 0.3597 +vn -0.7989 -0.4822 0.3594 +vn -0.1009 -0.6262 -0.7731 +vn -0.1004 -0.6263 -0.7731 +vn -0.1003 -0.6263 -0.7731 +vn 0.1197 -0.7966 -0.5925 +vn 0.1195 -0.7967 -0.5925 +vn 0.1193 -0.7967 -0.5925 +vn 0.1203 -0.7965 -0.5925 +vn -0.2865 -0.7866 -0.5470 +vn -0.2865 -0.7865 -0.5470 +vn -0.2870 -0.7865 -0.5469 +vn -0.2864 -0.7866 -0.5471 +vn 0.3497 -0.8700 -0.3477 +vn 0.3500 -0.8699 -0.3476 +vn 0.3502 -0.8698 -0.3475 +vn 0.3495 -0.8701 -0.3477 +vn -0.4653 -0.8453 -0.2626 +vn -0.4658 -0.8451 -0.2625 +vn -0.4650 -0.8454 -0.2627 +vn -0.4660 -0.8450 -0.2624 +vn 0.5794 -0.8093 -0.0962 +vn 0.5793 -0.8094 -0.0962 +vn 0.5789 -0.8097 -0.0963 +vn -0.6445 -0.7643 0.0205 +vn -0.6438 -0.7649 0.0203 +vn -0.6450 -0.7639 0.0206 +vn -0.6433 -0.7653 0.0202 +vn 0.7884 -0.6101 0.0791 +vn 0.7888 -0.6096 0.0789 +vn 0.7891 -0.6092 0.0789 +vn 0.7880 -0.6105 0.0791 +vn -0.8124 -0.5432 0.2119 +vn -0.8127 -0.5429 0.2118 +vn 0.9429 -0.3177 0.0996 +vn 0.9429 -0.3178 0.0996 +vn 0.8171 -0.4359 0.3772 +vn 0.8174 -0.4356 0.3770 +vn 0.8177 -0.4353 0.3767 +vn 0.8169 -0.4362 0.3775 +vn 0.6478 -0.3984 0.6493 +vn 0.6483 -0.3982 0.6489 +vn 0.6486 -0.3980 0.6487 +vn 0.6475 -0.3986 0.6495 +vn 0.4615 -0.2492 0.8514 +vn 0.4619 -0.2492 0.8512 +vn 0.4611 -0.2493 0.8516 +vn 0.2650 -0.0396 0.9634 +vn 0.2658 -0.0395 0.9632 +vn 0.2662 -0.0395 0.9631 +vn 0.2646 -0.0396 0.9635 +vn 0.0628 0.1898 0.9798 +vn 0.0632 0.1897 0.9798 +vn 0.0627 0.1898 0.9798 +vn 0.0634 0.1897 0.9798 +vn -0.1434 -0.0392 0.9889 +vn -0.1421 -0.0391 0.9891 +vn -0.1414 -0.0390 0.9892 +vn -0.1441 -0.0392 0.9888 +vn -0.3585 -0.2418 0.9017 +vn -0.3587 -0.2418 0.9016 +vn -0.5778 -0.3761 0.7243 +vn -0.5780 -0.3761 0.7242 +vn -0.5781 -0.3761 0.7242 +vn -0.7877 -0.3886 0.4780 +vn -0.7878 -0.3885 0.4779 +vn -0.9424 -0.2419 0.2309 +vn -0.9423 -0.2423 0.2310 +vn -0.9426 -0.2415 0.2307 +vn -0.9422 -0.2426 0.2312 +vn -0.0589 0.7818 -0.6207 +vn -0.0590 0.7827 -0.6196 +vn -0.0590 0.7829 -0.6194 +vn -0.0587 0.7808 -0.6220 +vn -0.0582 0.7783 -0.6252 +vn -0.0584 0.7787 -0.6247 +vn -0.0583 0.7798 -0.6233 +vn -0.0583 0.7801 -0.6229 +vn -0.0583 0.7799 -0.6232 +vn -0.0582 0.7788 -0.6245 +vn -0.0581 0.7785 -0.6250 +vn -0.0584 0.7799 -0.6232 +vn -0.0582 0.7785 -0.6249 +vn -0.0582 0.7788 -0.6246 +vn -0.0583 0.7803 -0.6226 +vn -0.0583 0.7797 -0.6234 +vn -0.0583 0.7785 -0.6249 +vn -0.0584 0.7785 -0.6249 +vn -0.0586 0.7811 -0.6217 +vn -0.0589 0.7822 -0.6203 +vn -0.0590 0.7828 -0.6195 +vn 0.1019 -0.6170 -0.7804 +vn 0.1023 -0.6169 -0.7803 +vn 0.1025 -0.6169 -0.7804 +vn 0.1018 -0.6169 -0.7804 +vn -0.3041 -0.6089 -0.7326 +vn -0.3040 -0.6090 -0.7326 +vn -0.3038 -0.6090 -0.7327 +vn -0.3043 -0.6089 -0.7325 +vn 0.3286 -0.7481 -0.5765 +vn 0.3288 -0.7481 -0.5765 +vn -0.4875 -0.7264 -0.4845 +vn -0.4864 -0.7267 -0.4850 +vn -0.4884 -0.7261 -0.4840 +vn -0.4856 -0.7270 -0.4854 +vn 0.5577 -0.7624 -0.3281 +vn 0.5576 -0.7625 -0.3282 +vn 0.5578 -0.7624 -0.3281 +vn 0.5575 -0.7625 -0.3282 +vn -0.6657 -0.7194 -0.1983 +vn -0.6656 -0.7195 -0.1983 +vn -0.6660 -0.7192 -0.1981 +vn -0.6654 -0.7197 -0.1984 +vn 0.7720 -0.6270 -0.1048 +vn 0.7716 -0.6274 -0.1047 +vn 0.7719 -0.6271 -0.1048 +vn 0.7716 -0.6275 -0.1047 +vn -0.8276 -0.5593 0.0482 +vn -0.8277 -0.5590 0.0482 +vn -0.8279 -0.5588 0.0482 +vn -0.8274 -0.5595 0.0482 +vn 0.9339 -0.3576 0.0022 +vn 0.9340 -0.3572 0.0021 +vn 0.9340 -0.3571 0.0021 +vn 0.9338 -0.3577 0.0022 +vn -0.9491 -0.2731 0.1571 +vn -0.9493 -0.2724 0.1571 +vn -0.9490 -0.2735 0.1572 +vn -0.9494 -0.2719 0.1570 +vn 0.9968 -0.0422 -0.0682 +vn 0.9968 -0.0415 -0.0687 +vn 0.9957 -0.0450 -0.0815 +vn 0.9978 -0.0351 -0.0570 +vn 0.9505 -0.2555 0.1771 +vn 0.9503 -0.2560 0.1775 +vn 0.9502 -0.2562 0.1777 +vn 0.9506 -0.2551 0.1768 +vn 0.8256 -0.3085 0.4725 +vn 0.8256 -0.3085 0.4724 +vn 0.6558 -0.2183 0.7227 +vn 0.6549 -0.2186 0.7235 +vn 0.6543 -0.2188 0.7239 +vn 0.6564 -0.2181 0.7222 +vn 0.4655 -0.0371 0.8843 +vn 0.4646 -0.0372 0.8848 +vn 0.4640 -0.0373 0.8851 +vn 0.4661 -0.0370 0.8840 +vn 0.2637 0.1870 0.9463 +vn 0.2634 0.1870 0.9464 +vn 0.2637 0.1869 0.9463 +vn 0.2633 0.1870 0.9464 +vn 0.0550 0.4149 0.9082 +vn 0.0559 0.4149 0.9082 +vn 0.0545 0.4150 0.9082 +vn 0.0564 0.4148 0.9082 +vn -0.1460 0.1845 0.9719 +vn -0.1457 0.1845 0.9720 +vn -0.1464 0.1846 0.9719 +vn -0.1455 0.1845 0.9720 +vn -0.3550 -0.0360 0.9342 +vn -0.3551 -0.0361 0.9341 +vn -0.3558 -0.0361 0.9339 +vn -0.3545 -0.0360 0.9344 +vn -0.5710 -0.2065 0.7946 +vn -0.5713 -0.2064 0.7944 +vn -0.5716 -0.2064 0.7942 +vn -0.5708 -0.2065 0.7947 +vn -0.7793 -0.2751 0.5630 +vn -0.7790 -0.2753 0.5633 +vn -0.7797 -0.2749 0.5626 +vn -0.7786 -0.2756 0.5637 +vn -0.9365 -0.1957 0.2910 +vn -0.9362 -0.1962 0.2915 +vn -0.9366 -0.1954 0.2907 +vn -0.9361 -0.1964 0.2917 +vn -0.9968 0.0417 0.0686 +vn -0.9968 0.0421 0.0683 +vn -0.9968 0.0414 0.0687 +vn -0.9978 0.0352 0.0568 +vn -0.3114 0.5796 0.7531 +vn -0.3112 0.5796 0.7531 +vn -0.3112 0.5796 0.7532 +vn -0.1027 0.6170 0.7803 +vn -0.1029 0.6170 0.7802 +vn -0.1025 0.6169 0.7803 +vn -0.1031 0.6169 0.7802 +vn 0.1013 0.6264 0.7729 +vn 0.1019 0.6263 0.7729 +vn 0.1010 0.6264 0.7730 +vn 0.1021 0.6262 0.7729 +vn 0.3040 0.6091 0.7325 +vn 0.3031 0.6093 0.7327 +vn 0.3045 0.6090 0.7324 +vn 0.3026 0.6095 0.7328 +vn 0.5036 0.5627 0.6556 +vn 0.5040 0.5625 0.6554 +vn 0.5033 0.5628 0.6557 +vn 0.5043 0.5624 0.6553 +vn 0.6957 0.4798 0.5345 +vn 0.6952 0.4802 0.5349 +vn 0.6961 0.4796 0.5343 +vn 0.6950 0.4803 0.5351 +vn 0.8640 0.3525 0.3596 +vn 0.8643 0.3520 0.3592 +vn 0.8637 0.3528 0.3600 +vn 0.8646 0.3517 0.3589 +vn 0.9761 0.1756 0.1282 +vn 0.9760 0.1758 0.1285 +vn 0.9762 0.1752 0.1279 +vn 0.9759 0.1762 0.1290 +vn 0.9909 -0.0313 -0.1313 +vn 0.9910 -0.0306 -0.1305 +vn 0.9918 -0.0398 -0.1218 +vn 0.9910 -0.0304 -0.1305 +vn 0.8996 -0.2299 -0.3714 +vn 0.8984 -0.2317 -0.3731 +vn 0.9002 -0.2289 -0.3705 +vn 0.8978 -0.2326 -0.3741 +vn 0.7297 -0.3931 -0.5596 +vn 0.7299 -0.3928 -0.5594 +vn 0.7294 -0.3933 -0.5597 +vn 0.7302 -0.3927 -0.5592 +vn 0.5236 -0.5080 -0.6839 +vn 0.5239 -0.5079 -0.6838 +vn 0.5240 -0.5079 -0.6837 +vn 0.5234 -0.5081 -0.6840 +vn 0.3125 -0.5792 -0.7529 +vn 0.3120 -0.5793 -0.7530 +vn 0.3119 -0.5794 -0.7530 +vn -0.5045 -0.5622 -0.6554 +vn -0.5044 -0.5622 -0.6554 +vn -0.5045 -0.5621 -0.6554 +vn -0.6953 -0.4801 -0.5348 +vn -0.6956 -0.4799 -0.5346 +vn -0.6962 -0.4795 -0.5343 +vn -0.6950 -0.4803 -0.5351 +vn -0.8647 -0.3517 -0.3585 +vn -0.8648 -0.3516 -0.3584 +vn -0.8649 -0.3515 -0.3583 +vn -0.8646 -0.3519 -0.3588 +vn -0.9759 -0.1761 -0.1286 +vn -0.9761 -0.1757 -0.1281 +vn -0.9762 -0.1753 -0.1277 +vn -0.9759 -0.1764 -0.1288 +vn -0.9910 0.0302 0.1302 +vn -0.9911 0.0296 0.1298 +vn -0.9918 0.0391 0.1214 +vn -0.9910 0.0303 0.1306 +vn -0.8988 0.2310 0.3727 +vn -0.8986 0.2313 0.3729 +vn -0.8989 0.2307 0.3725 +vn -0.7290 0.3937 0.5600 +vn -0.7291 0.3936 0.5599 +vn -0.7294 0.3934 0.5597 +vn -0.7288 0.3939 0.5601 +vn -0.5250 0.5076 0.6832 +vn -0.5252 0.5075 0.6831 +vn 0.5384 -0.6560 -0.5289 +vn 0.5387 -0.6559 -0.5288 +vn 0.5390 -0.6557 -0.5287 +vn -0.6830 -0.6189 -0.3879 +vn -0.6833 -0.6186 -0.3878 +vn -0.6837 -0.6184 -0.3876 +vn -0.6825 -0.6193 -0.3881 +vn 0.7547 -0.5912 -0.2845 +vn 0.7545 -0.5914 -0.2845 +vn 0.7548 -0.5910 -0.2845 +vn 0.7545 -0.5914 -0.2846 +vn -0.8424 -0.5270 -0.1122 +vn -0.8423 -0.5272 -0.1122 +vn -0.8425 -0.5270 -0.1122 +vn 0.9239 -0.3678 -0.1054 +vn 0.9239 -0.3677 -0.1054 +vn 0.9240 -0.3675 -0.1054 +vn -0.9568 -0.2809 0.0747 +vn -0.9568 -0.2811 0.0747 +vn -0.9566 -0.2815 0.0747 +vn -0.9569 -0.2808 0.0747 +vn 0.9956 -0.0479 -0.0811 +vn 0.9956 -0.0465 -0.0813 +vn -0.9956 0.0477 0.0813 +vn -0.9956 0.0471 0.0814 +vn -0.9955 0.0480 0.0812 +vn -0.9956 0.0466 0.0814 +vn 0.9426 0.2415 -0.2308 +vn 0.9425 0.2416 -0.2308 +vn 0.9424 0.2419 -0.2309 +vn 0.9977 -0.0340 -0.0578 +vn 0.9983 -0.0233 -0.0525 +vn 0.9556 -0.1809 0.2327 +vn 0.9557 -0.1806 0.2323 +vn 0.9559 -0.1803 0.2319 +vn 0.9554 -0.1811 0.2331 +vn 0.8304 -0.1693 0.5308 +vn 0.8308 -0.1691 0.5302 +vn 0.8311 -0.1690 0.5298 +vn 0.8301 -0.1696 0.5312 +vn 0.6592 -0.0324 0.7513 +vn 0.6590 -0.0325 0.7515 +vn 0.4629 0.1756 0.8688 +vn 0.4639 0.1754 0.8684 +vn 0.4624 0.1757 0.8691 +vn 0.4644 0.1753 0.8681 +vn 0.2571 0.4086 0.8757 +vn 0.2575 0.4086 0.8756 +vn 0.0425 0.6263 0.7784 +vn 0.0429 0.6263 0.7784 +vn 0.0421 0.6264 0.7784 +vn -0.1517 0.4034 0.9024 +vn -0.1525 0.4034 0.9022 +vn -0.1512 0.4034 0.9024 +vn -0.1531 0.4033 0.9022 +vn -0.3553 0.1706 0.9190 +vn -0.3557 0.1706 0.9189 +vn -0.3551 0.1706 0.9191 +vn -0.3560 0.1706 0.9188 +vn -0.5682 -0.0308 0.8223 +vn -0.7740 -0.1512 0.6148 +vn -0.7743 -0.1511 0.6145 +vn -0.7738 -0.1512 0.6150 +vn -0.7745 -0.1510 0.6143 +vn -0.9325 -0.1383 0.3337 +vn -0.9329 -0.1376 0.3327 +vn -0.9322 -0.1388 0.3342 +vn -0.9332 -0.1372 0.3320 +vn -0.9978 0.0344 0.0574 +vn -0.9984 0.0233 0.0523 +vn -0.9430 0.3176 -0.0995 +vn -0.9435 0.3162 -0.0988 +vn -0.9426 0.3186 -0.0999 +vn -0.9439 0.3153 -0.0983 +vn -0.3283 0.7481 0.5766 +vn -0.3285 0.7481 0.5766 +vn -0.3280 0.7482 0.5768 +vn -0.3288 0.7480 0.5765 +vn -0.1205 0.7966 0.5924 +vn -0.1204 0.7966 0.5925 +vn -0.1209 0.7965 0.5924 +vn -0.1200 0.7965 0.5926 +vn 0.0840 0.8086 0.5823 +vn 0.0838 0.8086 0.5823 +vn 0.0843 0.8086 0.5822 +vn 0.0837 0.8086 0.5823 +vn 0.2864 0.7865 0.5472 +vn 0.2859 0.7865 0.5473 +vn 0.2859 0.7865 0.5474 +vn 0.4864 0.7269 0.4849 +vn 0.4877 0.7264 0.4843 +vn 0.4883 0.7262 0.4840 +vn 0.4856 0.7271 0.4853 +vn 0.6832 0.6189 0.3875 +vn 0.6828 0.6193 0.3877 +vn 0.6837 0.6185 0.3873 +vn 0.6825 0.6195 0.3879 +vn 0.8548 0.4539 0.2515 +vn 0.8540 0.4550 0.2521 +vn 0.8554 0.4532 0.2510 +vn 0.8535 0.4558 0.2526 +vn 0.9709 0.2273 0.0752 +vn 0.9711 0.2266 0.0748 +vn 0.9708 0.2276 0.0754 +vn 0.9712 0.2263 0.0745 +vn 0.9917 -0.0407 -0.1222 +vn 0.9929 -0.0466 -0.1098 +vn 0.9060 -0.2971 -0.3015 +vn 0.9065 -0.2962 -0.3010 +vn 0.9057 -0.2976 -0.3018 +vn 0.9067 -0.2956 -0.3007 +vn 0.7401 -0.5086 -0.4400 +vn 0.7402 -0.5085 -0.4400 +vn 0.7399 -0.5087 -0.4401 +vn 0.7404 -0.5082 -0.4399 +vn -0.8550 -0.4537 -0.2511 +vn -0.8552 -0.4535 -0.2510 +vn -0.8553 -0.4533 -0.2509 +vn -0.8548 -0.4541 -0.2514 +vn -0.9711 -0.2266 -0.0747 +vn -0.9709 -0.2273 -0.0752 +vn -0.9708 -0.2276 -0.0753 +vn -0.9712 -0.2263 -0.0745 +vn -0.9920 0.0381 0.1204 +vn -0.9929 0.0466 0.1098 +vn -0.9054 0.2982 0.3021 +vn -0.9055 0.2980 0.3020 +vn -0.9057 0.2976 0.3018 +vn -0.9053 0.2985 0.3023 +vn -0.7403 0.5083 0.4400 +vn -0.7400 0.5085 0.4401 +vn -0.7399 0.5086 0.4403 +vn -0.7404 0.5081 0.4400 +vn -0.5394 0.6554 0.5287 +vn -0.5392 0.6555 0.5288 +vn -0.5396 0.6553 0.5286 +vn -0.5390 0.6556 0.5289 +vn 0.9142 -0.3461 -0.2107 +vn -0.9644 -0.2646 -0.0057 +vn -0.9644 -0.2645 -0.0057 +vn 0.9942 -0.0492 -0.0955 +vn 0.9942 -0.0493 -0.0955 +vn 0.9942 -0.0496 -0.0954 +vn -0.9942 0.0494 0.0953 +vn -0.9942 0.0496 0.0953 +vn 0.9493 0.2724 -0.1570 +vn 0.9491 0.2731 -0.1571 +vn 0.9490 0.2735 -0.1571 +vn 0.9494 0.2719 -0.1569 +vn -0.9335 0.3587 -0.0025 +vn -0.9335 0.3586 -0.0025 +vn -0.9337 0.3581 -0.0024 +vn -0.9334 0.3587 -0.0025 +vn 0.7978 0.4834 -0.3602 +vn 0.7973 0.4841 -0.3605 +vn 0.7970 0.4844 -0.3607 +vn 0.7981 0.4831 -0.3601 +vn 0.9365 0.1958 -0.2910 +vn 0.9367 0.1953 -0.2906 +vn 0.9368 0.1950 -0.2905 +vn 0.9363 0.1960 -0.2913 +vn 0.9985 -0.0238 -0.0497 +vn 0.9988 -0.0137 -0.0460 +vn 0.9588 -0.0991 0.2664 +vn 0.9587 -0.0992 0.2666 +vn 0.9586 -0.0993 0.2668 +vn 0.9588 -0.0991 0.2663 +vn 0.8325 -0.0251 0.5534 +vn 0.8323 -0.0252 0.5537 +vn 0.8322 -0.0252 0.5539 +vn 0.8327 -0.0251 0.5532 +vn 0.6575 0.1536 0.7377 +vn 0.6575 0.1536 0.7376 +vn 0.6570 0.1537 0.7380 +vn 0.6578 0.1536 0.7374 +vn 0.4571 0.3839 0.8023 +vn 0.4565 0.3840 0.8026 +vn 0.4574 0.3837 0.8022 +vn 0.4561 0.3841 0.8028 +vn 0.2450 0.6170 0.7479 +vn 0.2453 0.6169 0.7478 +vn 0.2447 0.6170 0.7479 +vn 0.2455 0.6169 0.7478 +vn 0.0257 0.8087 0.5877 +vn 0.0267 0.8088 0.5875 +vn 0.0274 0.8088 0.5874 +vn 0.0251 0.8086 0.5878 +vn -0.1627 0.6092 0.7762 +vn -0.1625 0.6092 0.7762 +vn -0.3616 0.3728 0.8546 +vn -0.3614 0.3728 0.8546 +vn -0.3618 0.3728 0.8545 +vn -0.3609 0.3728 0.8548 +vn -0.5701 0.1456 0.8086 +vn -0.5698 0.1456 0.8088 +vn -0.5705 0.1455 0.8083 +vn -0.5694 0.1455 0.8091 +vn -0.7728 -0.0225 0.6343 +vn -0.7729 -0.0224 0.6341 +vn -0.7725 -0.0225 0.6346 +vn -0.7733 -0.0224 0.6337 +vn -0.9306 -0.0756 0.3581 +vn -0.9304 -0.0758 0.3587 +vn -0.9308 -0.0754 0.3577 +vn -0.9302 -0.0760 0.3591 +vn -0.9985 0.0238 0.0496 +vn -0.9988 0.0135 0.0462 +vn -0.9510 0.2543 -0.1758 +vn -0.9509 0.2545 -0.1760 +vn -0.9510 0.2542 -0.1758 +vn -0.9508 0.2547 -0.1761 +vn -0.8053 0.5403 -0.2440 +vn -0.8046 0.5412 -0.2444 +vn -0.8057 0.5397 -0.2438 +vn -0.8041 0.5417 -0.2447 +vn -0.3498 0.8700 0.3474 +vn -0.3490 0.8703 0.3475 +vn -0.3502 0.8699 0.3473 +vn -0.3486 0.8704 0.3476 +vn -0.1429 0.9263 0.3486 +vn -0.1430 0.9263 0.3487 +vn 0.0608 0.9404 0.3347 +vn 0.0607 0.9404 0.3347 +vn 0.2634 0.9147 0.3064 +vn 0.2632 0.9148 0.3065 +vn 0.2637 0.9147 0.3063 +vn 0.4672 0.8445 0.2620 +vn 0.4657 0.8451 0.2626 +vn 0.4650 0.8454 0.2629 +vn 0.4679 0.8442 0.2617 +vn 0.6646 0.7203 0.1989 +vn 0.6654 0.7196 0.1984 +vn 0.6660 0.7192 0.1981 +vn 0.6640 0.7207 0.1992 +vn 0.8425 0.5269 0.1119 +vn 0.8422 0.5274 0.1120 +vn 0.8425 0.5270 0.1121 +vn 0.8423 0.5273 0.1121 +vn 0.9644 0.2645 0.0056 +vn 0.9929 -0.0462 -0.1095 +vn 0.9929 -0.0467 -0.1096 +vn -0.9929 0.0462 0.1096 +vn -0.9929 0.0467 0.1097 +vn -0.9144 0.3458 0.2106 +vn -0.9145 0.3455 0.2106 +vn -0.9146 0.3452 0.2105 +vn -0.9142 0.3461 0.2107 +vn -0.7548 0.5911 0.2845 +vn -0.7545 0.5914 0.2845 +vn -0.5582 0.7622 0.3279 +vn -0.5584 0.7620 0.3278 +vn -0.5578 0.7624 0.3280 +vn -0.5588 0.7618 0.3277 +vn 0.9567 0.2814 -0.0747 +vn 0.9568 0.2809 -0.0747 +vn 0.9567 0.2815 -0.0747 +vn 0.9569 0.2808 -0.0747 +vn -0.9242 0.3670 0.1054 +vn -0.9239 0.3680 0.1053 +vn -0.9237 0.3685 0.1053 +vn -0.9244 0.3664 0.1055 +vn 0.8115 0.5445 -0.2121 +vn 0.8122 0.5435 -0.2121 +vn 0.8126 0.5428 -0.2120 +vn 0.8111 0.5451 -0.2121 +vn -0.7894 0.6088 -0.0787 +vn -0.7902 0.6078 -0.0785 +vn -0.7891 0.6092 -0.0787 +vn -0.7905 0.6074 -0.0783 +vn 0.6039 0.6602 -0.4465 +vn 0.6038 0.6603 -0.4466 +vn 0.7872 0.3892 -0.4783 +vn 0.7870 0.3894 -0.4785 +vn 0.9326 0.1383 -0.3334 +vn 0.9325 0.1385 -0.3336 +vn 0.9324 0.1387 -0.3339 +vn 0.9327 0.1381 -0.3332 +vn 0.9988 -0.0126 -0.0480 +vn 0.9991 -0.0021 -0.0430 +vn 0.9600 -0.0147 0.2797 +vn 0.9600 -0.0146 0.2795 +vn 0.9601 -0.0146 0.2793 +vn 0.9599 -0.0147 0.2800 +vn 0.8321 0.1192 0.5417 +vn 0.8326 0.1189 0.5410 +vn 0.8318 0.1193 0.5421 +vn 0.8329 0.1188 0.5405 +vn 0.6518 0.3365 0.6797 +vn 0.6517 0.3366 0.6797 +vn 0.6520 0.3364 0.6795 +vn 0.6515 0.3367 0.6799 +vn 0.4474 0.5791 0.6815 +vn 0.4466 0.5793 0.6818 +vn 0.4478 0.5789 0.6814 +vn 0.4462 0.5795 0.6820 +vn 0.2274 0.7967 0.5599 +vn 0.2271 0.7968 0.5600 +vn 0.0038 0.9404 0.3401 +vn 0.0029 0.9403 0.3402 +vn 0.0023 0.9403 0.3404 +vn 0.0044 0.9404 0.3400 +vn -0.1813 0.7863 0.5906 +vn -0.1805 0.7864 0.5907 +vn -0.1818 0.7862 0.5906 +vn -0.1800 0.7865 0.5908 +vn -0.3734 0.5627 0.7376 +vn -0.3730 0.5627 0.7377 +vn -0.3736 0.5626 0.7375 +vn -0.3728 0.5628 0.7378 +vn -0.5764 0.3176 0.7529 +vn -0.5760 0.3176 0.7532 +vn -0.5768 0.3175 0.7526 +vn -0.5757 0.3177 0.7534 +vn -0.7738 0.1064 0.6244 +vn -0.7740 0.1064 0.6242 +vn -0.7742 0.1063 0.6239 +vn -0.7736 0.1065 0.6247 +vn -0.9293 -0.0112 0.3691 +vn -0.9295 -0.0111 0.3687 +vn -0.9293 -0.0112 0.3693 +vn -0.9296 -0.0111 0.3685 +vn -0.9988 0.0123 0.0482 +vn -0.9989 0.0021 0.0459 +vn -0.9559 0.1803 -0.2321 +vn -0.9556 0.1806 -0.2326 +vn -0.9560 0.1799 -0.2316 +vn -0.9555 0.1810 -0.2331 +vn -0.8162 0.4368 -0.3782 +vn -0.8161 0.4369 -0.3782 +vn -0.8164 0.4365 -0.3780 +vn -0.8162 0.4369 -0.3782 +vn -0.6211 0.6989 -0.3548 +vn -0.6207 0.6991 -0.3549 +vn -0.6206 0.6991 -0.3550 +vn -0.6211 0.6988 -0.3547 +vn -0.3748 0.9234 0.0834 +vn -0.3737 0.9238 0.0831 +vn -0.3732 0.9240 0.0830 +vn -0.3753 0.9231 0.0835 +vn -0.1689 0.9833 0.0671 +vn -0.1688 0.9834 0.0671 +vn 0.0338 0.9982 0.0489 +vn 0.0335 0.9982 0.0490 +vn 0.0339 0.9982 0.0489 +vn 0.2377 0.9709 0.0285 +vn 0.2373 0.9710 0.0286 +vn 0.2369 0.9711 0.0287 +vn 0.2379 0.9709 0.0284 +vn 0.4427 0.8966 0.0055 +vn 0.4434 0.8963 0.0053 +vn 0.4439 0.8961 0.0052 +vn 0.4422 0.8969 0.0056 +vn 0.6437 0.7650 -0.0203 +vn 0.6434 0.7653 -0.0203 +vn 0.6433 0.7653 -0.0202 +vn 0.8277 0.5591 -0.0481 +vn 0.8276 0.5593 -0.0481 +vn 0.8279 0.5588 -0.0481 +vn 0.8274 0.5595 -0.0482 +vn -0.7719 0.6271 0.1048 +vn -0.7716 0.6274 0.1048 +vn -0.7719 0.6270 0.1048 +vn -0.7716 0.6275 0.1048 +vn -0.5802 0.8088 0.0965 +vn -0.5793 0.8094 0.0965 +vn -0.5807 0.8084 0.0964 +vn -0.5788 0.8097 0.0966 +vn 0.6225 0.7435 -0.2443 +vn 0.6226 0.7435 -0.2443 +vn 0.6221 0.7439 -0.2442 +vn 0.6226 0.7434 -0.2443 +vn -0.6007 0.7869 -0.1410 +vn -0.6005 0.7871 -0.1411 +vn 0.3963 0.7738 -0.4941 +vn 0.3957 0.7740 -0.4943 +vn 0.3968 0.7736 -0.4940 +vn 0.3952 0.7742 -0.4944 +vn 0.5885 0.5322 -0.6086 +vn 0.5887 0.5321 -0.6085 +vn 0.7796 0.2749 -0.5628 +vn 0.7793 0.2751 -0.5630 +vn 0.7791 0.2753 -0.5632 +vn 0.7797 0.2749 -0.5625 +vn 0.9306 0.0754 -0.3582 +vn 0.9305 0.0755 -0.3585 +vn 0.9304 0.0756 -0.3588 +vn 0.9307 0.0753 -0.3580 +vn 0.9988 0.0030 -0.0492 +vn 0.9990 0.0038 -0.0435 +vn 0.9594 0.0698 0.2731 +vn 0.9594 0.0698 0.2733 +vn 0.9592 0.0699 0.2738 +vn 0.9595 0.0698 0.2731 +vn 0.8284 0.2605 0.4959 +vn 0.8281 0.2607 0.4962 +vn 0.8287 0.2602 0.4956 +vn 0.8279 0.2608 0.4965 +vn 0.6415 0.5080 0.5748 +vn 0.6407 0.5085 0.5753 +vn 0.6421 0.5077 0.5744 +vn 0.6402 0.5088 0.5756 +vn 0.4307 0.7478 0.5053 +vn 0.4294 0.7481 0.5059 +vn 0.4286 0.7484 0.5062 +vn 0.4315 0.7476 0.5049 +vn 0.2050 0.9262 0.3163 +vn 0.2049 0.9263 0.3163 +vn 0.2046 0.9264 0.3162 +vn -0.0248 0.9982 0.0543 +vn -0.0257 0.9982 0.0544 +vn -0.0262 0.9982 0.0545 +vn -0.0243 0.9982 0.0542 +vn -0.2042 0.9142 0.3501 +vn -0.2038 0.9143 0.3500 +vn -0.2035 0.9144 0.3500 +vn -0.2046 0.9141 0.3501 +vn -0.3882 0.7268 0.5667 +vn -0.3887 0.7266 0.5666 +vn -0.3878 0.7270 0.5667 +vn -0.3891 0.7265 0.5664 +vn -0.5845 0.4800 0.6542 +vn -0.5856 0.4795 0.6536 +vn -0.5837 0.4802 0.6547 +vn -0.5863 0.4792 0.6531 +vn -0.7774 0.2325 0.5844 +vn -0.7775 0.2325 0.5843 +vn -0.9298 0.0534 0.3642 +vn -0.9299 0.0533 0.3638 +vn -0.9300 0.0532 0.3636 +vn -0.9297 0.0535 0.3645 +vn -0.9992 -0.0039 0.0406 +vn -0.9989 -0.0035 0.0465 +vn -0.9587 0.0993 -0.2666 +vn -0.9588 0.0992 -0.2663 +vn -0.9586 0.0993 -0.2668 +vn -0.9588 0.0991 -0.2663 +vn -0.8254 0.3086 -0.4727 +vn -0.8259 0.3082 -0.4722 +vn -0.8251 0.3089 -0.4731 +vn -0.8262 0.3080 -0.4718 +vn -0.6376 0.5628 -0.5261 +vn -0.6380 0.5626 -0.5258 +vn -0.6372 0.5630 -0.5262 +vn -0.6383 0.5624 -0.5256 +vn -0.4220 0.7973 -0.4315 +vn -0.4223 0.7972 -0.4314 +vn -0.4225 0.7972 -0.4313 +vn -0.4217 0.7974 -0.4316 +vn -0.3998 0.8973 -0.1873 +vn -0.4004 0.8970 -0.1871 +vn -0.4008 0.8969 -0.1869 +vn -0.3994 0.8974 -0.1874 +vn -0.1974 0.9552 -0.2207 +vn -0.1961 0.9554 -0.2210 +vn -0.1953 0.9555 -0.2212 +vn -0.1982 0.9551 -0.2205 +vn 0.0079 0.9698 -0.2437 +vn 0.0066 0.9699 -0.2434 +vn 0.0058 0.9699 -0.2433 +vn 0.0087 0.9698 -0.2439 +vn 0.2108 0.9435 -0.2558 +vn 0.2115 0.9433 -0.2560 +vn 0.2118 0.9432 -0.2560 +vn 0.2104 0.9436 -0.2558 +vn 0.4184 0.8710 -0.2573 +vn 0.4181 0.8712 -0.2572 +vn 0.4178 0.8714 -0.2572 +vn 0.4188 0.8709 -0.2573 +vn 0.1872 0.8380 -0.5126 +vn 0.1864 0.8382 -0.5125 +vn 0.1862 0.8383 -0.5125 +vn 0.1874 0.8379 -0.5126 +vn 0.3780 0.6236 -0.6843 +vn 0.3788 0.6234 -0.6840 +vn 0.3774 0.6238 -0.6844 +vn 0.3793 0.6234 -0.6838 +vn 0.5781 0.3760 -0.7242 +vn 0.5784 0.3759 -0.7240 +vn 0.5778 0.3761 -0.7243 +vn 0.5789 0.3758 -0.7237 +vn 0.7736 0.1512 -0.6154 +vn 0.7745 0.1508 -0.6144 +vn 0.7749 0.1505 -0.6139 +vn 0.7730 0.1515 -0.6160 +vn 0.9297 0.0113 -0.3682 +vn 0.9296 0.0113 -0.3685 +vn 0.9296 0.0114 -0.3685 +vn 0.9990 0.0098 -0.0438 +vn 0.9986 0.0203 -0.0484 +vn 0.9569 0.1528 0.2469 +vn 0.9570 0.1526 0.2467 +vn 0.8206 0.3933 0.4148 +vn 0.8204 0.3934 0.4150 +vn 0.6264 0.6564 0.4205 +vn 0.6270 0.6560 0.4201 +vn 0.6276 0.6557 0.4196 +vn 0.6258 0.6567 0.4209 +vn 0.4077 0.8701 0.2770 +vn 0.4086 0.8697 0.2767 +vn 0.4092 0.8695 0.2765 +vn 0.4071 0.8703 0.2772 +vn 0.1792 0.9832 0.0344 +vn 0.1801 0.9831 0.0343 +vn 0.1807 0.9829 0.0341 +vn 0.1785 0.9833 0.0346 +vn -0.0529 0.9699 -0.2379 +vn -0.0516 0.9699 -0.2381 +vn -0.0508 0.9699 -0.2383 +vn -0.0537 0.9699 -0.2377 +vn -0.2293 0.9707 0.0719 +vn -0.2290 0.9708 0.0719 +vn -0.2287 0.9708 0.0718 +vn -0.2295 0.9706 0.0720 +vn -0.4090 0.8451 0.3442 +vn -0.4086 0.8453 0.3443 +vn -0.4091 0.8451 0.3442 +vn -0.5986 0.6195 0.5079 +vn -0.5980 0.6198 0.5082 +vn -0.5990 0.6192 0.5077 +vn -0.5977 0.6200 0.5083 +vn -0.7843 0.3508 0.5117 +vn -0.7842 0.3509 0.5118 +vn -0.9318 0.1167 0.3438 +vn -0.9316 0.1169 0.3441 +vn -0.9315 0.1170 0.3443 +vn -0.9318 0.1166 0.3436 +vn -0.9989 -0.0091 0.0468 +vn -0.9986 -0.0202 0.0485 +vn -0.9600 0.0147 -0.2797 +vn -0.9600 0.0147 -0.2795 +vn -0.9599 0.0148 -0.2800 +vn -0.8312 0.1692 -0.5297 +vn -0.8311 0.1692 -0.5297 +vn -0.8314 0.1691 -0.5293 +vn -0.8310 0.1693 -0.5299 +vn -0.6489 0.3980 -0.6484 +vn -0.6491 0.3979 -0.6484 +vn -0.6495 0.3977 -0.6481 +vn -0.6489 0.3980 -0.6485 +vn -0.4410 0.6422 -0.6270 +vn -0.4406 0.6422 -0.6272 +vn -0.4403 0.6422 -0.6274 +vn -0.4413 0.6421 -0.6269 +vn -0.2211 0.8487 -0.4805 +vn -0.2218 0.8486 -0.4802 +vn -0.2223 0.8486 -0.4801 +vn -0.2205 0.8487 -0.4806 +vn -0.0183 0.8618 -0.5070 +vn -0.0165 0.8616 -0.5073 +vn -0.0156 0.8615 -0.5075 +vn -0.0193 0.8618 -0.5068 +vn 0.1672 0.6753 -0.7183 +vn 0.1678 0.6753 -0.7182 +vn 0.1672 0.6754 -0.7183 +vn 0.3665 0.4406 -0.8195 +vn 0.3661 0.4407 -0.8196 +vn 0.3666 0.4406 -0.8195 +vn 0.3659 0.4407 -0.8197 +vn 0.5722 0.2062 -0.7938 +vn 0.5721 0.2062 -0.7938 +vn 0.5721 0.2062 -0.7939 +vn 0.5725 0.2062 -0.7936 +vn 0.7717 0.0226 -0.6356 +vn 0.7715 0.0227 -0.6359 +vn 0.7713 0.0227 -0.6361 +vn 0.7718 0.0226 -0.6355 +vn 0.9300 -0.0532 -0.3637 +vn 0.9298 -0.0534 -0.3642 +vn 0.9300 -0.0532 -0.3636 +vn 0.9297 -0.0534 -0.3645 +vn 0.9986 0.0202 -0.0485 +vn 0.9980 0.0289 -0.0568 +vn 0.9523 0.2307 0.1995 +vn 0.9522 0.2311 0.1999 +vn 0.9524 0.2306 0.1993 +vn 0.9521 0.2314 0.2001 +vn 0.8092 0.5081 0.2950 +vn 0.8088 0.5086 0.2953 +vn 0.8094 0.5078 0.2949 +vn 0.8086 0.5088 0.2956 +vn 0.6084 0.7627 0.2193 +vn 0.6087 0.7625 0.2191 +vn 0.6088 0.7625 0.2191 +vn 0.3836 0.9234 0.0122 +vn 0.3820 0.9241 0.0125 +vn 0.3810 0.9245 0.0128 +vn 0.3845 0.9230 0.0119 +vn 0.1530 0.9552 -0.2535 +vn 0.1521 0.9553 -0.2534 +vn 0.1515 0.9555 -0.2533 +vn 0.1535 0.9551 -0.2536 +vn -0.0751 0.8617 -0.5019 +vn -0.0753 0.8617 -0.5018 +vn -0.0750 0.8617 -0.5019 +vn -0.0754 0.8617 -0.5018 +vn -0.2549 0.9434 -0.2122 +vn -0.2547 0.9434 -0.2123 +vn -0.2544 0.9435 -0.2123 +vn -0.2552 0.9433 -0.2121 +vn -0.4336 0.8969 0.0872 +vn -0.4335 0.8969 0.0872 +vn -0.6168 0.7200 0.3182 +vn -0.6169 0.7198 0.3182 +vn -0.6170 0.7198 0.3182 +vn -0.6163 0.7203 0.3184 +vn -0.7939 0.4535 0.4050 +vn -0.7940 0.4534 0.4050 +vn -0.7942 0.4531 0.4049 +vn -0.7937 0.4538 0.4052 +vn -0.9354 0.1755 0.3068 +vn -0.9352 0.1759 0.3072 +vn -0.9352 0.1760 0.3073 +vn -0.9355 0.1755 0.3067 +vn -0.9986 -0.0203 0.0485 +vn -0.9980 -0.0291 0.0569 +vn -0.9593 -0.0699 -0.2735 +vn -0.9594 -0.0699 -0.2734 +vn -0.9593 -0.0699 -0.2736 +vn -0.9595 -0.0698 -0.2730 +vn -0.8332 0.0252 -0.5525 +vn -0.8334 0.0251 -0.5521 +vn -0.8331 0.0252 -0.5526 +vn -0.8336 0.0251 -0.5518 +vn -0.6556 0.2185 -0.7228 +vn -0.6551 0.2187 -0.7232 +vn -0.6557 0.2184 -0.7227 +vn -0.6549 0.2187 -0.7234 +vn -0.4544 0.4539 -0.7665 +vn -0.4541 0.4540 -0.7666 +vn -0.4538 0.4541 -0.7667 +vn -0.2396 0.6837 -0.6893 +vn -0.2392 0.6838 -0.6894 +vn -0.2399 0.6836 -0.6893 +vn -0.2390 0.6838 -0.6894 +vn -0.0382 0.6941 -0.7188 +vn -0.0375 0.6942 -0.7188 +vn -0.0388 0.6941 -0.7188 +vn -0.0370 0.6943 -0.7188 +vn 0.1537 0.4774 -0.8651 +vn 0.1538 0.4774 -0.8651 +vn 0.1533 0.4774 -0.8652 +vn 0.3580 0.2420 -0.9018 +vn 0.3571 0.2421 -0.9022 +vn 0.3585 0.2420 -0.9016 +vn 0.3566 0.2421 -0.9023 +vn 0.5698 0.0308 -0.8212 +vn 0.5696 0.0308 -0.8214 +vn 0.5700 0.0308 -0.8211 +vn 0.5694 0.0308 -0.8215 +vn 0.7730 -0.1065 -0.6254 +vn 0.7733 -0.1064 -0.6250 +vn 0.7728 -0.1066 -0.6256 +vn 0.7736 -0.1063 -0.6246 +vn 0.9316 -0.1169 -0.3441 +vn 0.9319 -0.1165 -0.3436 +vn 0.9315 -0.1170 -0.3443 +vn 0.9320 -0.1164 -0.3433 +vn 0.9980 0.0307 -0.0554 +vn 0.9971 0.0380 -0.0654 +vn 0.9456 0.2985 0.1292 +vn 0.9459 0.2977 0.1287 +vn 0.9454 0.2990 0.1296 +vn 0.9461 0.2972 0.1284 +vn 0.7942 0.5914 0.1399 +vn 0.7941 0.5914 0.1399 +vn 0.7941 0.5915 0.1399 +vn 0.5870 0.8095 -0.0125 +vn 0.5874 0.8092 -0.0126 +vn 0.5878 0.8089 -0.0127 +vn 0.5867 0.8097 -0.0124 +vn 0.3573 0.8976 -0.2580 +vn 0.3566 0.8979 -0.2581 +vn 0.3578 0.8974 -0.2580 +vn 0.3561 0.8980 -0.2582 +vn 0.1259 0.8490 -0.5132 +vn 0.1264 0.8489 -0.5131 +vn 0.1256 0.8490 -0.5132 +vn 0.1268 0.8489 -0.5131 +vn -0.0950 0.6943 -0.7134 +vn -0.0951 0.6943 -0.7134 +vn -0.0952 0.6943 -0.7134 +vn -0.2785 0.8381 -0.4690 +vn -0.2790 0.8381 -0.4688 +vn -0.2794 0.8380 -0.4687 +vn -0.2783 0.8382 -0.4691 +vn -0.4583 0.8713 -0.1752 +vn -0.4586 0.8712 -0.1751 +vn -0.4589 0.8711 -0.1750 +vn -0.4580 0.8715 -0.1754 +vn -0.6373 0.7642 0.0994 +vn -0.6370 0.7644 0.0993 +vn -0.6370 0.7645 0.0993 +vn -0.6374 0.7641 0.0994 +vn -0.8067 0.5275 0.2663 +vn -0.8066 0.5276 0.2663 +vn -0.9401 0.2275 0.2539 +vn -0.9403 0.2269 0.2536 +vn -0.9405 0.2266 0.2534 +vn -0.9399 0.2279 0.2541 +vn -0.9980 -0.0309 0.0555 +vn -0.9971 -0.0381 0.0653 +vn -0.9570 -0.1526 -0.2468 +vn -0.9569 -0.1527 -0.2470 +vn -0.8326 -0.1192 -0.5409 +vn -0.8329 -0.1191 -0.5405 +vn -0.6581 0.0324 -0.7522 +vn -0.6586 0.0324 -0.7518 +vn -0.6577 0.0325 -0.7526 +vn -0.6590 0.0323 -0.7514 +vn -0.4621 0.2492 -0.8511 +vn -0.4626 0.2491 -0.8509 +vn -0.4618 0.2492 -0.8512 +vn -0.4630 0.2490 -0.8507 +vn -0.2533 0.4835 -0.8379 +vn -0.2537 0.4834 -0.8378 +vn -0.2529 0.4836 -0.8380 +vn -0.2542 0.4833 -0.8377 +vn -0.0513 0.4907 -0.8698 +vn -0.0511 0.4908 -0.8698 +vn -0.0516 0.4907 -0.8698 +vn -0.0509 0.4908 -0.8698 +vn 0.1454 0.2620 -0.9540 +vn 0.1464 0.2621 -0.9539 +vn 0.1448 0.2619 -0.9542 +vn 0.1470 0.2621 -0.9538 +vn 0.3542 0.0361 -0.9345 +vn 0.3553 0.0362 -0.9341 +vn 0.3535 0.0360 -0.9348 +vn 0.3559 0.0362 -0.9338 +vn 0.5705 -0.1454 -0.8083 +vn 0.5707 -0.1454 -0.8082 +vn 0.7775 -0.2326 -0.5843 +vn 0.7775 -0.2327 -0.5843 +vn 0.9354 -0.1757 -0.3070 +vn 0.9354 -0.1756 -0.3069 +vn 0.9354 -0.1756 -0.3070 +vn 0.9355 -0.1754 -0.3068 +vn 0.9971 0.0374 -0.0660 +vn 0.9961 0.0439 -0.0772 +vn 0.9375 0.3459 0.0377 +vn 0.9374 0.3461 0.0377 +vn 0.9377 0.3455 0.0376 +vn 0.9373 0.3464 0.0378 +vn 0.7778 0.6272 -0.0402 +vn 0.7786 0.6262 -0.0401 +vn 0.7774 0.6278 -0.0402 +vn 0.7790 0.6257 -0.0401 +vn 0.5649 0.7865 -0.2498 +vn 0.5653 0.7862 -0.2496 +vn 0.5645 0.7867 -0.2498 +vn 0.5657 0.7860 -0.2495 +vn 0.3358 0.7970 -0.5020 +vn 0.3358 0.7971 -0.5020 +vn 0.3355 0.7972 -0.5020 +vn 0.1071 0.6840 -0.7216 +vn 0.1069 0.6840 -0.7216 +vn 0.1075 0.6840 -0.7216 +vn 0.1065 0.6840 -0.7217 +vn -0.1093 0.4908 -0.8644 +vn -0.1096 0.4908 -0.8644 +vn -0.1097 0.4907 -0.8644 +vn -0.2988 0.6751 -0.6745 +vn -0.2993 0.6750 -0.6744 +vn -0.2997 0.6750 -0.6742 +vn -0.2984 0.6751 -0.6746 +vn -0.4807 0.7740 -0.4121 +vn -0.4801 0.7742 -0.4125 +vn -0.4798 0.7743 -0.4126 +vn -0.4810 0.7739 -0.4120 +vn -0.6578 0.7428 -0.1245 +vn -0.6575 0.7431 -0.1246 +vn -0.6573 0.7432 -0.1247 +vn -0.6578 0.7428 -0.1244 +vn -0.8213 0.5606 0.1060 +vn -0.8214 0.5604 0.1060 +vn -0.8216 0.5602 0.1060 +vn -0.8212 0.5607 0.1060 +vn -0.9464 0.2651 0.1844 +vn -0.9463 0.2653 0.1845 +vn -0.9971 -0.0376 0.0658 +vn -0.9961 -0.0463 0.0757 +vn -0.9521 -0.2314 -0.1998 +vn -0.9523 -0.2310 -0.1993 +vn -0.9524 -0.2307 -0.1991 +vn -0.9520 -0.2316 -0.2001 +vn -0.8290 -0.2600 -0.4950 +vn -0.8289 -0.2601 -0.4952 +vn -0.8287 -0.2603 -0.4955 +vn -0.8293 -0.2599 -0.4947 +vn -0.6571 -0.1540 -0.7379 +vn -0.6574 -0.1539 -0.7376 +vn -0.6570 -0.1540 -0.7380 +vn -0.6575 -0.1539 -0.7376 +vn -0.4651 0.0370 -0.8845 +vn -0.4638 0.0372 -0.8851 +vn -0.4660 0.0369 -0.8840 +vn -0.4629 0.0373 -0.8856 +vn -0.2626 0.2652 -0.9277 +vn -0.2632 0.2651 -0.9276 +vn -0.2624 0.2653 -0.9278 +vn -0.2634 0.2651 -0.9276 +vn -0.0591 0.2693 -0.9612 +vn -0.0595 0.2692 -0.9612 +vn -0.0596 0.2692 -0.9612 +vn -0.0591 0.2693 -0.9613 +vn 0.1432 0.0390 -0.9889 +vn 0.1423 0.0389 -0.9891 +vn 0.1438 0.0391 -0.9888 +vn 0.1417 0.0389 -0.9891 +vn 0.3573 -0.1705 -0.9183 +vn 0.5755 -0.3181 -0.7534 +vn 0.5751 -0.3181 -0.7537 +vn 0.5749 -0.3182 -0.7539 +vn 0.5757 -0.3180 -0.7533 +vn 0.7842 -0.3508 -0.5118 +vn 0.7843 -0.3508 -0.5118 +vn 0.9405 -0.2266 -0.2532 +vn 0.9407 -0.2260 -0.2529 +vn 0.9405 -0.2268 -0.2532 +vn 0.9409 -0.2257 -0.2526 +vn 0.9960 0.0442 -0.0770 +vn 0.9948 0.0468 -0.0904 +vn 0.9273 0.3681 -0.0675 +vn 0.9269 0.3691 -0.0675 +vn 0.9276 0.3675 -0.0675 +vn 0.9267 0.3697 -0.0675 +vn 0.7614 0.6086 -0.2234 +vn 0.7619 0.6080 -0.2234 +vn 0.5445 0.6989 -0.4637 +vn 0.5447 0.6988 -0.4636 +vn 0.5445 0.6990 -0.4637 +vn 0.3165 0.6424 -0.6979 +vn 0.3167 0.6424 -0.6979 +vn 0.3161 0.6425 -0.6981 +vn 0.3170 0.6423 -0.6978 +vn 0.0937 0.4835 -0.8703 +vn 0.0938 0.4835 -0.8703 +vn 0.0939 0.4835 -0.8703 +vn 0.0936 0.4835 -0.8703 +vn -0.1194 0.2694 -0.9556 +vn -0.1191 0.2694 -0.9556 +vn -0.1199 0.2693 -0.9556 +vn -0.1186 0.2695 -0.9557 +vn -0.3122 0.4774 -0.8214 +vn -0.3129 0.4772 -0.8212 +vn -0.3118 0.4775 -0.8215 +vn -0.3133 0.4771 -0.8211 +vn -0.4978 0.6238 -0.6026 +vn -0.4974 0.6239 -0.6028 +vn -0.6768 0.6599 -0.3265 +vn -0.6763 0.6602 -0.3267 +vn -0.6771 0.6596 -0.3264 +vn -0.6760 0.6605 -0.3268 +vn -0.8372 0.5438 -0.0578 +vn -0.8369 0.5442 -0.0579 +vn -0.8375 0.5434 -0.0578 +vn -0.8367 0.5446 -0.0581 +vn -0.9541 0.2811 0.1038 +vn -0.9540 0.2814 0.1037 +vn -0.9539 0.2817 0.1037 +vn -0.9542 0.2807 0.1038 +vn -0.9960 -0.0451 0.0766 +vn -0.9960 -0.0468 0.0762 +vn -0.9454 -0.2988 -0.1298 +vn -0.9454 -0.2990 -0.1299 +vn -0.9452 -0.2993 -0.1302 +vn -0.8209 -0.3931 -0.4144 +vn -0.8213 -0.3926 -0.4138 +vn -0.8217 -0.3922 -0.4135 +vn -0.8205 -0.3933 -0.4147 +vn -0.6516 -0.3366 -0.6798 +vn -0.6515 -0.3366 -0.6799 +vn -0.6520 -0.3364 -0.6796 +vn -0.4620 -0.1758 -0.8693 +vn -0.4621 -0.1757 -0.8692 +vn -0.4624 -0.1757 -0.8691 +vn -0.4618 -0.1758 -0.8694 +vn -0.2669 0.0397 -0.9629 +vn -0.2668 0.0397 -0.9629 +vn -0.2670 0.0397 -0.9629 +vn -0.0625 0.0403 -0.9972 +vn -0.0629 0.0402 -0.9972 +vn -0.0624 0.0403 -0.9972 +vn 0.1435 -0.1847 -0.9723 +vn 0.1437 -0.1848 -0.9722 +vn 0.1440 -0.1848 -0.9722 +vn 0.1432 -0.1847 -0.9723 +vn 0.3625 -0.3727 -0.8542 +vn 0.3623 -0.3727 -0.8543 +vn 0.3618 -0.3728 -0.8545 +vn 0.3630 -0.3727 -0.8540 +vn 0.5843 -0.4799 -0.6544 +vn 0.5849 -0.4797 -0.6540 +vn 0.5851 -0.4796 -0.6539 +vn 0.5841 -0.4800 -0.6545 +vn 0.7940 -0.4535 -0.4049 +vn 0.7939 -0.4536 -0.4050 +vn 0.7942 -0.4532 -0.4048 +vn 0.7937 -0.4538 -0.4052 +vn 0.9472 -0.2627 -0.1836 +vn 0.9473 -0.2625 -0.1836 +vn 0.9937 0.0529 -0.0990 +vn 0.9942 0.0461 -0.0974 +vn 0.9168 0.3588 -0.1755 +vn 0.9172 0.3579 -0.1754 +vn 0.9174 0.3573 -0.1753 +vn 0.9165 0.3594 -0.1755 +vn 0.7457 0.5409 -0.3891 +vn 0.7459 0.5406 -0.3890 +vn 0.5290 0.5629 -0.6350 +vn 0.5294 0.5628 -0.6349 +vn 0.3025 0.4543 -0.8379 +vn 0.3024 0.4543 -0.8379 +vn 0.3030 0.4543 -0.8377 +vn 0.0872 0.2653 -0.9602 +vn 0.0863 0.2652 -0.9603 +vn 0.0876 0.2653 -0.9602 +vn 0.0858 0.2652 -0.9604 +vn -0.1227 0.0402 -0.9916 +vn -0.1229 0.0402 -0.9916 +vn -0.1222 0.0403 -0.9917 +vn -0.1232 0.0401 -0.9916 +vn -0.3200 0.2620 -0.9105 +vn -0.5108 0.4410 -0.7380 +vn -0.5112 0.4409 -0.7378 +vn -0.6920 0.5315 -0.4884 +vn -0.6921 0.5315 -0.4884 +vn -0.6923 0.5313 -0.4883 +vn -0.8510 0.4832 -0.2059 +vn -0.8511 0.4830 -0.2058 +vn -0.8506 0.4837 -0.2062 +vn -0.8514 0.4825 -0.2055 +vn -0.9615 0.2738 0.0214 +vn -0.9617 0.2731 0.0216 +vn -0.9614 0.2742 0.0214 +vn -0.9618 0.2727 0.0216 +vn -0.9948 -0.0477 0.0904 +vn -0.9947 -0.0490 0.0905 +vn -0.9947 -0.0497 0.0905 +vn -0.9948 -0.0470 0.0904 +vn -0.9369 -0.3475 -0.0383 +vn -0.9368 -0.3477 -0.0384 +vn -0.9367 -0.3480 -0.0385 +vn -0.9370 -0.3472 -0.0383 +vn -0.8097 -0.5075 -0.2947 +vn -0.8096 -0.5076 -0.2948 +vn -0.8094 -0.5079 -0.2949 +vn -0.8099 -0.5073 -0.2945 +vn -0.6417 -0.5078 -0.5748 +vn -0.6418 -0.5077 -0.5747 +vn -0.6420 -0.5076 -0.5746 +vn -0.6415 -0.5079 -0.5749 +vn -0.4556 -0.3842 -0.8030 +vn -0.4558 -0.3841 -0.8029 +vn -0.4561 -0.3840 -0.8028 +vn -0.2647 -0.1870 -0.9460 +vn -0.2641 -0.1871 -0.9462 +vn -0.2637 -0.1871 -0.9463 +vn -0.2651 -0.1869 -0.9459 +vn -0.0617 -0.1899 -0.9799 +vn -0.0624 -0.1898 -0.9798 +vn -0.0627 -0.1898 -0.9798 +vn -0.0612 -0.1899 -0.9799 +vn 0.1504 -0.4035 -0.9025 +vn 0.1496 -0.4035 -0.9027 +vn 0.1490 -0.4035 -0.9028 +vn 0.1510 -0.4035 -0.9024 +vn 0.3728 -0.5627 -0.7378 +vn 0.3731 -0.5626 -0.7377 +vn 0.3728 -0.5627 -0.7379 +vn 0.5990 -0.6192 -0.5078 +vn 0.5987 -0.6194 -0.5078 +vn 0.5990 -0.6192 -0.5077 +vn 0.5987 -0.6194 -0.5079 +vn 0.8066 -0.5276 -0.2664 +vn 0.8068 -0.5274 -0.2664 +vn 0.9548 -0.2785 -0.1036 +vn 0.9549 -0.2782 -0.1037 +vn 0.9547 -0.2789 -0.1036 +vn 0.9550 -0.2778 -0.1037 +vn 0.9935 0.0456 -0.1042 +vn 0.9923 0.0413 -0.1171 +vn 0.9084 0.3173 -0.2723 +vn 0.9084 0.3173 -0.2722 +vn 0.9082 0.3177 -0.2724 +vn 0.7333 0.4358 -0.5219 +vn 0.7330 0.4361 -0.5221 +vn 0.7329 0.4362 -0.5222 +vn 0.5183 0.3977 -0.7571 +vn 0.5186 0.3976 -0.7570 +vn 0.5186 0.3975 -0.7570 +vn 0.5182 0.3977 -0.7571 +vn 0.2945 0.2492 -0.9226 +vn 0.2950 0.2492 -0.9224 +vn 0.0843 0.0396 -0.9957 +vn 0.0844 0.0396 -0.9956 +vn 0.0839 0.0395 -0.9957 +vn -0.0399 0.9992 -0.0079 +vn -0.0413 0.9991 -0.0080 +vn -0.0408 0.9992 0.0038 +vn -0.0399 0.9992 -0.0081 +vn -0.3233 0.0392 -0.9455 +vn -0.3234 0.0391 -0.9454 +vn -0.3232 0.0392 -0.9455 +vn -0.5184 0.2420 -0.8202 +vn -0.5189 0.2419 -0.8199 +vn -0.5182 0.2421 -0.8203 +vn -0.5191 0.2418 -0.8198 +vn -0.7028 0.3759 -0.6040 +vn -0.7025 0.3760 -0.6043 +vn -0.7024 0.3760 -0.6043 +vn -0.8619 0.3895 -0.3248 +vn -0.8621 0.3892 -0.3245 +vn -0.9684 0.2436 -0.0534 +vn -0.9684 0.2436 -0.0533 +vn -0.9685 0.2432 -0.0532 +vn -0.9934 -0.0462 0.1045 +vn -0.9935 -0.0457 0.1044 +vn -0.9921 -0.0435 0.1178 +vn -0.9271 -0.3688 0.0675 +vn -0.9272 -0.3685 0.0675 +vn -0.7955 -0.5898 -0.1392 +vn -0.7956 -0.5897 -0.1392 +vn -0.7955 -0.5898 -0.1393 +vn -0.6273 -0.6559 -0.4199 +vn -0.6272 -0.6559 -0.4199 +vn -0.6277 -0.6557 -0.4196 +vn -0.6270 -0.6561 -0.4200 +vn -0.4456 -0.5795 -0.6824 +vn -0.4470 -0.5790 -0.6819 +vn -0.4479 -0.5787 -0.6816 +vn -0.4447 -0.5798 -0.6827 +vn -0.2575 -0.4087 -0.8756 +vn -0.2571 -0.4087 -0.8757 +vn -0.0559 -0.4148 -0.9082 +vn -0.0550 -0.4149 -0.9082 +vn -0.0545 -0.4149 -0.9082 +vn -0.0564 -0.4147 -0.9082 +vn 0.1610 -0.6094 -0.7763 +vn 0.1608 -0.6094 -0.7764 +vn 0.3895 -0.7264 -0.5663 +vn 0.3895 -0.7263 -0.5663 +vn 0.3894 -0.7264 -0.5663 +vn 0.6169 -0.7199 -0.3182 +vn 0.6166 -0.7200 -0.3183 +vn 0.6169 -0.7198 -0.3182 +vn 0.6163 -0.7202 -0.3184 +vn 0.8212 -0.5607 -0.1060 +vn 0.8205 -0.5618 -0.1059 +vn 0.8216 -0.5601 -0.1061 +vn 0.8201 -0.5624 -0.1058 +vn 0.9625 -0.2703 -0.0222 +vn 0.9623 -0.2709 -0.0221 +vn 0.9622 -0.2714 -0.0220 +vn 0.9626 -0.2699 -0.0223 +vn 0.9922 0.0418 -0.1175 +vn 0.9912 0.0340 -0.1276 +vn 0.9010 0.2560 -0.3503 +vn 0.9009 0.2560 -0.3503 +vn 0.7238 0.3084 -0.6172 +vn 0.5107 0.2183 -0.8316 +vn 0.5114 0.2182 -0.8312 +vn 0.5104 0.2183 -0.8318 +vn 0.5118 0.2182 -0.8310 +vn 0.2923 0.0372 -0.9556 +vn 0.2922 0.0372 -0.9556 +vn 0.1635 0.9862 -0.0247 +vn 0.1640 0.9861 -0.0248 +vn 0.1650 0.9862 -0.0152 +vn -0.2432 0.9699 0.0091 +vn -0.2423 0.9699 0.0223 +vn -0.5216 0.0361 -0.8524 +vn -0.5213 0.0362 -0.8526 +vn -0.5212 0.0362 -0.8527 +vn -0.7099 0.2061 -0.6735 +vn -0.7092 0.2063 -0.6741 +vn -0.7104 0.2058 -0.6730 +vn -0.7087 0.2064 -0.6746 +vn -0.8700 0.2752 -0.4090 +vn -0.8700 0.2753 -0.4090 +vn -0.8698 0.2754 -0.4094 +vn -0.9744 0.1949 -0.1121 +vn -0.9742 0.1956 -0.1126 +vn -0.9745 0.1946 -0.1118 +vn -0.9741 0.1961 -0.1130 +vn -0.9923 -0.0415 0.1171 +vn -0.9912 -0.0341 0.1280 +vn -0.9171 -0.3579 0.1754 +vn -0.9172 -0.3578 0.1754 +vn -0.7781 -0.6268 0.0401 +vn -0.7785 -0.6264 0.0401 +vn -0.7787 -0.6261 0.0401 +vn -0.7779 -0.6271 0.0402 +vn -0.6085 -0.7626 -0.2194 +vn -0.6087 -0.7625 -0.2193 +vn -0.6082 -0.7628 -0.2195 +vn -0.6088 -0.7624 -0.2192 +vn -0.4307 -0.7478 -0.5053 +vn -0.4312 -0.7477 -0.5051 +vn -0.4303 -0.7479 -0.5055 +vn -0.4315 -0.7476 -0.5049 +vn -0.2451 -0.6169 -0.7479 +vn -0.2445 -0.6170 -0.7480 +vn -0.2442 -0.6172 -0.7480 +vn -0.2455 -0.6168 -0.7478 +vn -0.0425 -0.6263 -0.7785 +vn -0.0427 -0.6262 -0.7785 +vn -0.0429 -0.6262 -0.7785 +vn -0.0422 -0.6263 -0.7785 +vn 0.1784 -0.7869 -0.5907 +vn 0.1792 -0.7868 -0.5906 +vn 0.1795 -0.7868 -0.5906 +vn 0.1781 -0.7869 -0.5908 +vn 0.4098 -0.8447 -0.3442 +vn 0.4088 -0.8452 -0.3443 +vn 0.4081 -0.8455 -0.3445 +vn 0.4105 -0.8445 -0.3440 +vn 0.6379 -0.7636 -0.0995 +vn 0.6377 -0.7638 -0.0995 +vn 0.6383 -0.7633 -0.0996 +vn 0.6374 -0.7641 -0.0994 +vn 0.8355 -0.5463 0.0588 +vn 0.8359 -0.5457 0.0586 +vn 0.8362 -0.5453 0.0585 +vn 0.8353 -0.5467 0.0588 +vn 0.9692 -0.2408 0.0516 +vn 0.9691 -0.2411 0.0517 +vn 0.9692 -0.2407 0.0516 +vn 0.9914 0.0332 -0.1270 +vn 0.9904 0.0243 -0.1360 +vn 0.8959 0.1806 -0.4058 +vn 0.8964 0.1801 -0.4051 +vn 0.8966 0.1799 -0.4047 +vn 0.8957 0.1809 -0.4062 +vn 0.7187 0.1691 -0.6745 +vn 0.7192 0.1689 -0.6740 +vn 0.7196 0.1687 -0.6735 +vn 0.7183 0.1693 -0.6748 +vn 0.5077 0.0325 -0.8609 +vn 0.5080 0.0325 -0.8607 +vn 0.5081 0.0326 -0.8607 +vn 0.5074 0.0325 -0.8611 +vn 0.3681 0.9289 -0.0410 +vn 0.3682 0.9288 -0.0410 +vn 0.3699 0.9284 -0.0341 +vn 0.1651 0.9862 -0.0152 +vn -0.0000 0.9992 0.0410 +vn -0.2422 0.9700 0.0223 +vn -0.4492 0.8930 0.0270 +vn -0.4494 0.8929 0.0270 +vn -0.7118 0.0309 -0.7017 +vn -0.7126 0.0308 -0.7009 +vn -0.7114 0.0310 -0.7021 +vn -0.7130 0.0307 -0.7004 +vn -0.8740 0.1514 -0.4618 +vn -0.8744 0.1511 -0.4610 +vn -0.8738 0.1515 -0.4621 +vn -0.8747 0.1509 -0.4605 +vn -0.9781 0.1386 -0.1553 +vn -0.9783 0.1381 -0.1545 +vn -0.9780 0.1390 -0.1558 +vn -0.9784 0.1378 -0.1540 +vn -0.9910 -0.0349 0.1291 +vn -0.9904 -0.0243 0.1360 +vn -0.9087 -0.3168 0.2719 +vn -0.9083 -0.3175 0.2722 +vn -0.9089 -0.3163 0.2717 +vn -0.9081 -0.3180 0.2723 +vn -0.7612 -0.6087 0.2237 +vn -0.7608 -0.6092 0.2237 +vn -0.7613 -0.6085 0.2237 +vn -0.7608 -0.6093 0.2237 +vn -0.5877 -0.8090 0.0128 +vn -0.5873 -0.8092 0.0127 +vn -0.5878 -0.8089 0.0129 +vn -0.5871 -0.8094 0.0127 +vn -0.4095 -0.8694 -0.2765 +vn -0.4095 -0.8694 -0.2766 +vn -0.4098 -0.8693 -0.2764 +vn -0.4091 -0.8695 -0.2767 +vn -0.2270 -0.7967 -0.5600 +vn -0.2269 -0.7968 -0.5601 +vn -0.2274 -0.7968 -0.5599 +vn -0.2265 -0.7968 -0.5602 +vn -0.0254 -0.8089 -0.5875 +vn -0.0249 -0.8088 -0.5875 +vn -0.0254 -0.8088 -0.5875 +vn -0.0250 -0.8088 -0.5876 +vn 0.2037 -0.9143 -0.3502 +vn 0.2025 -0.9146 -0.3500 +vn 0.2043 -0.9141 -0.3503 +vn 0.2019 -0.9148 -0.3499 +vn 0.4333 -0.8970 -0.0872 +vn 0.4327 -0.8973 -0.0871 +vn 0.4336 -0.8969 -0.0872 +vn 0.4324 -0.8975 -0.0870 +vn 0.6589 -0.7419 0.1242 +vn 0.6594 -0.7415 0.1240 +vn 0.6587 -0.7421 0.1244 +vn 0.6596 -0.7413 0.1239 +vn 0.8504 -0.4840 0.2064 +vn 0.8508 -0.4833 0.2061 +vn 0.8511 -0.4830 0.2060 +vn 0.8501 -0.4844 0.2066 +vn 0.9745 -0.1946 0.1116 +vn 0.9741 -0.1960 0.1128 +vn 0.9738 -0.1969 0.1137 +vn 0.9748 -0.1937 0.1108 +vn 0.9905 0.0238 -0.1352 +vn 0.9899 0.0137 -0.1410 +vn 0.8934 0.0988 -0.4384 +vn 0.8930 0.0991 -0.4391 +vn 0.8929 0.0992 -0.4393 +vn 0.8935 0.0987 -0.4382 +vn 0.7172 0.0252 -0.6964 +vn 0.7168 0.0253 -0.6968 +vn 0.7165 0.0253 -0.6971 +vn 0.7175 0.0251 -0.6961 +vn 0.5749 0.8163 -0.0567 +vn 0.5754 0.8159 -0.0569 +vn 0.5751 0.8164 -0.0532 +vn 0.5746 0.8164 -0.0568 +vn 0.3694 0.9286 -0.0340 +vn 0.3700 0.9284 -0.0341 +vn -0.0000 0.9862 -0.1656 +vn -0.1942 0.9642 0.1804 +vn -0.1943 0.9642 0.1804 +vn 0.0007 0.9700 0.2430 +vn -0.0000 0.9700 0.2431 +vn 0.0003 0.9700 0.2429 +vn -0.4478 0.8932 0.0412 +vn -0.4479 0.8931 0.0413 +vn -0.6489 0.7595 0.0450 +vn -0.6487 0.7597 0.0450 +vn -0.8763 0.0227 -0.4813 +vn -0.8759 0.0228 -0.4820 +vn -0.8765 0.0226 -0.4809 +vn -0.8757 0.0229 -0.4824 +vn -0.9809 0.0753 -0.1793 +vn -0.9806 0.0759 -0.1807 +vn -0.9811 0.0750 -0.1784 +vn -0.9804 0.0762 -0.1817 +vn -0.9905 -0.0239 0.1354 +vn -0.9903 -0.0129 0.1382 +vn -0.9012 -0.2557 0.3500 +vn -0.9016 -0.2550 0.3495 +vn -0.9010 -0.2560 0.3503 +vn -0.9018 -0.2547 0.3492 +vn -0.7448 -0.5419 0.3893 +vn -0.7454 -0.5413 0.3891 +vn -0.7445 -0.5423 0.3895 +vn -0.7457 -0.5409 0.3891 +vn -0.5652 -0.7863 0.2496 +vn -0.5644 -0.7868 0.2498 +vn -0.5639 -0.7871 0.2499 +vn -0.5656 -0.7860 0.2495 +vn -0.3835 -0.9234 -0.0123 +vn -0.3843 -0.9231 -0.0121 +vn -0.3832 -0.9236 -0.0124 +vn -0.3846 -0.9230 -0.0120 +vn -0.2048 -0.9263 -0.3162 +vn -0.2046 -0.9263 -0.3163 +vn -0.2042 -0.9264 -0.3164 +vn -0.2049 -0.9263 -0.3162 +vn -0.0028 -0.9404 -0.3402 +vn -0.0024 -0.9403 -0.3403 +vn -0.0032 -0.9404 -0.3401 +vn -0.0022 -0.9403 -0.3403 +vn 0.2290 -0.9708 -0.0718 +vn 0.2294 -0.9707 -0.0719 +vn 0.2287 -0.9708 -0.0718 +vn 0.2298 -0.9706 -0.0720 +vn 0.4586 -0.8712 0.1752 +vn 0.4584 -0.8713 0.1753 +vn 0.4589 -0.8711 0.1751 +vn 0.4580 -0.8715 0.1755 +vn 0.6769 -0.6597 0.3265 +vn 0.6762 -0.6603 0.3269 +vn 0.6758 -0.6606 0.3270 +vn 0.6773 -0.6594 0.3263 +vn 0.8621 -0.3894 0.3244 +vn 0.8620 -0.3895 0.3245 +vn 0.8621 -0.3894 0.3243 +vn 0.8619 -0.3896 0.3245 +vn 0.9779 -0.1389 0.1561 +vn 0.9780 -0.1387 0.1559 +vn 0.9781 -0.1385 0.1556 +vn 0.9778 -0.1392 0.1566 +vn 0.9900 0.0135 -0.1406 +vn 0.9902 0.0022 -0.1400 +vn 0.8914 0.0149 -0.4530 +vn 0.8909 0.0150 -0.4539 +vn 0.8907 0.0151 -0.4544 +vn 0.8916 0.0148 -0.4526 +vn 0.7689 0.6355 -0.0708 +vn 0.7684 0.6360 -0.0707 +vn 0.7687 0.6357 -0.0708 +vn 0.7696 0.6346 -0.0708 +vn 0.5757 0.8160 -0.0531 +vn 0.5750 0.8165 -0.0530 +vn -0.0000 0.9286 -0.3710 +vn -0.0000 0.9287 -0.3709 +vn -0.3820 0.9176 0.1100 +vn -0.0000 0.9700 0.2430 +vn 0.0006 0.9700 0.2433 +vn -0.0000 0.8929 0.4503 +vn -0.6481 0.7592 0.0597 +vn -0.6482 0.7591 0.0597 +vn -0.6485 0.7589 0.0597 +vn -0.6477 0.7596 0.0596 +vn -0.8324 0.5507 0.0625 +vn -0.8322 0.5509 0.0625 +vn -0.8325 0.5505 0.0625 +vn -0.8320 0.5513 0.0625 +vn -0.9820 0.0112 -0.1886 +vn -0.9820 0.0112 -0.1883 +vn -0.9820 0.0113 -0.1886 +vn -0.9900 -0.0129 0.1402 +vn -0.9902 -0.0020 0.1400 +vn -0.8960 -0.1806 0.4058 +vn -0.8958 -0.1808 0.4060 +vn -0.8961 -0.1805 0.4056 +vn -0.8957 -0.1808 0.4062 +vn -0.7326 -0.4363 0.5225 +vn -0.7322 -0.4367 0.5227 +vn -0.7329 -0.4361 0.5222 +vn -0.7319 -0.4370 0.5229 +vn -0.5448 -0.6989 0.4635 +vn -0.5454 -0.6985 0.4633 +vn -0.5460 -0.6982 0.4630 +vn -0.5443 -0.6991 0.4637 +vn -0.3580 -0.8974 0.2580 +vn -0.3577 -0.8975 0.2581 +vn -0.3576 -0.8975 0.2581 +vn -0.1785 -0.9833 -0.0346 +vn -0.1783 -0.9834 -0.0347 +vn 0.0239 -0.9982 -0.0544 +vn 0.0241 -0.9982 -0.0544 +vn 0.0238 -0.9982 -0.0543 +vn 0.0242 -0.9982 -0.0544 +vn 0.2552 -0.9433 0.2123 +vn 0.2550 -0.9433 0.2124 +vn 0.2553 -0.9433 0.2123 +vn 0.4810 -0.7738 0.4121 +vn 0.4812 -0.7737 0.4120 +vn 0.4815 -0.7737 0.4118 +vn 0.4810 -0.7738 0.4122 +vn 0.6919 -0.5317 0.4885 +vn 0.6928 -0.5310 0.4879 +vn 0.6934 -0.5306 0.4875 +vn 0.6912 -0.5321 0.4889 +vn 0.8702 -0.2750 0.4089 +vn 0.8704 -0.2748 0.4086 +vn 0.8705 -0.2746 0.4084 +vn 0.8700 -0.2751 0.4091 +vn 0.9805 -0.0760 0.1814 +vn 0.9804 -0.0760 0.1815 +vn 0.9899 0.0021 -0.1419 +vn 0.9902 0.0018 -0.1396 +vn 0.9222 0.3782 -0.0811 +vn 0.9229 0.3765 -0.0805 +vn 0.9219 0.3782 -0.0841 +vn 0.9217 0.3796 -0.0804 +vn 0.7683 0.6361 -0.0708 +vn 0.7693 0.6349 -0.0709 +vn -0.0000 0.8157 -0.5785 +vn -0.0000 0.8157 -0.5784 +vn -0.5603 0.8277 0.0318 +vn 0.1938 0.9643 0.1805 +vn -0.0002 0.9699 0.2435 +vn -0.0006 0.9701 0.2428 +vn 0.2103 0.9310 0.2985 +vn 0.2107 0.9308 0.2986 +vn 0.2100 0.9311 0.2984 +vn -0.0000 0.7594 0.6506 +vn -0.0000 0.7595 0.6505 +vn -0.8306 0.5516 0.0765 +vn -0.8309 0.5511 0.0766 +vn -0.8313 0.5506 0.0766 +vn -0.8303 0.5521 0.0765 +vn -0.9594 0.2716 0.0762 +vn -0.9604 0.2663 0.0823 +vn -0.9587 0.2724 0.0822 +vn -0.9600 0.2696 0.0762 +vn -0.9902 -0.0019 0.1400 +vn -0.8921 -0.0997 0.4408 +vn -0.8925 -0.0993 0.4399 +vn -0.8917 -0.0999 0.4414 +vn -0.8929 -0.0991 0.4393 +vn -0.7233 -0.3087 0.6177 +vn -0.7235 -0.3086 0.6175 +vn -0.7230 -0.3089 0.6179 +vn -0.7238 -0.3084 0.6172 +vn -0.5300 -0.5624 0.6347 +vn -0.5296 -0.5625 0.6348 +vn -0.5294 -0.5627 0.6349 +vn -0.5302 -0.5623 0.6346 +vn -0.3350 -0.7973 0.5021 +vn -0.3350 -0.7973 0.5020 +vn -0.3351 -0.7973 0.5020 +vn -0.1518 -0.9554 0.2533 +vn 0.0513 -0.9699 0.2379 +vn 0.0512 -0.9699 0.2380 +vn 0.0507 -0.9699 0.2381 +vn 0.0514 -0.9699 0.2379 +vn 0.2780 -0.8383 0.4690 +vn 0.2788 -0.8382 0.4687 +vn 0.2774 -0.8384 0.4692 +vn 0.2795 -0.8381 0.4686 +vn 0.4983 -0.6235 0.6024 +vn 0.4986 -0.6235 0.6022 +vn 0.4982 -0.6235 0.6025 +vn 0.4988 -0.6234 0.6021 +vn 0.7034 -0.3756 0.6035 +vn 0.7022 -0.3762 0.6044 +vn 0.7015 -0.3767 0.6049 +vn 0.7042 -0.3752 0.6028 +vn 0.8752 -0.1508 0.4597 +vn 0.8752 -0.1507 0.4596 +vn 0.8750 -0.1509 0.4600 +vn 0.9814 -0.0115 0.1917 +vn 0.9814 -0.0114 0.1915 +vn 0.9948 0.0580 -0.0837 +vn 0.9947 0.0603 -0.0829 +vn 0.9942 0.0582 -0.0908 +vn 0.9949 0.0572 -0.0829 +vn 0.9225 0.3766 -0.0849 +vn 0.9212 0.3796 -0.0848 +vn -0.0000 0.6371 -0.7708 +vn -0.7266 0.6848 -0.0558 +vn -0.7266 0.6848 -0.0559 +vn -0.0000 0.9287 -0.3710 +vn 0.3826 0.9174 0.1098 +vn 0.3828 0.9173 0.1097 +vn -0.0008 0.9700 0.2430 +vn -0.0003 0.9700 0.2429 +vn -0.0000 0.8934 0.4492 +vn 0.4276 0.8365 0.3427 +vn 0.4275 0.8365 0.3427 +vn -0.0000 0.5513 0.8343 +vn -0.0000 0.5521 0.8338 +vn -0.0000 0.5527 0.8334 +vn -0.0000 0.5507 0.8347 +vn -0.9583 0.2718 0.0882 +vn -0.9586 0.2706 0.0883 +vn -0.9949 -0.0583 0.0829 +vn -0.9947 -0.0543 0.0875 +vn -0.9946 -0.0546 0.0885 +vn -0.9949 -0.0575 0.0829 +vn -0.8905 -0.0149 0.4548 +vn -0.8906 -0.0149 0.4545 +vn -0.7199 -0.1686 0.6733 +vn -0.7186 -0.1692 0.6745 +vn -0.7208 -0.1683 0.6724 +vn -0.7178 -0.1696 0.6753 +vn -0.5181 -0.3978 0.7572 +vn -0.5182 -0.3978 0.7571 +vn -0.3164 -0.6424 0.6980 +vn -0.3169 -0.6423 0.6979 +vn -0.3162 -0.6424 0.6981 +vn -0.1281 -0.8487 0.5132 +vn -0.1289 -0.8486 0.5131 +vn -0.1293 -0.8485 0.5132 +vn -0.1277 -0.8487 0.5133 +vn 0.0773 -0.8617 0.5015 +vn 0.0762 -0.8616 0.5018 +vn 0.0781 -0.8617 0.5013 +vn 0.0754 -0.8616 0.5020 +vn 0.2968 -0.6754 0.6751 +vn 0.2967 -0.6754 0.6752 +vn 0.5111 -0.4412 0.7377 +vn 0.5117 -0.4409 0.7374 +vn 0.5122 -0.4406 0.7372 +vn 0.5106 -0.4412 0.7380 +vn 0.7086 -0.2064 0.6748 +vn 0.7088 -0.2063 0.6746 +vn 0.7090 -0.2062 0.6744 +vn 0.7083 -0.2065 0.6751 +vn 0.8771 -0.0225 0.4797 +vn 0.8769 -0.0226 0.4802 +vn 0.9592 -0.2721 -0.0763 +vn 0.9591 -0.2727 -0.0761 +vn 0.9585 -0.2712 -0.0880 +vn 0.9592 -0.2722 -0.0762 +vn 0.9940 0.0604 -0.0915 +vn 0.9941 0.0577 -0.0916 +vn -0.0000 0.3771 -0.9262 +vn -0.8639 0.4808 -0.1498 +vn 0.5603 0.8277 0.0318 +vn -0.0000 0.9861 -0.1661 +vn -0.0000 0.9862 -0.1657 +vn -0.0000 0.9861 -0.1660 +vn 0.0409 0.9992 0.0038 +vn -0.2103 0.9310 0.2985 +vn -0.2108 0.9308 0.2986 +vn -0.2100 0.9311 0.2984 +vn 0.6461 0.6682 0.3689 +vn 0.6462 0.6680 0.3689 +vn 0.6458 0.6685 0.3689 +vn 0.6465 0.6677 0.3690 +vn -0.0000 0.2703 0.9628 +vn -0.0000 0.2691 0.9631 +vn -0.0000 0.2683 0.9633 +vn -0.0000 0.2711 0.9625 +vn -0.9941 -0.0585 0.0916 +vn -0.9941 -0.0579 0.0916 +vn -0.9224 -0.3776 0.0806 +vn -0.9216 -0.3793 0.0828 +vn -0.9226 -0.3768 0.0828 +vn -0.9216 -0.3796 0.0805 +vn -0.7182 -0.0252 0.6954 +vn -0.7185 -0.0252 0.6951 +vn -0.7178 -0.0253 0.6958 +vn -0.7189 -0.0251 0.6946 +vn -0.5109 -0.2181 0.8315 +vn -0.5107 -0.2181 0.8317 +vn -0.5104 -0.2181 0.8318 +vn -0.5110 -0.2181 0.8315 +vn -0.3040 -0.4541 0.8375 +vn -0.3037 -0.4541 0.8376 +vn -0.3045 -0.4541 0.8373 +vn -0.1095 -0.6838 0.7214 +vn -0.1082 -0.6839 0.7216 +vn -0.1074 -0.6838 0.7217 +vn -0.1102 -0.6838 0.7213 +vn 0.0974 -0.6941 0.7132 +vn 0.0959 -0.6943 0.7133 +vn 0.0951 -0.6944 0.7133 +vn 0.0982 -0.6941 0.7132 +vn 0.3107 -0.4776 0.8218 +vn 0.3104 -0.4777 0.8219 +vn 0.5193 -0.2417 0.8197 +vn 0.5195 -0.2417 0.8196 +vn 0.5196 -0.2416 0.8195 +vn 0.7116 -0.0308 0.7019 +vn 0.7117 -0.0308 0.7018 +vn 0.7114 -0.0308 0.7021 +vn 0.8321 -0.5511 -0.0625 +vn 0.8322 -0.5509 -0.0625 +vn 0.8325 -0.5505 -0.0625 +vn 0.8319 -0.5514 -0.0625 +vn 0.9581 -0.2726 -0.0882 +vn 0.9584 -0.2714 -0.0883 +vn -0.0000 0.0587 -0.9983 +vn -0.9473 0.2074 -0.2442 +vn 0.7257 0.6858 -0.0553 +vn 0.7262 0.6853 -0.0556 +vn 0.7254 0.6862 -0.0551 +vn 0.7265 0.6849 -0.0558 +vn -0.1650 0.9862 -0.0152 +vn -0.1651 0.9862 -0.0152 +vn 0.1301 0.0401 -0.9907 +vn 0.1305 0.0400 -0.9906 +vn 0.1308 0.0400 -0.9906 +vn 0.1297 0.0401 -0.9907 +vn 0.2418 0.9701 0.0223 +vn 0.2416 0.9701 0.0223 +vn -0.4273 0.8367 0.3427 +vn -0.4270 0.8368 0.3426 +vn -0.4267 0.8370 0.3426 +vn -0.4275 0.8365 0.3427 +vn 0.8307 0.4206 0.3648 +vn 0.8309 0.4201 0.3647 +vn 0.8306 0.4208 0.3648 +vn 0.8311 0.4198 0.3647 +vn -0.0000 -0.0579 0.9983 +vn -0.0000 -0.0567 0.9984 +vn -0.0000 -0.0558 0.9984 +vn -0.0000 -0.0589 0.9983 +vn -0.9225 -0.3765 0.0850 +vn -0.9227 -0.3760 0.0850 +vn -0.7682 -0.6363 0.0707 +vn -0.7682 -0.6363 0.0708 +vn -0.7679 -0.6366 0.0707 +vn -0.7686 -0.6358 0.0708 +vn -0.5077 -0.0326 0.8609 +vn -0.5078 -0.0327 0.8608 +vn -0.5083 -0.0327 0.8606 +vn -0.5074 -0.0326 0.8611 +vn -0.2964 -0.2490 0.9220 +vn -0.2961 -0.2490 0.9221 +vn -0.2956 -0.2490 0.9223 +vn -0.2967 -0.2490 0.9219 +vn -0.0939 -0.4834 0.8704 +vn -0.0945 -0.4834 0.8703 +vn -0.0948 -0.4834 0.8702 +vn -0.0936 -0.4834 0.8704 +vn 0.1098 -0.4909 0.8643 +vn 0.1106 -0.4908 0.8642 +vn 0.1111 -0.4907 0.8642 +vn 0.1093 -0.4909 0.8643 +vn 0.3193 -0.2619 0.9108 +vn 0.3194 -0.2619 0.9107 +vn 0.5221 -0.0362 0.8521 +vn 0.5219 -0.0362 0.8523 +vn 0.5215 -0.0362 0.8525 +vn 0.5223 -0.0361 0.8520 +vn 0.6501 -0.7585 -0.0451 +vn 0.6505 -0.7582 -0.0451 +vn 0.8309 -0.5511 -0.0766 +vn 0.8306 -0.5516 -0.0765 +vn 0.8303 -0.5521 -0.0765 +vn 0.8313 -0.5505 -0.0766 +vn -0.0000 -0.2719 -0.9623 +vn -0.9407 -0.1092 -0.3213 +vn 0.8647 0.4793 -0.1504 +vn 0.8643 0.4800 -0.1501 +vn 0.8650 0.4787 -0.1506 +vn 0.8641 0.4805 -0.1499 +vn -0.0000 0.8164 -0.5775 +vn -0.0000 0.8160 -0.5781 +vn -0.0000 0.8166 -0.5772 +vn -0.3689 0.9289 -0.0340 +vn -0.3691 0.9288 -0.0340 +vn -0.0728 0.0396 -0.9966 +vn -0.0734 0.0396 -0.9965 +vn -0.0739 0.0397 -0.9965 +vn -0.0724 0.0395 -0.9966 +vn 0.1277 0.2691 -0.9546 +vn 0.3326 0.0388 -0.9423 +vn 0.3324 0.0389 -0.9423 +vn 0.3320 0.0389 -0.9425 +vn 0.3330 0.0388 -0.9421 +vn 0.4485 0.8928 0.0413 +vn 0.4487 0.8927 0.0413 +vn -0.0000 0.7602 0.6497 +vn -0.0000 0.7597 0.6502 +vn -0.0000 0.7606 0.6492 +vn -0.6465 0.6677 0.3690 +vn -0.0000 0.2711 0.9626 +vn 0.9405 0.1101 0.3214 +vn 0.9405 0.1100 0.3214 +vn 0.9406 0.1096 0.3213 +vn 0.9404 0.1105 0.3215 +vn -0.0000 -0.3778 0.9259 +vn -0.0000 -0.3769 0.9263 +vn -0.0000 -0.3786 0.9255 +vn -0.0000 -0.3760 0.9266 +vn -0.7681 -0.6364 0.0707 +vn -0.5746 -0.8164 0.0568 +vn -0.5757 -0.8157 0.0564 +vn -0.5742 -0.8170 0.0529 +vn -0.5765 -0.8151 0.0569 +vn -0.2923 -0.0372 0.9556 +vn -0.2924 -0.0372 0.9556 +vn -0.0864 -0.2651 0.9603 +vn -0.0868 -0.2651 0.9603 +vn -0.0870 -0.2651 0.9603 +vn -0.0862 -0.2651 0.9604 +vn 0.1199 -0.2692 0.9556 +vn 0.1196 -0.2692 0.9556 +vn 0.1195 -0.2692 0.9556 +vn 0.3227 -0.0392 0.9457 +vn 0.3230 -0.0391 0.9456 +vn 0.3235 -0.0391 0.9454 +vn 0.3225 -0.0392 0.9458 +vn 0.4472 -0.8940 -0.0268 +vn 0.4473 -0.8940 -0.0268 +vn 0.6497 -0.7578 -0.0599 +vn 0.6494 -0.7581 -0.0598 +vn 0.6499 -0.7577 -0.0599 +vn 0.6492 -0.7582 -0.0598 +vn -0.0000 -0.5507 -0.8347 +vn -0.8300 -0.4219 -0.3648 +vn 0.9473 0.2074 -0.2442 +vn -0.0000 0.6359 -0.7718 +vn -0.0000 0.6366 -0.7712 +vn -0.0000 0.6354 -0.7722 +vn -0.5761 0.8156 -0.0531 +vn -0.5762 0.8156 -0.0531 +vn -0.5757 0.8159 -0.0530 +vn -0.2846 0.0374 -0.9579 +vn -0.2849 0.0374 -0.9578 +vn -0.2853 0.0375 -0.9577 +vn -0.2842 0.0373 -0.9580 +vn -0.0768 0.2658 -0.9610 +vn -0.0772 0.2658 -0.9609 +vn -0.0773 0.2659 -0.9609 +vn 0.1194 0.4905 -0.8632 +vn 0.1196 0.4905 -0.8632 +vn 0.1190 0.4906 -0.8632 +vn 0.1197 0.4905 -0.8632 +vn 0.3284 0.2610 -0.9078 +vn 0.3285 0.2610 -0.9077 +vn 0.5284 0.0359 -0.8482 +vn 0.5295 0.0357 -0.8475 +vn 0.5302 0.0357 -0.8471 +vn 0.5278 0.0361 -0.8486 +vn 0.6474 0.7598 0.0596 +vn 0.6475 0.7597 0.0596 +vn 0.6471 0.7600 0.0596 +vn -0.8306 0.4208 0.3648 +vn 0.9473 -0.2071 0.2443 +vn 0.9473 -0.2073 0.2442 +vn 0.9474 -0.2068 0.2443 +vn 0.9473 -0.2075 0.2442 +vn -0.0000 -0.6354 0.7722 +vn -0.3685 -0.9287 0.0410 +vn -0.3681 -0.9289 0.0409 +vn -0.3689 -0.9289 0.0340 +vn -0.3682 -0.9289 0.0410 +vn -0.0833 -0.0397 0.9957 +vn -0.0826 -0.0396 0.9958 +vn -0.0820 -0.0396 0.9958 +vn -0.0838 -0.0397 0.9957 +vn 0.1228 -0.0402 0.9916 +vn 0.1226 -0.0402 0.9916 +vn 0.1222 -0.0403 0.9917 +vn 0.1233 -0.0402 0.9916 +vn 0.2456 -0.9693 -0.0093 +vn 0.2455 -0.9693 -0.0093 +vn 0.2445 -0.9694 -0.0225 +vn 0.4459 -0.8942 -0.0411 +vn 0.4457 -0.8942 -0.0411 +vn -0.0000 -0.7593 -0.6508 +vn -0.0000 -0.7592 -0.6508 +vn -0.6465 -0.6678 -0.3690 +vn -0.6465 -0.6677 -0.3690 +vn 0.9407 -0.1093 -0.3213 +vn 0.9407 -0.1092 -0.3213 +vn -0.7683 0.6362 -0.0707 +vn -0.7686 0.6358 -0.0708 +vn -0.7688 0.6355 -0.0708 +vn -0.7680 0.6366 -0.0707 +vn -0.5004 0.0330 -0.8652 +vn -0.5008 0.0330 -0.8649 +vn -0.5011 0.0331 -0.8648 +vn -0.5002 0.0330 -0.8653 +vn -0.2884 0.2505 -0.9241 +vn -0.2888 0.2505 -0.9240 +vn -0.2888 0.2505 -0.9241 +vn -0.2883 0.2505 -0.9242 +vn -0.0852 0.4846 -0.8706 +vn -0.0856 0.4846 -0.8705 +vn -0.0859 0.4846 -0.8705 +vn -0.0849 0.4846 -0.8706 +vn 0.1053 0.6939 -0.7124 +vn 0.1052 0.6939 -0.7123 +vn 0.1048 0.6939 -0.7124 +vn 0.1053 0.6939 -0.7123 +vn 0.3205 0.4758 -0.8190 +vn 0.3208 0.4758 -0.8190 +vn 0.3212 0.4757 -0.8189 +vn 0.3201 0.4759 -0.8192 +vn 0.5271 0.2401 -0.8152 +vn 0.5275 0.2400 -0.8150 +vn 0.7174 0.0305 -0.6959 +vn 0.7179 0.0304 -0.6955 +vn 0.7181 0.0304 -0.6952 +vn 0.7172 0.0307 -0.6962 +vn 0.8299 0.5526 0.0764 +vn 0.8300 0.5525 0.0765 +vn 0.8303 0.5520 0.0765 +vn 0.8296 0.5531 0.0764 +vn 0.0001 0.2711 0.9625 +vn 0.0001 0.2711 0.9626 +vn -0.9406 0.1096 0.3213 +vn -0.0000 -0.3761 0.9266 +vn 0.8648 -0.4791 0.1504 +vn 0.8644 -0.4799 0.1501 +vn 0.8651 -0.4783 0.1507 +vn 0.8641 -0.4805 0.1499 +vn -0.0000 -0.8172 0.5763 +vn -0.0000 -0.8176 0.5757 +vn -0.0000 -0.8170 0.5767 +vn -0.0000 -0.8178 0.5754 +vn -0.3691 -0.9288 0.0340 +vn -0.1641 -0.9861 0.0248 +vn -0.1637 -0.9862 0.0247 +vn -0.1652 -0.9861 0.0152 +vn -0.1634 -0.9862 0.0247 +vn 0.0398 -0.9992 0.0081 +vn 0.0399 -0.9992 0.0080 +vn 0.0384 -0.9993 -0.0035 +vn 0.0398 -0.9992 0.0080 +vn -0.0000 -0.8939 -0.4482 +vn -0.0000 -0.8939 -0.4483 +vn -0.4287 -0.8358 -0.3429 +vn -0.4286 -0.8359 -0.3429 +vn 0.8300 -0.4219 -0.3649 +vn 0.8300 -0.4219 -0.3648 +vn -0.0001 0.0587 -0.9983 +vn -0.9222 0.3773 -0.0850 +vn -0.9219 0.3779 -0.0850 +vn -0.9219 0.3781 -0.0849 +vn -0.9223 0.3771 -0.0849 +vn -0.7109 0.0258 -0.7028 +vn -0.7108 0.0259 -0.7029 +vn -0.7110 0.0258 -0.7027 +vn -0.7105 0.0259 -0.7032 +vn -0.5032 0.2205 -0.8356 +vn -0.5025 0.2205 -0.8360 +vn -0.5020 0.2206 -0.8363 +vn -0.5037 0.2204 -0.8353 +vn -0.2959 0.4564 -0.8391 +vn -0.2947 0.4565 -0.8395 +vn -0.2939 0.4566 -0.8397 +vn -0.2966 0.4563 -0.8389 +vn -0.0983 0.6854 -0.7215 +vn -0.0971 0.6854 -0.7216 +vn -0.0965 0.6854 -0.7217 +vn -0.0990 0.6853 -0.7215 +vn 0.0859 0.8613 -0.5008 +vn 0.0861 0.8613 -0.5008 +vn 0.0864 0.8613 -0.5007 +vn 0.3086 0.6728 -0.6724 +vn 0.3090 0.6727 -0.6723 +vn 0.3084 0.6728 -0.6724 +vn 0.3093 0.6727 -0.6721 +vn 0.5196 0.4376 -0.7338 +vn 0.5186 0.4380 -0.7343 +vn 0.5180 0.4383 -0.7346 +vn 0.5202 0.4375 -0.7335 +vn 0.7150 0.2039 -0.6687 +vn 0.7149 0.2040 -0.6688 +vn 0.7147 0.2040 -0.6690 +vn 0.7152 0.2038 -0.6685 +vn 0.8809 0.0221 -0.4728 +vn 0.8806 0.0222 -0.4734 +vn 0.8803 0.0223 -0.4739 +vn 0.8812 0.0220 -0.4723 +vn 0.9590 0.2695 0.0883 +vn 0.9588 0.2701 0.0883 +vn 0.9586 0.2706 0.0883 +vn 0.9591 0.2688 0.0883 +vn 0.0001 -0.0589 0.9983 +vn -0.9474 -0.2068 0.2444 +vn 0.7245 -0.6871 0.0546 +vn 0.7251 -0.6865 0.0550 +vn 0.7241 -0.6875 0.0544 +vn 0.7255 -0.6860 0.0552 +vn -0.0000 -0.9287 0.3709 +vn -0.0000 -0.9286 0.3710 +vn -0.1651 -0.9862 0.0152 +vn -0.0000 -0.9694 -0.2454 +vn -0.0014 -0.9697 -0.2444 +vn -0.2085 -0.9315 -0.2980 +vn 0.6465 -0.6678 -0.3690 +vn 0.6465 -0.6677 -0.3690 +vn -0.9940 0.0599 -0.0916 +vn -0.9941 0.0586 -0.0916 +vn -0.9941 0.0577 -0.0916 +vn -0.9939 0.0608 -0.0916 +vn -0.8874 0.0153 -0.4608 +vn -0.8870 0.0154 -0.4615 +vn -0.8876 0.0152 -0.4603 +vn -0.8867 0.0155 -0.4621 +vn -0.7123 0.1720 -0.6805 +vn -0.7120 0.1721 -0.6808 +vn -0.7124 0.1719 -0.6804 +vn -0.7120 0.1721 -0.6807 +vn -0.5089 0.4019 -0.7612 +vn -0.5087 0.4019 -0.7614 +vn -0.5090 0.4019 -0.7612 +vn -0.3065 0.6459 -0.6992 +vn -0.3073 0.6458 -0.6990 +vn -0.3078 0.6457 -0.6988 +vn -0.3060 0.6461 -0.6993 +vn -0.1158 0.8510 -0.5123 +vn -0.1153 0.8510 -0.5123 +vn -0.1150 0.8510 -0.5125 +vn -0.1160 0.8509 -0.5124 +vn 0.0630 0.9694 -0.2371 +vn 0.0637 0.9694 -0.2370 +vn 0.0626 0.9694 -0.2372 +vn 0.0642 0.9694 -0.2369 +vn 0.2908 0.8350 -0.4671 +vn 0.2909 0.8350 -0.4671 +vn 0.5059 0.6199 -0.5998 +vn 0.5056 0.6200 -0.6000 +vn 0.5054 0.6201 -0.6000 +vn 0.7101 0.3711 -0.5984 +vn 0.7093 0.3716 -0.5990 +vn 0.7088 0.3719 -0.5994 +vn 0.7105 0.3709 -0.5980 +vn 0.8789 0.1480 -0.4534 +vn 0.8792 0.1478 -0.4529 +vn 0.8794 0.1477 -0.4526 +vn 0.8787 0.1481 -0.4537 +vn 0.9833 0.0109 -0.1814 +vn 0.9836 0.0107 -0.1803 +vn 0.9837 0.0106 -0.1796 +vn 0.9832 0.0110 -0.1820 +vn 0.9941 -0.0579 0.0916 +vn 0.9941 -0.0578 0.0916 +vn 0.0001 -0.3760 0.9266 +vn 0.0001 -0.3761 0.9266 +vn -0.8640 -0.4807 0.1498 +vn -0.8639 -0.4809 0.1497 +vn -0.8641 -0.4804 0.1499 +vn -0.0000 -0.8179 0.5754 +vn 0.5603 -0.8277 -0.0318 +vn 0.5604 -0.8276 -0.0318 +vn -0.0000 -0.9862 0.1656 +vn -0.0000 -0.9861 0.1659 +vn -0.0000 -0.9861 0.1660 +vn -0.0000 -0.9993 -0.0386 +vn -0.0029 -0.9699 -0.2434 +vn -0.0001 -0.9694 -0.2455 +vn 0.4286 -0.8359 -0.3429 +vn 0.4287 -0.8358 -0.3429 +vn -0.9586 -0.2707 -0.0883 +vn -0.9588 -0.2701 -0.0883 +vn -0.9589 -0.2696 -0.0883 +vn -0.9584 -0.2714 -0.0883 +vn -0.9889 0.0024 -0.1488 +vn -0.9888 0.0024 -0.1490 +vn -0.8882 0.1029 -0.4479 +vn -0.8884 0.1027 -0.4473 +vn -0.8881 0.1030 -0.4480 +vn -0.8885 0.1026 -0.4472 +vn -0.7175 0.3135 -0.6220 +vn -0.7183 0.3130 -0.6214 +vn -0.7171 0.3138 -0.6224 +vn -0.7188 0.3127 -0.6210 +vn -0.5205 0.5684 -0.6372 +vn -0.5210 0.5682 -0.6370 +vn -0.5214 0.5681 -0.6368 +vn -0.5202 0.5685 -0.6374 +vn -0.3245 0.8017 -0.5020 +vn -0.3251 0.8015 -0.5019 +vn -0.3255 0.8014 -0.5018 +vn -0.3241 0.8018 -0.5020 +vn -0.1397 0.9576 -0.2520 +vn -0.1404 0.9575 -0.2521 +vn -0.1394 0.9576 -0.2520 +vn -0.1407 0.9574 -0.2522 +vn 0.0391 0.9977 0.0556 +vn 0.0388 0.9977 0.0555 +vn 0.0385 0.9977 0.0555 +vn 0.2682 0.9399 -0.2114 +vn 0.2685 0.9398 -0.2114 +vn 0.2681 0.9399 -0.2115 +vn 0.4892 0.7694 -0.4108 +vn 0.4897 0.7692 -0.4105 +vn 0.4889 0.7695 -0.4109 +vn 0.4901 0.7690 -0.4104 +vn 0.7003 0.5249 -0.4838 +vn 0.7004 0.5248 -0.4838 +vn 0.8745 0.2698 -0.4030 +vn 0.8745 0.2698 -0.4031 +vn 0.9827 0.0718 -0.1707 +vn 0.9825 0.0721 -0.1717 +vn 0.9824 0.0724 -0.1724 +vn 0.9828 0.0716 -0.1701 +vn 0.9889 -0.0025 0.1488 +vn 0.9889 -0.0025 0.1489 +vn 0.9891 -0.0163 0.1465 +vn 0.9220 -0.3777 0.0849 +vn 0.9218 -0.3782 0.0849 +vn 0.9217 -0.3786 0.0849 +vn 0.9222 -0.3773 0.0849 +vn 0.0001 -0.6359 0.7718 +vn -0.0000 -0.6366 0.7712 +vn 0.0001 -0.6354 0.7721 +vn -0.0000 -0.6371 0.7708 +vn -0.7253 -0.6862 0.0551 +vn -0.7255 -0.6860 0.0552 +vn -0.0000 -0.9287 0.3710 +vn 0.3828 -0.9173 -0.1097 +vn 0.3826 -0.9174 -0.1098 +vn 0.1960 -0.9640 -0.1797 +vn 0.1962 -0.9639 -0.1797 +vn 0.1959 -0.9640 -0.1798 +vn 0.0014 -0.9697 -0.2444 +vn 0.2085 -0.9315 -0.2980 +vn 0.2086 -0.9315 -0.2980 +vn -0.8300 -0.5525 -0.0765 +vn -0.8299 -0.5526 -0.0764 +vn -0.8297 -0.5529 -0.0764 +vn -0.8303 -0.5521 -0.0765 +vn -0.9838 -0.0107 0.1792 +vn -0.9837 -0.0107 0.1796 +vn -0.9890 0.0165 -0.1472 +vn -0.9888 0.0169 -0.1482 +vn -0.9890 0.0164 -0.1468 +vn -0.9887 0.0171 -0.1487 +vn -0.8915 0.1872 -0.4124 +vn -0.8917 0.1871 -0.4122 +vn -0.8915 0.1872 -0.4125 +vn -0.7268 0.4429 -0.5250 +vn -0.7259 0.4436 -0.5256 +vn -0.7273 0.4424 -0.5247 +vn -0.7255 0.4441 -0.5258 +vn -0.5365 0.7049 -0.4639 +vn -0.5365 0.7051 -0.4637 +vn -0.5368 0.7049 -0.4636 +vn -0.3468 0.9022 -0.2564 +vn -0.3461 0.9025 -0.2565 +vn -0.3458 0.9025 -0.2565 +vn -0.3471 0.9021 -0.2564 +vn -0.1659 0.9855 0.0367 +vn -0.1660 0.9854 0.0367 +vn 0.0136 0.9398 0.3413 +vn 0.0137 0.9398 0.3413 +vn 0.0133 0.9399 0.3413 +vn 0.2420 0.9676 0.0719 +vn 0.2408 0.9679 0.0716 +vn 0.2428 0.9674 0.0720 +vn 0.2400 0.9681 0.0715 +vn 0.4691 0.8656 -0.1751 +vn 0.4687 0.8658 -0.1753 +vn 0.4692 0.8656 -0.1751 +vn 0.4686 0.8659 -0.1753 +vn 0.6844 0.6528 -0.3246 +vn 0.6855 0.6519 -0.3241 +vn 0.6860 0.6515 -0.3239 +vn 0.6839 0.6532 -0.3248 +vn 0.8669 0.3819 -0.3204 +vn 0.8671 0.3816 -0.3201 +vn 0.8673 0.3814 -0.3199 +vn 0.8667 0.3821 -0.3206 +vn 0.9805 0.1311 -0.1466 +vn 0.9806 0.1309 -0.1462 +vn 0.9807 0.1306 -0.1457 +vn 0.9804 0.1313 -0.1471 +vn 0.9891 -0.0163 0.1464 +vn 0.9891 -0.0162 0.1463 +vn 0.8873 -0.0154 0.4608 +vn 0.8873 -0.0154 0.4609 +vn 0.8871 -0.0155 0.4613 +vn 0.8874 -0.0154 0.4608 +vn 0.7687 -0.6357 0.0708 +vn 0.7684 -0.6361 0.0707 +vn 0.7681 -0.6364 0.0707 +vn 0.7688 -0.6355 0.0708 +vn -0.0000 -0.8176 0.5758 +vn -0.0000 -0.8169 0.5767 +vn -0.5604 -0.8276 -0.0318 +vn -0.5603 -0.8277 -0.0318 +vn -0.6486 -0.7588 -0.0597 +vn -0.6490 -0.7585 -0.0598 +vn -0.8797 -0.0221 0.4750 +vn -0.8798 -0.0221 0.4748 +vn -0.8796 -0.0222 0.4751 +vn -0.8800 -0.0222 0.4745 +vn -0.9827 -0.0717 0.1707 +vn -0.9824 -0.0723 0.1723 +vn -0.9830 -0.0713 0.1695 +vn -0.9822 -0.0727 0.1735 +vn -0.9893 0.0309 -0.1427 +vn -0.9893 0.0307 -0.1424 +vn -0.9892 0.0311 -0.1430 +vn -0.9894 0.0304 -0.1420 +vn -0.8969 0.2643 -0.3545 +vn -0.8969 0.2644 -0.3545 +vn -0.8967 0.2648 -0.3548 +vn -0.8970 0.2642 -0.3545 +vn -0.7376 0.5509 -0.3904 +vn -0.7384 0.5501 -0.3901 +vn -0.7372 0.5514 -0.3905 +vn -0.7388 0.5496 -0.3900 +vn -0.5551 0.7940 -0.2478 +vn -0.5553 0.7939 -0.2478 +vn -0.5557 0.7936 -0.2477 +vn -0.5547 0.7943 -0.2479 +vn -0.3711 0.9284 0.0154 +vn -0.3705 0.9287 0.0155 +vn -0.3716 0.9283 0.0153 +vn -0.3701 0.9289 0.0156 +vn -0.1904 0.9284 0.3191 +vn -0.1908 0.9284 0.3190 +vn -0.1908 0.9283 0.3190 +vn -0.0088 0.8082 0.5888 +vn -0.0086 0.8082 0.5888 +vn 0.2159 0.9118 0.3492 +vn 0.2169 0.9116 0.3493 +vn 0.2154 0.9120 0.3491 +vn 0.2175 0.9114 0.3493 +vn 0.4461 0.8908 0.0859 +vn 0.4475 0.8901 0.0861 +vn 0.4453 0.8913 0.0857 +vn 0.4483 0.8897 0.0863 +vn 0.6667 0.7348 -0.1249 +vn 0.6672 0.7344 -0.1247 +vn 0.6663 0.7351 -0.1250 +vn 0.6675 0.7341 -0.1245 +vn 0.8566 0.4739 -0.2039 +vn 0.8564 0.4742 -0.2041 +vn 0.8563 0.4746 -0.2040 +vn 0.8567 0.4738 -0.2038 +vn 0.9768 0.1856 -0.1066 +vn 0.9768 0.1857 -0.1067 +vn 0.9895 -0.0303 0.1415 +vn 0.9895 -0.0301 0.1412 +vn 0.9896 -0.0298 0.1409 +vn 0.9894 -0.0306 0.1418 +vn 0.8890 -0.1023 0.4464 +vn 0.8889 -0.1024 0.4466 +vn 0.8887 -0.1025 0.4469 +vn 0.8892 -0.1022 0.4460 +vn 0.7107 -0.0258 0.7030 +vn 0.7110 -0.0257 0.7027 +vn 0.7105 -0.0258 0.7032 +vn 0.5740 -0.8171 0.0529 +vn 0.5740 -0.8172 0.0529 +vn 0.5743 -0.8169 0.0529 +vn -0.3821 -0.9176 -0.1100 +vn -0.3823 -0.9175 -0.1099 +vn -0.3818 -0.9177 -0.1101 +vn -0.3826 -0.9174 -0.1098 +vn -0.1963 -0.9639 -0.1797 +vn -0.1960 -0.9640 -0.1798 +vn -0.1965 -0.9639 -0.1796 +vn -0.4467 -0.8937 -0.0411 +vn -0.4465 -0.8939 -0.0411 +vn -0.7184 -0.0305 0.6949 +vn -0.7182 -0.0305 0.6951 +vn -0.7185 -0.0305 0.6949 +vn -0.8784 -0.1482 0.4543 +vn -0.8789 -0.1479 0.4535 +vn -0.8782 -0.1484 0.4547 +vn -0.8791 -0.1477 0.4531 +vn -0.9801 -0.1320 0.1483 +vn -0.9803 -0.1314 0.1474 +vn -0.9800 -0.1323 0.1488 +vn -0.9804 -0.1312 0.1470 +vn -0.9903 0.0427 -0.1322 +vn -0.9904 0.0419 -0.1315 +vn -0.9902 0.0432 -0.1328 +vn -0.9905 0.0415 -0.1311 +vn -0.9039 0.3285 -0.2739 +vn -0.9035 0.3293 -0.2743 +vn -0.9042 0.3280 -0.2737 +vn -0.9032 0.3298 -0.2745 +vn -0.7538 0.6186 -0.2216 +vn -0.5763 0.8172 -0.0085 +vn -0.5764 0.8171 -0.0086 +vn -0.5758 0.8175 -0.0084 +vn -0.5769 0.8168 -0.0087 +vn -0.3939 0.8749 0.2818 +vn -0.3943 0.8748 0.2816 +vn -0.3934 0.8751 0.2819 +vn -0.3945 0.8747 0.2815 +vn -0.2120 0.7984 0.5636 +vn -0.2116 0.7984 0.5637 +vn -0.2121 0.7984 0.5636 +vn -0.2114 0.7984 0.5638 +vn -0.0253 0.6261 0.7794 +vn -0.0246 0.6261 0.7794 +vn -0.0244 0.6260 0.7794 +vn -0.0256 0.6260 0.7794 +vn 0.1955 0.7840 0.5891 +vn 0.1961 0.7839 0.5891 +vn 0.1965 0.7838 0.5891 +vn 0.1951 0.7840 0.5892 +vn 0.4233 0.8392 0.3413 +vn 0.4246 0.8387 0.3411 +vn 0.4254 0.8383 0.3409 +vn 0.4225 0.8396 0.3414 +vn 0.6479 0.7556 0.0969 +vn 0.6472 0.7561 0.0968 +vn 0.6483 0.7551 0.0970 +vn 0.6467 0.7566 0.0967 +vn 0.8442 0.5329 -0.0584 +vn 0.8439 0.5333 -0.0585 +vn 0.8436 0.5337 -0.0587 +vn 0.8444 0.5325 -0.0584 +vn 0.9718 0.2304 -0.0500 +vn 0.9717 0.2310 -0.0502 +vn 0.9902 -0.0436 0.1329 +vn 0.9902 -0.0433 0.1327 +vn 0.9902 -0.0431 0.1326 +vn 0.8918 -0.1870 0.4120 +vn 0.8920 -0.1868 0.4116 +vn 0.8922 -0.1865 0.4113 +vn 0.8916 -0.1872 0.4123 +vn 0.7124 -0.1719 0.6804 +vn 0.7123 -0.1719 0.6804 +vn 0.4994 -0.0330 0.8657 +vn 0.4997 -0.0330 0.8656 +vn 0.4990 -0.0330 0.8660 +vn 0.5001 -0.0330 0.8653 +vn 0.3688 -0.9289 0.0340 +vn 0.3689 -0.9289 0.0340 +vn 0.3691 -0.9288 0.0340 +vn -0.2444 -0.9694 -0.0225 +vn -0.2442 -0.9695 -0.0225 +vn -0.2439 -0.9695 -0.0225 +vn -0.2447 -0.9693 -0.0225 +vn -0.5295 -0.0359 0.8476 +vn -0.5293 -0.0358 0.8477 +vn -0.5290 -0.0359 0.8479 +vn -0.7160 -0.2036 0.6677 +vn -0.7158 -0.2037 0.6679 +vn -0.7161 -0.2036 0.6677 +vn -0.7156 -0.2038 0.6681 +vn -0.8745 -0.2699 0.4030 +vn -0.8745 -0.2700 0.4029 +vn -0.8747 -0.2697 0.4027 +vn -0.9768 -0.1857 0.1067 +vn -0.9768 -0.1856 0.1066 +vn -0.9917 0.0513 -0.1181 +vn -0.9916 0.0516 -0.1183 +vn -0.9122 0.3710 -0.1736 +vn -0.9124 0.3706 -0.1735 +vn -0.9121 0.3713 -0.1736 +vn -0.9126 0.3703 -0.1735 +vn -0.7701 0.6370 -0.0351 +vn -0.7694 0.6378 -0.0351 +vn -0.7691 0.6382 -0.0351 +vn -0.7705 0.6365 -0.0351 +vn -0.5971 0.7698 0.2256 +vn -0.5962 0.7703 0.2260 +vn -0.5976 0.7695 0.2254 +vn -0.5957 0.7707 0.2262 +vn -0.4153 0.7521 0.5118 +vn -0.4151 0.7521 0.5119 +vn -0.4154 0.7521 0.5117 +vn -0.4150 0.7522 0.5119 +vn -0.2285 0.6184 0.7519 +vn -0.2284 0.6185 0.7519 +vn -0.2282 0.6185 0.7519 +vn -0.0369 0.4149 0.9091 +vn -0.0381 0.4147 0.9091 +vn -0.0389 0.4147 0.9091 +vn -0.0361 0.4149 0.9091 +vn 0.1787 0.6072 0.7741 +vn 0.1787 0.6073 0.7741 +vn 0.1790 0.6072 0.7741 +vn 0.4029 0.7221 0.5623 +vn 0.4031 0.7220 0.5623 +vn 0.4029 0.7222 0.5623 +vn 0.4032 0.7220 0.5623 +vn 0.6282 0.7122 0.3132 +vn 0.6280 0.7124 0.3133 +vn 0.6279 0.7124 0.3133 +vn 0.6284 0.7121 0.3132 +vn 0.8301 0.5482 0.1021 +vn 0.8298 0.5486 0.1021 +vn 0.9652 0.2606 0.0204 +vn 0.9654 0.2601 0.0204 +vn 0.9654 0.2600 0.0205 +vn 0.9653 0.2604 0.0205 +vn 0.9913 -0.0541 0.1197 +vn 0.9914 -0.0539 0.1195 +vn 0.9914 -0.0538 0.1195 +vn 0.8967 -0.2647 0.3548 +vn 0.8966 -0.2648 0.3548 +vn 0.7174 -0.3137 0.6221 +vn 0.7177 -0.3135 0.6218 +vn 0.7177 -0.3134 0.6219 +vn 0.7173 -0.3136 0.6222 +vn 0.5017 -0.2206 0.8364 +vn 0.5020 -0.2206 0.8363 +vn 0.2843 -0.0374 0.9580 +vn 0.2846 -0.0375 0.9579 +vn 0.2842 -0.0374 0.9580 +vn 0.1651 -0.9862 0.0152 +vn 0.1649 -0.9862 0.0152 +vn -0.0385 -0.9993 -0.0035 +vn -0.0384 -0.9993 -0.0035 +vn -0.3326 -0.0390 0.9423 +vn -0.3320 -0.0390 0.9425 +vn -0.3328 -0.0389 0.9422 +vn -0.3317 -0.0391 0.9426 +vn -0.5258 -0.2403 0.8159 +vn -0.5260 -0.2403 0.8159 +vn -0.7098 -0.3714 0.5985 +vn -0.7095 -0.3716 0.5988 +vn -0.8671 -0.3815 0.3203 +vn -0.8669 -0.3818 0.3206 +vn -0.8673 -0.3813 0.3200 +vn -0.8668 -0.3820 0.3206 +vn -0.9719 -0.2300 0.0495 +vn -0.9722 -0.2291 0.0491 +vn -0.9718 -0.2306 0.0498 +vn -0.9723 -0.2285 0.0488 +vn -0.9930 0.0586 -0.1029 +vn -0.9928 0.0603 -0.1031 +vn -0.9930 0.0577 -0.1027 +vn -0.9928 0.0613 -0.1032 +vn -0.9227 0.3806 -0.0621 +vn -0.9230 0.3798 -0.0620 +vn -0.9233 0.3791 -0.0620 +vn -0.9224 0.3813 -0.0621 +vn -0.7858 0.6006 0.1477 +vn -0.7862 0.6001 0.1475 +vn -0.7866 0.5996 0.1474 +vn -0.7854 0.6010 0.1478 +vn -0.6139 0.6627 0.4289 +vn -0.6140 0.6626 0.4289 +vn -0.6135 0.6628 0.4292 +vn -0.6145 0.6623 0.4286 +vn -0.4304 0.5827 0.6894 +vn -0.4303 0.5827 0.6894 +vn -0.4301 0.5828 0.6895 +vn -0.2395 0.4097 0.8802 +vn -0.2400 0.4096 0.8801 +vn -0.2401 0.4095 0.8801 +vn -0.2395 0.4096 0.8803 +vn -0.0446 0.1897 0.9808 +vn -0.0451 0.1896 0.9808 +vn 0.1682 0.4023 0.8999 +vn 0.1692 0.4023 0.8997 +vn 0.1698 0.4023 0.8997 +vn 0.1676 0.4023 0.9000 +vn 0.3880 0.5591 0.7327 +vn 0.3884 0.5590 0.7325 +vn 0.3886 0.5591 0.7324 +vn 0.3877 0.5592 0.7328 +vn 0.6130 0.6115 0.5004 +vn 0.6122 0.6119 0.5007 +vn 0.6118 0.6122 0.5009 +vn 0.6133 0.6113 0.5002 +vn 0.8157 0.5171 0.2593 +vn 0.8156 0.5172 0.2594 +vn 0.8154 0.5176 0.2593 +vn 0.9584 0.2676 0.0989 +vn 0.9584 0.2679 0.0989 +vn 0.9585 0.2674 0.0989 +vn 0.9583 0.2680 0.0989 +vn 0.9928 -0.0609 0.1032 +vn 0.9928 -0.0606 0.1032 +vn 0.9032 -0.3298 0.2748 +vn 0.9035 -0.3291 0.2745 +vn 0.9038 -0.3287 0.2742 +vn 0.9029 -0.3304 0.2750 +vn 0.7265 -0.4431 0.5252 +vn 0.7262 -0.4434 0.5254 +vn 0.7259 -0.4437 0.5256 +vn 0.7268 -0.4428 0.5250 +vn 0.5087 -0.4021 0.7613 +vn 0.5090 -0.4020 0.7611 +vn 0.2880 -0.2504 0.9243 +vn 0.2881 -0.2504 0.9243 +vn 0.2882 -0.2504 0.9242 +vn 0.2876 -0.2504 0.9244 +vn 0.0744 -0.0397 0.9964 +vn 0.0741 -0.0397 0.9965 +vn 0.0748 -0.0398 0.9964 +vn -0.1309 -0.0403 0.9906 +vn -0.1313 -0.0402 0.9905 +vn -0.1304 -0.0403 0.9906 +vn -0.1317 -0.0402 0.9905 +vn -0.3287 -0.2609 0.9077 +vn -0.3294 -0.2608 0.9075 +vn -0.3283 -0.2610 0.9078 +vn -0.3298 -0.2607 0.9073 +vn -0.5187 -0.4381 0.7342 +vn -0.5185 -0.4383 0.7342 +vn -0.5190 -0.4380 0.7340 +vn -0.6992 -0.5255 0.4847 +vn -0.6993 -0.5254 0.4847 +vn -0.6995 -0.5253 0.4846 +vn -0.8566 -0.4741 0.2037 +vn -0.8568 -0.4737 0.2035 +vn -0.8563 -0.4745 0.2039 +vn -0.8571 -0.4733 0.2034 +vn -0.9659 -0.2581 -0.0210 +vn -0.9655 -0.2596 -0.0207 +vn -0.9661 -0.2572 -0.0211 +vn -0.9653 -0.2605 -0.0206 +vn -0.9944 0.0625 -0.0848 +vn -0.9945 0.0616 -0.0847 +vn -0.9945 0.0612 -0.0847 +vn -0.9944 0.0630 -0.0848 +vn -0.9328 0.3573 0.0467 +vn -0.9327 0.3576 0.0468 +vn -0.9326 0.3579 0.0469 +vn -0.9329 0.3572 0.0467 +vn -0.8002 0.5161 0.3056 +vn -0.7998 0.5165 0.3059 +vn -0.7995 0.5169 0.3061 +vn -0.8004 0.5158 0.3054 +vn -0.6288 0.5128 0.5845 +vn -0.6297 0.5123 0.5840 +vn -0.6302 0.5120 0.5837 +vn -0.6284 0.5130 0.5848 +vn -0.4411 0.3858 0.8103 +vn -0.4412 0.3858 0.8103 +vn -0.4414 0.3858 0.8102 +vn -0.4409 0.3859 0.8104 +vn -0.2464 0.1873 0.9509 +vn -0.2465 0.1874 0.9508 +vn -0.2460 0.1874 0.9510 +vn -0.2466 0.1874 0.9508 +vn -0.0456 -0.0403 0.9981 +vn -0.0459 -0.0402 0.9981 +vn 0.1635 0.1840 0.9692 +vn 0.1630 0.1839 0.9693 +vn 0.1628 0.1839 0.9694 +vn 0.1637 0.1840 0.9692 +vn 0.3786 0.3703 0.8483 +vn 0.3785 0.3703 0.8483 +vn 0.3784 0.3703 0.8483 +vn 0.6004 0.4736 0.6444 +vn 0.5998 0.4738 0.6448 +vn 0.5994 0.4740 0.6450 +vn 0.6008 0.4734 0.6441 +vn 0.8033 0.4449 0.3959 +vn 0.8034 0.4448 0.3959 +vn 0.9521 0.2508 0.1750 +vn 0.9519 0.2515 0.1752 +vn 0.9523 0.2502 0.1749 +vn 0.9517 0.2520 0.1753 +vn 0.9945 -0.0616 0.0848 +vn 0.9944 -0.0623 0.0848 +vn 0.9945 -0.0610 0.0849 +vn 0.9944 -0.0627 0.0849 +vn 0.9122 -0.3710 0.1737 +vn 0.9121 -0.3713 0.1738 +vn 0.9119 -0.3717 0.1738 +vn 0.9123 -0.3708 0.1737 +vn 0.7393 -0.5492 0.3897 +vn 0.7388 -0.5497 0.3899 +vn 0.7386 -0.5499 0.3900 +vn 0.7396 -0.5488 0.3897 +vn 0.5191 -0.5690 0.6378 +vn 0.5197 -0.5687 0.6375 +vn 0.5187 -0.5691 0.6381 +vn 0.5201 -0.5685 0.6374 +vn 0.2949 -0.4566 0.8394 +vn 0.2952 -0.4566 0.8393 +vn 0.2946 -0.4566 0.8395 +vn 0.2955 -0.4565 0.8392 +vn 0.0776 -0.2659 0.9609 +vn 0.0773 -0.2659 0.9609 +vn 0.0780 -0.2659 0.9608 +vn 0.0770 -0.2658 0.9609 +vn -0.1292 -0.2690 0.9544 +vn -0.1281 -0.2691 0.9546 +vn -0.1300 -0.2689 0.9544 +vn -0.1274 -0.2692 0.9546 +vn -0.3208 -0.4760 0.8189 +vn -0.3213 -0.4759 0.8187 +vn -0.3205 -0.4760 0.8190 +vn -0.5073 -0.6192 0.5993 +vn -0.5074 -0.6192 0.5993 +vn -0.5073 -0.6192 0.5994 +vn -0.5078 -0.6191 0.5990 +vn -0.6851 -0.6523 0.3242 +vn -0.6849 -0.6525 0.3243 +vn -0.6854 -0.6521 0.3240 +vn -0.6845 -0.6528 0.3244 +vn -0.8443 -0.5327 0.0586 +vn -0.8444 -0.5325 0.0586 +vn -0.9584 -0.2678 -0.0987 +vn -0.9586 -0.2671 -0.0987 +vn -0.9587 -0.2667 -0.0988 +vn -0.9583 -0.2682 -0.0986 +vn -0.9960 0.0583 -0.0671 +vn -0.9960 0.0593 -0.0668 +vn -0.9960 0.0599 -0.0668 +vn -0.9961 0.0577 -0.0674 +vn -0.9411 0.3074 0.1409 +vn -0.9410 0.3077 0.1411 +vn -0.9411 0.3073 0.1408 +vn -0.8106 0.3999 0.4277 +vn -0.8105 0.4000 0.4278 +vn -0.6394 0.3391 0.6900 +vn -0.6390 0.3393 0.6903 +vn -0.6387 0.3395 0.6905 +vn -0.6397 0.3391 0.6898 +vn -0.4471 0.1766 0.8769 +vn -0.4466 0.1767 0.8771 +vn -0.4465 0.1767 0.8771 +vn -0.4472 0.1766 0.8768 +vn -0.2473 -0.0397 0.9681 +vn -0.2475 -0.0397 0.9681 +vn -0.2476 -0.0397 0.9681 +vn -0.0425 -0.2691 0.9622 +vn -0.0423 -0.2691 0.9622 +vn -0.0429 -0.2691 0.9622 +vn -0.0419 -0.2691 0.9622 +vn 0.1610 -0.0391 0.9862 +vn 0.1611 -0.0391 0.9862 +vn 0.3735 0.1694 0.9120 +vn 0.3734 0.1693 0.9121 +vn 0.3738 0.1693 0.9119 +vn 0.3730 0.1693 0.9122 +vn 0.5902 0.3142 0.7436 +vn 0.5900 0.3143 0.7437 +vn 0.5898 0.3143 0.7439 +vn 0.5904 0.3142 0.7434 +vn 0.7940 0.3448 0.5007 +vn 0.7941 0.3447 0.5006 +vn 0.9459 0.2165 0.2416 +vn 0.9462 0.2157 0.2412 +vn 0.9457 0.2170 0.2420 +vn 0.9464 0.2151 0.2410 +vn 0.9961 -0.0575 0.0675 +vn 0.9961 -0.0573 0.0676 +vn 0.9961 -0.0569 0.0676 +vn 0.9223 -0.3815 0.0617 +vn 0.9222 -0.3818 0.0617 +vn 0.7543 -0.6180 0.2215 +vn 0.7542 -0.6182 0.2215 +vn 0.5345 -0.7062 0.4642 +vn 0.5348 -0.7061 0.4641 +vn 0.5345 -0.7063 0.4643 +vn 0.5350 -0.7060 0.4640 +vn 0.3087 -0.6454 0.6986 +vn 0.3074 -0.6457 0.6990 +vn 0.3095 -0.6453 0.6984 +vn 0.3067 -0.6458 0.6992 +vn 0.0844 -0.4845 0.8707 +vn 0.0854 -0.4845 0.8706 +vn 0.0838 -0.4844 0.8708 +vn 0.0861 -0.4845 0.8706 +vn -0.1183 -0.4907 0.8633 +vn -0.1204 -0.4904 0.8631 +vn -0.1170 -0.4909 0.8633 +vn -0.1217 -0.4903 0.8630 +vn -0.3085 -0.6727 0.6725 +vn -0.3084 -0.6727 0.6725 +vn -0.4903 -0.7689 0.4103 +vn -0.4906 -0.7688 0.4102 +vn -0.4909 -0.7687 0.4101 +vn -0.4900 -0.7690 0.4105 +vn -0.6674 -0.7342 0.1247 +vn -0.6670 -0.7345 0.1248 +vn -0.6666 -0.7349 0.1250 +vn -0.6678 -0.7339 0.1245 +vn -0.8298 -0.5486 -0.1021 +vn -0.8294 -0.5493 -0.1020 +vn -0.8291 -0.5497 -0.1019 +vn -0.8302 -0.5480 -0.1021 +vn -0.9518 -0.2516 -0.1756 +vn -0.9515 -0.2524 -0.1758 +vn -0.9514 -0.2528 -0.1759 +vn -0.9519 -0.2512 -0.1755 +vn -0.9974 0.0509 -0.0511 +vn -0.9974 0.0501 -0.0517 +vn -0.9974 0.0497 -0.0519 +vn -0.9974 0.0514 -0.0508 +vn -0.9474 0.2384 0.2137 +vn -0.9472 0.2388 0.2141 +vn -0.9470 0.2391 0.2145 +vn -0.9475 0.2381 0.2135 +vn -0.8178 0.2651 0.5109 +vn -0.8174 0.2653 0.5113 +vn -0.8172 0.2655 0.5116 +vn -0.8180 0.2650 0.5106 +vn -0.6442 0.1553 0.7489 +vn -0.6444 0.1553 0.7487 +vn -0.6447 0.1552 0.7485 +vn -0.6440 0.1554 0.7491 +vn -0.4482 -0.0375 0.8931 +vn -0.4486 -0.0374 0.8929 +vn -0.4480 -0.0375 0.8932 +vn -0.4489 -0.0374 0.8928 +vn -0.2441 -0.2659 0.9326 +vn -0.2443 -0.2658 0.9325 +vn -0.2440 -0.2659 0.9326 +vn -0.2446 -0.2658 0.9325 +vn -0.0336 -0.4906 0.8707 +vn -0.0337 -0.4905 0.8708 +vn -0.0338 -0.4905 0.8708 +vn -0.0332 -0.4906 0.8708 +vn 0.1636 -0.2610 0.9514 +vn 0.1630 -0.2610 0.9515 +vn 0.1642 -0.2610 0.9513 +vn 0.1626 -0.2610 0.9516 +vn 0.3722 -0.0359 0.9275 +vn 0.3714 -0.0359 0.9278 +vn 0.3727 -0.0360 0.9273 +vn 0.3709 -0.0358 0.9280 +vn 0.5851 0.1436 0.7982 +vn 0.5852 0.1436 0.7981 +vn 0.5855 0.1437 0.7979 +vn 0.5849 0.1437 0.7983 +vn 0.7887 0.2276 0.5710 +vn 0.7882 0.2280 0.5716 +vn 0.7890 0.2275 0.5707 +vn 0.7879 0.2283 0.5719 +vn 0.9414 0.1676 0.2926 +vn 0.9413 0.1679 0.2929 +vn 0.9416 0.1673 0.2922 +vn 0.9412 0.1681 0.2931 +vn 0.9974 -0.0494 0.0522 +vn 0.9974 -0.0492 0.0523 +vn 0.9974 -0.0498 0.0520 +vn 0.9974 -0.0489 0.0525 +vn 0.9324 -0.3583 -0.0471 +vn 0.9321 -0.3591 -0.0474 +vn 0.9326 -0.3578 -0.0470 +vn 0.9319 -0.3596 -0.0475 +vn 0.7705 -0.6365 0.0350 +vn 0.7706 -0.6364 0.0350 +vn 0.5542 -0.7946 0.2480 +vn 0.5539 -0.7948 0.2480 +vn 0.5539 -0.7948 0.2481 +vn 0.3266 -0.8010 0.5017 +vn 0.3262 -0.8011 0.5018 +vn 0.3267 -0.8010 0.5017 +vn 0.3261 -0.8012 0.5018 +vn 0.0969 -0.6856 0.7215 +vn 0.0965 -0.6856 0.7215 +vn 0.0964 -0.6856 0.7215 +vn 0.0973 -0.6856 0.7214 +vn -0.1046 -0.6939 0.7124 +vn -0.1040 -0.6940 0.7125 +vn -0.1052 -0.6938 0.7125 +vn -0.1035 -0.6940 0.7125 +vn -0.2894 -0.8353 0.4674 +vn -0.2898 -0.8353 0.4673 +vn -0.2902 -0.8352 0.4672 +vn -0.2889 -0.8354 0.4676 +vn -0.4691 -0.8656 0.1751 +vn -0.4690 -0.8657 0.1751 +vn -0.4686 -0.8659 0.1752 +vn -0.6491 -0.7545 -0.0967 +vn -0.6486 -0.7550 -0.0966 +vn -0.6482 -0.7553 -0.0966 +vn -0.6494 -0.7542 -0.0968 +vn -0.8153 -0.5175 -0.2596 +vn -0.8153 -0.5176 -0.2596 +vn -0.9458 -0.2169 -0.2417 +vn -0.9463 -0.2155 -0.2409 +vn -0.9466 -0.2147 -0.2406 +vn -0.9455 -0.2176 -0.2421 +vn -0.9984 0.0390 -0.0399 +vn -0.9984 0.0396 -0.0393 +vn -0.9984 0.0399 -0.0390 +vn -0.9984 0.0386 -0.0403 +vn -0.9514 0.1584 0.2641 +vn -0.9513 0.1585 0.2643 +vn -0.8211 0.1214 0.5577 +vn -0.8213 0.1214 0.5574 +vn -0.6454 -0.0328 0.7632 +vn -0.6442 -0.0330 0.7642 +vn -0.6461 -0.0327 0.7625 +vn -0.6434 -0.0331 0.7648 +vn -0.4455 -0.2503 0.8596 +vn -0.4447 -0.2505 0.8600 +vn -0.4459 -0.2502 0.8594 +vn -0.4442 -0.2506 0.8602 +vn -0.2365 -0.4846 0.8421 +vn -0.2364 -0.4846 0.8422 +vn -0.2368 -0.4845 0.8421 +vn -0.2364 -0.4846 0.8421 +vn -0.0200 -0.6939 0.7198 +vn -0.0199 -0.6939 0.7198 +vn 0.1711 -0.4759 0.8627 +vn 0.1715 -0.4759 0.8626 +vn 0.1707 -0.4760 0.8628 +vn 0.1718 -0.4759 0.8626 +vn 0.3742 -0.2402 0.8957 +vn 0.3748 -0.2402 0.8955 +vn 0.3739 -0.2402 0.8958 +vn 0.3751 -0.2402 0.8953 +vn 0.5844 -0.0304 0.8109 +vn 0.5852 -0.0304 0.8103 +vn 0.5841 -0.0303 0.8111 +vn 0.5856 -0.0305 0.8101 +vn 0.7837 0.1047 0.6123 +vn 0.7847 0.1042 0.6110 +vn 0.7830 0.1050 0.6130 +vn 0.7854 0.1040 0.6102 +vn 0.9385 0.1107 0.3271 +vn 0.9384 0.1108 0.3273 +vn 0.9984 -0.0395 0.0394 +vn 0.9984 -0.0390 0.0399 +vn 0.9984 -0.0398 0.0390 +vn 0.9984 -0.0386 0.0403 +vn 0.9408 -0.3081 -0.1413 +vn 0.9410 -0.3076 -0.1411 +vn 0.9410 -0.3077 -0.1411 +vn 0.7856 -0.6009 -0.1477 +vn 0.7862 -0.6001 -0.1475 +vn 0.7852 -0.6013 -0.1478 +vn 0.7866 -0.5997 -0.1474 +vn 0.5752 -0.8179 0.0082 +vn 0.5755 -0.8178 0.0082 +vn 0.5751 -0.8181 0.0081 +vn 0.3478 -0.9018 0.2565 +vn 0.3479 -0.9018 0.2565 +vn 0.3479 -0.9017 0.2565 +vn 0.3477 -0.9018 0.2565 +vn 0.1153 -0.8509 0.5125 +vn 0.1159 -0.8509 0.5124 +vn 0.1150 -0.8509 0.5125 +vn 0.1161 -0.8509 0.5124 +vn -0.0877 -0.8612 0.5006 +vn -0.0870 -0.8612 0.5007 +vn -0.0866 -0.8612 0.5008 +vn -0.0880 -0.8613 0.5005 +vn -0.2673 -0.9401 0.2115 +vn -0.2667 -0.9403 0.2117 +vn -0.2664 -0.9403 0.2118 +vn -0.2676 -0.9400 0.2115 +vn -0.4460 -0.8909 -0.0860 +vn -0.4455 -0.8911 -0.0859 +vn -0.4453 -0.8913 -0.0859 +vn -0.4464 -0.8907 -0.0861 +vn -0.6300 -0.7107 -0.3131 +vn -0.6297 -0.7109 -0.3132 +vn -0.8034 -0.4450 -0.3957 +vn -0.8033 -0.4450 -0.3957 +vn -0.8034 -0.4449 -0.3957 +vn -0.9418 -0.1667 -0.2919 +vn -0.9414 -0.1677 -0.2928 +vn -0.9411 -0.1682 -0.2933 +vn -0.9421 -0.1662 -0.2913 +vn -0.9992 0.0263 -0.0306 +vn -0.9992 0.0263 -0.0307 +vn -0.9992 0.0262 -0.0309 +vn -0.9537 0.0725 0.2917 +vn -0.9539 0.0724 0.2913 +vn -0.9537 0.0725 0.2918 +vn -0.9538 0.0723 0.2916 +vn -0.8227 -0.0256 0.5679 +vn -0.8223 -0.0257 0.5685 +vn -0.8228 -0.0255 0.5678 +vn -0.8221 -0.0257 0.5688 +vn -0.6413 -0.2204 0.7349 +vn -0.6419 -0.2202 0.7345 +vn -0.6410 -0.2206 0.7352 +vn -0.6422 -0.2202 0.7343 +vn -0.4373 -0.4568 0.7746 +vn -0.4383 -0.4566 0.7743 +vn -0.4366 -0.4570 0.7749 +vn -0.4390 -0.4564 0.7740 +vn -0.2230 -0.6855 0.6931 +vn -0.2226 -0.6855 0.6932 +vn -0.2233 -0.6854 0.6931 +vn -0.2225 -0.6856 0.6932 +vn -0.0015 -0.8611 0.5084 +vn -0.0016 -0.8611 0.5084 +vn 0.1846 -0.6730 0.7163 +vn 0.1838 -0.6730 0.7164 +vn 0.1850 -0.6730 0.7162 +vn 0.1834 -0.6731 0.7164 +vn 0.3816 -0.4381 0.8139 +vn 0.3808 -0.4382 0.8143 +vn 0.3823 -0.4380 0.8137 +vn 0.3802 -0.4383 0.8144 +vn 0.5882 -0.2036 0.7827 +vn 0.5881 -0.2036 0.7828 +vn 0.7825 -0.0223 0.6222 +vn 0.7826 -0.0222 0.6221 +vn 0.7827 -0.0222 0.6220 +vn 0.7824 -0.0223 0.6224 +vn 0.9371 0.0506 0.3454 +vn 0.9369 0.0507 0.3458 +vn 0.9373 0.0505 0.3449 +vn 0.9367 0.0509 0.3464 +vn 0.9991 -0.0252 0.0328 +vn 0.9992 -0.0259 0.0315 +vn 0.9991 -0.0247 0.0336 +vn 0.9992 -0.0264 0.0308 +vn 0.9477 -0.2378 -0.2129 +vn 0.9475 -0.2383 -0.2134 +vn 0.9479 -0.2374 -0.2125 +vn 0.9473 -0.2386 -0.2138 +vn 0.7996 -0.5167 -0.3061 +vn 0.7994 -0.5169 -0.3062 +vn 0.7998 -0.5164 -0.3059 +vn 0.7991 -0.5172 -0.3064 +vn 0.5966 -0.7701 -0.2257 +vn 0.5962 -0.7704 -0.2259 +vn 0.5959 -0.7706 -0.2261 +vn 0.5969 -0.7699 -0.2256 +vn 0.3716 -0.9283 -0.0155 +vn 0.3714 -0.9283 -0.0155 +vn 0.1402 -0.9575 0.2520 +vn 0.1398 -0.9576 0.2519 +vn 0.1394 -0.9576 0.2519 +vn 0.1407 -0.9574 0.2521 +vn -0.0649 -0.9694 0.2368 +vn -0.0648 -0.9694 0.2368 +vn -0.2429 -0.9674 -0.0720 +vn -0.2426 -0.9675 -0.0720 +vn -0.2430 -0.9674 -0.0721 +vn -0.2422 -0.9676 -0.0719 +vn -0.4230 -0.8395 -0.3411 +vn -0.4220 -0.8399 -0.3413 +vn -0.4235 -0.8393 -0.3410 +vn -0.4216 -0.8401 -0.3415 +vn -0.6128 -0.6117 -0.5003 +vn -0.6126 -0.6118 -0.5004 +vn -0.6132 -0.6115 -0.5001 +vn -0.6123 -0.6120 -0.5005 +vn -0.7941 -0.3446 -0.5007 +vn -0.7945 -0.3442 -0.5003 +vn -0.7946 -0.3441 -0.5002 +vn -0.7940 -0.3447 -0.5007 +vn -0.9379 -0.1115 -0.3285 +vn -0.9380 -0.1114 -0.3283 +vn -0.9378 -0.1117 -0.3288 +vn -0.9996 0.0119 -0.0271 +vn -0.9995 0.0116 -0.0280 +vn -0.9995 0.0114 -0.0287 +vn -0.9996 0.0120 -0.0265 +vn -0.9545 -0.0153 0.2979 +vn -0.9545 -0.0153 0.2977 +vn -0.9544 -0.0154 0.2983 +vn -0.8215 -0.1717 0.5437 +vn -0.8210 -0.1720 0.5444 +vn -0.8219 -0.1715 0.5432 +vn -0.8207 -0.1721 0.5448 +vn -0.6350 -0.4020 0.6597 +vn -0.6343 -0.4024 0.6601 +vn -0.6355 -0.4018 0.6593 +vn -0.6338 -0.4026 0.6605 +vn -0.4258 -0.6456 0.6339 +vn -0.4262 -0.6456 0.6337 +vn -0.4258 -0.6457 0.6339 +vn -0.4266 -0.6455 0.6335 +vn -0.2042 -0.8507 0.4843 +vn -0.2047 -0.8507 0.4842 +vn -0.2052 -0.8506 0.4841 +vn -0.2039 -0.8508 0.4844 +vn 0.0222 -0.9694 0.2445 +vn 0.0218 -0.9694 0.2444 +vn 0.0216 -0.9694 0.2444 +vn 0.2026 -0.8353 0.5111 +vn 0.2025 -0.8353 0.5111 +vn 0.2029 -0.8353 0.5111 +vn 0.2021 -0.8354 0.5112 +vn 0.3929 -0.6197 0.6794 +vn 0.3941 -0.6194 0.6790 +vn 0.3921 -0.6199 0.6797 +vn 0.3949 -0.6192 0.6787 +vn 0.5946 -0.3709 0.7134 +vn 0.5948 -0.3708 0.7132 +vn 0.5943 -0.3710 0.7136 +vn 0.5951 -0.3707 0.7130 +vn 0.7844 -0.1485 0.6023 +vn 0.7845 -0.1484 0.6022 +vn 0.7845 -0.1484 0.6021 +vn 0.9367 -0.0107 0.3500 +vn 0.9364 -0.0107 0.3506 +vn 0.9362 -0.0108 0.3513 +vn 0.9369 -0.0106 0.3495 +vn 0.9995 -0.0116 0.0288 +vn 0.9995 -0.0116 0.0294 +vn 0.9996 -0.0046 0.0282 +vn 0.9995 -0.0114 0.0294 +vn 0.9517 -0.1579 -0.2634 +vn 0.9521 -0.1574 -0.2623 +vn 0.9514 -0.1583 -0.2640 +vn 0.9523 -0.1570 -0.2617 +vn 0.8105 -0.4001 -0.4278 +vn 0.8106 -0.4000 -0.4278 +vn 0.6143 -0.6625 -0.4287 +vn 0.6145 -0.6623 -0.4285 +vn 0.6148 -0.6622 -0.4283 +vn 0.6141 -0.6626 -0.4289 +vn 0.3942 -0.8748 -0.2817 +vn 0.3948 -0.8746 -0.2814 +vn 0.3952 -0.8745 -0.2813 +vn 0.3939 -0.8749 -0.2818 +vn 0.1644 -0.9857 -0.0368 +vn 0.1655 -0.9855 -0.0365 +vn 0.1661 -0.9854 -0.0364 +vn 0.1639 -0.9858 -0.0369 +vn -0.0372 -0.9978 -0.0551 +vn -0.0380 -0.9977 -0.0553 +vn -0.0384 -0.9977 -0.0554 +vn -0.0368 -0.9978 -0.0550 +vn -0.2176 -0.9115 -0.3490 +vn -0.2177 -0.9115 -0.3490 +vn -0.2175 -0.9115 -0.3490 +vn -0.2180 -0.9114 -0.3491 +vn -0.4026 -0.7222 -0.5625 +vn -0.4037 -0.7218 -0.5621 +vn -0.4019 -0.7224 -0.5627 +vn -0.4044 -0.7216 -0.5619 +vn -0.5986 -0.4744 -0.6455 +vn -0.5992 -0.4742 -0.6451 +vn -0.5984 -0.4745 -0.6456 +vn -0.5994 -0.4741 -0.6450 +vn -0.7880 -0.2283 -0.5718 +vn -0.7878 -0.2284 -0.5720 +vn -0.7875 -0.2285 -0.5723 +vn -0.7883 -0.2281 -0.5714 +vn -0.9364 -0.0510 -0.3472 +vn -0.9370 -0.0505 -0.3457 +vn -0.9372 -0.0502 -0.3450 +vn -0.9361 -0.0512 -0.3480 +vn -0.9997 -0.0025 -0.0260 +vn -0.9996 -0.0024 -0.0269 +vn -0.9997 -0.0026 -0.0252 +vn -0.9996 -0.0023 -0.0276 +vn -0.9530 -0.1029 0.2848 +vn -0.9532 -0.1027 0.2843 +vn -0.9529 -0.1030 0.2852 +vn -0.9533 -0.1026 0.2839 +vn -0.8165 -0.3130 0.4851 +vn -0.8167 -0.3129 0.4850 +vn -0.8164 -0.3131 0.4853 +vn -0.8169 -0.3127 0.4847 +vn -0.6238 -0.5686 0.5362 +vn -0.6233 -0.5689 0.5364 +vn -0.6242 -0.5684 0.5360 +vn -0.6230 -0.5691 0.5366 +vn -0.4089 -0.8012 0.4368 +vn -0.4091 -0.8012 0.4368 +vn -0.4093 -0.8011 0.4366 +vn -0.4086 -0.8013 0.4369 +vn -0.1824 -0.9575 0.2233 +vn -0.1821 -0.9576 0.2234 +vn -0.1825 -0.9575 0.2233 +vn -0.1820 -0.9576 0.2234 +vn 0.0472 -0.9977 -0.0477 +vn 0.0463 -0.9978 -0.0479 +vn 0.0459 -0.9978 -0.0480 +vn 0.0476 -0.9977 -0.0476 +vn 0.2252 -0.9403 0.2553 +vn 0.2251 -0.9403 0.2552 +vn 0.2249 -0.9404 0.2552 +vn 0.2252 -0.9403 0.2552 +vn 0.4112 -0.7685 0.4902 +vn 0.4114 -0.7685 0.4901 +vn 0.6045 -0.5247 0.5993 +vn 0.6036 -0.5254 0.5997 +vn 0.6031 -0.5258 0.5999 +vn 0.6050 -0.5244 0.5991 +vn 0.7889 -0.2705 0.5518 +vn 0.7889 -0.2706 0.5518 +vn 0.7888 -0.2706 0.5518 +vn 0.9371 -0.0720 0.3415 +vn 0.9373 -0.0719 0.3411 +vn 0.9373 -0.0719 0.3409 +vn 0.9370 -0.0722 0.3418 +vn 0.9997 0.0026 0.0253 +vn 0.9996 0.0023 0.0276 +vn 0.9538 -0.0725 -0.2914 +vn 0.9539 -0.0724 -0.2914 +vn 0.9538 -0.0725 -0.2917 +vn 0.9539 -0.0723 -0.2911 +vn 0.8180 -0.2650 -0.5106 +vn 0.8180 -0.2650 -0.5105 +vn 0.6278 -0.5132 -0.5852 +vn 0.6281 -0.5130 -0.5851 +vn 0.6277 -0.5133 -0.5853 +vn 0.6282 -0.5130 -0.5850 +vn 0.4148 -0.7523 -0.5119 +vn 0.4145 -0.7523 -0.5120 +vn 0.4143 -0.7524 -0.5121 +vn 0.4149 -0.7523 -0.5118 +vn 0.1897 -0.9285 -0.3191 +vn 0.1894 -0.9286 -0.3192 +vn 0.1890 -0.9286 -0.3192 +vn 0.1899 -0.9285 -0.3190 +vn -0.0123 -0.9399 -0.3411 +vn -0.0124 -0.9399 -0.3412 +vn -0.1965 -0.7838 -0.5891 +vn -0.1956 -0.7840 -0.5892 +vn -0.1969 -0.7837 -0.5890 +vn -0.1951 -0.7840 -0.5893 +vn -0.3904 -0.5585 -0.7319 +vn -0.3899 -0.5586 -0.7321 +vn -0.3907 -0.5584 -0.7318 +vn -0.3895 -0.5587 -0.7322 +vn -0.5909 -0.3140 -0.7431 +vn -0.5902 -0.3142 -0.7436 +vn -0.5914 -0.3139 -0.7428 +vn -0.5898 -0.3143 -0.7439 +vn -0.7838 -0.1044 -0.6122 +vn -0.7834 -0.1046 -0.6127 +vn -0.7830 -0.1047 -0.6131 +vn -0.7841 -0.1043 -0.6118 +vn -0.9357 0.0109 -0.3527 +vn -0.9364 0.0106 -0.3507 +vn -0.9352 0.0110 -0.3539 +vn -0.9369 0.0104 -0.3495 +vn -0.9994 -0.0165 -0.0293 +vn -0.9995 -0.0169 -0.0282 +vn -0.9994 -0.0162 -0.0300 +vn -0.9995 -0.0171 -0.0276 +vn -0.9502 -0.1872 0.2493 +vn -0.9500 -0.1875 0.2498 +vn -0.9503 -0.1870 0.2489 +vn -0.9499 -0.1878 0.2500 +vn -0.8077 -0.4429 0.3891 +vn -0.8076 -0.4431 0.3892 +vn -0.6087 -0.7057 0.3626 +vn -0.6089 -0.7056 0.3626 +vn -0.3869 -0.9021 0.1914 +vn -0.3857 -0.9025 0.1917 +vn -0.3847 -0.9028 0.1920 +vn -0.3878 -0.9017 0.1911 +vn -0.1565 -0.9855 -0.0652 +vn -0.1556 -0.9857 -0.0650 +vn -0.1552 -0.9858 -0.0649 +vn -0.1570 -0.9854 -0.0653 +vn 0.0717 -0.9399 -0.3338 +vn 0.0726 -0.9399 -0.3336 +vn 0.0731 -0.9399 -0.3335 +vn 0.0712 -0.9399 -0.3339 +vn 0.2501 -0.9678 -0.0283 +vn 0.2505 -0.9677 -0.0282 +vn 0.2508 -0.9676 -0.0281 +vn 0.2498 -0.9679 -0.0283 +vn 0.4325 -0.8649 0.2547 +vn 0.4321 -0.8651 0.2547 +vn 0.4319 -0.8652 0.2547 +vn 0.4326 -0.8648 0.2548 +vn 0.6177 -0.6521 0.4396 +vn 0.6174 -0.6523 0.4397 +vn 0.6180 -0.6519 0.4394 +vn 0.6171 -0.6525 0.4399 +vn 0.7968 -0.3822 0.4681 +vn 0.7965 -0.3825 0.4683 +vn 0.7968 -0.3821 0.4681 +vn 0.7965 -0.3825 0.4684 +vn 0.9394 -0.1311 0.3168 +vn 0.9393 -0.1312 0.3169 +vn 0.9392 -0.1314 0.3173 +vn 0.9395 -0.1309 0.3165 +vn 0.9995 0.0168 0.0281 +vn 0.9994 0.0163 0.0293 +vn 0.9994 0.0162 0.0298 +vn 0.9995 0.0170 0.0276 +vn 0.9544 0.0153 -0.2981 +vn 0.9544 0.0153 -0.2983 +vn 0.9545 0.0152 -0.2977 +vn 0.8215 -0.1215 -0.5571 +vn 0.8219 -0.1213 -0.5566 +vn 0.8212 -0.1216 -0.5575 +vn 0.8222 -0.1212 -0.5562 +vn 0.6383 -0.3395 -0.6909 +vn 0.6375 -0.3399 -0.6915 +vn 0.6387 -0.3393 -0.6906 +vn 0.6371 -0.3400 -0.6918 +vn 0.4311 -0.5823 -0.6892 +vn 0.4308 -0.5825 -0.6893 +vn 0.4315 -0.5822 -0.6891 +vn 0.4303 -0.5827 -0.6894 +vn 0.2117 -0.7984 -0.5637 +vn 0.2118 -0.7984 -0.5636 +vn 0.2119 -0.7984 -0.5636 +vn 0.0092 -0.8083 -0.5887 +vn 0.0097 -0.8083 -0.5887 +vn 0.0100 -0.8084 -0.5886 +vn 0.0087 -0.8082 -0.5888 +vn -0.1790 -0.6071 -0.7742 +vn -0.1791 -0.6071 -0.7742 +vn -0.3798 -0.3700 -0.8479 +vn -0.3801 -0.3700 -0.8478 +vn -0.3795 -0.3699 -0.8480 +vn -0.3804 -0.3699 -0.8476 +vn -0.5859 -0.1437 -0.7975 +vn -0.5862 -0.1437 -0.7973 +vn -0.5855 -0.1437 -0.7979 +vn -0.5867 -0.1437 -0.7970 +vn -0.7837 0.0219 -0.6208 +vn -0.7829 0.0221 -0.6218 +vn -0.7842 0.0218 -0.6201 +vn -0.7823 0.0222 -0.6225 +vn -0.9369 0.0721 -0.3420 +vn -0.9364 0.0726 -0.3434 +vn -0.9373 0.0718 -0.3412 +vn -0.9360 0.0730 -0.3443 +vn -0.9989 -0.0302 -0.0346 +vn -0.9989 -0.0299 -0.0351 +vn -0.9990 -0.0306 -0.0340 +vn -0.9989 -0.0296 -0.0356 +vn -0.9452 -0.2646 0.1912 +vn -0.9452 -0.2646 0.1913 +vn -0.9453 -0.2644 0.1910 +vn -0.7948 -0.5509 0.2547 +vn -0.7952 -0.5504 0.2545 +vn -0.7945 -0.5512 0.2550 +vn -0.7956 -0.5499 0.2543 +vn -0.5899 -0.7941 0.1466 +vn -0.5908 -0.7935 0.1462 +vn -0.5912 -0.7932 0.1461 +vn -0.5895 -0.7943 0.1468 +vn -0.3611 -0.9291 -0.0803 +vn -0.3622 -0.9286 -0.0805 +vn -0.3630 -0.9283 -0.0806 +vn -0.3603 -0.9294 -0.0802 +vn -0.1314 -0.9284 -0.3476 +vn -0.1328 -0.9281 -0.3478 +vn -0.1337 -0.9280 -0.3479 +vn -0.1304 -0.9286 -0.3475 +vn 0.0947 -0.8083 -0.5812 +vn 0.0939 -0.8083 -0.5813 +vn 0.0935 -0.8083 -0.5813 +vn 0.0951 -0.8083 -0.5811 +vn 0.2753 -0.9115 -0.3055 +vn 0.4546 -0.8907 -0.0061 +vn 0.4545 -0.8907 -0.0061 +vn 0.4543 -0.8908 -0.0061 +vn 0.4546 -0.8907 -0.0060 +vn 0.6351 -0.7341 0.2402 +vn 0.6343 -0.7348 0.2402 +vn 0.6338 -0.7353 0.2402 +vn 0.6357 -0.7336 0.2402 +vn 0.8071 -0.4744 0.3516 +vn 0.8069 -0.4747 0.3516 +vn 0.8066 -0.4751 0.3518 +vn 0.8073 -0.4741 0.3515 +vn 0.9428 -0.1859 0.2768 +vn 0.9989 0.0300 0.0352 +vn 0.9989 0.0303 0.0346 +vn 0.9989 0.0306 0.0342 +vn 0.9989 0.0298 0.0356 +vn 0.9532 0.1028 -0.2842 +vn 0.9532 0.1029 -0.2843 +vn 0.9531 0.1030 -0.2845 +vn 0.9533 0.1027 -0.2840 +vn 0.8223 0.0257 -0.5684 +vn 0.8226 0.0257 -0.5680 +vn 0.8230 0.0256 -0.5675 +vn 0.8220 0.0258 -0.5689 +vn 0.6445 -0.1553 -0.7487 +vn 0.6441 -0.1554 -0.7490 +vn 0.6447 -0.1553 -0.7485 +vn 0.6439 -0.1555 -0.7491 +vn 0.4402 -0.3861 -0.8106 +vn 0.4411 -0.3859 -0.8102 +vn 0.4396 -0.3863 -0.8109 +vn 0.4417 -0.3857 -0.8100 +vn 0.2284 -0.6183 -0.7521 +vn 0.2286 -0.6182 -0.7520 +vn 0.2282 -0.6184 -0.7520 +vn 0.2289 -0.6182 -0.7519 +vn 0.0261 -0.6259 -0.7795 +vn 0.0258 -0.6259 -0.7795 +vn 0.0263 -0.6259 -0.7795 +vn 0.0255 -0.6259 -0.7795 +vn -0.1674 -0.4023 -0.9001 +vn -0.1678 -0.4023 -0.9000 +vn -0.1670 -0.4023 -0.9001 +vn -0.1681 -0.4023 -0.8999 +vn -0.3739 -0.1693 -0.9119 +vn -0.3735 -0.1694 -0.9120 +vn -0.3738 -0.1694 -0.9119 +vn -0.5851 0.0306 -0.8104 +vn -0.5844 0.0305 -0.8109 +vn -0.5837 0.0305 -0.8114 +vn -0.5855 0.0306 -0.8101 +vn -0.7859 0.1479 -0.6004 +vn -0.7859 0.1480 -0.6004 +vn -0.7856 0.1481 -0.6007 +vn -0.7862 0.1478 -0.6001 +vn -0.9391 0.1316 -0.3175 +vn -0.9393 0.1312 -0.3169 +vn -0.9390 0.1317 -0.3176 +vn -0.9394 0.1311 -0.3168 +vn -0.9981 -0.0431 -0.0439 +vn -0.9981 -0.0431 -0.0438 +vn -0.9981 -0.0427 -0.0442 +vn -0.9981 -0.0434 -0.0435 +vn -0.9385 -0.3273 0.1102 +vn -0.9382 -0.3279 0.1106 +vn -0.9386 -0.3270 0.1101 +vn -0.9381 -0.3282 0.1107 +vn -0.7791 -0.6210 0.0864 +vn -0.7792 -0.6207 0.0863 +vn -0.7789 -0.6212 0.0864 +vn -0.7795 -0.6205 0.0862 +vn -0.5688 -0.8172 -0.0930 +vn -0.5696 -0.8166 -0.0930 +vn -0.5683 -0.8175 -0.0930 +vn -0.5699 -0.8164 -0.0929 +vn -0.3381 -0.8749 -0.3467 +vn -0.3386 -0.8748 -0.3467 +vn -0.3376 -0.8751 -0.3468 +vn -0.3391 -0.8746 -0.3466 +vn -0.1109 -0.7982 -0.5921 +vn -0.1116 -0.7982 -0.5920 +vn -0.1104 -0.7982 -0.5921 +vn -0.1121 -0.7981 -0.5920 +vn 0.1107 -0.6259 -0.7720 +vn 0.1103 -0.6260 -0.7720 +vn 0.1099 -0.6260 -0.7720 +vn 0.2968 -0.7838 -0.5454 +vn 0.2973 -0.7838 -0.5453 +vn 0.2978 -0.7837 -0.5451 +vn 0.2964 -0.7839 -0.5456 +vn 0.4774 -0.8389 -0.2613 +vn 0.4770 -0.8391 -0.2615 +vn 0.4775 -0.8389 -0.2613 +vn 0.6543 -0.7560 0.0185 +vn 0.6551 -0.7553 0.0187 +vn 0.6557 -0.7548 0.0189 +vn 0.6537 -0.7566 0.0183 +vn 0.8198 -0.5342 0.2064 +vn 0.8204 -0.5333 0.2064 +vn 0.8208 -0.5327 0.2063 +vn 0.8194 -0.5348 0.2065 +vn 0.9479 -0.2304 0.2200 +vn 0.9480 -0.2303 0.2199 +vn 0.9481 -0.2300 0.2198 +vn 0.9478 -0.2307 0.2202 +vn 0.9981 0.0430 0.0437 +vn 0.9981 0.0430 0.0436 +vn 0.9981 0.0427 0.0440 +vn 0.9502 0.1873 -0.2493 +vn 0.9502 0.1872 -0.2492 +vn 0.9503 0.1871 -0.2490 +vn 0.9501 0.1874 -0.2495 +vn 0.8208 0.1722 -0.5447 +vn 0.8209 0.1720 -0.5445 +vn 0.6461 0.0330 -0.7625 +vn 0.6463 0.0329 -0.7624 +vn 0.4461 -0.1767 -0.8774 +vn 0.4456 -0.1768 -0.8776 +vn 0.4465 -0.1765 -0.8772 +vn 0.4452 -0.1768 -0.8778 +vn 0.2395 -0.4098 -0.8802 +vn 0.2395 -0.4097 -0.8802 +vn 0.0383 -0.4148 -0.9091 +vn 0.0380 -0.4148 -0.9091 +vn 0.0379 -0.4148 -0.9091 +vn -0.1623 -0.1841 -0.9694 +vn -0.1616 -0.1840 -0.9695 +vn -0.1628 -0.1841 -0.9693 +vn -0.1613 -0.1840 -0.9696 +vn -0.3714 0.0360 -0.9278 +vn -0.3722 0.0361 -0.9275 +vn -0.3727 0.0361 -0.9273 +vn -0.3709 0.0359 -0.9280 +vn -0.5864 0.2038 -0.7839 +vn -0.5872 0.2037 -0.7834 +vn -0.5876 0.2037 -0.7831 +vn -0.5860 0.2039 -0.7842 +vn -0.7896 0.2701 -0.5509 +vn -0.7901 0.2697 -0.5504 +vn -0.7895 0.2702 -0.5511 +vn -0.7902 0.2696 -0.5504 +vn -0.9434 0.1845 -0.2756 +vn -0.9429 0.1855 -0.2765 +vn -0.9437 0.1839 -0.2751 +vn -0.9426 0.1861 -0.2771 +vn -0.9970 -0.0529 -0.0571 +vn -0.9970 -0.0531 -0.0570 +vn -0.9970 -0.0531 -0.0569 +vn -0.9295 -0.3686 0.0099 +vn -0.9297 -0.3682 0.0099 +vn -0.9295 -0.3687 0.0100 +vn -0.9297 -0.3681 0.0098 +vn -0.7636 -0.6378 -0.1010 +vn -0.7628 -0.6387 -0.1008 +vn -0.7623 -0.6393 -0.1007 +vn -0.7641 -0.6372 -0.1010 +vn -0.5477 -0.7700 -0.3272 +vn -0.5478 -0.7699 -0.3272 +vn -0.3171 -0.7527 -0.5769 +vn -0.3166 -0.7529 -0.5770 +vn -0.3172 -0.7527 -0.5769 +vn -0.0933 -0.6184 -0.7803 +vn -0.0930 -0.6184 -0.7804 +vn -0.0934 -0.6183 -0.7803 +vn -0.0928 -0.6184 -0.7804 +vn 0.0544 0.7810 -0.6222 +vn 0.0520 0.7782 -0.6259 +vn 0.0544 0.7828 -0.6199 +vn 0.0542 0.7771 -0.6270 +vn 0.0551 0.7792 -0.6243 +vn 0.0551 0.7791 -0.6245 +vn 0.0551 0.7797 -0.6237 +vn 0.0553 0.7809 -0.6222 +vn 0.0554 0.7808 -0.6224 +vn 0.0553 0.7788 -0.6248 +vn 0.0553 0.7796 -0.6238 +vn 0.0551 0.7797 -0.6238 +vn 0.0551 0.7796 -0.6239 +vn 0.0552 0.7797 -0.6237 +vn 0.0552 0.7788 -0.6248 +vn 0.0554 0.7795 -0.6239 +vn 0.0551 0.7785 -0.6252 +vn 0.0546 0.7787 -0.6250 +vn 0.0550 0.7807 -0.6225 +vn 0.0550 0.7782 -0.6255 +vn 0.0552 0.7779 -0.6259 +vn 0.0554 0.7780 -0.6258 +vn 0.0554 0.7777 -0.6262 +vn 0.0554 0.7773 -0.6266 +vn 0.3136 -0.6069 -0.7303 +vn 0.3127 -0.6071 -0.7305 +vn 0.3141 -0.6068 -0.7302 +vn 0.3122 -0.6073 -0.7306 +vn 0.4966 -0.7216 -0.4823 +vn 0.4969 -0.7215 -0.4822 +vn 0.4970 -0.7215 -0.4822 +vn 0.6751 -0.7108 -0.1975 +vn 0.6747 -0.7111 -0.1977 +vn 0.8347 -0.5488 0.0452 +vn 0.8342 -0.5496 0.0452 +vn 0.8351 -0.5482 0.0452 +vn 0.8339 -0.5501 0.0452 +vn 0.9542 -0.2592 0.1495 +vn 0.9543 -0.2589 0.1494 +vn 0.9970 0.0531 0.0569 +vn 0.9452 0.2646 -0.1913 +vn 0.9453 0.2643 -0.1911 +vn 0.9454 0.2641 -0.1911 +vn 0.9452 0.2645 -0.1914 +vn 0.8156 0.3136 -0.4862 +vn 0.8156 0.3137 -0.4863 +vn 0.8158 0.3135 -0.4859 +vn 0.8155 0.3137 -0.4863 +vn 0.6429 0.2203 -0.7336 +vn 0.6430 0.2203 -0.7335 +vn 0.6433 0.2201 -0.7333 +vn 0.6427 0.2204 -0.7337 +vn 0.4480 0.0375 -0.8933 +vn 0.4478 0.0375 -0.8933 +vn 0.2460 -0.1874 -0.9510 +vn 0.2461 -0.1874 -0.9510 +vn 0.0446 -0.1897 -0.9808 +vn -0.1611 0.0390 -0.9862 +vn -0.1610 0.0390 -0.9862 +vn -0.3746 0.2402 -0.8955 +vn -0.3741 0.2402 -0.8957 +vn -0.3739 0.2402 -0.8958 +vn -0.3748 0.2402 -0.8955 +vn -0.5931 0.3716 -0.7142 +vn -0.5924 0.3718 -0.7147 +vn -0.5919 0.3719 -0.7150 +vn -0.5936 0.3714 -0.7139 +vn -0.7966 0.3825 -0.4681 +vn -0.7967 0.3824 -0.4681 +vn -0.7970 0.3821 -0.4678 +vn -0.9482 0.2295 -0.2196 +vn -0.9485 0.2287 -0.2192 +vn -0.9481 0.2300 -0.2197 +vn -0.9487 0.2282 -0.2189 +vn -0.9955 -0.0597 -0.0732 +vn -0.9955 -0.0595 -0.0732 +vn -0.9192 -0.3806 -0.1013 +vn -0.9194 -0.3800 -0.1014 +vn -0.9196 -0.3795 -0.1014 +vn -0.9190 -0.3811 -0.1013 +vn -0.7473 -0.6010 -0.2836 +vn -0.7476 -0.6006 -0.2835 +vn -0.7479 -0.6003 -0.2835 +vn -0.7470 -0.6013 -0.2837 +vn -0.5311 -0.6615 -0.5295 +vn -0.5303 -0.6619 -0.5298 +vn -0.5316 -0.6613 -0.5292 +vn -0.5298 -0.6622 -0.5300 +vn -0.3017 -0.5827 -0.7546 +vn -0.3032 -0.5824 -0.7542 +vn -0.3008 -0.5828 -0.7549 +vn -0.3041 -0.5823 -0.7540 +vn 0.3017 0.5827 0.7546 +vn 0.3032 0.5825 0.7542 +vn 0.3041 0.5824 0.7539 +vn 0.3009 0.5829 0.7548 +vn 0.5163 0.5127 0.6860 +vn 0.5156 0.5129 0.6863 +vn 0.5151 0.5131 0.6866 +vn 0.5168 0.5125 0.6857 +vn 0.7231 0.3998 0.5633 +vn 0.7232 0.3997 0.5632 +vn 0.7236 0.3994 0.5629 +vn 0.8944 0.2392 0.3780 +vn 0.8945 0.2389 0.3778 +vn 0.8942 0.2395 0.3782 +vn 0.8947 0.2387 0.3776 +vn 0.9899 0.0389 0.1363 +vn 0.9899 0.0388 0.1362 +vn 0.9898 0.0393 0.1366 +vn 0.9900 0.0384 0.1358 +vn 0.9782 -0.1676 -0.1228 +vn 0.9780 -0.1680 -0.1233 +vn 0.9782 -0.1674 -0.1226 +vn 0.9780 -0.1682 -0.1235 +vn 0.8698 -0.3445 -0.3532 +vn 0.8699 -0.3443 -0.3531 +vn 0.8697 -0.3446 -0.3535 +vn 0.8700 -0.3442 -0.3531 +vn 0.7034 -0.4740 -0.5297 +vn 0.7032 -0.4741 -0.5298 +vn 0.7036 -0.4738 -0.5296 +vn 0.7030 -0.4742 -0.5300 +vn 0.5122 -0.5587 -0.6523 +vn 0.5130 -0.5584 -0.6520 +vn 0.5117 -0.5589 -0.6525 +vn 0.5134 -0.5582 -0.6518 +vn -0.5174 -0.5122 -0.6855 +vn -0.5168 -0.5124 -0.6859 +vn -0.5178 -0.5120 -0.6854 +vn -0.5163 -0.5124 -0.6861 +vn -0.7221 -0.4004 -0.5642 +vn -0.7219 -0.4006 -0.5643 +vn -0.7217 -0.4007 -0.5644 +vn -0.7222 -0.4002 -0.5641 +vn -0.8947 -0.2387 -0.3776 +vn -0.8947 -0.2386 -0.3776 +vn -0.9901 -0.0381 -0.1354 +vn -0.9901 -0.0377 -0.1351 +vn -0.9902 -0.0374 -0.1347 +vn -0.9900 -0.0384 -0.1358 +vn -0.9785 0.1666 0.1216 +vn -0.9783 0.1673 0.1222 +vn -0.9782 0.1677 0.1227 +vn -0.9786 0.1663 0.1214 +vn -0.8702 0.3440 0.3527 +vn -0.8705 0.3437 0.3524 +vn -0.8706 0.3435 0.3521 +vn -0.8700 0.3443 0.3529 +vn -0.7034 0.4739 0.5298 +vn -0.7023 0.4747 0.5305 +vn -0.7017 0.4750 0.5310 +vn -0.7039 0.4735 0.5295 +vn -0.5117 0.5586 0.6528 +vn -0.5111 0.5589 0.6530 +vn -0.5110 0.5589 0.6531 +vn -0.3142 0.6067 0.7302 +vn -0.3143 0.6067 0.7302 +vn -0.3141 0.6068 0.7302 +vn -0.1109 0.6259 0.7720 +vn -0.1114 0.6258 0.7720 +vn -0.1119 0.6258 0.7719 +vn -0.1106 0.6259 0.7720 +vn 0.0937 0.6183 0.7803 +vn 0.0927 0.6184 0.7804 +vn 0.0921 0.6184 0.7805 +vn 0.0942 0.6184 0.7802 +vn 0.6911 -0.6117 -0.3849 +vn 0.6909 -0.6119 -0.3850 +vn 0.6913 -0.6115 -0.3848 +vn 0.6907 -0.6120 -0.3851 +vn 0.8478 -0.5182 -0.1125 +vn 0.8482 -0.5176 -0.1123 +vn 0.8483 -0.5174 -0.1123 +vn 0.9611 -0.2669 0.0713 +vn 0.9612 -0.2665 0.0713 +vn 0.9955 0.0595 0.0733 +vn 0.9955 0.0597 0.0733 +vn 0.9955 0.0598 0.0733 +vn 0.9381 0.3281 -0.1108 +vn 0.9378 0.3289 -0.1112 +vn 0.9376 0.3293 -0.1113 +vn 0.9383 0.3276 -0.1105 +vn 0.8068 0.4437 -0.3901 +vn 0.8065 0.4440 -0.3904 +vn 0.8062 0.4443 -0.3906 +vn 0.8070 0.4435 -0.3899 +vn 0.6355 0.4018 -0.6593 +vn 0.6359 0.4016 -0.6590 +vn 0.6363 0.4014 -0.6588 +vn 0.6351 0.4020 -0.6596 +vn 0.4446 0.2505 -0.8600 +vn 0.4445 0.2505 -0.8600 +vn 0.4449 0.2504 -0.8598 +vn 0.4442 0.2506 -0.8602 +vn 0.2476 0.0398 -0.9680 +vn 0.2477 0.0398 -0.9680 +vn 0.2474 0.0398 -0.9681 +vn 0.0454 0.0403 -0.9982 +vn 0.0455 0.0402 -0.9982 +vn 0.0459 0.0402 -0.9981 +vn -0.1646 0.2610 -0.9512 +vn -0.1642 0.2610 -0.9513 +vn -0.1647 0.2610 -0.9512 +vn -0.3828 0.4379 -0.8135 +vn -0.3827 0.4379 -0.8135 +vn -0.3824 0.4380 -0.8136 +vn -0.3832 0.4378 -0.8133 +vn -0.6031 0.5256 -0.6000 +vn -0.6035 0.5253 -0.5998 +vn -0.6029 0.5258 -0.6001 +vn -0.6037 0.5252 -0.5997 +vn -0.8077 0.4736 -0.3512 +vn -0.8074 0.4740 -0.3514 +vn -0.8079 0.4733 -0.3511 +vn -0.8071 0.4743 -0.3515 +vn -0.9541 0.2595 -0.1494 +vn -0.9542 0.2591 -0.1493 +vn -0.9541 0.2597 -0.1494 +vn -0.9543 0.2589 -0.1493 +vn -0.9940 -0.0605 -0.0913 +vn -0.9940 -0.0608 -0.0913 +vn -0.9939 -0.0613 -0.0912 +vn -0.9940 -0.0602 -0.0913 +vn -0.9093 -0.3590 -0.2104 +vn -0.9093 -0.3591 -0.2104 +vn -0.9093 -0.3592 -0.2104 +vn -0.7327 -0.5173 -0.4422 +vn -0.7329 -0.5171 -0.4421 +vn -0.7332 -0.5169 -0.4419 +vn -0.7325 -0.5175 -0.4423 +vn 0.3184 0.7523 0.5768 +vn 0.3192 0.7521 0.5766 +vn 0.3197 0.7520 0.5765 +vn 0.3179 0.7524 0.5770 +vn 0.5306 0.6618 0.5296 +vn 0.5295 0.6623 0.5301 +vn 0.5289 0.6626 0.5303 +vn 0.5313 0.6615 0.5292 +vn 0.7340 0.5161 0.4414 +vn 0.7340 0.5162 0.4414 +vn 0.7337 0.5164 0.4415 +vn 0.7344 0.5157 0.4412 +vn 0.9011 0.3085 0.3049 +vn 0.9009 0.3087 0.3050 +vn 0.9012 0.3082 0.3047 +vn 0.9007 0.3092 0.3052 +vn 0.9910 0.0495 0.1241 +vn 0.9910 0.0502 0.1244 +vn 0.9911 0.0491 0.1239 +vn 0.9909 0.0506 0.1247 +vn 0.9737 -0.2164 -0.0714 +vn 0.9738 -0.2161 -0.0712 +vn 0.9739 -0.2157 -0.0713 +vn 0.9737 -0.2164 -0.0715 +vn 0.8603 -0.4452 -0.2483 +vn 0.8605 -0.4450 -0.2482 +vn 0.8605 -0.4449 -0.2482 +vn -0.9010 -0.3086 -0.3049 +vn -0.9009 -0.3088 -0.3050 +vn -0.9008 -0.3089 -0.3050 +vn -0.9011 -0.3084 -0.3049 +vn -0.9911 -0.0491 -0.1236 +vn -0.9911 -0.0489 -0.1235 +vn -0.9911 -0.0490 -0.1239 +vn -0.9912 -0.0484 -0.1232 +vn -0.9737 0.2162 0.0713 +vn -0.9738 0.2162 0.0713 +vn -0.9738 0.2159 0.0711 +vn -0.9737 0.2164 0.0715 +vn -0.8612 0.4440 0.2475 +vn -0.8610 0.4443 0.2476 +vn -0.8608 0.4445 0.2478 +vn -0.8614 0.4437 0.2473 +vn -0.6892 0.6132 0.3861 +vn -0.6897 0.6127 0.3858 +vn -0.6901 0.6124 0.3856 +vn -0.6887 0.6136 0.3862 +vn -0.4969 0.7216 0.4821 +vn -0.4967 0.7217 0.4822 +vn -0.2974 0.7838 0.5451 +vn -0.2968 0.7839 0.5454 +vn -0.2978 0.7837 0.5450 +vn -0.2964 0.7839 0.5456 +vn -0.0949 0.8082 0.5812 +vn -0.0948 0.8083 0.5811 +vn 0.1088 0.7985 0.5921 +vn 0.1087 0.7985 0.5921 +vn 0.9679 -0.2514 -0.0054 +vn 0.9679 -0.2512 -0.0053 +vn 0.9678 -0.2515 -0.0054 +vn 0.9680 -0.2511 -0.0053 +vn 0.9939 0.0617 0.0913 +vn 0.9939 0.0624 0.0913 +vn 0.9939 0.0613 0.0913 +vn 0.9938 0.0630 0.0913 +vn 0.9289 0.3702 -0.0102 +vn 0.9293 0.3692 -0.0100 +vn 0.9295 0.3687 -0.0099 +vn 0.9287 0.3707 -0.0103 +vn 0.7946 0.5510 -0.2549 +vn 0.7954 0.5501 -0.2544 +vn 0.7958 0.5496 -0.2542 +vn 0.7942 0.5515 -0.2551 +vn 0.6241 0.5684 -0.5360 +vn 0.6242 0.5684 -0.5360 +vn 0.4371 0.4568 -0.7748 +vn 0.4369 0.4568 -0.7749 +vn 0.4366 0.4569 -0.7750 +vn 0.4372 0.4567 -0.7748 +vn 0.2458 0.2657 -0.9322 +vn 0.2450 0.2658 -0.9324 +vn 0.2446 0.2659 -0.9325 +vn 0.2462 0.2656 -0.9321 +vn 0.0422 0.2691 -0.9622 +vn 0.0420 0.2692 -0.9622 +vn -0.1709 0.4760 -0.8627 +vn -0.1716 0.4760 -0.8625 +vn -0.1720 0.4759 -0.8625 +vn -0.1706 0.4759 -0.8628 +vn -0.3939 0.6194 -0.6792 +vn -0.3946 0.6192 -0.6789 +vn -0.3952 0.6190 -0.6788 +vn -0.3933 0.6195 -0.6794 +vn -0.6170 0.6525 -0.4400 +vn -0.6168 0.6526 -0.4400 +vn -0.6167 0.6527 -0.4401 +vn -0.6171 0.6524 -0.4399 +vn -0.8210 0.5325 -0.2060 +vn -0.8211 0.5323 -0.2060 +vn -0.8207 0.5329 -0.2060 +vn -0.9608 0.2677 -0.0714 +vn -0.9610 0.2672 -0.0713 +vn -0.9610 0.2670 -0.0713 +vn -0.9608 0.2679 -0.0714 +vn -0.9925 -0.0571 -0.1085 +vn -0.9925 -0.0568 -0.1084 +vn -0.9925 -0.0567 -0.1084 +vn -0.9925 -0.0572 -0.1085 +vn 0.3383 0.8748 0.3467 +vn 0.3383 0.8748 0.3468 +vn 0.3384 0.8748 0.3467 +vn 0.3378 0.8750 0.3468 +vn 0.5488 0.7693 0.3272 +vn 0.5490 0.7692 0.3271 +vn 0.5491 0.7691 0.3271 +vn 0.5487 0.7693 0.3272 +vn 0.7468 0.6015 0.2838 +vn 0.7475 0.6006 0.2837 +vn 0.7465 0.6018 0.2838 +vn 0.7478 0.6002 0.2836 +vn 0.9098 0.3579 0.2101 +vn 0.9097 0.3583 0.2101 +vn 0.9100 0.3576 0.2100 +vn 0.9095 0.3586 0.2102 +vn 0.9923 0.0587 0.1090 +vn 0.9924 0.0578 0.1087 +vn 0.9923 0.0593 0.1091 +vn 0.9924 0.0572 0.1086 +vn -0.9679 0.2514 0.0054 +vn -0.9678 0.2518 0.0056 +vn -0.9676 0.2522 0.0056 +vn -0.9680 0.2510 0.0053 +vn -0.8492 0.5162 0.1116 +vn -0.8496 0.5155 0.1114 +vn -0.8499 0.5150 0.1113 +vn -0.8489 0.5166 0.1117 +vn -0.6737 0.7120 0.1980 +vn -0.6734 0.7123 0.1981 +vn -0.6737 0.7120 0.1979 +vn -0.4775 0.8389 0.2611 +vn -0.4774 0.8390 0.2611 +vn -0.4775 0.8390 0.2611 +vn -0.4770 0.8391 0.2613 +vn -0.2747 0.9117 0.3055 +vn -0.2738 0.9119 0.3058 +vn -0.2753 0.9116 0.3053 +vn -0.2732 0.9120 0.3060 +vn -0.0731 0.9398 0.3337 +vn -0.0733 0.9398 0.3336 +vn -0.0730 0.9398 0.3338 +vn -0.0734 0.9398 0.3336 +vn 0.1310 0.9284 0.3477 +vn 0.1314 0.9283 0.3478 +vn 0.1306 0.9285 0.3477 +vn 0.1319 0.9282 0.3478 +vn 0.9196 0.3796 0.1011 +vn 0.9196 0.3795 0.1012 +vn 0.7804 0.6194 -0.0860 +vn 0.7794 0.6206 -0.0862 +vn 0.7790 0.6211 -0.0862 +vn 0.7808 0.6189 -0.0859 +vn 0.6089 0.7056 -0.3625 +vn 0.6087 0.7057 -0.3626 +vn 0.4246 0.6460 -0.6344 +vn 0.4249 0.6459 -0.6343 +vn 0.4243 0.6460 -0.6345 +vn 0.4250 0.6459 -0.6342 +vn 0.2382 0.4845 -0.8417 +vn 0.2379 0.4845 -0.8418 +vn 0.2377 0.4845 -0.8418 +vn 0.2384 0.4844 -0.8417 +vn 0.0338 0.4906 -0.8707 +vn 0.0333 0.4907 -0.8707 +vn 0.0332 0.4907 -0.8707 +vn -0.1850 0.6729 -0.7163 +vn -0.1839 0.6730 -0.7164 +vn -0.1833 0.6730 -0.7165 +vn -0.1856 0.6728 -0.7161 +vn -0.4105 0.7688 -0.4903 +vn -0.4110 0.7687 -0.4902 +vn -0.4114 0.7685 -0.4901 +vn -0.4101 0.7690 -0.4904 +vn -0.6343 0.7349 -0.2401 +vn -0.6339 0.7352 -0.2401 +vn -0.6338 0.7353 -0.2401 +vn -0.6344 0.7348 -0.2401 +vn -0.8358 0.5472 -0.0456 +vn -0.8354 0.5478 -0.0456 +vn -0.8351 0.5482 -0.0456 +vn -0.8361 0.5467 -0.0456 +vn 0.3615 0.9289 0.0802 +vn 0.3621 0.9287 0.0803 +vn 0.3610 0.9291 0.0801 +vn 0.3626 0.9285 0.0804 +vn 0.5699 0.8164 0.0929 +vn 0.7626 0.6389 0.1008 +vn 0.7624 0.6392 0.1007 +vn 0.7627 0.6389 0.1008 +vn -0.6542 0.7561 -0.0185 +vn -0.6540 0.7563 -0.0185 +vn -0.6544 0.7559 -0.0186 +vn -0.6538 0.7565 -0.0184 +vn -0.4545 0.8907 0.0063 +vn -0.4544 0.8908 0.0063 +vn -0.4543 0.8908 0.0063 +vn -0.4546 0.8907 0.0063 +vn -0.2495 0.9680 0.0284 +vn -0.2501 0.9678 0.0283 +vn -0.2490 0.9681 0.0285 +vn -0.2504 0.9677 0.0282 +vn -0.0480 0.9977 0.0477 +vn -0.0476 0.9977 0.0478 +vn -0.0482 0.9977 0.0477 +vn -0.0475 0.9977 0.0478 +vn 0.1565 0.9855 0.0652 +vn 0.1554 0.9857 0.0649 +vn 0.1572 0.9854 0.0653 +vn 0.1546 0.9858 0.0648 +vn 0.5907 0.7935 -0.1461 +vn 0.5899 0.7941 -0.1465 +vn 0.5912 0.7932 -0.1460 +vn 0.5895 0.7943 -0.1467 +vn 0.4086 0.8015 -0.4366 +vn 0.4074 0.8018 -0.4371 +vn 0.4093 0.8013 -0.4363 +vn 0.4067 0.8020 -0.4375 +vn 0.2249 0.6851 -0.6928 +vn 0.2248 0.6852 -0.6928 +vn 0.2247 0.6852 -0.6928 +vn 0.0212 0.6938 -0.7199 +vn 0.0203 0.6939 -0.7198 +vn 0.0198 0.6939 -0.7198 +vn 0.0218 0.6938 -0.7199 +vn -0.2034 0.8351 -0.5110 +vn -0.2032 0.8352 -0.5111 +vn -0.2029 0.8352 -0.5112 +vn -0.2038 0.8351 -0.5110 +vn -0.4320 0.8651 -0.2548 +vn -0.4324 0.8649 -0.2549 +vn -0.4318 0.8652 -0.2548 +vn -0.4326 0.8648 -0.2549 +vn 0.3873 0.9019 -0.1914 +vn 0.3872 0.9019 -0.1914 +vn 0.3869 0.9020 -0.1915 +vn 0.3878 0.9017 -0.1912 +vn -0.2252 0.9403 -0.2553 +vn -0.2253 0.9402 -0.2553 +vn -0.0211 0.9694 -0.2445 +vn -0.0203 0.9695 -0.2444 +vn -0.0215 0.9694 -0.2447 +vn -0.0194 0.9695 -0.2443 +vn 0.1800 0.9579 -0.2237 +vn 0.1795 0.9580 -0.2238 +vn 0.1802 0.9579 -0.2236 +vn 0.1793 0.9580 -0.2239 +vn 0.2037 0.8509 -0.4843 +vn 0.2053 0.8507 -0.4838 +vn 0.2026 0.8510 -0.4846 +vn 0.2063 0.8506 -0.4836 +vn 0.0034 0.8612 -0.5082 +vn 0.0030 0.8612 -0.5083 +vn 0.0035 0.8613 -0.5082 +vn 0.0028 0.8612 -0.5083 +vn 0.0181 -0.8616 0.5073 +vn 0.0189 -0.8617 0.5071 +vn 0.0176 -0.8615 0.5074 +vn 0.0193 -0.8617 0.5071 +vn -0.0077 -0.9698 0.2436 +vn -0.0068 -0.9699 0.2434 +vn -0.0081 -0.9698 0.2437 +vn 0.2197 -0.8489 0.4808 +vn 0.2187 -0.8490 0.4811 +vn 0.2203 -0.8488 0.4806 +vn 0.2181 -0.8490 0.4812 +vn 0.0388 -0.6943 0.7187 +vn 0.0387 -0.6943 0.7187 +vn 0.0388 -0.6943 0.7186 +vn -0.1866 -0.8382 0.5124 +vn -0.1874 -0.8380 0.5125 +vn -0.1862 -0.8383 0.5124 +vn -0.1878 -0.8379 0.5125 +vn -0.0345 -0.9982 -0.0488 +vn -0.0354 -0.9982 -0.0486 +vn -0.0339 -0.9982 -0.0489 +vn -0.0360 -0.9982 -0.0485 +vn 0.1972 -0.9552 0.2207 +vn 0.1964 -0.9553 0.2209 +vn 0.1976 -0.9551 0.2206 +vn 0.1959 -0.9554 0.2210 +vn -0.2113 -0.9433 0.2558 +vn -0.2108 -0.9435 0.2558 +vn -0.2117 -0.9432 0.2559 +vn -0.2104 -0.9436 0.2557 +vn 0.4228 -0.7971 0.4312 +vn 0.4233 -0.7970 0.4309 +vn 0.4225 -0.7972 0.4314 +vn 0.4235 -0.7969 0.4309 +vn 0.2386 -0.6839 0.6894 +vn 0.2381 -0.6841 0.6895 +vn 0.2377 -0.6842 0.6895 +vn 0.2390 -0.6839 0.6893 +vn 0.0538 -0.4908 0.8696 +vn 0.0532 -0.4909 0.8696 +vn 0.0527 -0.4909 0.8696 +vn 0.0542 -0.4908 0.8696 +vn -0.1688 -0.6750 0.7182 +vn -0.1685 -0.6751 0.7183 +vn -0.1682 -0.6751 0.7183 +vn -0.1693 -0.6750 0.7181 +vn -0.3948 -0.7745 0.4942 +vn -0.3954 -0.7743 0.4941 +vn -0.3959 -0.7741 0.4941 +vn -0.3943 -0.7746 0.4945 +vn -0.0592 -0.9404 -0.3350 +vn -0.0601 -0.9404 -0.3348 +vn -0.0586 -0.9404 -0.3351 +vn -0.0607 -0.9404 -0.3347 +vn 0.1695 -0.9832 -0.0670 +vn 0.1690 -0.9833 -0.0669 +vn 0.1711 -0.9830 -0.0673 +vn -0.2384 -0.9708 -0.0282 +vn -0.2383 -0.9708 -0.0282 +vn -0.2387 -0.9707 -0.0281 +vn -0.2379 -0.9709 -0.0283 +vn 0.3989 -0.8976 0.1875 +vn 0.3991 -0.8976 0.1874 +vn 0.3987 -0.8977 0.1875 +vn 0.3994 -0.8975 0.1873 +vn -0.4179 -0.8714 0.2570 +vn -0.4178 -0.8714 0.2570 +vn 0.6215 -0.6986 0.3545 +vn 0.6208 -0.6990 0.3548 +vn 0.6219 -0.6984 0.3542 +vn 0.6204 -0.6993 0.3551 +vn 0.2529 -0.4835 0.8380 +vn 0.2530 -0.4835 0.8380 +vn 0.0601 -0.2693 0.9612 +vn 0.0617 -0.2692 0.9611 +vn 0.0625 -0.2691 0.9611 +vn -0.1553 -0.4774 0.8649 +vn -0.3768 -0.6240 0.6845 +vn -0.3769 -0.6240 0.6845 +vn -0.3765 -0.6241 0.6847 +vn -0.3770 -0.6240 0.6845 +vn -0.6052 -0.6595 0.4459 +vn -0.0834 -0.8088 -0.5822 +vn -0.0825 -0.8087 -0.5824 +vn -0.0841 -0.8088 -0.5821 +vn -0.0819 -0.8087 -0.5825 +vn 0.1429 -0.9262 -0.3488 +vn -0.2657 -0.9142 -0.3061 +vn -0.2650 -0.9143 -0.3063 +vn -0.2659 -0.9141 -0.3060 +vn -0.2647 -0.9144 -0.3063 +vn 0.3754 -0.9231 -0.0832 +vn 0.3763 -0.9227 -0.0834 +vn 0.3732 -0.9241 -0.0828 +vn 0.6010 -0.7867 0.1409 +vn 0.6014 -0.7864 0.1407 +vn 0.6018 -0.7862 0.1406 +vn -0.6227 -0.7434 0.2441 +vn -0.6221 -0.7439 0.2441 +vn -0.6242 -0.7421 0.2442 +vn 0.8055 -0.5402 0.2438 +vn 0.8048 -0.5409 0.2441 +vn 0.8044 -0.5415 0.2444 +vn 0.8059 -0.5396 0.2435 +vn 0.6369 -0.5632 0.5265 +vn 0.6365 -0.5634 0.5267 +vn 0.6362 -0.5636 0.5270 +vn 0.6371 -0.5630 0.5264 +vn 0.4541 -0.4540 0.7666 +vn 0.4544 -0.4539 0.7665 +vn 0.4538 -0.4541 0.7667 +vn 0.2623 -0.2654 0.9278 +vn 0.2618 -0.2654 0.9279 +vn 0.2633 -0.2652 0.9275 +vn -0.1447 -0.2618 0.9542 +vn -0.3655 -0.4409 0.8197 +vn -0.3649 -0.4410 0.8200 +vn -0.3646 -0.4411 0.8200 +vn -0.3659 -0.4409 0.8196 +vn -0.5899 -0.5313 0.6080 +vn -0.5897 -0.5314 0.6081 +vn -0.5900 -0.5313 0.6080 +vn -0.5897 -0.5314 0.6082 +vn -0.7983 -0.4830 0.3598 +vn -0.7981 -0.4833 0.3598 +vn -0.1019 -0.6262 -0.7730 +vn -0.1022 -0.6262 -0.7729 +vn -0.1025 -0.6262 -0.7729 +vn -0.1018 -0.6262 -0.7730 +vn 0.1193 -0.7968 -0.5924 +vn 0.1195 -0.7968 -0.5924 +vn 0.1201 -0.7967 -0.5923 +vn -0.2874 -0.7863 -0.5469 +vn -0.2880 -0.7862 -0.5467 +vn -0.2871 -0.7863 -0.5470 +vn -0.2883 -0.7862 -0.5466 +vn 0.3506 -0.8697 -0.3475 +vn 0.3491 -0.8702 -0.3477 +vn 0.3482 -0.8705 -0.3479 +vn 0.3515 -0.8693 -0.3473 +vn -0.4652 -0.8453 -0.2627 +vn -0.4656 -0.8452 -0.2625 +vn -0.4660 -0.8450 -0.2623 +vn 0.5785 -0.8100 -0.0963 +vn 0.5779 -0.8104 -0.0964 +vn 0.5776 -0.8106 -0.0964 +vn 0.7894 -0.6088 0.0787 +vn 0.7898 -0.6083 0.0785 +vn 0.7901 -0.6079 0.0784 +vn 0.7891 -0.6092 0.0787 +vn -0.8120 -0.5438 0.2121 +vn -0.8122 -0.5435 0.2120 +vn -0.8117 -0.5441 0.2121 +vn -0.8124 -0.5432 0.2120 +vn 0.8173 -0.4357 0.3770 +vn 0.8179 -0.4351 0.3764 +vn 0.8184 -0.4347 0.3759 +vn 0.8169 -0.4363 0.3774 +vn 0.6487 -0.3980 0.6486 +vn 0.4608 -0.2493 0.8517 +vn 0.4614 -0.2492 0.8515 +vn 0.4619 -0.2491 0.8512 +vn 0.4605 -0.2494 0.8519 +vn 0.2651 -0.0396 0.9634 +vn 0.2659 -0.0395 0.9632 +vn 0.2663 -0.0395 0.9631 +vn 0.2647 -0.0396 0.9635 +vn 0.0626 0.1898 0.9798 +vn 0.0630 0.1897 0.9798 +vn 0.0622 0.1898 0.9799 +vn 0.0634 0.1896 0.9798 +vn -0.1433 -0.0392 0.9889 +vn -0.5787 -0.3758 0.7238 +vn -0.5789 -0.3758 0.7237 +vn -0.5786 -0.3758 0.7238 +vn -0.7870 -0.3892 0.4786 +vn -0.7871 -0.3892 0.4785 +vn -0.7870 -0.3893 0.4786 +vn -0.9424 -0.2420 0.2309 +vn -0.9423 -0.2421 0.2310 +vn -0.0581 0.7774 -0.6264 +vn -0.0581 0.7769 -0.6269 +vn -0.0581 0.7771 -0.6267 +vn -0.0582 0.7781 -0.6255 +vn -0.0583 0.7795 -0.6236 +vn -0.0584 0.7806 -0.6223 +vn -0.0584 0.7801 -0.6229 +vn -0.0583 0.7793 -0.6239 +vn -0.0582 0.7791 -0.6242 +vn -0.0581 0.7794 -0.6238 +vn -0.0581 0.7798 -0.6233 +vn -0.0581 0.7789 -0.6244 +vn -0.0581 0.7800 -0.6231 +vn -0.0580 0.7785 -0.6250 +vn -0.0582 0.7799 -0.6232 +vn -0.0584 0.7797 -0.6235 +vn -0.0586 0.7794 -0.6238 +vn -0.0587 0.7810 -0.6218 +vn -0.0582 0.7786 -0.6248 +vn -0.0582 0.7779 -0.6257 +vn -0.0581 0.7770 -0.6268 +vn 0.1019 -0.6169 -0.7804 +vn 0.1024 -0.6169 -0.7804 +vn -0.3040 -0.6089 -0.7327 +vn 0.3279 -0.7483 -0.5767 +vn 0.3295 -0.7479 -0.5763 +vn 0.3304 -0.7476 -0.5761 +vn 0.3270 -0.7485 -0.5769 +vn -0.4864 -0.7268 -0.4848 +vn -0.4859 -0.7270 -0.4851 +vn -0.4867 -0.7267 -0.4847 +vn -0.4856 -0.7271 -0.4852 +vn 0.5564 -0.7633 -0.3283 +vn 0.5573 -0.7628 -0.3280 +vn 0.5578 -0.7625 -0.3279 +vn 0.5558 -0.7637 -0.3284 +vn -0.6648 -0.7202 -0.1986 +vn -0.6651 -0.7200 -0.1984 +vn -0.6645 -0.7204 -0.1987 +vn -0.6653 -0.7198 -0.1983 +vn 0.7727 -0.6261 -0.1047 +vn 0.7719 -0.6270 -0.1046 +vn 0.7730 -0.6256 -0.1048 +vn 0.7716 -0.6275 -0.1045 +vn -0.8273 -0.5598 0.0480 +vn -0.8272 -0.5598 0.0480 +vn -0.8270 -0.5602 0.0480 +vn -0.8275 -0.5595 0.0480 +vn 0.9338 -0.3578 0.0024 +vn 0.9337 -0.3581 0.0024 +vn 0.9339 -0.3577 0.0023 +vn -0.9490 -0.2732 0.1572 +vn -0.9491 -0.2731 0.1572 +vn -0.9489 -0.2735 0.1572 +vn -0.9492 -0.2727 0.1571 +vn 0.9968 -0.0424 -0.0682 +vn 0.9956 -0.0452 -0.0815 +vn 0.9978 -0.0351 -0.0569 +vn 0.9506 -0.2552 0.1767 +vn 0.9504 -0.2558 0.1772 +vn 0.9509 -0.2546 0.1762 +vn 0.8260 -0.3082 0.4719 +vn 0.8258 -0.3084 0.4722 +vn 0.8261 -0.3081 0.4718 +vn 0.6561 -0.2182 0.7224 +vn 0.6550 -0.2186 0.7233 +vn 0.6568 -0.2180 0.7219 +vn 0.4644 -0.0372 0.8848 +vn 0.4660 -0.0370 0.8840 +vn 0.2635 0.1869 0.9464 +vn 0.2631 0.1869 0.9465 +vn 0.2629 0.1870 0.9465 +vn 0.0554 0.4149 0.9082 +vn 0.0555 0.4149 0.9082 +vn -0.1459 0.1846 0.9719 +vn -0.3549 -0.0360 0.9342 +vn -0.3553 -0.0361 0.9341 +vn -0.5712 -0.2065 0.7944 +vn -0.5716 -0.2064 0.7941 +vn -0.5721 -0.2064 0.7938 +vn -0.7788 -0.2755 0.5636 +vn -0.7792 -0.2752 0.5632 +vn -0.7787 -0.2756 0.5637 +vn -0.9367 -0.1953 0.2906 +vn -0.9368 -0.1950 0.2904 +vn -0.9364 -0.1960 0.2912 +vn -0.9968 0.0417 0.0685 +vn -0.9968 0.0422 0.0682 +vn -0.9968 0.0414 0.0686 +vn -0.9978 0.0353 0.0568 +vn -0.3125 0.5792 0.7529 +vn -0.3126 0.5792 0.7528 +vn -0.3128 0.5792 0.7528 +vn -0.1021 0.6169 0.7804 +vn -0.1019 0.6169 0.7804 +vn -0.1016 0.6170 0.7804 +vn 0.1025 0.6262 0.7729 +vn 0.1024 0.6262 0.7729 +vn 0.3025 0.6093 0.7329 +vn 0.3017 0.6096 0.7331 +vn 0.3031 0.6092 0.7328 +vn 0.3012 0.6097 0.7332 +vn 0.5035 0.5627 0.6556 +vn 0.5041 0.5625 0.6554 +vn 0.5043 0.5623 0.6553 +vn 0.6959 0.4798 0.5344 +vn 0.6961 0.4797 0.5342 +vn 0.6958 0.4798 0.5344 +vn 0.8641 0.3523 0.3595 +vn 0.8642 0.3522 0.3594 +vn 0.9761 0.1755 0.1282 +vn 0.9761 0.1755 0.1283 +vn 0.9760 0.1758 0.1286 +vn 0.9909 -0.0313 -0.1312 +vn 0.9910 -0.0307 -0.1306 +vn 0.9910 -0.0305 -0.1305 +vn 0.8993 -0.2302 -0.3718 +vn 0.8983 -0.2318 -0.3732 +vn 0.8999 -0.2293 -0.3710 +vn 0.8977 -0.2326 -0.3741 +vn 0.7304 -0.3925 -0.5589 +vn 0.7307 -0.3924 -0.5588 +vn 0.7302 -0.3927 -0.5591 +vn 0.7309 -0.3921 -0.5586 +vn 0.5225 -0.5085 -0.6844 +vn 0.5228 -0.5084 -0.6843 +vn 0.5229 -0.5085 -0.6841 +vn 0.5223 -0.5086 -0.6845 +vn 0.3138 -0.5789 -0.7526 +vn 0.3133 -0.5790 -0.7527 +vn -0.5032 -0.5625 -0.6560 +vn -0.5031 -0.5626 -0.6560 +vn -0.6961 -0.4795 -0.5343 +vn -0.6959 -0.4797 -0.5344 +vn -0.6959 -0.4797 -0.5345 +vn -0.8652 -0.3512 -0.3579 +vn -0.8654 -0.3509 -0.3576 +vn -0.9761 -0.1756 -0.1280 +vn -0.9910 0.0302 0.1303 +vn -0.9910 0.0304 0.1307 +vn -0.8990 0.2306 0.3724 +vn -0.8985 0.2313 0.3730 +vn -0.8983 0.2317 0.3733 +vn -0.8992 0.2303 0.3720 +vn -0.7297 0.3931 0.5595 +vn -0.7302 0.3927 0.5591 +vn -0.5239 0.5081 0.6837 +vn -0.5241 0.5080 0.6836 +vn 0.5384 -0.6560 -0.5290 +vn 0.5375 -0.6564 -0.5294 +vn 0.5369 -0.6567 -0.5296 +vn -0.6824 -0.6194 -0.3881 +vn -0.6826 -0.6193 -0.3881 +vn 0.7546 -0.5913 -0.2845 +vn -0.8424 -0.5270 -0.1120 +vn -0.8423 -0.5273 -0.1121 +vn -0.8425 -0.5270 -0.1120 +vn -0.8422 -0.5273 -0.1121 +vn 0.9237 -0.3682 -0.1054 +vn 0.9242 -0.3671 -0.1055 +vn 0.9235 -0.3688 -0.1053 +vn 0.9244 -0.3664 -0.1055 +vn -0.9568 -0.2810 0.0746 +vn -0.9568 -0.2811 0.0746 +vn -0.9567 -0.2815 0.0746 +vn -0.9569 -0.2808 0.0746 +vn 0.9956 -0.0478 -0.0811 +vn -0.9956 0.0476 0.0813 +vn 0.9423 0.2421 -0.2310 +vn 0.9424 0.2419 -0.2310 +vn 0.9977 -0.0341 -0.0578 +vn 0.9984 -0.0234 -0.0524 +vn 0.9556 -0.1808 0.2326 +vn 0.9559 -0.1804 0.2320 +vn 0.9560 -0.1801 0.2316 +vn 0.9555 -0.1811 0.2331 +vn 0.8305 -0.1693 0.5307 +vn 0.8310 -0.1691 0.5299 +vn 0.8314 -0.1689 0.5294 +vn 0.6593 -0.0324 0.7512 +vn 0.6590 -0.0325 0.7514 +vn 0.4631 0.1756 0.8688 +vn 0.4641 0.1754 0.8682 +vn 0.4648 0.1753 0.8679 +vn 0.2571 0.4087 0.8757 +vn 0.2574 0.4087 0.8756 +vn 0.2575 0.4087 0.8756 +vn 0.0442 0.6263 0.7783 +vn 0.0438 0.6263 0.7783 +vn 0.0444 0.6264 0.7783 +vn 0.0437 0.6264 0.7783 +vn -0.1522 0.4034 0.9023 +vn -0.3559 0.1705 0.9188 +vn -0.3555 0.1705 0.9190 +vn -0.5681 -0.0308 0.8224 +vn -0.7742 -0.1511 0.6146 +vn -0.7738 -0.1513 0.6150 +vn -0.7744 -0.1510 0.6144 +vn -0.9325 -0.1382 0.3336 +vn -0.9329 -0.1378 0.3329 +vn -0.9323 -0.1386 0.3340 +vn -0.9331 -0.1375 0.3323 +vn -0.9978 0.0343 0.0575 +vn -0.9984 0.0233 0.0524 +vn -0.9432 0.3171 -0.0992 +vn -0.9436 0.3160 -0.0987 +vn -0.9429 0.3177 -0.0995 +vn -0.3295 0.7478 0.5764 +vn -0.3291 0.7479 0.5765 +vn -0.3298 0.7478 0.5763 +vn -0.3288 0.7480 0.5766 +vn -0.1194 0.7967 0.5924 +vn -0.1197 0.7967 0.5924 +vn -0.1190 0.7968 0.5924 +vn -0.1201 0.7967 0.5923 +vn 0.0842 0.8088 0.5820 +vn 0.0838 0.8088 0.5821 +vn 0.0843 0.8088 0.5820 +vn 0.0838 0.8088 0.5822 +vn 0.2857 0.7867 0.5472 +vn 0.2847 0.7868 0.5476 +vn 0.2839 0.7869 0.5479 +vn 0.2865 0.7866 0.5469 +vn 0.4877 0.7263 0.4844 +vn 0.4880 0.7262 0.4843 +vn 0.4883 0.7261 0.4841 +vn 0.4873 0.7264 0.4847 +vn 0.6825 0.6194 0.3880 +vn 0.6824 0.6194 0.3881 +vn 0.6825 0.6193 0.3880 +vn 0.8550 0.4537 0.2511 +vn 0.8545 0.4544 0.2516 +vn 0.8553 0.4532 0.2509 +vn 0.8542 0.4549 0.2519 +vn 0.9709 0.2273 0.0751 +vn 0.9711 0.2268 0.0747 +vn 0.9708 0.2277 0.0753 +vn 0.9929 -0.0466 -0.1097 +vn 0.9062 -0.2967 -0.3013 +vn 0.9063 -0.2966 -0.3012 +vn 0.9061 -0.2969 -0.3014 +vn 0.9064 -0.2964 -0.3011 +vn 0.7403 -0.5083 -0.4399 +vn 0.7410 -0.5076 -0.4396 +vn 0.7399 -0.5087 -0.4402 +vn 0.7414 -0.5072 -0.4394 +vn -0.8555 -0.4531 -0.2507 +vn -0.9711 -0.2267 -0.0748 +vn -0.9709 -0.2273 -0.0751 +vn -0.9708 -0.2276 -0.0754 +vn -0.9712 -0.2262 -0.0746 +vn -0.9920 0.0381 0.1205 +vn -0.9928 0.0467 0.1099 +vn -0.9051 0.2988 0.3025 +vn -0.9055 0.2979 0.3020 +vn -0.9049 0.2993 0.3027 +vn -0.7409 0.5077 0.4396 +vn -0.7403 0.5083 0.4399 +vn -0.7399 0.5087 0.4401 +vn -0.7413 0.5072 0.4395 +vn -0.5384 0.6560 0.5290 +vn -0.5386 0.6559 0.5289 +vn -0.5381 0.6562 0.5290 +vn -0.5390 0.6557 0.5287 +vn 0.9146 -0.3452 -0.2104 +vn -0.9644 -0.2646 -0.0056 +vn 0.9942 -0.0491 -0.0955 +vn 0.9491 0.2730 -0.1570 +vn 0.9490 0.2733 -0.1571 +vn 0.9492 0.2727 -0.1570 +vn -0.9339 0.3576 -0.0022 +vn -0.9339 0.3575 -0.0022 +vn -0.9340 0.3571 -0.0021 +vn -0.9338 0.3577 -0.0022 +vn 0.7984 0.4827 -0.3599 +vn 0.7975 0.4839 -0.3604 +vn 0.7970 0.4844 -0.3606 +vn 0.7989 0.4820 -0.3597 +vn 0.9365 0.1957 -0.2910 +vn 0.9985 -0.0238 -0.0495 +vn 0.9988 -0.0138 -0.0460 +vn 0.9586 -0.0992 0.2667 +vn 0.8326 -0.0251 0.5533 +vn 0.8323 -0.0252 0.5538 +vn 0.8322 -0.0252 0.5538 +vn 0.6574 0.1536 0.7377 +vn 0.6574 0.1537 0.7378 +vn 0.6573 0.1537 0.7378 +vn 0.4570 0.3839 0.8023 +vn 0.4565 0.3841 0.8026 +vn 0.4574 0.3838 0.8022 +vn 0.4561 0.3842 0.8027 +vn 0.2448 0.6170 0.7479 +vn 0.0266 0.8087 0.5876 +vn 0.0258 0.8087 0.5877 +vn 0.0254 0.8087 0.5877 +vn 0.0270 0.8088 0.5875 +vn -0.1628 0.6091 0.7762 +vn -0.1637 0.6090 0.7761 +vn -0.1625 0.6091 0.7763 +vn -0.1641 0.6090 0.7760 +vn -0.3618 0.3727 0.8545 +vn -0.5700 0.1456 0.8087 +vn -0.5697 0.1456 0.8089 +vn -0.5702 0.1455 0.8085 +vn -0.7730 -0.0224 0.6340 +vn -0.7733 -0.0224 0.6336 +vn -0.9304 -0.0758 0.3586 +vn -0.9308 -0.0755 0.3578 +vn -0.9302 -0.0760 0.3590 +vn -0.9985 0.0238 0.0495 +vn -0.9988 0.0136 0.0462 +vn -0.9510 0.2545 -0.1759 +vn -0.9509 0.2547 -0.1761 +vn -0.9510 0.2543 -0.1757 +vn -0.8051 0.5405 -0.2442 +vn -0.8039 0.5419 -0.2449 +vn -0.8032 0.5428 -0.2452 +vn -0.3506 0.8696 0.3475 +vn -0.3503 0.8698 0.3476 +vn -0.3502 0.8698 0.3476 +vn -0.3507 0.8696 0.3475 +vn 0.0609 0.9404 0.3347 +vn 0.2648 0.9144 0.3063 +vn 0.2643 0.9145 0.3064 +vn 0.2638 0.9146 0.3066 +vn 0.2653 0.9143 0.3061 +vn 0.4663 0.8448 0.2624 +vn 0.4667 0.8446 0.2622 +vn 0.4669 0.8446 0.2621 +vn 0.4660 0.8450 0.2625 +vn 0.6641 0.7207 0.1989 +vn 0.6644 0.7204 0.1987 +vn 0.6645 0.7204 0.1987 +vn 0.6640 0.7208 0.1990 +vn 0.8424 0.5270 0.1121 +vn 0.9644 0.2645 0.0057 +vn 0.9929 -0.0464 -0.1096 +vn -0.9142 0.3461 0.2108 +vn -0.7546 0.5913 0.2847 +vn -0.7548 0.5910 0.2847 +vn -0.7545 0.5914 0.2847 +vn -0.5581 0.7621 0.3281 +vn -0.5585 0.7619 0.3280 +vn -0.5578 0.7623 0.3282 +vn -0.5588 0.7617 0.3279 +vn 0.9567 0.2813 -0.0747 +vn -0.9240 0.3675 0.1054 +vn 0.8112 0.5449 -0.2121 +vn 0.8116 0.5444 -0.2121 +vn 0.8117 0.5441 -0.2121 +vn -0.7895 0.6087 -0.0786 +vn -0.7905 0.6074 -0.0784 +vn 0.6046 0.6598 -0.4463 +vn 0.6033 0.6606 -0.4468 +vn 0.6053 0.6593 -0.4460 +vn 0.6025 0.6611 -0.4471 +vn 0.7876 0.3888 -0.4780 +vn 0.7879 0.3886 -0.4778 +vn 0.7870 0.3895 -0.4784 +vn 0.9327 0.1381 -0.3331 +vn 0.9329 0.1379 -0.3328 +vn 0.9988 -0.0127 -0.0480 +vn 0.9991 -0.0021 -0.0429 +vn 0.9599 -0.0147 0.2801 +vn 0.8320 0.1191 0.5418 +vn 0.6521 0.3364 0.6794 +vn 0.6520 0.3365 0.6795 +vn 0.6519 0.3365 0.6795 +vn 0.6522 0.3364 0.6793 +vn 0.4462 0.5793 0.6821 +vn 0.4465 0.5793 0.6820 +vn 0.2269 0.7967 0.5601 +vn 0.2271 0.7967 0.5601 +vn 0.2274 0.7966 0.5601 +vn 0.2271 0.7967 0.5602 +vn 0.0015 0.9403 0.3403 +vn 0.0007 0.9403 0.3404 +vn 0.0001 0.9402 0.3405 +vn 0.0021 0.9404 0.3401 +vn -0.1800 0.7866 0.5907 +vn -0.1799 0.7866 0.5907 +vn -0.3743 0.5624 0.7373 +vn -0.3734 0.5625 0.7376 +vn -0.3749 0.5622 0.7371 +vn -0.3728 0.5627 0.7379 +vn -0.5758 0.3178 0.7533 +vn -0.5754 0.3178 0.7536 +vn -0.5761 0.3177 0.7531 +vn -0.5750 0.3179 0.7538 +vn -0.7739 0.1063 0.6244 +vn -0.7740 0.1063 0.6243 +vn -0.7741 0.1062 0.6241 +vn -0.7736 0.1064 0.6247 +vn -0.9294 -0.0112 0.3689 +vn -0.9293 -0.0112 0.3692 +vn -0.9988 0.0123 0.0483 +vn -0.9989 0.0021 0.0460 +vn -0.9559 0.1802 -0.2320 +vn -0.9556 0.1806 -0.2327 +vn -0.9555 0.1809 -0.2331 +vn -0.8160 0.4370 -0.3784 +vn -0.8166 0.4364 -0.3778 +vn -0.8157 0.4373 -0.3787 +vn -0.8169 0.4361 -0.3776 +vn -0.6209 0.6991 -0.3546 +vn -0.6206 0.6993 -0.3547 +vn -0.6211 0.6990 -0.3546 +vn -0.3754 0.9231 0.0832 +vn -0.3753 0.9232 0.0832 +vn -0.1690 0.9833 0.0671 +vn 0.0336 0.9982 0.0490 +vn 0.0340 0.9982 0.0489 +vn 0.2383 0.9708 0.0283 +vn 0.2388 0.9707 0.0282 +vn 0.2392 0.9706 0.0281 +vn 0.4421 0.8969 0.0053 +vn 0.4422 0.8969 0.0053 +vn 0.4418 0.8971 0.0054 +vn 0.4423 0.8969 0.0053 +vn 0.6446 0.7642 -0.0204 +vn 0.6440 0.7648 -0.0202 +vn 0.6437 0.7650 -0.0201 +vn 0.6449 0.7640 -0.0205 +vn 0.8271 0.5601 -0.0480 +vn 0.8273 0.5597 -0.0480 +vn 0.8270 0.5602 -0.0480 +vn 0.8275 0.5595 -0.0480 +vn -0.7719 0.6271 0.1047 +vn -0.7719 0.6270 0.1047 +vn -0.7716 0.6275 0.1046 +vn -0.5802 0.8088 0.0962 +vn -0.5793 0.8094 0.0963 +vn -0.5807 0.8084 0.0962 +vn -0.5788 0.8097 0.0963 +vn 0.6241 0.7423 -0.2441 +vn 0.6238 0.7425 -0.2441 +vn 0.6237 0.7426 -0.2441 +vn 0.6242 0.7422 -0.2441 +vn -0.6005 0.7871 -0.1410 +vn 0.3970 0.7736 -0.4939 +vn 0.3968 0.7737 -0.4940 +vn 0.5881 0.5324 -0.6089 +vn 0.5891 0.5319 -0.6083 +vn 0.5875 0.5327 -0.6092 +vn 0.5897 0.5317 -0.6079 +vn 0.7795 0.2750 -0.5628 +vn 0.7794 0.2751 -0.5629 +vn 0.7791 0.2752 -0.5632 +vn 0.7797 0.2748 -0.5626 +vn 0.9304 0.0755 -0.3586 +vn 0.9305 0.0752 -0.3585 +vn 0.9988 0.0030 -0.0491 +vn 0.9990 0.0038 -0.0434 +vn 0.9594 0.0698 0.2734 +vn 0.8284 0.2604 0.4959 +vn 0.8278 0.2609 0.4967 +vn 0.8287 0.2602 0.4955 +vn 0.8275 0.2611 0.4970 +vn 0.6417 0.5079 0.5747 +vn 0.6420 0.5077 0.5744 +vn 0.6412 0.5082 0.5749 +vn 0.4303 0.7479 0.5055 +vn 0.4301 0.7479 0.5055 +vn 0.4298 0.7480 0.5057 +vn 0.2055 0.9262 0.3161 +vn 0.2066 0.9261 0.3158 +vn 0.2072 0.9259 0.3158 +vn 0.2049 0.9263 0.3164 +vn -0.0259 0.9982 0.0545 +vn -0.0247 0.9982 0.0542 +vn -0.0238 0.9983 0.0541 +vn -0.0267 0.9981 0.0546 +vn -0.2021 0.9147 0.3500 +vn -0.2016 0.9148 0.3500 +vn -0.2012 0.9149 0.3500 +vn -0.2024 0.9146 0.3501 +vn -0.3886 0.7266 0.5667 +vn -0.3900 0.7261 0.5663 +vn -0.3877 0.7268 0.5669 +vn -0.3908 0.7258 0.5661 +vn -0.5842 0.4801 0.6544 +vn -0.5848 0.4799 0.6540 +vn -0.5837 0.4803 0.6546 +vn -0.5853 0.4797 0.6537 +vn -0.9300 0.0533 0.3637 +vn -0.9299 0.0533 0.3639 +vn -0.9297 0.0534 0.3644 +vn -0.9992 -0.0038 0.0406 +vn -0.9586 0.0994 -0.2667 +vn -0.9587 0.0993 -0.2664 +vn -0.9589 0.0991 -0.2661 +vn -0.8257 0.3084 -0.4724 +vn -0.8256 0.3085 -0.4725 +vn -0.6372 0.5630 -0.5264 +vn -0.6372 0.5629 -0.5263 +vn -0.4231 0.7970 -0.4310 +vn -0.4228 0.7971 -0.4311 +vn -0.4224 0.7972 -0.4313 +vn -0.4235 0.7969 -0.4309 +vn -0.3998 0.8973 -0.1872 +vn -0.4004 0.8971 -0.1870 +vn -0.1958 0.9555 -0.2208 +vn -0.1954 0.9555 -0.2209 +vn -0.1953 0.9555 -0.2209 +vn -0.1959 0.9555 -0.2208 +vn 0.0059 0.9699 -0.2433 +vn 0.0064 0.9699 -0.2434 +vn 0.2107 0.9435 -0.2558 +vn 0.2113 0.9433 -0.2559 +vn 0.2117 0.9432 -0.2560 +vn 0.4180 0.8713 -0.2572 +vn 0.4189 0.8708 -0.2573 +vn 0.1857 0.8384 -0.5124 +vn 0.1859 0.8384 -0.5124 +vn 0.1854 0.8385 -0.5125 +vn 0.3790 0.6233 -0.6840 +vn 0.3792 0.6232 -0.6839 +vn 0.3789 0.6233 -0.6841 +vn 0.3793 0.6232 -0.6839 +vn 0.5785 0.3759 -0.7240 +vn 0.5787 0.3758 -0.7238 +vn 0.5780 0.3760 -0.7242 +vn 0.7738 0.1512 -0.6152 +vn 0.7745 0.1508 -0.6143 +vn 0.7749 0.1506 -0.6138 +vn 0.7733 0.1514 -0.6157 +vn 0.9986 0.0204 -0.0483 +vn 0.9570 0.1526 0.2466 +vn 0.9569 0.1528 0.2470 +vn 0.9569 0.1528 0.2471 +vn 0.9570 0.1526 0.2465 +vn 0.8199 0.3938 0.4156 +vn 0.8198 0.3939 0.4157 +vn 0.8199 0.3938 0.4155 +vn 0.6273 0.6559 0.4200 +vn 0.6274 0.6558 0.4199 +vn 0.6277 0.6557 0.4197 +vn 0.6271 0.6560 0.4200 +vn 0.4085 0.8698 0.2767 +vn 0.4078 0.8701 0.2770 +vn 0.4072 0.8703 0.2772 +vn 0.4090 0.8696 0.2765 +vn 0.1802 0.9830 0.0343 +vn 0.1791 0.9832 0.0345 +vn 0.1784 0.9834 0.0347 +vn 0.1809 0.9829 0.0341 +vn -0.0508 0.9699 -0.2380 +vn -0.0510 0.9699 -0.2380 +vn -0.0513 0.9699 -0.2379 +vn -0.2277 0.9711 0.0719 +vn -0.2282 0.9709 0.0720 +vn -0.2287 0.9708 0.0721 +vn -0.2272 0.9712 0.0718 +vn -0.4089 0.8452 0.3442 +vn -0.4088 0.8452 0.3443 +vn -0.5996 0.6188 0.5076 +vn -0.5984 0.6194 0.5081 +vn -0.6004 0.6183 0.5072 +vn -0.5977 0.6199 0.5084 +vn -0.7839 0.3512 0.5120 +vn -0.7841 0.3509 0.5118 +vn -0.7836 0.3515 0.5123 +vn -0.9319 0.1165 0.3435 +vn -0.9317 0.1169 0.3440 +vn -0.9320 0.1164 0.3433 +vn -0.9986 -0.0203 0.0484 +vn -0.9600 0.0147 -0.2796 +vn -0.9601 0.0147 -0.2794 +vn -0.6487 0.3981 -0.6487 +vn -0.4414 0.6421 -0.6268 +vn -0.4417 0.6421 -0.6266 +vn -0.4417 0.6420 -0.6267 +vn -0.4413 0.6420 -0.6270 +vn -0.2203 0.8488 -0.4807 +vn -0.2206 0.8487 -0.4806 +vn -0.0176 0.8617 -0.5071 +vn -0.0172 0.8616 -0.5072 +vn -0.0172 0.8616 -0.5073 +vn 0.1663 0.6753 -0.7185 +vn 0.1669 0.6753 -0.7184 +vn 0.1662 0.6754 -0.7185 +vn 0.3669 0.4405 -0.8194 +vn 0.3670 0.4405 -0.8193 +vn 0.5719 0.2063 -0.7940 +vn 0.5716 0.2063 -0.7942 +vn 0.7713 0.0227 -0.6360 +vn 0.7718 0.0226 -0.6354 +vn 0.9299 -0.0533 -0.3639 +vn 0.9297 -0.0534 -0.3644 +vn 0.9986 0.0203 -0.0485 +vn 0.9980 0.0289 -0.0569 +vn 0.9525 0.2304 0.1991 +vn 0.9526 0.2301 0.1988 +vn 0.9523 0.2310 0.1996 +vn 0.8085 0.5088 0.2956 +vn 0.6091 0.7622 0.2191 +vn 0.6096 0.7619 0.2189 +vn 0.6100 0.7616 0.2188 +vn 0.6088 0.7624 0.2193 +vn 0.3827 0.9238 0.0124 +vn 0.3829 0.9237 0.0123 +vn 0.3832 0.9236 0.0123 +vn 0.3825 0.9239 0.0124 +vn 0.1513 0.9555 -0.2532 +vn 0.1516 0.9555 -0.2533 +vn 0.1512 0.9555 -0.2532 +vn -0.0758 0.8616 -0.5019 +vn -0.0767 0.8616 -0.5018 +vn -0.0771 0.8616 -0.5017 +vn -0.0754 0.8616 -0.5020 +vn -0.2556 0.9432 -0.2123 +vn -0.2563 0.9430 -0.2121 +vn -0.2566 0.9430 -0.2120 +vn -0.2553 0.9433 -0.2124 +vn -0.4335 0.8970 0.0872 +vn -0.4334 0.8970 0.0872 +vn -0.6173 0.7195 0.3182 +vn -0.6177 0.7192 0.3181 +vn -0.6169 0.7198 0.3184 +vn -0.6179 0.7191 0.3181 +vn -0.7937 0.4537 0.4051 +vn -0.7935 0.4541 0.4052 +vn -0.7934 0.4541 0.4053 +vn -0.9354 0.1757 0.3069 +vn -0.9986 -0.0204 0.0482 +vn -0.9980 -0.0293 0.0568 +vn -0.9594 -0.0698 -0.2732 +vn -0.9593 -0.0699 -0.2737 +vn -0.9592 -0.0699 -0.2738 +vn -0.9594 -0.0698 -0.2733 +vn -0.8333 0.0252 -0.5523 +vn -0.8334 0.0251 -0.5522 +vn -0.6554 0.2184 -0.7230 +vn -0.6552 0.2185 -0.7232 +vn -0.6558 0.2183 -0.7227 +vn -0.6549 0.2186 -0.7234 +vn -0.4542 0.4540 -0.7666 +vn -0.2395 0.6839 -0.6891 +vn -0.2394 0.6839 -0.6892 +vn -0.2399 0.6838 -0.6891 +vn -0.2390 0.6840 -0.6892 +vn -0.0383 0.6941 -0.7188 +vn -0.0372 0.6942 -0.7188 +vn -0.0387 0.6941 -0.7188 +vn 0.1532 0.4775 -0.8652 +vn 0.1527 0.4775 -0.8653 +vn 0.1538 0.4776 -0.8650 +vn 0.1522 0.4776 -0.8653 +vn 0.3584 0.2420 -0.9017 +vn 0.3572 0.2420 -0.9021 +vn 0.3591 0.2420 -0.9014 +vn 0.3566 0.2420 -0.9024 +vn 0.7732 -0.1065 -0.6251 +vn 0.7726 -0.1067 -0.6259 +vn 0.7735 -0.1064 -0.6249 +vn 0.9318 -0.1167 -0.3437 +vn 0.9319 -0.1166 -0.3435 +vn 0.9980 0.0308 -0.0554 +vn 0.9971 0.0380 -0.0655 +vn 0.9459 0.2979 0.1289 +vn 0.9457 0.2982 0.1291 +vn 0.9457 0.2983 0.1291 +vn 0.7942 0.5913 0.1399 +vn 0.5881 0.8087 -0.0128 +vn 0.5879 0.8088 -0.0128 +vn 0.5884 0.8085 -0.0129 +vn 0.3579 0.8974 -0.2580 +vn 0.3582 0.8973 -0.2579 +vn 0.3583 0.8973 -0.2580 +vn 0.1275 0.8487 -0.5132 +vn 0.1270 0.8488 -0.5133 +vn 0.1277 0.8487 -0.5132 +vn 0.1268 0.8488 -0.5133 +vn -0.0967 0.6943 -0.7132 +vn -0.0968 0.6943 -0.7132 +vn -0.0967 0.6942 -0.7132 +vn -0.2795 0.8380 -0.4687 +vn -0.2782 0.8382 -0.4691 +vn -0.2775 0.8383 -0.4693 +vn -0.2803 0.8378 -0.4684 +vn -0.4576 0.8717 -0.1755 +vn -0.4574 0.8718 -0.1756 +vn -0.4569 0.8720 -0.1757 +vn -0.4579 0.8715 -0.1754 +vn -0.8067 0.5274 0.2665 +vn -0.8066 0.5276 0.2665 +vn -0.8068 0.5274 0.2665 +vn -0.9401 0.2276 0.2539 +vn -0.9403 0.2270 0.2535 +vn -0.9405 0.2266 0.2533 +vn -0.9399 0.2280 0.2541 +vn -0.9980 -0.0310 0.0554 +vn -0.9569 -0.1528 -0.2471 +vn -0.9569 -0.1528 -0.2469 +vn -0.9570 -0.1525 -0.2466 +vn -0.8326 -0.1192 -0.5408 +vn -0.6582 0.0324 -0.7522 +vn -0.6585 0.0324 -0.7519 +vn -0.6590 0.0323 -0.7515 +vn -0.4621 0.2491 -0.8511 +vn -0.4626 0.2490 -0.8509 +vn -0.4618 0.2492 -0.8513 +vn -0.4630 0.2489 -0.8507 +vn -0.2538 0.4834 -0.8378 +vn -0.0522 0.4908 -0.8697 +vn -0.0514 0.4908 -0.8697 +vn -0.0528 0.4907 -0.8697 +vn -0.0509 0.4909 -0.8698 +vn 0.1450 0.2620 -0.9541 +vn 0.1462 0.2620 -0.9539 +vn 0.1442 0.2619 -0.9543 +vn 0.3552 0.0362 -0.9341 +vn 0.3535 0.0360 -0.9347 +vn 0.7773 -0.2327 -0.5845 +vn 0.7774 -0.2327 -0.5844 +vn 0.7770 -0.2329 -0.5848 +vn 0.9354 -0.1757 -0.3069 +vn 0.9355 -0.1755 -0.3067 +vn 0.9373 0.3465 0.0379 +vn 0.9370 0.3471 0.0381 +vn 0.9370 0.3473 0.0382 +vn 0.7777 0.6274 -0.0400 +vn 0.7776 0.6275 -0.0400 +vn 0.7774 0.6278 -0.0400 +vn 0.7779 0.6271 -0.0400 +vn 0.5660 0.7858 -0.2495 +vn 0.5658 0.7859 -0.2495 +vn 0.5663 0.7856 -0.2495 +vn 0.5656 0.7860 -0.2496 +vn 0.3343 0.7975 -0.5023 +vn 0.3350 0.7973 -0.5021 +vn 0.3339 0.7976 -0.5024 +vn 0.3355 0.7971 -0.5020 +vn 0.1087 0.6838 -0.7216 +vn 0.1084 0.6838 -0.7216 +vn 0.1091 0.6838 -0.7215 +vn 0.1082 0.6838 -0.7216 +vn -0.1105 0.4907 -0.8643 +vn -0.1108 0.4907 -0.8642 +vn -0.2972 0.6754 -0.6749 +vn -0.2978 0.6753 -0.6747 +vn -0.2982 0.6753 -0.6746 +vn -0.2968 0.6754 -0.6751 +vn -0.4799 0.7743 -0.4125 +vn -0.4809 0.7739 -0.4121 +vn -0.4815 0.7737 -0.4118 +vn -0.4793 0.7745 -0.4128 +vn -0.6577 0.7429 -0.1245 +vn -0.6576 0.7430 -0.1246 +vn -0.6579 0.7428 -0.1244 +vn -0.8219 0.5597 0.1060 +vn -0.8217 0.5599 0.1059 +vn -0.8216 0.5602 0.1059 +vn -0.8221 0.5593 0.1060 +vn -0.9464 0.2651 0.1845 +vn -0.9463 0.2653 0.1846 +vn -0.9960 -0.0464 0.0758 +vn -0.9523 -0.2309 -0.1993 +vn -0.9525 -0.2306 -0.1988 +vn -0.9526 -0.2303 -0.1986 +vn -0.9522 -0.2311 -0.1996 +vn -0.8288 -0.2602 -0.4953 +vn -0.6574 -0.1539 -0.7377 +vn -0.6578 -0.1538 -0.7374 +vn -0.6573 -0.1539 -0.7377 +vn -0.6578 -0.1538 -0.7373 +vn -0.4638 0.0372 -0.8852 +vn -0.4659 0.0369 -0.8840 +vn -0.4631 0.0373 -0.8855 +vn -0.2628 0.2651 -0.9277 +vn -0.2630 0.2651 -0.9277 +vn -0.2624 0.2652 -0.9278 +vn -0.2633 0.2650 -0.9276 +vn -0.0591 0.2692 -0.9613 +vn -0.0595 0.2691 -0.9613 +vn -0.0596 0.2691 -0.9613 +vn 0.1422 0.0389 -0.9891 +vn 0.1416 0.0389 -0.9892 +vn 0.3578 -0.1705 -0.9181 +vn 0.3574 -0.1705 -0.9183 +vn 0.5755 -0.3180 -0.7534 +vn 0.5749 -0.3182 -0.7538 +vn 0.7841 -0.3509 -0.5119 +vn 0.7837 -0.3513 -0.5123 +vn 0.7843 -0.3508 -0.5117 +vn 0.7836 -0.3515 -0.5124 +vn 0.9405 -0.2265 -0.2532 +vn 0.9408 -0.2258 -0.2528 +vn 0.9405 -0.2267 -0.2532 +vn 0.9960 0.0443 -0.0770 +vn 0.9948 0.0470 -0.0904 +vn 0.9271 0.3687 -0.0675 +vn 0.9271 0.3686 -0.0675 +vn 0.7606 0.6095 -0.2235 +vn 0.7605 0.6097 -0.2236 +vn 0.7602 0.6099 -0.2236 +vn 0.7608 0.6093 -0.2236 +vn 0.5461 0.6980 -0.4632 +vn 0.5462 0.6979 -0.4632 +vn 0.5460 0.6980 -0.4632 +vn 0.3150 0.6428 -0.6983 +vn 0.3151 0.6428 -0.6982 +vn 0.3146 0.6429 -0.6984 +vn 0.3155 0.6427 -0.6981 +vn 0.0948 0.4834 -0.8703 +vn 0.0947 0.4834 -0.8703 +vn 0.0950 0.4834 -0.8702 +vn -0.1196 0.2693 -0.9556 +vn -0.1192 0.2694 -0.9556 +vn -0.3112 0.4775 -0.8217 +vn -0.3118 0.4774 -0.8215 +vn -0.3107 0.4776 -0.8218 +vn -0.3123 0.4773 -0.8214 +vn -0.4989 0.6234 -0.6021 +vn -0.4993 0.6233 -0.6019 +vn -0.4992 0.6233 -0.6019 +vn -0.4988 0.6234 -0.6021 +vn -0.6760 0.6604 -0.3270 +vn -0.6758 0.6605 -0.3271 +vn -0.6758 0.6606 -0.3271 +vn -0.8375 0.5433 -0.0577 +vn -0.8375 0.5434 -0.0576 +vn -0.9540 0.2812 0.1036 +vn -0.9540 0.2813 0.1036 +vn -0.9539 0.2817 0.1036 +vn -0.9542 0.2807 0.1037 +vn -0.9452 -0.2994 -0.1302 +vn -0.9455 -0.2986 -0.1296 +vn -0.9457 -0.2981 -0.1293 +vn -0.9450 -0.3000 -0.1305 +vn -0.8202 -0.3935 -0.4151 +vn -0.8208 -0.3930 -0.4146 +vn -0.8211 -0.3927 -0.4142 +vn -0.8199 -0.3939 -0.4154 +vn -0.6520 -0.3364 -0.6795 +vn -0.6521 -0.3363 -0.6794 +vn -0.4619 -0.1758 -0.8693 +vn -0.4614 -0.1759 -0.8696 +vn 0.1436 -0.1847 -0.9722 +vn 0.3633 -0.3725 -0.8539 +vn 0.3622 -0.3726 -0.8544 +vn 0.3618 -0.3727 -0.8545 +vn 0.3638 -0.3725 -0.8538 +vn 0.5843 -0.4800 -0.6544 +vn 0.5850 -0.4797 -0.6540 +vn 0.5841 -0.4800 -0.6546 +vn 0.7935 -0.4540 -0.4053 +vn 0.7937 -0.4537 -0.4052 +vn 0.7934 -0.4540 -0.4054 +vn 0.7937 -0.4537 -0.4053 +vn 0.9937 0.0531 -0.0987 +vn 0.9942 0.0462 -0.0974 +vn 0.9170 0.3582 -0.1753 +vn 0.9173 0.3576 -0.1752 +vn 0.9174 0.3573 -0.1752 +vn 0.9170 0.3584 -0.1753 +vn 0.7449 0.5417 -0.3895 +vn 0.7447 0.5419 -0.3896 +vn 0.5303 0.5623 -0.6345 +vn 0.5306 0.5622 -0.6344 +vn 0.5307 0.5621 -0.6343 +vn 0.3024 0.4543 -0.8380 +vn 0.3019 0.4543 -0.8381 +vn 0.3025 0.4544 -0.8379 +vn 0.3019 0.4544 -0.8381 +vn 0.0865 0.2652 -0.9603 +vn 0.0864 0.2652 -0.9603 +vn 0.0870 0.2653 -0.9602 +vn -0.1227 0.0403 -0.9916 +vn -0.1223 0.0403 -0.9917 +vn -0.1232 0.0402 -0.9916 +vn -0.3197 0.2619 -0.9106 +vn -0.3197 0.2620 -0.9106 +vn -0.3200 0.2619 -0.9105 +vn -0.3194 0.2620 -0.9107 +vn -0.5118 0.4408 -0.7374 +vn -0.5122 0.4406 -0.7372 +vn -0.5117 0.4408 -0.7374 +vn -0.6912 0.5320 -0.4891 +vn -0.6910 0.5322 -0.4892 +vn -0.8514 0.4825 -0.2056 +vn -0.8514 0.4826 -0.2056 +vn -0.9618 0.2728 0.0217 +vn -0.9618 0.2730 0.0216 +vn -0.9617 0.2734 0.0216 +vn -0.9618 0.2727 0.0217 +vn -0.9948 -0.0478 0.0904 +vn -0.9371 -0.3469 -0.0382 +vn -0.9366 -0.3481 -0.0385 +vn -0.9363 -0.3490 -0.0387 +vn -0.9374 -0.3463 -0.0380 +vn -0.8095 -0.5077 -0.2949 +vn -0.8090 -0.5083 -0.2953 +vn -0.8085 -0.5088 -0.2957 +vn -0.8099 -0.5072 -0.2946 +vn -0.6424 -0.5075 -0.5742 +vn -0.6425 -0.5074 -0.5742 +vn -0.6420 -0.5077 -0.5745 +vn -0.4556 -0.3841 -0.8030 +vn -0.4561 -0.3839 -0.8029 +vn -0.4561 -0.3839 -0.8028 +vn -0.2650 -0.1869 -0.9459 +vn -0.2643 -0.1870 -0.9461 +vn -0.2656 -0.1869 -0.9458 +vn -0.0615 -0.1899 -0.9799 +vn -0.0619 -0.1898 -0.9799 +vn -0.0622 -0.1898 -0.9799 +vn 0.1500 -0.4035 -0.9026 +vn 0.3732 -0.5626 -0.7377 +vn 0.5992 -0.6191 -0.5077 +vn 0.5999 -0.6187 -0.5073 +vn 0.6004 -0.6184 -0.5070 +vn 0.8067 -0.5274 -0.2664 +vn 0.9548 -0.2787 -0.1036 +vn 0.9549 -0.2781 -0.1037 +vn 0.9935 0.0458 -0.1042 +vn 0.9923 0.0411 -0.1170 +vn 0.9082 0.3177 -0.2725 +vn 0.9084 0.3172 -0.2723 +vn 0.7326 0.4364 -0.5224 +vn 0.7328 0.4362 -0.5222 +vn 0.7329 0.4361 -0.5222 +vn 0.7325 0.4365 -0.5225 +vn 0.5188 0.3975 -0.7569 +vn 0.5192 0.3974 -0.7567 +vn 0.5187 0.3975 -0.7570 +vn 0.5191 0.3974 -0.7567 +vn 0.2952 0.2492 -0.9224 +vn 0.2949 0.2492 -0.9225 +vn 0.2956 0.2492 -0.9222 +vn 0.0842 0.0396 -0.9957 +vn 0.0840 0.0395 -0.9957 +vn 0.0838 0.0395 -0.9957 +vn -0.0419 0.9991 -0.0075 +vn -0.0402 0.9992 -0.0080 +vn -0.0384 0.9993 0.0035 +vn -0.0423 0.9991 -0.0078 +vn -0.5186 0.2420 -0.8201 +vn -0.5192 0.2418 -0.8197 +vn -0.5181 0.2421 -0.8203 +vn -0.5196 0.2417 -0.8195 +vn -0.7020 0.3762 -0.6047 +vn -0.7018 0.3764 -0.6048 +vn -0.7018 0.3764 -0.6049 +vn -0.8624 0.3889 -0.3241 +vn -0.8623 0.3890 -0.3241 +vn -0.8621 0.3893 -0.3244 +vn -0.9685 0.2434 -0.0532 +vn -0.9683 0.2439 -0.0535 +vn -0.9686 0.2429 -0.0531 +vn -0.9934 -0.0464 0.1045 +vn -0.9935 -0.0457 0.1043 +vn -0.9270 -0.3690 0.0675 +vn -0.9273 -0.3683 0.0675 +vn -0.9275 -0.3677 0.0675 +vn -0.9267 -0.3696 0.0675 +vn -0.7954 -0.5899 -0.1391 +vn -0.7955 -0.5897 -0.1390 +vn -0.6278 -0.6556 -0.4197 +vn -0.6282 -0.6554 -0.4194 +vn -0.6277 -0.6556 -0.4198 +vn -0.6284 -0.6552 -0.4193 +vn -0.4452 -0.5796 -0.6825 +vn -0.4460 -0.5793 -0.6822 +vn -0.4465 -0.5792 -0.6820 +vn -0.2571 -0.4086 -0.8758 +vn -0.2572 -0.4086 -0.8757 +vn -0.2575 -0.4086 -0.8757 +vn -0.2571 -0.4086 -0.8757 +vn -0.0555 -0.4148 -0.9082 +vn -0.0554 -0.4148 -0.9082 +vn -0.0556 -0.4148 -0.9082 +vn 0.1625 -0.6092 -0.7762 +vn 0.1623 -0.6092 -0.7763 +vn 0.1622 -0.6092 -0.7763 +vn 0.3890 -0.7266 -0.5663 +vn 0.3883 -0.7268 -0.5665 +vn 0.3878 -0.7270 -0.5667 +vn 0.3895 -0.7264 -0.5662 +vn 0.6175 -0.7193 -0.3182 +vn 0.6173 -0.7195 -0.3183 +vn 0.6169 -0.7197 -0.3184 +vn 0.6179 -0.7191 -0.3181 +vn 0.8214 -0.5604 -0.1059 +vn 0.8212 -0.5608 -0.1058 +vn 0.8216 -0.5602 -0.1059 +vn 0.8210 -0.5610 -0.1058 +vn 0.9626 -0.2701 -0.0222 +vn 0.9625 -0.2704 -0.0222 +vn 0.9624 -0.2706 -0.0222 +vn 0.9922 0.0416 -0.1175 +vn 0.9913 0.0340 -0.1275 +vn 0.9008 0.2562 -0.3506 +vn 0.9008 0.2562 -0.3507 +vn 0.9006 0.2565 -0.3508 +vn 0.9009 0.2560 -0.3504 +vn 0.7237 0.3085 -0.6173 +vn 0.7235 0.3086 -0.6175 +vn 0.7232 0.3088 -0.6177 +vn 0.5112 0.2183 -0.8313 +vn 0.5099 0.2184 -0.8321 +vn 0.5118 0.2182 -0.8309 +vn 0.2924 0.0372 -0.9556 +vn 0.1635 0.9862 -0.0246 +vn 0.1623 0.9864 -0.0246 +vn 0.1627 0.9866 -0.0150 +vn -0.2435 0.9699 0.0094 +vn -0.2451 0.9695 0.0092 +vn -0.2446 0.9694 0.0225 +vn -0.5211 0.0362 -0.8527 +vn -0.5215 0.0361 -0.8525 +vn -0.7098 0.2059 -0.6736 +vn -0.7089 0.2063 -0.6745 +vn -0.7104 0.2058 -0.6731 +vn -0.7084 0.2066 -0.6750 +vn -0.8700 0.2752 -0.4091 +vn -0.8700 0.2753 -0.4091 +vn -0.9743 0.1951 -0.1122 +vn -0.9741 0.1960 -0.1129 +vn -0.9739 0.1965 -0.1133 +vn -0.9167 -0.3589 0.1756 +vn -0.9168 -0.3587 0.1755 +vn -0.7781 -0.6269 0.0399 +vn -0.7786 -0.6263 0.0399 +vn -0.7788 -0.6260 0.0399 +vn -0.7779 -0.6271 0.0400 +vn -0.6083 -0.7629 -0.2192 +vn -0.6087 -0.7626 -0.2190 +vn -0.6087 -0.7625 -0.2190 +vn -0.4301 -0.7479 -0.5056 +vn -0.4302 -0.7479 -0.5055 +vn -0.4298 -0.7480 -0.5057 +vn -0.2456 -0.6170 -0.7477 +vn -0.2455 -0.6170 -0.7477 +vn -0.2456 -0.6170 -0.7476 +vn -0.0436 -0.6263 -0.7783 +vn -0.0433 -0.6263 -0.7783 +vn -0.0429 -0.6264 -0.7784 +vn -0.0437 -0.6263 -0.7784 +vn 0.1799 -0.7865 -0.5907 +vn 0.1795 -0.7866 -0.5908 +vn 0.1794 -0.7866 -0.5908 +vn 0.1800 -0.7865 -0.5907 +vn 0.4090 -0.8451 -0.3443 +vn 0.4097 -0.8448 -0.3442 +vn 0.4101 -0.8446 -0.3441 +vn 0.4086 -0.8453 -0.3444 +vn 0.6370 -0.7645 -0.0993 +vn 0.6372 -0.7643 -0.0993 +vn 0.6368 -0.7646 -0.0992 +vn 0.8361 -0.5454 0.0585 +vn 0.9693 -0.2405 0.0515 +vn 0.9914 0.0332 -0.1269 +vn 0.9904 0.0244 -0.1360 +vn 0.8958 0.1808 -0.4060 +vn 0.8963 0.1803 -0.4052 +vn 0.8955 0.1812 -0.4065 +vn 0.7184 0.1693 -0.6747 +vn 0.7192 0.1690 -0.6740 +vn 0.7197 0.1687 -0.6735 +vn 0.7179 0.1695 -0.6752 +vn 0.5080 0.0326 -0.8608 +vn 0.5074 0.0326 -0.8611 +vn 0.5073 0.0325 -0.8611 +vn 0.3702 0.9281 -0.0409 +vn 0.3690 0.9285 -0.0410 +vn 0.3704 0.9280 -0.0411 +vn -0.0000 0.9993 0.0386 +vn -0.4471 0.8941 0.0268 +vn -0.4473 0.8940 0.0268 +vn -0.7119 0.0309 -0.7016 +vn -0.7125 0.0308 -0.7010 +vn -0.7115 0.0310 -0.7020 +vn -0.7130 0.0307 -0.7005 +vn -0.8741 0.1513 -0.4617 +vn -0.8737 0.1515 -0.4622 +vn -0.9782 0.1384 -0.1551 +vn -0.9783 0.1380 -0.1544 +vn -0.9780 0.1388 -0.1555 +vn -0.9904 -0.0244 0.1361 +vn -0.9089 -0.3164 0.2717 +vn -0.9082 -0.3179 0.2723 +vn -0.9093 -0.3154 0.2714 +vn -0.9078 -0.3188 0.2727 +vn -0.7610 -0.6090 0.2237 +vn -0.7614 -0.6085 0.2237 +vn -0.5876 -0.8091 0.0126 +vn -0.5875 -0.8092 0.0126 +vn -0.5878 -0.8089 0.0127 +vn -0.5872 -0.8094 0.0125 +vn -0.4083 -0.8699 -0.2768 +vn -0.4086 -0.8698 -0.2767 +vn -0.4078 -0.8701 -0.2769 +vn -0.4090 -0.8696 -0.2765 +vn -0.2277 -0.7966 -0.5600 +vn -0.2283 -0.7965 -0.5599 +vn -0.2274 -0.7966 -0.5601 +vn -0.2284 -0.7965 -0.5598 +vn -0.0254 -0.8087 -0.5877 +vn -0.0254 -0.8086 -0.5878 +vn -0.0251 -0.8086 -0.5878 +vn 0.2021 -0.9147 -0.3500 +vn 0.4338 -0.8968 -0.0872 +vn 0.4342 -0.8966 -0.0873 +vn 0.4336 -0.8969 -0.0873 +vn 0.4345 -0.8964 -0.0874 +vn 0.6576 -0.7430 0.1246 +vn 0.6578 -0.7428 0.1245 +vn 0.6572 -0.7433 0.1248 +vn 0.6581 -0.7426 0.1244 +vn 0.8501 -0.4844 0.2067 +vn 0.8504 -0.4840 0.2065 +vn 0.8503 -0.4840 0.2065 +vn 0.9745 -0.1947 0.1116 +vn 0.9739 -0.1969 0.1134 +vn 0.9748 -0.1938 0.1108 +vn 0.9906 0.0238 -0.1351 +vn 0.9899 0.0136 -0.1409 +vn 0.8933 0.0988 -0.4384 +vn 0.8928 0.0992 -0.4394 +vn 0.7173 0.0251 -0.6963 +vn 0.7167 0.0253 -0.6970 +vn 0.7177 0.0251 -0.6959 +vn 0.5749 0.8163 -0.0566 +vn 0.5754 0.8159 -0.0568 +vn 0.5751 0.8163 -0.0532 +vn 0.5746 0.8164 -0.0567 +vn 0.3695 0.9286 -0.0340 +vn -0.0000 0.9866 -0.1632 +vn -0.1965 0.9639 0.1796 +vn -0.1964 0.9639 0.1796 +vn 0.0033 0.9696 0.2448 +vn -0.0000 0.9694 0.2454 +vn 0.0014 0.9697 0.2444 +vn -0.4457 0.8943 0.0410 +vn -0.4459 0.8942 0.0411 +vn -0.6505 0.7582 0.0451 +vn -0.6503 0.7584 0.0451 +vn -0.9809 0.0753 -0.1794 +vn -0.9810 0.0751 -0.1786 +vn -0.9805 0.0762 -0.1814 +vn -0.9903 -0.0129 0.1381 +vn -0.9013 -0.2555 0.3499 +vn -0.9021 -0.2541 0.3488 +vn -0.7448 -0.5418 0.3895 +vn -0.7454 -0.5411 0.3893 +vn -0.7445 -0.5422 0.3896 +vn -0.7457 -0.5408 0.3892 +vn -0.5643 -0.7869 0.2498 +vn -0.5639 -0.7872 0.2499 +vn -0.3829 -0.9237 -0.0125 +vn -0.3828 -0.9237 -0.0125 +vn -0.3825 -0.9239 -0.0126 +vn -0.2043 -0.9264 -0.3164 +vn -0.0027 -0.9404 -0.3402 +vn -0.0031 -0.9404 -0.3401 +vn -0.0021 -0.9403 -0.3403 +vn 0.2284 -0.9709 -0.0720 +vn 0.2277 -0.9711 -0.0719 +vn 0.2287 -0.9708 -0.0721 +vn 0.2274 -0.9711 -0.0718 +vn 0.4585 -0.8712 0.1753 +vn 0.4582 -0.8714 0.1753 +vn 0.4579 -0.8715 0.1755 +vn 0.6763 -0.6602 0.3267 +vn 0.6771 -0.6595 0.3265 +vn 0.6760 -0.6604 0.3269 +vn 0.8616 -0.3900 0.3250 +vn 0.8619 -0.3895 0.3246 +vn 0.8621 -0.3893 0.3244 +vn 0.8614 -0.3902 0.3252 +vn 0.9778 -0.1391 0.1564 +vn 0.9780 -0.1388 0.1559 +vn 0.9901 0.0022 -0.1400 +vn 0.8910 0.0150 -0.4538 +vn 0.8915 0.0148 -0.4528 +vn 0.7682 0.6363 -0.0707 +vn 0.7680 0.6365 -0.0707 +vn 0.7688 0.6356 -0.0708 +vn 0.5757 0.8159 -0.0530 +vn 0.5749 0.8165 -0.0530 +vn -0.3800 0.9183 0.1108 +vn -0.0002 0.9698 0.2440 +vn 0.0017 0.9696 0.2448 +vn -0.0000 0.8939 0.4482 +vn -0.6495 0.7580 0.0598 +vn -0.6496 0.7579 0.0598 +vn -0.6500 0.7575 0.0599 +vn -0.6492 0.7582 0.0598 +vn -0.8315 0.5521 0.0624 +vn -0.8313 0.5523 0.0624 +vn -0.8316 0.5518 0.0624 +vn -0.8311 0.5526 0.0624 +vn -0.9821 0.0112 -0.1883 +vn -0.9821 0.0112 -0.1882 +vn -0.9902 -0.0019 0.1399 +vn -0.8960 -0.1806 0.4057 +vn -0.8959 -0.1807 0.4059 +vn -0.8960 -0.1805 0.4056 +vn -0.8957 -0.1809 0.4061 +vn -0.7320 -0.4369 0.5228 +vn -0.5447 -0.6988 0.4637 +vn -0.5455 -0.6983 0.4634 +vn -0.5459 -0.6981 0.4632 +vn -0.5443 -0.6990 0.4639 +vn 0.0237 -0.9982 -0.0544 +vn 0.2549 -0.9433 0.2124 +vn 0.4801 -0.7742 0.4125 +vn 0.4807 -0.7739 0.4123 +vn 0.4798 -0.7743 0.4127 +vn 0.6923 -0.5313 0.4882 +vn 0.6923 -0.5314 0.4882 +vn 0.6923 -0.5314 0.4883 +vn 0.8702 -0.2751 0.4088 +vn 0.8704 -0.2748 0.4085 +vn 0.8705 -0.2747 0.4084 +vn 0.8700 -0.2752 0.4090 +vn 0.9804 -0.0761 0.1817 +vn 0.9902 0.0018 -0.1398 +vn 0.9219 0.3783 -0.0842 +vn 0.7683 0.6362 -0.0708 +vn -0.5604 0.8276 0.0318 +vn 0.1960 0.9640 0.1797 +vn 0.1960 0.9640 0.1798 +vn -0.0000 0.9693 0.2460 +vn -0.0030 0.9699 0.2433 +vn 0.2081 0.9316 0.2979 +vn 0.2080 0.9316 0.2979 +vn 0.2085 0.9315 0.2980 +vn 0.2078 0.9317 0.2979 +vn -0.0000 0.7581 0.6521 +vn -0.8298 0.5529 0.0764 +vn -0.8301 0.5523 0.0765 +vn -0.8304 0.5519 0.0765 +vn -0.8294 0.5534 0.0764 +vn -0.9595 0.2712 0.0762 +vn -0.9604 0.2662 0.0824 +vn -0.9604 0.2667 0.0811 +vn -0.9602 0.2688 0.0763 +vn -0.8920 -0.0997 0.4410 +vn -0.8925 -0.0993 0.4400 +vn -0.8916 -0.1000 0.4417 +vn -0.8929 -0.0990 0.4392 +vn -0.7233 -0.3088 0.6177 +vn -0.7236 -0.3085 0.6175 +vn -0.7236 -0.3085 0.6174 +vn -0.7232 -0.3088 0.6177 +vn -0.5306 -0.5622 0.6343 +vn -0.5307 -0.5622 0.6343 +vn -0.3345 -0.7974 0.5022 +vn -0.3337 -0.7976 0.5024 +vn -0.3331 -0.7978 0.5025 +vn -0.1535 -0.9551 0.2536 +vn -0.1525 -0.9553 0.2534 +vn -0.1541 -0.9549 0.2537 +vn 0.0526 -0.9699 0.2379 +vn 0.0518 -0.9699 0.2381 +vn 0.0531 -0.9699 0.2378 +vn 0.0514 -0.9698 0.2383 +vn 0.2794 -0.8379 0.4688 +vn 0.4979 -0.6238 0.6025 +vn 0.4977 -0.6238 0.6026 +vn 0.4981 -0.6237 0.6024 +vn 0.4974 -0.6239 0.6028 +vn 0.7031 -0.3757 0.6037 +vn 0.7027 -0.3760 0.6041 +vn 0.7034 -0.3756 0.6034 +vn 0.8753 -0.1508 0.4596 +vn 0.8753 -0.1507 0.4595 +vn 0.9814 -0.0114 0.1917 +vn 0.9814 -0.0114 0.1916 +vn 0.9948 0.0581 -0.0837 +vn 0.9942 0.0583 -0.0908 +vn 0.3806 0.9181 0.1106 +vn 0.3808 0.9180 0.1105 +vn -0.0031 0.9695 0.2452 +vn -0.0014 0.9697 0.2444 +vn -0.0000 0.8944 0.4472 +vn 0.4295 0.8354 0.3430 +vn -0.0000 0.5526 0.8334 +vn -0.0000 0.5534 0.8329 +vn -0.0000 0.5540 0.8325 +vn -0.0000 0.5520 0.8338 +vn -0.9949 -0.0581 0.0829 +vn -0.9947 -0.0541 0.0875 +vn -0.9946 -0.0543 0.0882 +vn -0.7203 -0.1685 0.6729 +vn -0.7189 -0.1691 0.6742 +vn -0.7211 -0.1682 0.6721 +vn -0.7182 -0.1695 0.6749 +vn -0.5188 -0.3976 0.7569 +vn -0.5184 -0.3976 0.7571 +vn -0.5181 -0.3977 0.7572 +vn -0.5191 -0.3975 0.7566 +vn -0.3154 -0.6426 0.6983 +vn -0.3163 -0.6424 0.6980 +vn -0.3147 -0.6427 0.6985 +vn -0.1293 -0.8485 0.5131 +vn -0.1297 -0.8485 0.5131 +vn -0.1292 -0.8485 0.5131 +vn -0.1298 -0.8485 0.5131 +vn 0.0764 -0.8616 0.5017 +vn 0.0771 -0.8617 0.5016 +vn 0.0761 -0.8617 0.5018 +vn 0.0774 -0.8617 0.5015 +vn 0.2971 -0.6753 0.6750 +vn 0.2979 -0.6753 0.6747 +vn 0.2966 -0.6754 0.6752 +vn 0.2984 -0.6752 0.6746 +vn 0.5109 -0.4411 0.7379 +vn 0.5109 -0.4411 0.7378 +vn 0.5112 -0.4409 0.7377 +vn 0.5106 -0.4411 0.7380 +vn 0.7087 -0.2064 0.6747 +vn 0.8771 -0.0225 0.4798 +vn 0.8769 -0.0226 0.4801 +vn 0.9595 -0.2713 -0.0763 +vn 0.9593 -0.2720 -0.0762 +vn 0.9587 -0.2706 -0.0881 +vn 0.9594 -0.2714 -0.0762 +vn 0.5603 0.8276 0.0318 +vn -0.0000 0.9865 -0.1635 +vn -0.0000 0.9865 -0.1636 +vn 0.0385 0.9993 0.0035 +vn -0.2082 0.9316 0.2980 +vn -0.2085 0.9315 0.2980 +vn -0.2078 0.9317 0.2979 +vn 0.6447 0.6696 0.3688 +vn 0.6450 0.6692 0.3689 +vn 0.6444 0.6698 0.3688 +vn 0.6452 0.6691 0.3689 +vn -0.9941 -0.0581 0.0916 +vn -0.9941 -0.0577 0.0916 +vn -0.9222 -0.3784 0.0805 +vn -0.9213 -0.3801 0.0825 +vn -0.9222 -0.3779 0.0827 +vn -0.9217 -0.3796 0.0805 +vn -0.7186 -0.0252 0.6950 +vn -0.7190 -0.0251 0.6946 +vn -0.5107 -0.2182 0.8316 +vn -0.5102 -0.2182 0.8319 +vn -0.5099 -0.2182 0.8321 +vn -0.3041 -0.4541 0.8374 +vn -0.3042 -0.4541 0.8374 +vn -0.1095 -0.6836 0.7216 +vn -0.1082 -0.6837 0.7217 +vn -0.1074 -0.6837 0.7218 +vn -0.1102 -0.6836 0.7215 +vn 0.0965 -0.6943 0.7132 +vn 0.0966 -0.6943 0.7132 +vn 0.0968 -0.6942 0.7133 +vn 0.3108 -0.4776 0.8218 +vn 0.3114 -0.4774 0.8216 +vn 0.3118 -0.4773 0.8215 +vn 0.3103 -0.4777 0.8219 +vn 0.5191 -0.2418 0.8198 +vn 0.8315 -0.5520 -0.0624 +vn 0.8320 -0.5512 -0.0625 +vn 0.8310 -0.5528 -0.0624 +vn 0.9583 -0.2717 -0.0883 +vn 0.9586 -0.2706 -0.0883 +vn -0.9473 0.2071 -0.2442 +vn -0.9474 0.2068 -0.2443 +vn 0.7257 0.6858 -0.0554 +vn 0.7262 0.6852 -0.0557 +vn 0.7254 0.6861 -0.0552 +vn -0.1626 0.9866 -0.0150 +vn -0.1627 0.9866 -0.0150 +vn 0.1300 0.0401 -0.9907 +vn 0.1306 0.0400 -0.9906 +vn 0.1296 0.0401 -0.9908 +vn 0.2441 0.9695 0.0225 +vn 0.2440 0.9695 0.0225 +vn -0.0000 0.8939 0.4483 +vn -0.4289 0.8357 0.3430 +vn -0.4290 0.8357 0.3429 +vn -0.4287 0.8359 0.3429 +vn -0.4295 0.8354 0.3430 +vn 0.8310 0.4200 0.3647 +vn -0.0000 -0.0578 0.9983 +vn -0.0000 -0.0565 0.9984 +vn -0.0000 -0.0556 0.9985 +vn -0.0000 -0.0587 0.9983 +vn -0.9220 -0.3777 0.0849 +vn -0.9223 -0.3771 0.0849 +vn -0.7684 -0.6361 0.0707 +vn -0.7685 -0.6359 0.0707 +vn -0.5075 -0.0326 0.8610 +vn -0.5082 -0.0327 0.8606 +vn -0.5073 -0.0326 0.8611 +vn -0.2964 -0.2491 0.9220 +vn -0.2960 -0.2491 0.9221 +vn -0.2956 -0.2491 0.9223 +vn -0.0936 -0.4835 0.8703 +vn 0.1104 -0.4908 0.8642 +vn 0.1101 -0.4908 0.8643 +vn 0.1099 -0.4909 0.8643 +vn 0.1105 -0.4907 0.8643 +vn 0.3193 -0.2620 0.9107 +vn 0.3196 -0.2620 0.9106 +vn 0.3199 -0.2618 0.9105 +vn 0.5222 -0.0362 0.8521 +vn 0.5216 -0.0362 0.8524 +vn 0.6492 -0.7593 -0.0450 +vn 0.6499 -0.7587 -0.0451 +vn 0.6489 -0.7595 -0.0450 +vn 0.6502 -0.7585 -0.0451 +vn 0.8310 -0.5510 -0.0766 +vn 0.8305 -0.5517 -0.0765 +vn 0.8313 -0.5506 -0.0766 +vn -0.0000 -0.2711 -0.9625 +vn -0.9406 -0.1096 -0.3213 +vn -0.0000 0.3761 -0.9266 +vn 0.8647 0.4792 -0.1504 +vn 0.8644 0.4799 -0.1501 +vn 0.8649 0.4787 -0.1506 +vn -0.0728 0.0395 -0.9966 +vn -0.0735 0.0397 -0.9965 +vn -0.0740 0.0397 -0.9965 +vn -0.0723 0.0395 -0.9966 +vn 0.3327 0.0388 -0.9422 +vn 0.3325 0.0389 -0.9423 +vn 0.3321 0.0389 -0.9424 +vn 0.3331 0.0388 -0.9421 +vn 0.4465 0.8938 0.0411 +vn 0.4467 0.8937 0.0411 +vn -0.0000 0.7589 0.6512 +vn -0.0000 0.7585 0.6517 +vn -0.0000 0.7592 0.6508 +vn -0.6452 0.6691 0.3689 +vn 0.9406 0.1098 0.3214 +vn 0.9407 0.1092 0.3213 +vn 0.9405 0.1102 0.3214 +vn -0.0000 -0.3789 0.9254 +vn -0.0000 -0.3779 0.9259 +vn -0.0000 -0.3797 0.9251 +vn -0.0000 -0.3771 0.9262 +vn -0.7681 -0.6364 0.0708 +vn -0.5753 -0.8160 0.0568 +vn -0.5748 -0.8163 0.0566 +vn -0.5761 -0.8156 0.0532 +vn -0.5747 -0.8164 0.0568 +vn -0.0860 -0.2651 0.9604 +vn -0.0866 -0.2652 0.9603 +vn -0.0870 -0.2652 0.9603 +vn -0.0856 -0.2651 0.9604 +vn 0.1192 -0.2692 0.9557 +vn 0.1198 -0.2691 0.9556 +vn 0.1189 -0.2693 0.9557 +vn 0.3228 -0.0392 0.9457 +vn 0.3231 -0.0391 0.9456 +vn 0.3234 -0.0391 0.9454 +vn 0.4493 -0.8930 -0.0270 +vn 0.4492 -0.8930 -0.0270 +vn 0.6480 -0.7593 -0.0597 +vn 0.6478 -0.7594 -0.0597 +vn 0.6483 -0.7590 -0.0597 +vn 0.6477 -0.7596 -0.0597 +vn -0.8306 -0.4208 -0.3647 +vn -0.8306 -0.4208 -0.3648 +vn -0.0000 0.0589 -0.9983 +vn 0.9473 0.2070 -0.2443 +vn 0.9473 0.2073 -0.2443 +vn 0.9474 0.2068 -0.2444 +vn -0.0000 0.6367 -0.7711 +vn -0.5759 0.8158 -0.0530 +vn -0.5758 0.8159 -0.0530 +vn -0.2847 0.0374 -0.9579 +vn -0.2850 0.0375 -0.9578 +vn -0.0775 0.2658 -0.9609 +vn -0.0778 0.2659 -0.9609 +vn -0.0780 0.2659 -0.9609 +vn -0.0774 0.2658 -0.9609 +vn 0.1197 0.4906 -0.8631 +vn 0.1202 0.4906 -0.8631 +vn 0.1202 0.4905 -0.8631 +vn 0.3290 0.2609 -0.9076 +vn 0.3291 0.2609 -0.9075 +vn 0.5284 0.0359 -0.8483 +vn 0.5295 0.0357 -0.8476 +vn 0.5277 0.0361 -0.8487 +vn 0.6485 0.7588 0.0597 +vn 0.6487 0.7587 0.0597 +vn 0.6490 0.7585 0.0598 +vn 0.9472 -0.2081 0.2440 +vn 0.9472 -0.2079 0.2441 +vn 0.9473 -0.2074 0.2442 +vn -0.5760 -0.8157 0.0531 +vn -0.3686 -0.9287 0.0410 +vn -0.3682 -0.9289 0.0409 +vn -0.3690 -0.9288 0.0340 +vn -0.3682 -0.9288 0.0410 +vn 0.1227 -0.0402 0.9916 +vn 0.1232 -0.0402 0.9916 +vn 0.2429 -0.9700 -0.0091 +vn 0.2431 -0.9700 -0.0091 +vn 0.2421 -0.9700 -0.0223 +vn 0.2433 -0.9699 -0.0091 +vn 0.4479 -0.8931 -0.0413 +vn 0.4481 -0.8930 -0.0413 +vn 0.4477 -0.8932 -0.0412 +vn -0.0000 -0.7606 -0.6492 +vn -0.0000 -0.2711 -0.9626 +vn 0.9406 -0.1096 -0.3213 +vn -0.7682 0.6363 -0.0707 +vn -0.7689 0.6355 -0.0708 +vn -0.5005 0.0331 -0.8651 +vn -0.5007 0.0331 -0.8650 +vn -0.5010 0.0331 -0.8648 +vn -0.5002 0.0331 -0.8653 +vn -0.2882 0.2505 -0.9242 +vn -0.0859 0.4845 -0.8706 +vn -0.0858 0.4845 -0.8706 +vn -0.0861 0.4845 -0.8705 +vn 0.1065 0.6939 -0.7122 +vn 0.1066 0.6939 -0.7122 +vn 0.1069 0.6939 -0.7121 +vn 0.3213 0.4758 -0.8188 +vn 0.3212 0.4758 -0.8188 +vn 0.5269 0.2401 -0.8153 +vn 0.5266 0.2402 -0.8155 +vn 0.7174 0.0306 -0.6960 +vn 0.7178 0.0305 -0.6956 +vn 0.7170 0.0307 -0.6964 +vn 0.8290 0.5540 0.0763 +vn 0.8292 0.5537 0.0764 +vn 0.8294 0.5534 0.0764 +vn 0.8287 0.5544 0.0763 +vn -0.9407 0.1093 0.3213 +vn -0.9407 0.1092 0.3213 +vn 0.8648 -0.4790 0.1505 +vn 0.8651 -0.4784 0.1507 +vn -0.0000 -0.8160 0.5781 +vn -0.0000 -0.8163 0.5776 +vn -0.0000 -0.8157 0.5785 +vn -0.0000 -0.8166 0.5772 +vn -0.1638 -0.9862 0.0247 +vn -0.1634 -0.9863 0.0246 +vn 0.0413 -0.9991 0.0080 +vn 0.0400 -0.9992 0.0078 +vn 0.0408 -0.9992 -0.0038 +vn 0.0399 -0.9992 0.0081 +vn 0.2419 -0.9700 -0.0223 +vn 0.2423 -0.9700 -0.0223 +vn -0.0000 -0.8929 -0.4503 +vn -0.4267 -0.8370 -0.3426 +vn 0.8306 -0.4208 -0.3648 +vn -0.9222 0.3773 -0.0849 +vn -0.9220 0.3777 -0.0849 +vn -0.7108 0.0259 -0.7030 +vn -0.7107 0.0259 -0.7030 +vn -0.5028 0.2205 -0.8358 +vn -0.5024 0.2206 -0.8360 +vn -0.2954 0.4565 -0.8393 +vn -0.2955 0.4564 -0.8393 +vn -0.2950 0.4565 -0.8394 +vn -0.2955 0.4564 -0.8392 +vn -0.0987 0.6854 -0.7215 +vn -0.0984 0.6854 -0.7215 +vn -0.0981 0.6854 -0.7216 +vn 0.0880 0.8612 -0.5006 +vn 0.0884 0.8612 -0.5005 +vn 0.0885 0.8612 -0.5005 +vn 0.3077 0.6729 -0.6727 +vn 0.3080 0.6728 -0.6726 +vn 0.5189 0.4379 -0.7341 +vn 0.5183 0.4381 -0.7344 +vn 0.5180 0.4383 -0.7345 +vn 0.5192 0.4378 -0.7340 +vn 0.7153 0.2039 -0.6685 +vn 0.7147 0.2041 -0.6690 +vn 0.7144 0.2042 -0.6693 +vn 0.7156 0.2037 -0.6681 +vn 0.8802 0.0223 -0.4740 +vn 0.8812 0.0220 -0.4722 +vn 0.9590 0.2694 0.0883 +vn 0.9588 0.2700 0.0883 +vn 0.0001 -0.0587 0.9983 +vn -0.9473 -0.2074 0.2442 +vn 0.7257 -0.6858 0.0553 +vn 0.7262 -0.6852 0.0556 +vn 0.7253 -0.6862 0.0551 +vn 0.7266 -0.6848 0.0558 +vn -0.0000 -0.9700 -0.2431 +vn -0.0003 -0.9700 -0.2429 +vn -0.2107 -0.9308 -0.2986 +vn -0.8870 0.0154 -0.4616 +vn -0.8877 0.0152 -0.4603 +vn -0.7125 0.1719 -0.6803 +vn -0.7127 0.1718 -0.6801 +vn -0.5090 0.4020 -0.7612 +vn -0.5087 0.4020 -0.7613 +vn -0.3075 0.6458 -0.6989 +vn -0.1165 0.8508 -0.5124 +vn -0.1166 0.8508 -0.5125 +vn -0.1171 0.8507 -0.5125 +vn -0.1160 0.8508 -0.5125 +vn 0.0646 0.9694 -0.2368 +vn 0.0648 0.9694 -0.2368 +vn 0.0649 0.9694 -0.2367 +vn 0.2889 0.8354 -0.4675 +vn 0.5054 0.6200 -0.6002 +vn 0.5056 0.6199 -0.6001 +vn 0.5059 0.6198 -0.5999 +vn 0.7102 0.3711 -0.5982 +vn 0.7098 0.3714 -0.5985 +vn 0.7095 0.3715 -0.5988 +vn 0.7105 0.3709 -0.5979 +vn 0.8788 0.1481 -0.4535 +vn 0.8793 0.1478 -0.4527 +vn 0.8796 0.1476 -0.4523 +vn 0.8785 0.1482 -0.4541 +vn 0.9834 0.0109 -0.1814 +vn 0.9837 0.0106 -0.1795 +vn 0.9832 0.0110 -0.1821 +vn 0.9941 -0.0577 0.0916 +vn 0.9941 -0.0576 0.0916 +vn 0.9941 -0.0574 0.0917 +vn 0.0001 -0.3771 0.9262 +vn -0.8640 -0.4808 0.1498 +vn -0.8641 -0.4806 0.1499 +vn -0.8641 -0.4805 0.1499 +vn -0.8639 -0.4808 0.1498 +vn -0.0000 -0.9992 -0.0410 +vn 0.4267 -0.8370 -0.3426 +vn -0.9588 -0.2700 -0.0883 +vn -0.9590 -0.2693 -0.0883 +vn -0.9591 -0.2688 -0.0883 +vn -0.9587 -0.2703 -0.0883 +vn -0.8882 0.1029 -0.4478 +vn -0.8885 0.1027 -0.4473 +vn -0.8879 0.1031 -0.4483 +vn -0.8887 0.1025 -0.4469 +vn -0.7176 0.3134 -0.6220 +vn -0.7171 0.3138 -0.6223 +vn -0.7187 0.3127 -0.6210 +vn -0.5201 0.5685 -0.6374 +vn -0.5201 0.5685 -0.6373 +vn -0.5200 0.5685 -0.6374 +vn -0.3255 0.8013 -0.5019 +vn -0.3260 0.8012 -0.5018 +vn -0.3261 0.8011 -0.5018 +vn -0.1391 0.9577 -0.2520 +vn -0.1389 0.9577 -0.2520 +vn -0.1384 0.9578 -0.2519 +vn 0.0386 0.9977 0.0552 +vn 0.0391 0.9977 0.0553 +vn 0.0385 0.9977 0.0552 +vn 0.0392 0.9977 0.0553 +vn 0.2660 0.9404 -0.2118 +vn 0.2663 0.9404 -0.2117 +vn 0.2659 0.9404 -0.2118 +vn 0.4898 0.7692 -0.4105 +vn 0.7003 0.5248 -0.4839 +vn 0.7004 0.5247 -0.4839 +vn 0.8744 0.2699 -0.4033 +vn 0.8742 0.2701 -0.4035 +vn 0.8741 0.2702 -0.4036 +vn 0.9827 0.0718 -0.1708 +vn 0.9825 0.0722 -0.1718 +vn 0.9823 0.0724 -0.1725 +vn 0.9889 -0.0025 0.1487 +vn 0.9216 -0.3789 0.0849 +vn 0.9214 -0.3793 0.0849 +vn 0.9212 -0.3797 0.0848 +vn 0.9218 -0.3784 0.0849 +vn 0.0001 -0.6354 0.7722 +vn -0.7265 -0.6849 0.0558 +vn -0.7266 -0.6848 0.0558 +vn 0.1938 -0.9643 -0.1805 +vn 0.0003 -0.9700 -0.2429 +vn 0.2107 -0.9308 -0.2986 +vn 0.2110 -0.9307 -0.2987 +vn -0.8301 -0.5524 -0.0765 +vn -0.8298 -0.5529 -0.0764 +vn -0.9837 -0.0107 0.1795 +vn -0.9890 0.0165 -0.1473 +vn -0.9888 0.0168 -0.1482 +vn -0.9891 0.0163 -0.1466 +vn -0.9887 0.0171 -0.1488 +vn -0.8918 0.1869 -0.4119 +vn -0.8919 0.1869 -0.4118 +vn -0.8916 0.1872 -0.4124 +vn -0.7270 0.4427 -0.5249 +vn -0.7266 0.4430 -0.5252 +vn -0.7263 0.4433 -0.5253 +vn -0.5352 0.7059 -0.4641 +vn -0.5352 0.7058 -0.4642 +vn -0.5349 0.7060 -0.4641 +vn -0.3473 0.9020 -0.2566 +vn -0.3476 0.9019 -0.2565 +vn -0.3479 0.9017 -0.2565 +vn -0.3471 0.9020 -0.2566 +vn -0.1643 0.9857 0.0368 +vn -0.1652 0.9856 0.0366 +vn -0.1637 0.9858 0.0369 +vn -0.1658 0.9855 0.0365 +vn 0.0137 0.9398 0.3414 +vn 0.0133 0.9399 0.3412 +vn 0.2411 0.9679 0.0717 +vn 0.2417 0.9677 0.0718 +vn 0.2406 0.9680 0.0716 +vn 0.2422 0.9676 0.0719 +vn 0.4695 0.8654 -0.1749 +vn 0.4702 0.8651 -0.1747 +vn 0.4705 0.8649 -0.1747 +vn 0.6845 0.6528 -0.3246 +vn 0.9804 0.1313 -0.1469 +vn 0.9806 0.1308 -0.1462 +vn 0.9803 0.1315 -0.1474 +vn 0.8872 -0.0155 0.4611 +vn 0.7685 -0.6359 0.0708 +vn 0.7683 -0.6362 0.0707 +vn -0.0000 -0.8164 0.5775 +vn -0.6470 -0.7602 -0.0596 +vn -0.6474 -0.7598 -0.0596 +vn -0.8796 -0.0222 0.4752 +vn -0.8798 -0.0220 0.4749 +vn -0.9827 -0.0718 0.1708 +vn -0.9824 -0.0723 0.1724 +vn -0.9829 -0.0713 0.1697 +vn -0.9822 -0.0727 0.1734 +vn -0.9893 0.0310 -0.1428 +vn -0.9893 0.0306 -0.1423 +vn -0.9892 0.0312 -0.1431 +vn -0.8966 0.2647 -0.3550 +vn -0.8967 0.2647 -0.3549 +vn -0.7384 0.5500 -0.3901 +vn -0.7386 0.5499 -0.3900 +vn -0.7382 0.5503 -0.3902 +vn -0.5545 0.7944 -0.2479 +vn -0.5543 0.7945 -0.2480 +vn -0.5540 0.7947 -0.2481 +vn -0.5547 0.7942 -0.2479 +vn -0.3718 0.9282 0.0156 +vn -0.3721 0.9281 0.0155 +vn -0.3716 0.9283 0.0156 +vn -0.3722 0.9280 0.0154 +vn 0.2175 0.9115 0.3491 +vn 0.4470 0.8904 0.0860 +vn 0.4467 0.8906 0.0860 +vn 0.4473 0.8902 0.0861 +vn 0.4463 0.8907 0.0859 +vn 0.6661 0.7353 -0.1250 +vn 0.6663 0.7352 -0.1249 +vn 0.6661 0.7354 -0.1250 +vn 0.8573 0.4729 -0.2033 +vn 0.8572 0.4731 -0.2034 +vn 0.8571 0.4733 -0.2035 +vn 0.8574 0.4728 -0.2032 +vn 0.9767 0.1860 -0.1070 +vn 0.9767 0.1860 -0.1071 +vn 0.9895 -0.0299 0.1411 +vn 0.8891 -0.1023 0.4462 +vn 0.8888 -0.1025 0.4468 +vn 0.8885 -0.1026 0.4472 +vn 0.8893 -0.1021 0.4457 +vn 0.7106 -0.0258 0.7032 +vn 0.5760 -0.8157 0.0531 +vn 0.5757 -0.8159 0.0530 +vn -0.3822 -0.9175 -0.1100 +vn -0.3822 -0.9175 -0.1099 +vn -0.1938 -0.9643 -0.1805 +vn -0.1943 -0.9642 -0.1804 +vn -0.4487 -0.8927 -0.0413 +vn -0.4485 -0.8929 -0.0413 +vn -0.7182 -0.0304 0.6952 +vn -0.7184 -0.0304 0.6950 +vn -0.7182 -0.0305 0.6952 +vn -0.8789 -0.1479 0.4534 +vn -0.9802 -0.1317 0.1478 +vn -0.9904 0.0418 -0.1315 +vn -0.9902 0.0432 -0.1325 +vn -0.9905 0.0414 -0.1310 +vn -0.9036 0.3290 -0.2743 +vn -0.9033 0.3296 -0.2745 +vn -0.9037 0.3288 -0.2741 +vn -0.9033 0.3298 -0.2746 +vn -0.5755 0.8177 -0.0083 +vn -0.5754 0.8178 -0.0083 +vn -0.5751 0.8180 -0.0082 +vn -0.3960 0.8741 0.2813 +vn -0.3962 0.8740 0.2812 +vn -0.3954 0.8743 0.2815 +vn -0.3966 0.8739 0.2811 +vn -0.2119 0.7984 0.5636 +vn -0.2122 0.7983 0.5636 +vn -0.0259 0.6259 0.7795 +vn 0.1965 0.7837 0.5892 +vn 0.1970 0.7837 0.5891 +vn 0.1970 0.7836 0.5891 +vn 0.4243 0.8388 0.3411 +vn 0.4238 0.8390 0.3412 +vn 0.4235 0.8392 0.3413 +vn 0.4245 0.8388 0.3410 +vn 0.6468 0.7565 0.0967 +vn 0.8445 0.5324 -0.0584 +vn 0.9717 0.2310 -0.0503 +vn 0.9902 -0.0435 0.1329 +vn 0.9902 -0.0436 0.1330 +vn 0.8919 -0.1869 0.4119 +vn 0.8925 -0.1862 0.4109 +vn 0.7121 -0.1720 0.6807 +vn 0.7120 -0.1721 0.6807 +vn 0.4992 -0.0330 0.8658 +vn 0.4999 -0.0330 0.8655 +vn 0.4991 -0.0330 0.8659 +vn 0.3689 -0.9288 0.0340 +vn -0.2421 -0.9700 -0.0223 +vn -0.2420 -0.9700 -0.0223 +vn -0.2417 -0.9701 -0.0223 +vn -0.2424 -0.9699 -0.0223 +vn -0.5291 -0.0358 0.8478 +vn -0.5294 -0.0358 0.8476 +vn -0.5291 -0.0359 0.8478 +vn -0.5296 -0.0359 0.8475 +vn -0.7160 -0.2037 0.6678 +vn -0.7161 -0.2036 0.6676 +vn -0.8747 -0.2697 0.4028 +vn -0.9767 -0.1861 0.1071 +vn -0.9767 -0.1860 0.1070 +vn -0.9917 0.0512 -0.1180 +vn -0.9917 0.0514 -0.1182 +vn -0.9917 0.0515 -0.1182 +vn -0.9123 0.3710 -0.1736 +vn -0.9124 0.3707 -0.1734 +vn -0.7695 0.6376 -0.0351 +vn -0.5959 0.7706 0.2261 +vn -0.5959 0.7705 0.2261 +vn -0.4165 0.7517 0.5113 +vn -0.4157 0.7520 0.5116 +vn -0.4171 0.7516 0.5110 +vn -0.2282 0.6184 0.7520 +vn -0.2284 0.6184 0.7520 +vn -0.0374 0.4148 0.9091 +vn -0.0376 0.4148 0.9091 +vn -0.0379 0.4147 0.9092 +vn -0.0371 0.4148 0.9091 +vn 0.1800 0.6072 0.7739 +vn 0.1792 0.6072 0.7740 +vn 0.1787 0.6072 0.7742 +vn 0.1805 0.6071 0.7738 +vn 0.4036 0.7218 0.5623 +vn 0.4042 0.7216 0.5621 +vn 0.4045 0.7215 0.5620 +vn 0.4031 0.7219 0.5624 +vn 0.6271 0.7130 0.3136 +vn 0.6276 0.7127 0.3134 +vn 0.6269 0.7132 0.3137 +vn 0.8301 0.5483 0.1021 +vn 0.9653 0.2603 0.0204 +vn 0.9653 0.2604 0.0204 +vn 0.9913 -0.0540 0.1196 +vn 0.9913 -0.0537 0.1198 +vn 0.8967 -0.2647 0.3549 +vn 0.8967 -0.2648 0.3548 +vn 0.7171 -0.3137 0.6223 +vn 0.7171 -0.3137 0.6224 +vn 0.1650 -0.9862 0.0152 +vn -0.0409 -0.9992 -0.0038 +vn -0.0408 -0.9992 -0.0038 +vn -0.5263 -0.2402 0.8156 +vn -0.5259 -0.2403 0.8159 +vn -0.5264 -0.2402 0.8156 +vn -0.7098 -0.3715 0.5985 +vn -0.7102 -0.3713 0.5982 +vn -0.7095 -0.3717 0.5987 +vn -0.7105 -0.3711 0.5978 +vn -0.8673 -0.3814 0.3199 +vn -0.8673 -0.3814 0.3200 +vn -0.9718 -0.2305 0.0499 +vn -0.9720 -0.2297 0.0495 +vn -0.9716 -0.2311 0.0502 +vn -0.9930 0.0584 -0.1027 +vn -0.9929 0.0601 -0.1030 +vn -0.9931 0.0572 -0.1026 +vn -0.9928 0.0611 -0.1031 +vn -0.9226 0.3806 -0.0621 +vn -0.9224 0.3813 -0.0620 +vn -0.7856 0.6009 0.1479 +vn -0.7854 0.6010 0.1480 +vn -0.7856 0.6008 0.1479 +vn -0.6137 0.6628 0.4289 +vn -0.6142 0.6626 0.4286 +vn -0.6135 0.6629 0.4291 +vn -0.6145 0.6625 0.4285 +vn -0.4301 0.5827 0.6896 +vn -0.4303 0.5827 0.6895 +vn -0.2399 0.4097 0.8801 +vn -0.2407 0.4095 0.8800 +vn -0.2411 0.4095 0.8799 +vn -0.0446 0.1896 0.9808 +vn -0.0441 0.1897 0.9809 +vn 0.1682 0.4022 0.9000 +vn 0.1692 0.4022 0.8998 +vn 0.1698 0.4022 0.8997 +vn 0.1676 0.4022 0.9001 +vn 0.3884 0.5590 0.7326 +vn 0.3893 0.5588 0.7322 +vn 0.3899 0.5588 0.7320 +vn 0.6117 0.6124 0.5009 +vn 0.6109 0.6128 0.5012 +vn 0.6105 0.6131 0.5015 +vn 0.6121 0.6121 0.5007 +vn 0.8160 0.5166 0.2592 +vn 0.8159 0.5168 0.2592 +vn 0.8162 0.5164 0.2591 +vn 0.9585 0.2675 0.0989 +vn 0.9928 -0.0607 0.1032 +vn 0.9928 -0.0610 0.1032 +vn 0.9032 -0.3299 0.2748 +vn 0.9037 -0.3287 0.2743 +vn 0.9029 -0.3304 0.2749 +vn 0.7272 -0.4426 0.5248 +vn 0.7264 -0.4433 0.5252 +vn 0.7259 -0.4438 0.5255 +vn 0.7277 -0.4421 0.5244 +vn 0.5089 -0.4020 0.7612 +vn 0.2883 -0.2504 0.9242 +vn 0.2888 -0.2504 0.9240 +vn 0.0743 -0.0398 0.9964 +vn 0.0748 -0.0397 0.9964 +vn -0.1308 -0.0403 0.9906 +vn -0.1314 -0.0402 0.9905 +vn -0.1318 -0.0402 0.9905 +vn -0.3288 -0.2609 0.9076 +vn -0.3295 -0.2608 0.9074 +vn -0.5183 -0.4383 0.7344 +vn -0.5183 -0.4382 0.7343 +vn -0.5180 -0.4384 0.7345 +vn -0.7001 -0.5249 0.4841 +vn -0.6997 -0.5252 0.4844 +vn -0.7003 -0.5247 0.4840 +vn -0.6995 -0.5254 0.4845 +vn -0.8570 -0.4734 0.2034 +vn -0.8571 -0.4733 0.2033 +vn -0.9657 -0.2587 -0.0209 +vn -0.9654 -0.2598 -0.0207 +vn -0.9659 -0.2579 -0.0210 +vn -0.9652 -0.2605 -0.0206 +vn -0.9944 0.0624 -0.0848 +vn -0.9945 0.0614 -0.0847 +vn -0.9945 0.0610 -0.0847 +vn -0.9944 0.0628 -0.0848 +vn -0.9328 0.3574 0.0468 +vn -0.8002 0.5160 0.3056 +vn -0.8003 0.5159 0.3055 +vn -0.7996 0.5167 0.3060 +vn -0.6289 0.5127 0.5846 +vn -0.6297 0.5122 0.5841 +vn -0.6302 0.5119 0.5838 +vn -0.6284 0.5129 0.5849 +vn -0.4405 0.3860 0.8105 +vn -0.2467 0.1874 0.9508 +vn -0.2464 0.1874 0.9509 +vn -0.2470 0.1873 0.9507 +vn -0.0460 -0.0402 0.9981 +vn -0.0455 -0.0403 0.9982 +vn 0.1633 0.1839 0.9693 +vn 0.1627 0.1839 0.9694 +vn 0.1624 0.1840 0.9694 +vn 0.3790 0.3701 0.8482 +vn 0.3789 0.3701 0.8482 +vn 0.3787 0.3702 0.8483 +vn 0.3793 0.3701 0.8480 +vn 0.5994 0.4741 0.6449 +vn 0.5988 0.4743 0.6454 +vn 0.5984 0.4745 0.6456 +vn 0.5998 0.4739 0.6447 +vn 0.8037 0.4445 0.3956 +vn 0.8039 0.4443 0.3955 +vn 0.8034 0.4449 0.3958 +vn 0.8041 0.4441 0.3953 +vn 0.9521 0.2507 0.1751 +vn 0.9518 0.2515 0.1753 +vn 0.9523 0.2502 0.1750 +vn 0.9517 0.2519 0.1754 +vn 0.9945 -0.0615 0.0849 +vn 0.9944 -0.0622 0.0849 +vn 0.9944 -0.0625 0.0849 +vn 0.9122 -0.3711 0.1736 +vn 0.9121 -0.3714 0.1737 +vn 0.9119 -0.3718 0.1737 +vn 0.9124 -0.3708 0.1736 +vn 0.7396 -0.5487 0.3897 +vn 0.5198 -0.5688 0.6375 +vn 0.5187 -0.5692 0.6379 +vn 0.5201 -0.5686 0.6373 +vn 0.2959 -0.4564 0.8391 +vn 0.2964 -0.4564 0.8390 +vn 0.2957 -0.4564 0.8392 +vn 0.2966 -0.4564 0.8389 +vn 0.0770 -0.2659 0.9609 +vn -0.1291 -0.2690 0.9545 +vn -0.1276 -0.2691 0.9546 +vn -0.1300 -0.2688 0.9544 +vn -0.1267 -0.2693 0.9547 +vn -0.3205 -0.4760 0.8189 +vn -0.5073 -0.6193 0.5992 +vn -0.5064 -0.6196 0.5997 +vn -0.5059 -0.6197 0.6000 +vn -0.5078 -0.6192 0.5989 +vn -0.6851 -0.6522 0.3244 +vn -0.6847 -0.6526 0.3245 +vn -0.6854 -0.6520 0.3242 +vn -0.6846 -0.6527 0.3245 +vn -0.8443 -0.5327 0.0584 +vn -0.8444 -0.5325 0.0584 +vn -0.9585 -0.2676 -0.0987 +vn -0.9585 -0.2675 -0.0987 +vn -0.9960 0.0583 -0.0672 +vn -0.9960 0.0592 -0.0670 +vn -0.9961 0.0575 -0.0674 +vn -0.9409 0.3078 0.1411 +vn -0.9408 0.3081 0.1413 +vn -0.9408 0.3081 0.1414 +vn -0.8109 0.3997 0.4275 +vn -0.8108 0.3998 0.4275 +vn -0.8112 0.3994 0.4271 +vn -0.6394 0.3393 0.6900 +vn -0.6390 0.3394 0.6903 +vn -0.6387 0.3396 0.6905 +vn -0.6397 0.3391 0.6897 +vn -0.4469 0.1766 0.8770 +vn -0.4468 0.1767 0.8770 +vn -0.4470 0.1766 0.8769 +vn -0.2474 -0.0397 0.9681 +vn -0.0422 -0.2691 0.9622 +vn 0.1609 -0.0391 0.9862 +vn 0.1612 -0.0391 0.9861 +vn 0.3734 0.1694 0.9121 +vn 0.3739 0.1694 0.9119 +vn 0.3743 0.1694 0.9117 +vn 0.7944 0.3443 0.5003 +vn 0.7942 0.3445 0.5005 +vn 0.7947 0.3442 0.5000 +vn 0.9459 0.2165 0.2417 +vn 0.9464 0.2152 0.2409 +vn 0.9961 -0.0572 0.0676 +vn 0.9961 -0.0573 0.0675 +vn 0.9961 -0.0569 0.0677 +vn 0.9223 -0.3815 0.0618 +vn 0.9223 -0.3814 0.0618 +vn 0.9221 -0.3819 0.0618 +vn 0.7543 -0.6181 0.2214 +vn 0.7542 -0.6182 0.2214 +vn 0.5351 -0.7058 0.4642 +vn 0.5360 -0.7054 0.4638 +vn 0.5345 -0.7061 0.4644 +vn 0.5365 -0.7051 0.4636 +vn 0.3082 -0.6456 0.6988 +vn 0.3080 -0.6456 0.6988 +vn 0.3079 -0.6456 0.6988 +vn 0.0840 -0.4845 0.8707 +vn 0.0847 -0.4846 0.8706 +vn 0.0838 -0.4845 0.8708 +vn 0.0850 -0.4846 0.8706 +vn -0.1192 -0.4907 0.8631 +vn -0.1207 -0.4905 0.8630 +vn -0.1181 -0.4908 0.8632 +vn -0.1217 -0.4904 0.8630 +vn -0.3084 -0.6729 0.6724 +vn -0.3085 -0.6729 0.6724 +vn -0.3086 -0.6729 0.6723 +vn -0.4903 -0.7688 0.4105 +vn -0.4908 -0.7686 0.4103 +vn -0.4909 -0.7686 0.4103 +vn -0.4901 -0.7689 0.4106 +vn -0.6665 -0.7350 0.1248 +vn -0.6663 -0.7351 0.1249 +vn -0.8300 -0.5484 -0.1019 +vn -0.8302 -0.5480 -0.1019 +vn -0.9516 -0.2522 -0.1758 +vn -0.9517 -0.2519 -0.1757 +vn -0.9974 0.0510 -0.0511 +vn -0.9974 0.0500 -0.0517 +vn -0.9974 0.0495 -0.0519 +vn -0.9974 0.0514 -0.0509 +vn -0.9471 0.2388 0.2143 +vn -0.9469 0.2392 0.2147 +vn -0.9468 0.2395 0.2151 +vn -0.9473 0.2385 0.2141 +vn -0.8176 0.2652 0.5110 +vn -0.8172 0.2656 0.5115 +vn -0.6441 0.1554 0.7490 +vn -0.6443 0.1553 0.7488 +vn -0.6439 0.1554 0.7491 +vn -0.4481 -0.0375 0.8932 +vn -0.4484 -0.0374 0.8931 +vn -0.4479 -0.0375 0.8933 +vn -0.4488 -0.0374 0.8929 +vn -0.0334 -0.4906 0.8707 +vn 0.1638 -0.2610 0.9513 +vn 0.3721 -0.0359 0.9275 +vn 0.3713 -0.0359 0.9278 +vn 0.3726 -0.0360 0.9273 +vn 0.3708 -0.0358 0.9280 +vn 0.5852 0.1437 0.7981 +vn 0.7883 0.2280 0.5715 +vn 0.7884 0.2279 0.5714 +vn 0.7886 0.2278 0.5712 +vn 0.9415 0.1675 0.2925 +vn 0.9416 0.1672 0.2923 +vn 0.9412 0.1681 0.2932 +vn 0.9974 -0.0495 0.0523 +vn 0.9974 -0.0491 0.0524 +vn 0.9974 -0.0497 0.0521 +vn 0.9324 -0.3583 -0.0472 +vn 0.9321 -0.3591 -0.0475 +vn 0.9326 -0.3578 -0.0471 +vn 0.9319 -0.3595 -0.0476 +vn 0.7705 -0.6365 0.0352 +vn 0.7706 -0.6364 0.0352 +vn 0.5553 -0.7939 0.2477 +vn 0.5547 -0.7943 0.2479 +vn 0.5557 -0.7936 0.2476 +vn 0.3247 -0.8016 0.5020 +vn 0.3247 -0.8016 0.5019 +vn 0.3248 -0.8016 0.5019 +vn 0.3242 -0.8018 0.5021 +vn 0.0977 -0.6854 0.7215 +vn 0.0976 -0.6854 0.7216 +vn 0.0981 -0.6854 0.7215 +vn 0.0973 -0.6855 0.7216 +vn -0.1052 -0.6939 0.7123 +vn -0.1051 -0.6940 0.7123 +vn -0.2903 -0.8350 0.4674 +vn -0.2905 -0.8350 0.4673 +vn -0.2902 -0.8350 0.4674 +vn -0.2908 -0.8350 0.4672 +vn -0.4701 -0.8651 0.1746 +vn -0.4695 -0.8655 0.1749 +vn -0.4705 -0.8649 0.1745 +vn -0.6476 -0.7559 -0.0967 +vn -0.6471 -0.7563 -0.0966 +vn -0.6467 -0.7566 -0.0965 +vn -0.6479 -0.7556 -0.0968 +vn -0.8159 -0.5168 -0.2594 +vn -0.8156 -0.5172 -0.2594 +vn -0.8154 -0.5175 -0.2595 +vn -0.8161 -0.5164 -0.2593 +vn -0.9458 -0.2168 -0.2417 +vn -0.9463 -0.2154 -0.2410 +vn -0.9455 -0.2176 -0.2422 +vn -0.9984 0.0389 -0.0400 +vn -0.9984 0.0395 -0.0394 +vn -0.9984 0.0398 -0.0390 +vn -0.9984 0.0385 -0.0405 +vn -0.9513 0.1586 0.2643 +vn -0.9514 0.1584 0.2639 +vn -0.9516 0.1581 0.2636 +vn -0.9512 0.1587 0.2648 +vn -0.8212 0.1214 0.5575 +vn -0.8211 0.1215 0.5578 +vn -0.8210 0.1215 0.5579 +vn -0.8213 0.1214 0.5575 +vn -0.4450 -0.2504 0.8598 +vn -0.4445 -0.2505 0.8600 +vn -0.4454 -0.2503 0.8596 +vn -0.2369 -0.4845 0.8421 +vn -0.0200 -0.6937 0.7200 +vn -0.0198 -0.6938 0.7199 +vn -0.0201 -0.6937 0.7200 +vn 0.1706 -0.4759 0.8628 +vn 0.1707 -0.4759 0.8628 +vn 0.3738 -0.2403 0.8958 +vn 0.3745 -0.2402 0.8955 +vn 0.3733 -0.2403 0.8960 +vn 0.5845 -0.0304 0.8108 +vn 0.5842 -0.0303 0.8111 +vn 0.5855 -0.0305 0.8101 +vn 0.7838 0.1047 0.6121 +vn 0.7847 0.1043 0.6110 +vn 0.7832 0.1049 0.6128 +vn 0.7852 0.1041 0.6105 +vn 0.9385 0.1107 0.3270 +vn 0.9384 0.1109 0.3273 +vn 0.9384 0.1109 0.3274 +vn 0.9385 0.1107 0.3269 +vn 0.9984 -0.0396 0.0394 +vn 0.9984 -0.0389 0.0400 +vn 0.9984 -0.0399 0.0389 +vn 0.9984 -0.0385 0.0404 +vn 0.9408 -0.3080 -0.1413 +vn 0.9410 -0.3077 -0.1410 +vn 0.7848 -0.6018 -0.1482 +vn 0.7860 -0.6004 -0.1478 +vn 0.7842 -0.6025 -0.1484 +vn 0.7866 -0.5996 -0.1476 +vn 0.5764 -0.8171 0.0085 +vn 0.5761 -0.8174 0.0084 +vn 0.5768 -0.8168 0.0086 +vn 0.3456 -0.9026 0.2566 +vn 0.3458 -0.9026 0.2565 +vn 0.1167 -0.8507 0.5126 +vn 0.1162 -0.8507 0.5126 +vn 0.1171 -0.8506 0.5125 +vn 0.1160 -0.8507 0.5127 +vn -0.0861 -0.8612 0.5009 +vn -0.0865 -0.8612 0.5008 +vn -0.0859 -0.8612 0.5009 +vn -0.2663 -0.9403 0.2118 +vn -0.2677 -0.9400 0.2114 +vn -0.2685 -0.9398 0.2112 +vn -0.2654 -0.9405 0.2121 +vn -0.4465 -0.8906 -0.0859 +vn -0.4471 -0.8903 -0.0860 +vn -0.4474 -0.8902 -0.0860 +vn -0.4463 -0.8907 -0.0858 +vn -0.6289 -0.7115 -0.3133 +vn -0.6294 -0.7112 -0.3131 +vn -0.6285 -0.7118 -0.3134 +vn -0.6297 -0.7110 -0.3130 +vn -0.8030 -0.4453 -0.3961 +vn -0.8029 -0.4455 -0.3962 +vn -0.8026 -0.4458 -0.3963 +vn -0.8033 -0.4450 -0.3958 +vn -0.9418 -0.1669 -0.2919 +vn -0.9413 -0.1679 -0.2929 +vn -0.9409 -0.1686 -0.2936 +vn -0.9992 0.0262 -0.0306 +vn -0.9538 0.0725 0.2914 +vn -0.9539 0.0724 0.2911 +vn -0.8223 -0.0257 0.5684 +vn -0.8231 -0.0255 0.5673 +vn -0.8222 -0.0257 0.5687 +vn -0.6413 -0.2206 0.7349 +vn -0.6418 -0.2204 0.7345 +vn -0.6409 -0.2206 0.7352 +vn -0.4383 -0.4565 0.7743 +vn -0.4389 -0.4564 0.7740 +vn -0.2235 -0.6854 0.6930 +vn -0.2239 -0.6853 0.6930 +vn -0.2242 -0.6852 0.6930 +vn -0.0015 -0.8612 0.5082 +vn -0.0016 -0.8612 0.5082 +vn 0.1834 -0.6731 0.7165 +vn 0.3819 -0.4379 0.8139 +vn 0.3816 -0.4380 0.8140 +vn 0.3823 -0.4379 0.8137 +vn 0.3812 -0.4381 0.8141 +vn 0.5881 -0.2036 0.7827 +vn 0.7825 -0.0223 0.6223 +vn 0.7827 -0.0222 0.6221 +vn 0.7828 -0.0222 0.6219 +vn 0.7823 -0.0223 0.6225 +vn 0.9369 0.0507 0.3459 +vn 0.9373 0.0505 0.3450 +vn 0.9992 -0.0259 0.0314 +vn 0.9992 -0.0264 0.0307 +vn 0.9479 -0.2374 -0.2124 +vn 0.9475 -0.2383 -0.2133 +vn 0.9481 -0.2370 -0.2120 +vn 0.9473 -0.2387 -0.2137 +vn 0.7987 -0.5177 -0.3068 +vn 0.7986 -0.5177 -0.3068 +vn 0.7990 -0.5174 -0.3066 +vn 0.7982 -0.5182 -0.3072 +vn 0.5975 -0.7696 -0.2253 +vn 0.5972 -0.7697 -0.2255 +vn 0.5975 -0.7695 -0.2253 +vn 0.3694 -0.9291 -0.0157 +vn 0.3693 -0.9292 -0.0157 +vn 0.1404 -0.9575 0.2520 +vn 0.1397 -0.9576 0.2519 +vn 0.1394 -0.9577 0.2519 +vn -0.0642 -0.9694 0.2369 +vn -0.0631 -0.9694 0.2372 +vn -0.0626 -0.9694 0.2373 +vn -0.2418 -0.9677 -0.0718 +vn -0.2412 -0.9678 -0.0717 +vn -0.2408 -0.9679 -0.0716 +vn -0.4235 -0.8392 -0.3413 +vn -0.6129 -0.6116 -0.5003 +vn -0.6127 -0.6118 -0.5004 +vn -0.7939 -0.3447 -0.5009 +vn -0.7946 -0.3440 -0.5002 +vn -0.7952 -0.3435 -0.4997 +vn -0.7934 -0.3452 -0.5013 +vn -0.9377 -0.1118 -0.3290 +vn -0.9379 -0.1116 -0.3286 +vn -0.9377 -0.1119 -0.3291 +vn -0.9379 -0.1115 -0.3286 +vn -0.9996 0.0119 -0.0270 +vn -0.9995 0.0114 -0.0286 +vn -0.9544 -0.0154 0.2981 +vn -0.9543 -0.0154 0.2983 +vn -0.8215 -0.1718 0.5437 +vn -0.8213 -0.1719 0.5440 +vn -0.8216 -0.1716 0.5435 +vn -0.6343 -0.4024 0.6602 +vn -0.4259 -0.6457 0.6338 +vn -0.4249 -0.6459 0.6343 +vn -0.4243 -0.6460 0.6346 +vn -0.4265 -0.6455 0.6336 +vn -0.2057 -0.8506 0.4839 +vn -0.2055 -0.8506 0.4840 +vn -0.2052 -0.8507 0.4840 +vn -0.2058 -0.8506 0.4839 +vn 0.0213 -0.9694 0.2446 +vn 0.0201 -0.9695 0.2444 +vn 0.0193 -0.9695 0.2442 +vn 0.0221 -0.9693 0.2448 +vn 0.2026 -0.8354 0.5109 +vn 0.2024 -0.8354 0.5110 +vn 0.2029 -0.8353 0.5109 +vn 0.2021 -0.8355 0.5110 +vn 0.3940 -0.6193 0.6791 +vn 0.3945 -0.6192 0.6790 +vn 0.3936 -0.6195 0.6792 +vn 0.3949 -0.6191 0.6789 +vn 0.5944 -0.3709 0.7135 +vn 0.5950 -0.3707 0.7132 +vn 0.7843 -0.1485 0.6023 +vn 0.7844 -0.1484 0.6022 +vn 0.9364 -0.0107 0.3507 +vn 0.9995 -0.0116 0.0289 +vn 0.9996 -0.0046 0.0281 +vn 0.9995 -0.0113 0.0294 +vn 0.9517 -0.1578 -0.2632 +vn 0.9522 -0.1572 -0.2621 +vn 0.9514 -0.1584 -0.2641 +vn 0.9525 -0.1568 -0.2611 +vn 0.8104 -0.4001 -0.4280 +vn 0.8100 -0.4004 -0.4284 +vn 0.8106 -0.3999 -0.4278 +vn 0.8098 -0.4006 -0.4286 +vn 0.6154 -0.6618 -0.4282 +vn 0.6150 -0.6620 -0.4285 +vn 0.6148 -0.6621 -0.4285 +vn 0.6155 -0.6618 -0.4281 +vn 0.3952 -0.8745 -0.2814 +vn 0.3932 -0.8751 -0.2820 +vn 0.3959 -0.8742 -0.2811 +vn 0.1661 -0.9854 -0.0367 +vn 0.1662 -0.9854 -0.0367 +vn -0.0389 -0.9977 -0.0555 +vn -0.0385 -0.9977 -0.0554 +vn -0.0392 -0.9977 -0.0555 +vn -0.2178 -0.9113 -0.3493 +vn -0.2177 -0.9114 -0.3492 +vn -0.2175 -0.9114 -0.3492 +vn -0.2180 -0.9113 -0.3493 +vn -0.4038 -0.7218 -0.5621 +vn -0.4042 -0.7217 -0.5620 +vn -0.4036 -0.7218 -0.5622 +vn -0.5986 -0.4743 -0.6455 +vn -0.5990 -0.4741 -0.6453 +vn -0.5984 -0.4744 -0.6457 +vn -0.5994 -0.4740 -0.6451 +vn -0.7885 -0.2280 -0.5713 +vn -0.7883 -0.2281 -0.5715 +vn -0.7879 -0.2283 -0.5719 +vn -0.7887 -0.2278 -0.5710 +vn -0.9364 -0.0510 -0.3473 +vn -0.9369 -0.0505 -0.3458 +vn -0.9360 -0.0512 -0.3482 +vn -0.9997 -0.0025 -0.0258 +vn -0.9996 -0.0024 -0.0270 +vn -0.9996 -0.0023 -0.0277 +vn -0.9531 -0.1028 0.2845 +vn -0.9533 -0.1027 0.2841 +vn -0.9531 -0.1029 0.2846 +vn -0.8161 -0.3132 0.4857 +vn -0.8161 -0.3131 0.4856 +vn -0.8158 -0.3135 0.4860 +vn -0.8163 -0.3130 0.4854 +vn -0.6239 -0.5685 0.5363 +vn -0.6233 -0.5688 0.5365 +vn -0.6242 -0.5683 0.5361 +vn -0.6231 -0.5690 0.5367 +vn -0.4068 -0.8020 0.4374 +vn -0.4074 -0.8018 0.4372 +vn -0.4075 -0.8018 0.4372 +vn -0.4068 -0.8020 0.4375 +vn -0.1815 -0.9577 0.2235 +vn -0.1807 -0.9578 0.2237 +vn -0.1802 -0.9578 0.2238 +vn 0.0460 -0.9978 -0.0479 +vn 0.0474 -0.9977 -0.0477 +vn 0.0482 -0.9977 -0.0475 +vn 0.0453 -0.9978 -0.0481 +vn 0.2258 -0.9401 0.2553 +vn 0.2265 -0.9399 0.2554 +vn 0.2271 -0.9398 0.2555 +vn 0.4099 -0.7691 0.4903 +vn 0.4109 -0.7688 0.4901 +vn 0.4096 -0.7693 0.4904 +vn 0.4112 -0.7686 0.4900 +vn 0.6046 -0.5247 0.5993 +vn 0.6043 -0.5249 0.5994 +vn 0.6042 -0.5250 0.5995 +vn 0.7887 -0.2707 0.5520 +vn 0.7888 -0.2706 0.5519 +vn 0.7885 -0.2708 0.5522 +vn 0.9372 -0.0720 0.3414 +vn 0.9373 -0.0719 0.3410 +vn 0.9374 -0.0719 0.3408 +vn 0.9371 -0.0721 0.3416 +vn 0.9538 -0.0724 -0.2914 +vn 0.9538 -0.0725 -0.2915 +vn 0.8180 -0.2651 -0.5105 +vn 0.6280 -0.5130 -0.5851 +vn 0.6289 -0.5126 -0.5846 +vn 0.6293 -0.5123 -0.5844 +vn 0.4140 -0.7525 -0.5122 +vn 0.4151 -0.7522 -0.5117 +vn 0.4159 -0.7520 -0.5114 +vn 0.4133 -0.7527 -0.5125 +vn 0.1903 -0.9284 -0.3192 +vn 0.1909 -0.9283 -0.3190 +vn 0.1913 -0.9283 -0.3189 +vn 0.1899 -0.9284 -0.3193 +vn -0.0131 -0.9399 -0.3413 +vn -0.0139 -0.9398 -0.3415 +vn -0.0145 -0.9397 -0.3416 +vn -0.0125 -0.9399 -0.3412 +vn -0.1970 -0.7838 -0.5890 +vn -0.1970 -0.7837 -0.5890 +vn -0.3895 -0.5587 -0.7323 +vn -0.3894 -0.5587 -0.7323 +vn -0.5903 -0.3142 -0.7436 +vn -0.5897 -0.3143 -0.7439 +vn -0.7840 -0.1044 -0.6120 +vn -0.7835 -0.1045 -0.6125 +vn -0.7832 -0.1047 -0.6129 +vn -0.7843 -0.1043 -0.6115 +vn -0.9995 -0.0171 -0.0275 +vn -0.9502 -0.1872 0.2492 +vn -0.9501 -0.1873 0.2494 +vn -0.9500 -0.1875 0.2496 +vn -0.8080 -0.4427 0.3888 +vn -0.8073 -0.4433 0.3894 +vn -0.8084 -0.4423 0.3885 +vn -0.8070 -0.4437 0.3898 +vn -0.6092 -0.7054 0.3624 +vn -0.6098 -0.7050 0.3620 +vn -0.6101 -0.7049 0.3618 +vn -0.6089 -0.7055 0.3626 +vn -0.3859 -0.9024 0.1917 +vn -0.3866 -0.9021 0.1915 +vn -0.3869 -0.9020 0.1914 +vn -0.3857 -0.9025 0.1918 +vn -0.1555 -0.9857 -0.0649 +vn -0.1566 -0.9855 -0.0651 +vn -0.1574 -0.9854 -0.0654 +vn -0.1547 -0.9858 -0.0648 +vn 0.0731 -0.9398 -0.3337 +vn 0.0731 -0.9399 -0.3337 +vn 0.0734 -0.9398 -0.3337 +vn 0.2518 -0.9674 -0.0281 +vn 0.2512 -0.9675 -0.0282 +vn 0.2508 -0.9676 -0.0283 +vn 0.2522 -0.9673 -0.0280 +vn 0.4309 -0.8657 0.2547 +vn 0.4314 -0.8654 0.2547 +vn 0.4318 -0.8652 0.2547 +vn 0.4307 -0.8658 0.2547 +vn 0.6185 -0.6515 0.4394 +vn 0.6182 -0.6516 0.4395 +vn 0.6179 -0.6518 0.4396 +vn 0.7960 -0.3830 0.4687 +vn 0.7965 -0.3824 0.4683 +vn 0.7958 -0.3832 0.4689 +vn 0.9394 -0.1310 0.3167 +vn 0.9395 -0.1309 0.3166 +vn 0.9393 -0.1311 0.3170 +vn 0.9995 0.0168 0.0283 +vn 0.9994 0.0164 0.0293 +vn 0.9994 0.0162 0.0299 +vn 0.9545 0.0152 -0.2979 +vn 0.9544 0.0153 -0.2982 +vn 0.8213 -0.1216 -0.5573 +vn 0.8210 -0.1217 -0.5578 +vn 0.6375 -0.3399 -0.6914 +vn 0.6387 -0.3394 -0.6905 +vn 0.6370 -0.3401 -0.6917 +vn 0.4298 -0.5827 -0.6897 +vn 0.4294 -0.5829 -0.6899 +vn 0.4302 -0.5826 -0.6896 +vn 0.4290 -0.5830 -0.6900 +vn 0.2132 -0.7983 -0.5632 +vn 0.2123 -0.7984 -0.5635 +vn 0.2138 -0.7983 -0.5630 +vn 0.0088 -0.8083 -0.5888 +vn -0.1805 -0.6069 -0.7740 +vn -0.1806 -0.6069 -0.7740 +vn -0.3789 -0.3701 -0.8482 +vn -0.3791 -0.3701 -0.8481 +vn -0.3786 -0.3701 -0.8483 +vn -0.3796 -0.3701 -0.8479 +vn -0.5856 -0.1437 -0.7977 +vn -0.5862 -0.1437 -0.7974 +vn -0.5852 -0.1437 -0.7981 +vn -0.7836 0.0219 -0.6208 +vn -0.7828 0.0221 -0.6218 +vn -0.7842 0.0218 -0.6202 +vn -0.9370 0.0721 -0.3418 +vn -0.9365 0.0726 -0.3431 +vn -0.9374 0.0717 -0.3409 +vn -0.9361 0.0729 -0.3441 +vn -0.9989 -0.0303 -0.0346 +vn -0.9989 -0.0300 -0.0351 +vn -0.9990 -0.0306 -0.0341 +vn -0.9989 -0.0297 -0.0355 +vn -0.7951 -0.5506 0.2545 +vn -0.7960 -0.5495 0.2539 +vn -0.7945 -0.5513 0.2548 +vn -0.7965 -0.5489 0.2536 +vn -0.5911 -0.7932 0.1461 +vn -0.5912 -0.7932 0.1460 +vn -0.3621 -0.9287 -0.0805 +vn -0.3612 -0.9290 -0.0803 +vn -0.3608 -0.9292 -0.0802 +vn -0.3626 -0.9285 -0.0806 +vn -0.1323 -0.9282 -0.3477 +vn -0.1319 -0.9283 -0.3477 +vn -0.1315 -0.9284 -0.3476 +vn -0.1327 -0.9281 -0.3478 +vn 0.0947 -0.8083 -0.5811 +vn 0.0939 -0.8083 -0.5812 +vn 0.2754 -0.9115 -0.3055 +vn 0.4544 -0.8908 -0.0061 +vn 0.6352 -0.7341 0.2400 +vn 0.6343 -0.7349 0.2400 +vn 0.6338 -0.7353 0.2400 +vn 0.6357 -0.7337 0.2400 +vn 0.8073 -0.4740 0.3514 +vn 0.8073 -0.4741 0.3514 +vn 0.9427 -0.1860 0.2769 +vn 0.9426 -0.1863 0.2772 +vn 0.9430 -0.1855 0.2764 +vn 0.9989 0.0301 0.0351 +vn 0.9989 0.0305 0.0345 +vn 0.9989 0.0298 0.0355 +vn 0.9533 0.1027 -0.2839 +vn 0.8226 0.0257 -0.5681 +vn 0.8229 0.0256 -0.5677 +vn 0.8221 0.0258 -0.5688 +vn 0.6444 -0.1554 -0.7487 +vn 0.6441 -0.1555 -0.7490 +vn 0.4400 -0.3862 -0.8107 +vn 0.4405 -0.3860 -0.8105 +vn 0.4408 -0.3859 -0.8104 +vn 0.2288 -0.6184 -0.7518 +vn 0.2297 -0.6182 -0.7517 +vn 0.2282 -0.6185 -0.7519 +vn 0.2304 -0.6181 -0.7516 +vn 0.0272 -0.6260 -0.7794 +vn 0.0261 -0.6261 -0.7793 +vn 0.0278 -0.6259 -0.7794 +vn 0.0255 -0.6261 -0.7794 +vn -0.1684 -0.4022 -0.8999 +vn -0.1687 -0.4022 -0.8999 +vn -0.1680 -0.4022 -0.9000 +vn -0.1690 -0.4022 -0.8998 +vn -0.3739 -0.1694 -0.9119 +vn -0.3734 -0.1694 -0.9121 +vn -0.3742 -0.1694 -0.9117 +vn -0.3730 -0.1694 -0.9122 +vn -0.5852 0.0306 -0.8103 +vn -0.5845 0.0305 -0.8108 +vn -0.5841 0.0305 -0.8111 +vn -0.5856 0.0306 -0.8100 +vn -0.7857 0.1480 -0.6007 +vn -0.7857 0.1480 -0.6006 +vn -0.7854 0.1482 -0.6010 +vn -0.9391 0.1316 -0.3174 +vn -0.9393 0.1312 -0.3170 +vn -0.9389 0.1320 -0.3179 +vn -0.9395 0.1309 -0.3165 +vn -0.9981 -0.0429 -0.0441 +vn -0.9981 -0.0430 -0.0439 +vn -0.9981 -0.0433 -0.0436 +vn -0.9382 -0.3280 0.1106 +vn -0.7791 -0.6209 0.0863 +vn -0.7792 -0.6208 0.0863 +vn -0.5700 -0.8164 -0.0929 +vn -0.5701 -0.8163 -0.0929 +vn -0.3376 -0.8752 -0.3466 +vn -0.3371 -0.8753 -0.3467 +vn -0.3369 -0.8754 -0.3467 +vn -0.1089 -0.7985 -0.5921 +vn -0.1098 -0.7984 -0.5921 +vn -0.1085 -0.7985 -0.5921 +vn -0.1102 -0.7984 -0.5920 +vn 0.1101 -0.6261 -0.7719 +vn 0.1105 -0.6261 -0.7719 +vn 0.1100 -0.6261 -0.7719 +vn 0.1107 -0.6260 -0.7719 +vn 0.2959 -0.7841 -0.5455 +vn 0.2963 -0.7840 -0.5454 +vn 0.2964 -0.7840 -0.5454 +vn 0.4767 -0.8393 -0.2614 +vn 0.4759 -0.8396 -0.2617 +vn 0.4756 -0.8398 -0.2618 +vn 0.4770 -0.8392 -0.2613 +vn 0.6542 -0.7561 0.0186 +vn 0.6537 -0.7565 0.0185 +vn 0.8204 -0.5333 0.2062 +vn 0.8206 -0.5331 0.2062 +vn 0.8208 -0.5327 0.2062 +vn 0.8202 -0.5335 0.2062 +vn 0.9477 -0.2310 0.2203 +vn 0.9476 -0.2313 0.2205 +vn 0.9479 -0.2305 0.2201 +vn 0.9981 0.0428 0.0439 +vn 0.9981 0.0428 0.0437 +vn 0.9981 0.0431 0.0435 +vn 0.8209 0.1722 -0.5445 +vn 0.8206 0.1722 -0.5449 +vn 0.4464 -0.1766 -0.8772 +vn 0.4458 -0.1767 -0.8775 +vn 0.4469 -0.1765 -0.8770 +vn 0.0393 -0.4148 -0.9091 +vn 0.0389 -0.4148 -0.9091 +vn -0.1621 -0.1841 -0.9695 +vn -0.1624 -0.1841 -0.9694 +vn -0.1617 -0.1840 -0.9695 +vn -0.3721 0.0360 -0.9275 +vn -0.3726 0.0361 -0.9273 +vn -0.5870 0.2037 -0.7836 +vn -0.5881 0.2036 -0.7828 +vn -0.5865 0.2038 -0.7839 +vn -0.7897 0.2700 -0.5509 +vn -0.7899 0.2698 -0.5506 +vn -0.9432 0.1849 -0.2760 +vn -0.9428 0.1859 -0.2769 +vn -0.9435 0.1842 -0.2755 +vn -0.9425 0.1865 -0.2775 +vn -0.9297 -0.3682 0.0098 +vn -0.7636 -0.6377 -0.1010 +vn -0.7628 -0.6388 -0.1008 +vn -0.7624 -0.6393 -0.1007 +vn -0.7640 -0.6372 -0.1010 +vn -0.5497 -0.7687 -0.3270 +vn -0.5496 -0.7688 -0.3270 +vn -0.5494 -0.7689 -0.3270 +vn -0.3180 -0.7524 -0.5769 +vn -0.3176 -0.7525 -0.5770 +vn -0.3184 -0.7523 -0.5768 +vn -0.3171 -0.7526 -0.5771 +vn -0.0926 -0.6183 -0.7804 +vn -0.0936 -0.6183 -0.7803 +vn -0.0919 -0.6184 -0.7805 +vn -0.0942 -0.6183 -0.7803 +vn 0.0554 0.7785 -0.6252 +vn 0.0568 0.7781 -0.6255 +vn 0.0554 0.7771 -0.6270 +vn 0.0552 0.7809 -0.6223 +vn 0.0551 0.7790 -0.6246 +vn 0.0550 0.7794 -0.6240 +vn 0.0551 0.7787 -0.6250 +vn 0.0553 0.7792 -0.6243 +vn 0.0554 0.7797 -0.6237 +vn 0.0552 0.7785 -0.6252 +vn 0.0551 0.7798 -0.6236 +vn 0.0555 0.7808 -0.6224 +vn 0.0552 0.7801 -0.6232 +vn 0.0555 0.7803 -0.6229 +vn 0.0551 0.7762 -0.6281 +vn 0.0549 0.7749 -0.6297 +vn 0.0546 0.7731 -0.6320 +vn 0.0544 0.7831 -0.6195 +vn 0.4957 -0.7220 -0.4826 +vn 0.4963 -0.7218 -0.4824 +vn 0.4966 -0.7217 -0.4822 +vn 0.4954 -0.7222 -0.4828 +vn 0.6740 -0.7117 -0.1978 +vn 0.6744 -0.7114 -0.1976 +vn 0.6747 -0.7112 -0.1975 +vn 0.6737 -0.7120 -0.1980 +vn 0.8350 -0.5483 0.0454 +vn 0.8351 -0.5483 0.0454 +vn 0.8347 -0.5488 0.0454 +vn 0.9541 -0.2596 0.1495 +vn 0.9542 -0.2593 0.1495 +vn 0.9542 -0.2592 0.1494 +vn 0.9970 0.0530 0.0569 +vn 0.9452 0.2644 -0.1913 +vn 0.9451 0.2647 -0.1915 +vn 0.8158 0.3135 -0.4860 +vn 0.8161 0.3132 -0.4856 +vn 0.8164 0.3131 -0.4853 +vn 0.6426 0.2204 -0.7338 +vn 0.6438 0.2200 -0.7329 +vn 0.6422 0.2205 -0.7341 +vn 0.4479 0.0375 -0.8933 +vn 0.2460 -0.1873 -0.9510 +vn 0.2461 -0.1873 -0.9510 +vn 0.0444 -0.1897 -0.9808 +vn 0.0447 -0.1896 -0.9808 +vn 0.0441 -0.1897 -0.9808 +vn 0.0451 -0.1896 -0.9808 +vn -0.1612 0.0390 -0.9861 +vn -0.3742 0.2402 -0.8957 +vn -0.5932 0.3715 -0.7142 +vn -0.5928 0.3717 -0.7145 +vn -0.7973 0.3818 -0.4675 +vn -0.7976 0.3814 -0.4673 +vn -0.7976 0.3814 -0.4672 +vn -0.9480 0.2302 -0.2199 +vn -0.9483 0.2293 -0.2195 +vn -0.9477 0.2310 -0.2202 +vn -0.9485 0.2288 -0.2193 +vn -0.9189 -0.3814 -0.1012 +vn -0.9193 -0.3802 -0.1013 +vn -0.9185 -0.3822 -0.1012 +vn -0.7475 -0.6007 -0.2836 +vn -0.7470 -0.6012 -0.2837 +vn -0.5304 -0.6618 -0.5298 +vn -0.5309 -0.6616 -0.5296 +vn -0.5301 -0.6620 -0.5299 +vn -0.5313 -0.6614 -0.5294 +vn -0.3023 -0.5826 -0.7545 +vn -0.3027 -0.5825 -0.7544 +vn -0.3022 -0.5826 -0.7545 +vn 0.3026 0.5826 0.7543 +vn 0.3023 0.5826 0.7544 +vn 0.3027 0.5826 0.7543 +vn 0.5167 0.5126 0.6857 +vn 0.5164 0.5127 0.6859 +vn 0.5167 0.5125 0.6858 +vn 0.7226 0.4001 0.5638 +vn 0.7225 0.4001 0.5638 +vn 0.7228 0.3999 0.5636 +vn 0.7223 0.4003 0.5639 +vn 0.8947 0.2387 0.3775 +vn 0.8945 0.2390 0.3777 +vn 0.9899 0.0392 0.1365 +vn 0.9900 0.0387 0.1360 +vn 0.9898 0.0393 0.1367 +vn 0.9782 -0.1676 -0.1229 +vn 0.9781 -0.1679 -0.1231 +vn 0.8700 -0.3442 -0.3530 +vn 0.8702 -0.3440 -0.3528 +vn 0.8695 -0.3448 -0.3537 +vn 0.7039 -0.4736 -0.5294 +vn 0.7040 -0.4736 -0.5292 +vn 0.7037 -0.4738 -0.5295 +vn -0.5164 -0.5126 -0.6860 +vn -0.5166 -0.5125 -0.6859 +vn -0.7225 -0.4001 -0.5638 +vn -0.7225 -0.4000 -0.5639 +vn -0.7223 -0.4003 -0.5640 +vn -0.8947 -0.2387 -0.3775 +vn -0.8950 -0.2382 -0.3771 +vn -0.9900 -0.0381 -0.1355 +vn -0.9901 -0.0378 -0.1351 +vn -0.9783 0.1672 0.1222 +vn -0.9782 0.1675 0.1228 +vn -0.9786 0.1663 0.1213 +vn -0.8702 0.3440 0.3528 +vn -0.8700 0.3442 0.3530 +vn -0.7027 0.4744 0.5303 +vn -0.7030 0.4741 0.5301 +vn -0.7026 0.4744 0.5303 +vn -0.5117 0.5587 0.6526 +vn -0.5122 0.5585 0.6525 +vn -0.5117 0.5587 0.6527 +vn -0.5122 0.5585 0.6524 +vn -0.3133 0.6069 0.7304 +vn -0.3138 0.6068 0.7303 +vn -0.3142 0.6068 0.7302 +vn -0.3129 0.6071 0.7305 +vn -0.1111 0.6260 0.7719 +vn -0.1114 0.6260 0.7718 +vn -0.1118 0.6259 0.7718 +vn -0.1107 0.6261 0.7719 +vn 0.0931 0.6184 0.7804 +vn 0.0933 0.6183 0.7803 +vn 0.0936 0.6184 0.7803 +vn 0.8487 -0.5169 -0.1120 +vn 0.8490 -0.5164 -0.1118 +vn 0.8486 -0.5170 -0.1120 +vn 0.8491 -0.5163 -0.1118 +vn 0.9610 -0.2672 0.0713 +vn 0.9610 -0.2674 0.0713 +vn 0.9609 -0.2677 0.0713 +vn 0.9955 0.0595 0.0734 +vn 0.9379 0.3286 -0.1112 +vn 0.9378 0.3289 -0.1113 +vn 0.9376 0.3292 -0.1114 +vn 0.9381 0.3281 -0.1110 +vn 0.8075 0.4430 -0.3894 +vn 0.8072 0.4434 -0.3897 +vn 0.8070 0.4436 -0.3899 +vn 0.8078 0.4428 -0.3891 +vn 0.6354 0.4018 -0.6594 +vn 0.6354 0.4019 -0.6594 +vn 0.6355 0.4018 -0.6594 +vn 0.4444 0.2505 -0.8601 +vn 0.4446 0.2504 -0.8600 +vn 0.4449 0.2504 -0.8599 +vn 0.4442 0.2505 -0.8602 +vn 0.2476 0.0398 -0.9681 +vn 0.2473 0.0398 -0.9681 +vn 0.0460 0.0402 -0.9981 +vn 0.0456 0.0402 -0.9981 +vn 0.0456 0.0403 -0.9981 +vn -0.1650 0.2609 -0.9512 +vn -0.1646 0.2609 -0.9512 +vn -0.1642 0.2609 -0.9513 +vn -0.1653 0.2609 -0.9511 +vn -0.3819 0.4381 -0.8138 +vn -0.3821 0.4381 -0.8137 +vn -0.3814 0.4382 -0.8140 +vn -0.6023 0.5261 -0.6004 +vn -0.6032 0.5255 -0.6000 +vn -0.6017 0.5264 -0.6007 +vn -0.6038 0.5251 -0.5998 +vn -0.8085 0.4726 -0.3507 +vn -0.8082 0.4730 -0.3509 +vn -0.8087 0.4723 -0.3506 +vn -0.8079 0.4734 -0.3510 +vn -0.9542 0.2592 -0.1493 +vn -0.9542 0.2593 -0.1493 +vn -0.9940 -0.0603 -0.0913 +vn -0.9940 -0.0609 -0.0912 +vn -0.9939 -0.0614 -0.0912 +vn -0.9940 -0.0600 -0.0913 +vn -0.9091 -0.3594 -0.2106 +vn -0.9090 -0.3597 -0.2106 +vn -0.9089 -0.3600 -0.2107 +vn -0.9092 -0.3591 -0.2105 +vn -0.7332 -0.5169 -0.4418 +vn -0.7333 -0.5169 -0.4418 +vn -0.7335 -0.5166 -0.4417 +vn 0.3180 0.7524 0.5769 +vn 0.3179 0.7524 0.5769 +vn 0.5300 0.6621 0.5298 +vn 0.5302 0.6620 0.5297 +vn 0.5304 0.6619 0.5296 +vn 0.5298 0.6622 0.5299 +vn 0.7334 0.5167 0.4417 +vn 0.7335 0.5167 0.4416 +vn 0.7338 0.5164 0.4415 +vn 0.7334 0.5168 0.4417 +vn 0.9011 0.3085 0.3047 +vn 0.9012 0.3082 0.3046 +vn 0.9909 0.0504 0.1245 +vn 0.9909 0.0508 0.1248 +vn 0.8610 -0.4443 -0.2476 +vn 0.8611 -0.4441 -0.2475 +vn -0.9009 -0.3088 -0.3051 +vn -0.9008 -0.3090 -0.3050 +vn -0.9011 -0.3084 -0.3048 +vn -0.9912 -0.0487 -0.1233 +vn -0.9911 -0.0491 -0.1235 +vn -0.9738 0.2160 0.0711 +vn -0.8605 0.4448 0.2482 +vn -0.8604 0.4451 0.2483 +vn -0.8602 0.4454 0.2485 +vn -0.8607 0.4446 0.2481 +vn -0.6904 0.6123 0.3853 +vn -0.6909 0.6119 0.3851 +vn -0.6913 0.6115 0.3849 +vn -0.6899 0.6127 0.3855 +vn -0.2973 0.7838 0.5452 +vn -0.2967 0.7839 0.5454 +vn -0.2978 0.7837 0.5451 +vn -0.0949 0.8083 0.5811 +vn 0.1107 0.7983 0.5921 +vn 0.1106 0.7982 0.5921 +vn 0.9678 -0.2518 -0.0056 +vn 0.9679 -0.2513 -0.0055 +vn 0.9677 -0.2522 -0.0057 +vn 0.9680 -0.2510 -0.0053 +vn 0.9939 0.0616 0.0913 +vn 0.9939 0.0623 0.0913 +vn 0.9938 0.0628 0.0912 +vn 0.9289 0.3702 -0.0101 +vn 0.9293 0.3693 -0.0099 +vn 0.9295 0.3687 -0.0098 +vn 0.9287 0.3708 -0.0102 +vn 0.7954 0.5501 -0.2545 +vn 0.7956 0.5498 -0.2543 +vn 0.7952 0.5504 -0.2546 +vn 0.6239 0.5685 -0.5362 +vn 0.6232 0.5689 -0.5366 +vn 0.6230 0.5691 -0.5367 +vn 0.6242 0.5683 -0.5361 +vn 0.4371 0.4568 -0.7747 +vn 0.4369 0.4569 -0.7748 +vn 0.4366 0.4570 -0.7749 +vn 0.4372 0.4568 -0.7747 +vn 0.2461 0.2656 -0.9321 +vn 0.2447 0.2658 -0.9324 +vn 0.2440 0.2660 -0.9326 +vn 0.2468 0.2655 -0.9320 +vn 0.0425 0.2692 -0.9622 +vn 0.0425 0.2692 -0.9621 +vn 0.0429 0.2691 -0.9621 +vn 0.0419 0.2692 -0.9622 +vn -0.1721 0.4759 -0.8625 +vn -0.1728 0.4758 -0.8624 +vn -0.1731 0.4757 -0.8624 +vn -0.1718 0.4758 -0.8626 +vn -0.3944 0.6192 -0.6790 +vn -0.3940 0.6193 -0.6791 +vn -0.3937 0.6193 -0.6793 +vn -0.3948 0.6191 -0.6789 +vn -0.6161 0.6532 -0.4402 +vn -0.6164 0.6530 -0.4400 +vn -0.6167 0.6528 -0.4399 +vn -0.6158 0.6534 -0.4403 +vn -0.8213 0.5321 -0.2058 +vn -0.8215 0.5318 -0.2058 +vn -0.8211 0.5323 -0.2058 +vn -0.8216 0.5316 -0.2058 +vn -0.9609 0.2675 -0.0714 +vn -0.9612 0.2666 -0.0713 +vn -0.9613 0.2663 -0.0713 +vn -0.9608 0.2679 -0.0713 +vn -0.9925 -0.0569 -0.1085 +vn -0.9925 -0.0566 -0.1084 +vn 0.3394 0.8745 0.3466 +vn 0.3388 0.8747 0.3467 +vn 0.3398 0.8743 0.3465 +vn 0.5473 0.7702 0.3273 +vn 0.5470 0.7704 0.3274 +vn 0.5474 0.7702 0.3273 +vn 0.5470 0.7704 0.3275 +vn 0.7468 0.6015 0.2837 +vn 0.7475 0.6006 0.2836 +vn 0.7479 0.6002 0.2836 +vn 0.9096 0.3583 0.2101 +vn 0.9923 0.0586 0.1090 +vn 0.9924 0.0577 0.1088 +vn 0.9923 0.0592 0.1091 +vn -0.9678 0.2515 0.0054 +vn -0.9678 0.2518 0.0055 +vn -0.9677 0.2522 0.0056 +vn -0.8486 0.5171 0.1119 +vn -0.8495 0.5157 0.1115 +vn -0.8481 0.5178 0.1121 +vn -0.6741 0.7117 0.1978 +vn -0.6730 0.7125 0.1984 +vn -0.6747 0.7112 0.1974 +vn -0.6723 0.7131 0.1987 +vn -0.4771 0.8390 0.2616 +vn -0.4774 0.8389 0.2614 +vn -0.4775 0.8389 0.2614 +vn -0.2753 0.9115 0.3056 +vn -0.0734 0.9398 0.3337 +vn 0.1326 0.9282 0.3477 +vn 0.1321 0.9283 0.3476 +vn 0.1329 0.9281 0.3477 +vn 0.1319 0.9283 0.3476 +vn 0.9197 0.3795 0.1012 +vn 0.7803 0.6194 -0.0858 +vn 0.7794 0.6206 -0.0860 +vn 0.7789 0.6212 -0.0861 +vn 0.7808 0.6188 -0.0857 +vn 0.6094 0.7053 -0.3622 +vn 0.6081 0.7060 -0.3630 +vn 0.6101 0.7049 -0.3618 +vn 0.6075 0.7064 -0.3633 +vn 0.4256 0.6456 -0.6341 +vn 0.4253 0.6457 -0.6342 +vn 0.4258 0.6456 -0.6339 +vn 0.4251 0.6457 -0.6343 +vn 0.2385 0.4845 -0.8416 +vn 0.2384 0.4845 -0.8416 +vn 0.2389 0.4844 -0.8416 +vn 0.2384 0.4846 -0.8416 +vn 0.0337 0.4906 -0.8707 +vn 0.0336 0.4906 -0.8707 +vn -0.1842 0.6729 -0.7164 +vn -0.1847 0.6729 -0.7163 +vn -0.1850 0.6729 -0.7162 +vn -0.1840 0.6730 -0.7164 +vn -0.4111 0.7686 -0.4901 +vn -0.4102 0.7689 -0.4904 +vn -0.4095 0.7692 -0.4906 +vn -0.4119 0.7683 -0.4899 +vn -0.6342 0.7349 -0.2401 +vn -0.6343 0.7348 -0.2401 +vn -0.8360 0.5468 -0.0456 +vn 0.3631 0.9283 0.0805 +vn 0.5682 0.8177 0.0928 +vn 0.7624 0.6393 0.1007 +vn -0.6532 0.7570 -0.0185 +vn -0.6533 0.7568 -0.0186 +vn -0.6529 0.7573 -0.0184 +vn -0.6537 0.7565 -0.0187 +vn -0.4545 0.8907 0.0060 +vn -0.4544 0.8908 0.0060 +vn -0.4543 0.8908 0.0060 +vn -0.2511 0.9675 0.0281 +vn -0.2506 0.9677 0.0282 +vn -0.2513 0.9675 0.0280 +vn -0.2505 0.9677 0.0282 +vn -0.0481 0.9977 0.0477 +vn -0.0477 0.9977 0.0478 +vn 0.1571 0.9854 0.0653 +vn 0.5900 0.7940 -0.1465 +vn 0.5906 0.7936 -0.1463 +vn 0.5911 0.7932 -0.1460 +vn 0.4077 0.8017 -0.4370 +vn 0.4083 0.8016 -0.4368 +vn 0.4075 0.8018 -0.4372 +vn 0.4086 0.8015 -0.4367 +vn 0.2237 0.6852 -0.6931 +vn 0.2245 0.6851 -0.6930 +vn 0.2250 0.6850 -0.6929 +vn 0.2231 0.6854 -0.6932 +vn 0.0213 0.6936 -0.7200 +vn 0.0203 0.6937 -0.7200 +vn 0.0198 0.6938 -0.7199 +vn 0.0218 0.6936 -0.7200 +vn -0.2021 0.8354 -0.5112 +vn -0.2025 0.8353 -0.5112 +vn -0.2018 0.8354 -0.5113 +vn -0.4315 0.8654 -0.2548 +vn -0.4310 0.8657 -0.2547 +vn -0.4306 0.8658 -0.2547 +vn 0.3866 0.9021 -0.1916 +vn 0.3860 0.9023 -0.1918 +vn 0.3857 0.9024 -0.1919 +vn -0.2252 0.9403 -0.2550 +vn -0.2254 0.9403 -0.2551 +vn -0.0218 0.9694 -0.2444 +vn -0.0216 0.9694 -0.2444 +vn -0.0215 0.9694 -0.2444 +vn -0.0222 0.9694 -0.2445 +vn 0.1821 0.9575 -0.2235 +vn 0.1820 0.9576 -0.2235 +vn 0.1825 0.9575 -0.2233 +vn 0.1816 0.9576 -0.2236 +vn 0.2043 0.8508 -0.4841 +vn 0.2046 0.8508 -0.4840 +vn 0.2047 0.8508 -0.4840 +vn 0.0019 0.8612 -0.5082 +vn 0.0023 0.8613 -0.5082 +vn 0.0015 0.8612 -0.5083 +vn 0.0028 0.8613 -0.5081 +vn 0.0176 -0.8616 0.5074 +vn 0.0176 -0.8616 0.5073 +vn -0.0060 -0.9699 0.2432 +vn -0.0062 -0.9699 0.2433 +vn -0.0058 -0.9700 0.2432 +vn 0.0371 -0.6942 0.7188 +vn -0.1859 -0.8383 0.5126 +vn -0.1861 -0.8382 0.5126 +vn -0.1862 -0.8382 0.5125 +vn -0.1858 -0.8383 0.5126 +vn -0.0336 -0.9982 -0.0487 +vn -0.0339 -0.9982 -0.0487 +vn 0.1959 -0.9555 0.2208 +vn -0.2114 -0.9433 0.2559 +vn -0.2107 -0.9435 0.2558 +vn 0.4221 -0.7973 0.4315 +vn 0.4220 -0.7973 0.4315 +vn 0.4224 -0.7972 0.4314 +vn 0.4217 -0.7974 0.4316 +vn 0.2404 -0.6837 0.6890 +vn 0.2393 -0.6840 0.6891 +vn 0.0520 -0.4909 0.8697 +vn -0.1673 -0.6753 0.7183 +vn -0.1668 -0.6754 0.7184 +vn -0.1666 -0.6754 0.7184 +vn -0.1676 -0.6753 0.7182 +vn -0.3954 -0.7742 0.4942 +vn -0.3943 -0.7746 0.4944 +vn -0.0607 -0.9403 -0.3349 +vn -0.2381 -0.9708 -0.0282 +vn 0.4003 -0.8971 0.1870 +vn 0.3998 -0.8973 0.1872 +vn 0.6225 -0.6980 0.3538 +vn 0.6212 -0.6989 0.3546 +vn 0.6233 -0.6975 0.3534 +vn 0.6204 -0.6993 0.3550 +vn 0.4399 -0.6425 0.6275 +vn 0.4403 -0.6424 0.6273 +vn 0.2541 -0.4835 0.8377 +vn 0.0600 -0.2693 0.9612 +vn 0.0611 -0.2692 0.9611 +vn -0.1542 -0.4775 0.8650 +vn -0.1543 -0.4775 0.8650 +vn -0.3778 -0.6236 0.6844 +vn -0.3771 -0.6238 0.6846 +vn -0.3765 -0.6240 0.6848 +vn -0.0838 -0.8088 -0.5821 +vn -0.0840 -0.8088 -0.5821 +vn -0.0838 -0.8088 -0.5822 +vn -0.2642 -0.9145 -0.3065 +vn -0.2643 -0.9145 -0.3064 +vn -0.2638 -0.9145 -0.3066 +vn 0.3759 -0.9229 -0.0831 +vn 0.3757 -0.9230 -0.0831 +vn 0.3763 -0.9228 -0.0832 +vn 0.3753 -0.9232 -0.0830 +vn -0.4420 -0.8970 -0.0054 +vn 0.6015 -0.7864 0.1407 +vn 0.8053 -0.5404 0.2440 +vn 0.8042 -0.5417 0.2446 +vn 0.8035 -0.5426 0.2449 +vn 0.6377 -0.5626 0.5261 +vn 0.6367 -0.5632 0.5267 +vn 0.6383 -0.5622 0.5258 +vn 0.4538 -0.4540 0.7668 +vn 0.4535 -0.4541 0.7669 +vn 0.4534 -0.4541 0.7669 +vn 0.2630 -0.2653 0.9276 +vn 0.2624 -0.2654 0.9278 +vn -0.1448 -0.2619 0.9542 +vn -0.1458 -0.2619 0.9540 +vn -0.1465 -0.2620 0.9539 +vn -0.1441 -0.2619 0.9543 +vn -0.3666 -0.4407 0.8193 +vn -0.3661 -0.4408 0.8196 +vn -0.3657 -0.4409 0.8197 +vn -0.3669 -0.4407 0.8193 +vn -0.5889 -0.5318 0.6086 +vn -0.5896 -0.5315 0.6082 +vn -0.5885 -0.5320 0.6088 +vn -0.7986 -0.4826 0.3596 +vn -0.7989 -0.4822 0.3595 +vn -0.1017 -0.6261 -0.7731 +vn -0.1010 -0.6262 -0.7731 +vn -0.1019 -0.6261 -0.7731 +vn 0.1202 -0.7965 -0.5925 +vn 0.1209 -0.7965 -0.5924 +vn 0.1212 -0.7965 -0.5923 +vn 0.1200 -0.7966 -0.5925 +vn -0.2869 -0.7865 -0.5469 +vn -0.2871 -0.7865 -0.5469 +vn 0.3511 -0.8694 -0.3476 +vn 0.3506 -0.8696 -0.3477 +vn 0.3502 -0.8697 -0.3478 +vn 0.3515 -0.8693 -0.3476 +vn -0.4652 -0.8453 -0.2626 +vn -0.4657 -0.8452 -0.2624 +vn 0.7884 -0.6100 0.0790 +vn 0.7880 -0.6105 0.0792 +vn -0.8126 -0.5429 0.2119 +vn 0.8168 -0.4363 0.3775 +vn 0.8178 -0.4352 0.3766 +vn 0.8184 -0.4346 0.3760 +vn 0.8161 -0.4369 0.3781 +vn 0.6481 -0.3983 0.6491 +vn 0.6489 -0.3979 0.6485 +vn 0.6495 -0.3976 0.6481 +vn 0.4614 -0.2493 0.8514 +vn 0.4610 -0.2493 0.8516 +vn 0.0631 0.1898 0.9798 +vn 0.0633 0.1897 0.9798 +vn -0.1432 -0.0392 0.9889 +vn -0.1420 -0.0391 0.9891 +vn -0.1413 -0.0390 0.9892 +vn -0.1440 -0.0392 0.9888 +vn -0.3590 -0.2418 0.9015 +vn -0.3588 -0.2418 0.9016 +vn -0.3591 -0.2418 0.9014 +vn -0.5778 -0.3761 0.7244 +vn -0.7876 -0.3888 0.4781 +vn -0.7872 -0.3891 0.4784 +vn -0.9424 -0.2419 0.2308 +vn -0.9423 -0.2424 0.2311 +vn -0.0589 0.7819 -0.6206 +vn -0.0590 0.7826 -0.6197 +vn -0.0587 0.7809 -0.6219 +vn -0.0584 0.7786 -0.6248 +vn -0.0583 0.7798 -0.6234 +vn -0.0583 0.7804 -0.6226 +vn -0.0584 0.7785 -0.6250 +vn -0.0586 0.7810 -0.6218 +vn -0.0590 0.7829 -0.6193 +vn -0.0590 0.7828 -0.6194 +vn 0.1031 -0.6167 -0.7804 +vn 0.1026 -0.6167 -0.7805 +vn 0.1025 -0.6168 -0.7805 +vn 0.1033 -0.6167 -0.7804 +vn 0.3288 -0.7480 -0.5765 +vn -0.4876 -0.7263 -0.4844 +vn -0.4883 -0.7261 -0.4841 +vn 0.5575 -0.7626 -0.3282 +vn -0.6658 -0.7193 -0.1982 +vn -0.6655 -0.7196 -0.1984 +vn -0.6653 -0.7197 -0.1985 +vn 0.7719 -0.6270 -0.1048 +vn -0.8277 -0.5591 0.0482 +vn -0.8279 -0.5589 0.0482 +vn 0.9337 -0.3581 0.0023 +vn 0.9337 -0.3580 0.0024 +vn -0.9493 -0.2725 0.1571 +vn 0.9968 -0.0422 -0.0683 +vn 0.9968 -0.0414 -0.0687 +vn 0.9957 -0.0449 -0.0815 +vn 0.9978 -0.0350 -0.0570 +vn 0.9506 -0.2551 0.1767 +vn 0.9504 -0.2558 0.1773 +vn 0.9502 -0.2562 0.1776 +vn 0.9508 -0.2546 0.1762 +vn 0.8255 -0.3085 0.4727 +vn 0.8253 -0.3086 0.4729 +vn 0.8251 -0.3089 0.4730 +vn 0.8257 -0.3084 0.4724 +vn 0.6557 -0.2183 0.7227 +vn 0.6549 -0.2186 0.7234 +vn 0.4656 -0.0371 0.8842 +vn 0.4640 -0.0373 0.8850 +vn 0.2634 0.1869 0.9464 +vn 0.2633 0.1869 0.9464 +vn 0.0558 0.4150 0.9081 +vn 0.0559 0.4150 0.9081 +vn 0.0555 0.4150 0.9081 +vn 0.0564 0.4149 0.9081 +vn -0.1463 0.1845 0.9719 +vn -0.1464 0.1845 0.9719 +vn -0.1459 0.1845 0.9719 +vn -0.5712 -0.2064 0.7944 +vn -0.7790 -0.2753 0.5634 +vn -0.9364 -0.1958 0.2911 +vn -0.9968 0.0418 0.0685 +vn -0.9968 0.0418 0.0687 +vn -0.9978 0.0352 0.0569 +vn -0.3114 0.5795 0.7531 +vn 0.1006 0.6263 0.7731 +vn 0.1009 0.6262 0.7731 +vn 0.3031 0.6094 0.7327 +vn 0.6958 0.4798 0.5345 +vn 0.6953 0.4801 0.5349 +vn 0.6949 0.4804 0.5351 +vn 0.9762 0.1753 0.1279 +vn 0.9760 0.1758 0.1284 +vn 0.9763 0.1748 0.1275 +vn 0.9908 -0.0313 -0.1313 +vn 0.9918 -0.0398 -0.1217 +vn 0.9910 -0.0304 -0.1304 +vn 0.8995 -0.2299 -0.3714 +vn 0.8984 -0.2316 -0.3731 +vn 0.7301 -0.3927 -0.5592 +vn 0.5228 -0.5083 -0.6844 +vn 0.5237 -0.5080 -0.6839 +vn 0.5241 -0.5079 -0.6836 +vn 0.5223 -0.5085 -0.6845 +vn 0.3124 -0.5793 -0.7529 +vn 0.3120 -0.5794 -0.7530 +vn 0.3119 -0.5793 -0.7531 +vn 0.3124 -0.5792 -0.7529 +vn -0.5043 -0.5622 -0.6554 +vn -0.6953 -0.4801 -0.5349 +vn -0.6958 -0.4797 -0.5345 +vn -0.8646 -0.3518 -0.3587 +vn -0.9760 -0.1761 -0.1285 +vn -0.9910 0.0301 0.1302 +vn -0.9911 0.0296 0.1297 +vn -0.9918 0.0391 0.1215 +vn -0.8989 0.2308 0.3725 +vn -0.8987 0.2311 0.3727 +vn -0.7290 0.3937 0.5599 +vn -0.5251 0.5076 0.6831 +vn 0.5383 -0.6562 -0.5288 +vn 0.5375 -0.6566 -0.5291 +vn 0.5369 -0.6568 -0.5294 +vn 0.5390 -0.6558 -0.5285 +vn -0.6829 -0.6190 -0.3880 +vn -0.6833 -0.6187 -0.3878 +vn -0.6837 -0.6184 -0.3875 +vn 0.7547 -0.5911 -0.2845 +vn 0.7546 -0.5913 -0.2846 +vn -0.8425 -0.5270 -0.1121 +vn 0.9237 -0.3683 -0.1054 +vn 0.9238 -0.3681 -0.1055 +vn 0.9235 -0.3688 -0.1054 +vn 0.9239 -0.3678 -0.1055 +vn -0.9567 -0.2813 0.0747 +vn -0.9567 -0.2815 0.0747 +vn -0.9956 0.0478 0.0811 +vn -0.9956 0.0472 0.0812 +vn -0.9955 0.0482 0.0811 +vn -0.9956 0.0467 0.0814 +vn 0.9423 0.2421 -0.2311 +vn 0.9422 0.2425 -0.2313 +vn 0.9984 -0.0233 -0.0525 +vn 0.9556 -0.1807 0.2326 +vn 0.9559 -0.1803 0.2320 +vn 0.8304 -0.1694 0.5308 +vn 0.8309 -0.1691 0.5302 +vn 0.6589 -0.0325 0.7515 +vn 0.6592 -0.0324 0.7512 +vn 0.2566 0.4087 0.8758 +vn 0.2566 0.4088 0.8758 +vn 0.2565 0.4088 0.8758 +vn 0.0431 0.6263 0.7784 +vn 0.0434 0.6263 0.7784 +vn 0.0437 0.6263 0.7784 +vn -0.1517 0.4034 0.9023 +vn -0.1527 0.4034 0.9022 +vn -0.1512 0.4034 0.9025 +vn -0.1531 0.4033 0.9021 +vn -0.3553 0.1706 0.9191 +vn -0.3558 0.1706 0.9189 +vn -0.3552 0.1705 0.9191 +vn -0.3560 0.1705 0.9188 +vn -0.5683 -0.0308 0.8222 +vn -0.7742 -0.1511 0.6147 +vn -0.7741 -0.1512 0.6148 +vn -0.7738 -0.1512 0.6151 +vn -0.9326 -0.1382 0.3335 +vn -0.9330 -0.1376 0.3326 +vn -0.9324 -0.1386 0.3339 +vn -0.9332 -0.1372 0.3321 +vn -0.9432 0.3170 -0.0992 +vn -0.9436 0.3160 -0.0986 +vn -0.3280 0.7482 0.5767 +vn -0.1193 0.7968 0.5924 +vn -0.1190 0.7968 0.5925 +vn 0.0832 0.8087 0.5823 +vn 0.0827 0.8087 0.5824 +vn 0.0823 0.8087 0.5825 +vn 0.2863 0.7865 0.5472 +vn 0.2860 0.7865 0.5473 +vn 0.2858 0.7866 0.5474 +vn 0.2865 0.7865 0.5471 +vn 0.4863 0.7269 0.4849 +vn 0.4876 0.7264 0.4843 +vn 0.4883 0.7262 0.4839 +vn 0.6829 0.6191 0.3877 +vn 0.6836 0.6185 0.3874 +vn 0.8549 0.4539 0.2514 +vn 0.8540 0.4551 0.2521 +vn 0.8553 0.4532 0.2511 +vn 0.9710 0.2271 0.0749 +vn 0.9714 0.2257 0.0741 +vn 0.9929 -0.0467 -0.1098 +vn 0.9061 -0.2970 -0.3014 +vn 0.9067 -0.2957 -0.3007 +vn 0.7404 -0.5083 -0.4398 +vn 0.7410 -0.5077 -0.4395 +vn 0.7399 -0.5088 -0.4401 +vn 0.7414 -0.5073 -0.4393 +vn -0.8550 -0.4538 -0.2512 +vn -0.8551 -0.4536 -0.2511 +vn -0.9712 -0.2262 -0.0745 +vn -0.9920 0.0382 0.1204 +vn -0.9929 0.0467 0.1098 +vn -0.9054 0.2983 0.3022 +vn -0.9056 0.2979 0.3020 +vn -0.9053 0.2986 0.3023 +vn -0.7401 0.5085 0.4401 +vn -0.5394 0.6554 0.5286 +vn -0.5396 0.6553 0.5285 +vn 0.9142 -0.3462 -0.2107 +vn -0.9644 -0.2645 -0.0056 +vn 0.9492 0.2728 -0.1570 +vn -0.9340 0.3573 -0.0021 +vn 0.7986 0.4825 -0.3597 +vn 0.7982 0.4830 -0.3600 +vn 0.7979 0.4834 -0.3601 +vn 0.7989 0.4821 -0.3596 +vn 0.9364 0.1960 -0.2912 +vn 0.9365 0.1956 -0.2910 +vn 0.9366 0.1954 -0.2909 +vn 0.9985 -0.0238 -0.0496 +vn 0.9588 -0.0992 0.2663 +vn 0.9587 -0.0993 0.2667 +vn 0.6572 0.1536 0.7379 +vn 0.6576 0.1535 0.7376 +vn 0.6578 0.1535 0.7374 +vn 0.4570 0.3839 0.8024 +vn 0.2456 0.6168 0.7478 +vn 0.2446 0.6171 0.7479 +vn 0.2462 0.6167 0.7477 +vn 0.2441 0.6172 0.7480 +vn 0.0254 0.8088 0.5876 +vn 0.0251 0.8087 0.5876 +vn 0.0251 0.8087 0.5877 +vn -0.1626 0.6092 0.7762 +vn -0.3615 0.3728 0.8546 +vn -0.3612 0.3728 0.8547 +vn -0.5701 0.1455 0.8086 +vn -0.5699 0.1455 0.8088 +vn -0.5706 0.1455 0.8083 +vn -0.5693 0.1455 0.8091 +vn -0.7731 -0.0224 0.6339 +vn -0.7725 -0.0226 0.6347 +vn -0.9307 -0.0756 0.3579 +vn -0.9304 -0.0759 0.3586 +vn -0.9308 -0.0755 0.3576 +vn -0.9303 -0.0760 0.3589 +vn -0.9510 0.2542 -0.1757 +vn -0.8053 0.5403 -0.2441 +vn -0.8046 0.5412 -0.2445 +vn -0.8042 0.5417 -0.2447 +vn -0.1429 0.9263 0.3487 +vn 0.2649 0.9144 0.3062 +vn 0.2641 0.9145 0.3065 +vn 0.4655 0.8452 0.2627 +vn 0.6645 0.7203 0.1989 +vn 0.6655 0.7196 0.1984 +vn 0.8425 0.5268 0.1120 +vn 0.9929 -0.0463 -0.1095 +vn -0.9929 0.0463 0.1096 +vn -0.9143 0.3459 0.2106 +vn -0.9145 0.3456 0.2106 +vn -0.7548 0.5910 0.2845 +vn -0.7549 0.5910 0.2844 +vn -0.5575 0.7626 0.3281 +vn -0.5573 0.7627 0.3282 +vn -0.5571 0.7629 0.3282 +vn 0.9568 0.2811 -0.0747 +vn 0.9568 0.2810 -0.0747 +vn -0.9244 0.3666 0.1055 +vn -0.9242 0.3671 0.1054 +vn 0.8122 0.5435 -0.2118 +vn 0.8124 0.5433 -0.2118 +vn 0.8126 0.5429 -0.2119 +vn 0.8120 0.5439 -0.2119 +vn -0.7901 0.6079 -0.0785 +vn 0.7872 0.3891 -0.4783 +vn 0.7875 0.3888 -0.4781 +vn 0.7878 0.3885 -0.4779 +vn 0.9601 -0.0146 0.2794 +vn 0.9600 -0.0146 0.2796 +vn 0.6515 0.3366 0.6799 +vn 0.4465 0.5793 0.6819 +vn 0.4465 0.5794 0.6819 +vn 0.2286 0.7965 0.5598 +vn 0.2279 0.7966 0.5599 +vn 0.2290 0.7965 0.5596 +vn 0.0022 0.9403 0.3404 +vn 0.0021 0.9403 0.3404 +vn -0.3733 0.5627 0.7376 +vn -0.5765 0.3176 0.7529 +vn -0.5768 0.3175 0.7527 +vn -0.7740 0.1063 0.6242 +vn -0.7739 0.1063 0.6243 +vn -0.9294 -0.0112 0.3688 +vn -0.9559 0.1803 -0.2320 +vn -0.9556 0.1807 -0.2327 +vn -0.9555 0.1810 -0.2330 +vn -0.8164 0.4366 -0.3780 +vn -0.6211 0.6989 -0.3547 +vn -0.6208 0.6991 -0.3549 +vn -0.6206 0.6992 -0.3549 +vn -0.6211 0.6988 -0.3548 +vn 0.2382 0.9708 0.0283 +vn 0.2389 0.9706 0.0282 +vn 0.6434 0.7653 -0.0202 +vn 0.8275 0.5594 -0.0481 +vn 0.8279 0.5589 -0.0481 +vn -0.5789 0.8097 0.0963 +vn 0.6224 0.7436 -0.2443 +vn 0.6222 0.7438 -0.2443 +vn 0.6221 0.7438 -0.2443 +vn 0.3964 0.7738 -0.4941 +vn 0.3956 0.7740 -0.4943 +vn 0.7791 0.2753 -0.5631 +vn 0.9305 0.0754 -0.3584 +vn 0.9303 0.0756 -0.3589 +vn 0.9306 0.0753 -0.3582 +vn 0.9988 0.0029 -0.0492 +vn 0.9593 0.0698 0.2735 +vn 0.8285 0.2604 0.4958 +vn 0.8280 0.2608 0.4965 +vn 0.6415 0.5081 0.5748 +vn 0.6420 0.5078 0.5744 +vn 0.4294 0.7482 0.5058 +vn 0.4289 0.7483 0.5060 +vn -0.2027 0.9145 0.3501 +vn -0.2033 0.9144 0.3502 +vn -0.2034 0.9143 0.3502 +vn -0.3882 0.7268 0.5666 +vn -0.3887 0.7267 0.5665 +vn -0.3877 0.7270 0.5667 +vn -0.5844 0.4800 0.6543 +vn -0.5837 0.4803 0.6547 +vn -0.5864 0.4792 0.6531 +vn -0.9298 0.0534 0.3641 +vn -0.9300 0.0532 0.3638 +vn -0.9992 -0.0038 0.0407 +vn -0.9588 0.0991 -0.2664 +vn -0.8258 0.3083 -0.4722 +vn -0.8250 0.3089 -0.4732 +vn -0.6376 0.5628 -0.5260 +vn -0.6372 0.5631 -0.5262 +vn -0.4219 0.7974 -0.4315 +vn -0.4222 0.7973 -0.4314 +vn -0.3998 0.8972 -0.1873 +vn -0.4005 0.8970 -0.1871 +vn -0.1959 0.9554 -0.2208 +vn 0.0063 0.9699 -0.2434 +vn 0.2108 0.9435 -0.2559 +vn 0.2113 0.9433 -0.2560 +vn 0.2103 0.9436 -0.2558 +vn 0.4182 0.8712 -0.2572 +vn 0.1871 0.8380 -0.5126 +vn 0.3788 0.6235 -0.6840 +vn 0.5783 0.3760 -0.7241 +vn 0.5786 0.3759 -0.7238 +vn 0.5778 0.3761 -0.7244 +vn 0.7749 0.1506 -0.6139 +vn 0.9297 0.0113 -0.3683 +vn 0.9296 0.0113 -0.3684 +vn 0.9990 0.0097 -0.0439 +vn 0.8205 0.3933 0.4148 +vn 0.6263 0.6564 0.4206 +vn 0.6277 0.6557 0.4196 +vn 0.4086 0.8698 0.2767 +vn 0.4092 0.8696 0.2765 +vn 0.1792 0.9832 0.0345 +vn 0.1801 0.9830 0.0343 +vn 0.1786 0.9833 0.0346 +vn -0.0516 0.9699 -0.2382 +vn -0.0537 0.9699 -0.2376 +vn -0.2289 0.9708 0.0718 +vn -0.2296 0.9706 0.0720 +vn -0.5997 0.6189 0.5074 +vn -0.5984 0.6196 0.5080 +vn -0.6004 0.6184 0.5070 +vn -0.9317 0.1168 0.3440 +vn -0.9319 0.1166 0.3435 +vn -0.9989 -0.0090 0.0468 +vn -0.9599 0.0148 -0.2799 +vn -0.8314 0.1690 -0.5294 +vn -0.8314 0.1691 -0.5294 +vn -0.8308 0.1693 -0.5301 +vn -0.6490 0.3980 -0.6484 +vn -0.4398 0.6425 -0.6275 +vn -0.4403 0.6424 -0.6273 +vn -0.2205 0.8487 -0.4807 +vn -0.2206 0.8487 -0.4807 +vn -0.0188 0.8616 -0.5072 +vn -0.0181 0.8616 -0.5073 +vn -0.0176 0.8615 -0.5074 +vn -0.0193 0.8617 -0.5071 +vn 0.1673 0.6754 -0.7183 +vn 0.3664 0.4406 -0.8195 +vn 0.7715 0.0227 -0.6358 +vn 0.9298 -0.0533 -0.3641 +vn 0.9297 -0.0534 -0.3643 +vn 0.9523 0.2307 0.1996 +vn 0.9521 0.2312 0.2000 +vn 0.8092 0.5081 0.2951 +vn 0.8088 0.5085 0.2954 +vn 0.8094 0.5078 0.2950 +vn 0.6085 0.7627 0.2192 +vn 0.3846 0.9230 0.0119 +vn 0.1529 0.9552 -0.2535 +vn 0.1522 0.9553 -0.2534 +vn 0.1516 0.9554 -0.2533 +vn -0.2548 0.9434 -0.2123 +vn -0.2544 0.9435 -0.2124 +vn -0.2553 0.9433 -0.2121 +vn -0.6175 0.7193 0.3181 +vn -0.7933 0.4543 0.4054 +vn -0.7939 0.4536 0.4050 +vn -0.7929 0.4547 0.4056 +vn -0.9354 0.1756 0.3070 +vn -0.9353 0.1758 0.3071 +vn -0.9594 -0.0699 -0.2733 +vn -0.9593 -0.0700 -0.2736 +vn -0.9594 -0.0698 -0.2731 +vn -0.8335 0.0251 -0.5520 +vn -0.6555 0.2185 -0.7229 +vn -0.6551 0.2186 -0.7232 +vn -0.6557 0.2184 -0.7228 +vn -0.4538 0.4540 -0.7668 +vn -0.4537 0.4541 -0.7668 +vn -0.4534 0.4541 -0.7670 +vn -0.4539 0.4540 -0.7667 +vn -0.2402 0.6837 -0.6891 +vn -0.2405 0.6837 -0.6890 +vn -0.2406 0.6836 -0.6890 +vn -0.0376 0.6942 -0.7188 +vn -0.0370 0.6942 -0.7188 +vn 0.1537 0.4774 -0.8652 +vn 0.3571 0.2420 -0.9022 +vn 0.3566 0.2421 -0.9024 +vn 0.7734 -0.1064 -0.6249 +vn 0.9318 -0.1168 -0.3438 +vn 0.9317 -0.1169 -0.3440 +vn 0.9971 0.0381 -0.0654 +vn 0.9454 0.2990 0.1295 +vn 0.5875 0.8092 -0.0126 +vn 0.3578 0.8975 -0.2580 +vn 0.1271 0.8488 -0.5133 +vn -0.2787 0.8381 -0.4689 +vn -0.2791 0.8381 -0.4688 +vn -0.2783 0.8381 -0.4691 +vn -0.4585 0.8713 -0.1752 +vn -0.8064 0.5279 0.2665 +vn -0.8062 0.5282 0.2666 +vn -0.8059 0.5287 0.2667 +vn -0.9402 0.2272 0.2537 +vn -0.9404 0.2268 0.2535 +vn -0.9402 0.2274 0.2537 +vn -0.9980 -0.0310 0.0555 +vn -0.9570 -0.1526 -0.2467 +vn -0.8327 -0.1192 -0.5408 +vn -0.6581 0.0324 -0.7523 +vn -0.6586 0.0323 -0.7518 +vn -0.4622 0.2492 -0.8510 +vn -0.2537 0.4835 -0.8378 +vn -0.2534 0.4835 -0.8379 +vn -0.2540 0.4833 -0.8378 +vn -0.2530 0.4836 -0.8379 +vn -0.0521 0.4908 -0.8697 +vn -0.0516 0.4908 -0.8697 +vn -0.0521 0.4907 -0.8697 +vn 0.1470 0.2620 -0.9538 +vn 0.5704 -0.1454 -0.8084 +vn 0.7771 -0.2328 -0.5847 +vn 0.7772 -0.2327 -0.5846 +vn 0.9961 0.0438 -0.0772 +vn 0.9376 0.3458 0.0376 +vn 0.7785 0.6263 -0.0402 +vn 0.7773 0.6278 -0.0402 +vn 0.5648 0.7865 -0.2498 +vn 0.5654 0.7861 -0.2496 +vn 0.5656 0.7860 -0.2495 +vn 0.3343 0.7975 -0.5022 +vn 0.3351 0.7973 -0.5021 +vn 0.3354 0.7971 -0.5020 +vn 0.1081 0.6839 -0.7215 +vn 0.1078 0.6839 -0.7216 +vn 0.1082 0.6839 -0.7215 +vn -0.1096 0.4907 -0.8644 +vn -0.2994 0.6751 -0.6743 +vn -0.2998 0.6750 -0.6742 +vn -0.4806 0.7740 -0.4122 +vn -0.4802 0.7742 -0.4124 +vn -0.8214 0.5605 0.1060 +vn -0.8214 0.5603 0.1060 +vn -0.9465 0.2649 0.1844 +vn -0.9466 0.2646 0.1843 +vn -0.9467 0.2643 0.1843 +vn -0.9960 -0.0463 0.0758 +vn -0.9520 -0.2315 -0.2001 +vn -0.8291 -0.2600 -0.4950 +vn -0.8288 -0.2602 -0.4954 +vn -0.8287 -0.2603 -0.4954 +vn -0.8292 -0.2599 -0.4948 +vn -0.2622 0.2652 -0.9279 +vn -0.2618 0.2653 -0.9279 +vn -0.2634 0.2650 -0.9276 +vn -0.0598 0.2692 -0.9612 +vn -0.0594 0.2692 -0.9612 +vn -0.0602 0.2691 -0.9612 +vn 0.1424 0.0389 -0.9890 +vn 0.3574 -0.1705 -0.9182 +vn 0.7842 -0.3509 -0.5118 +vn 0.9408 -0.2259 -0.2528 +vn 0.9270 0.3690 -0.0675 +vn 0.7617 0.6082 -0.2233 +vn 0.7616 0.6084 -0.2233 +vn 0.5447 0.6989 -0.4636 +vn 0.3161 0.6426 -0.6980 +vn 0.3162 0.6426 -0.6980 +vn 0.0940 0.4835 -0.8703 +vn -0.1195 0.2694 -0.9556 +vn -0.1189 0.2695 -0.9556 +vn -0.3124 0.4773 -0.8214 +vn -0.3134 0.4771 -0.8211 +vn -0.4975 0.6239 -0.6027 +vn -0.4979 0.6237 -0.6026 +vn -0.6767 0.6599 -0.3265 +vn -0.8372 0.5438 -0.0579 +vn -0.8370 0.5442 -0.0579 +vn -0.8367 0.5446 -0.0580 +vn -0.9540 0.2813 0.1037 +vn -0.9960 -0.0450 0.0767 +vn -0.9451 -0.2996 -0.1303 +vn -0.9453 -0.2992 -0.1300 +vn -0.9455 -0.2988 -0.1297 +vn -0.9449 -0.3000 -0.1306 +vn -0.8209 -0.3930 -0.4143 +vn -0.8214 -0.3926 -0.4138 +vn -0.8206 -0.3933 -0.4147 +vn -0.6517 -0.3366 -0.6798 +vn -0.6518 -0.3365 -0.6797 +vn -0.6519 -0.3364 -0.6796 +vn -0.4621 -0.1758 -0.8692 +vn -0.4625 -0.1757 -0.8690 +vn -0.4628 -0.1757 -0.8689 +vn -0.4618 -0.1759 -0.8694 +vn 0.1435 -0.1848 -0.9722 +vn 0.1436 -0.1848 -0.9722 +vn 0.1431 -0.1847 -0.9723 +vn 0.5840 -0.4800 -0.6546 +vn 0.9474 -0.2623 -0.1835 +vn 0.9168 0.3587 -0.1754 +vn 0.7457 0.5409 -0.3890 +vn 0.7459 0.5407 -0.3890 +vn 0.5293 0.5628 -0.6349 +vn 0.5291 0.5628 -0.6350 +vn 0.5294 0.5628 -0.6348 +vn 0.5290 0.5629 -0.6351 +vn 0.3035 0.4541 -0.8377 +vn 0.3030 0.4542 -0.8378 +vn 0.3035 0.4541 -0.8376 +vn 0.0871 0.2653 -0.9602 +vn -0.1230 0.0402 -0.9916 +vn -0.5111 0.4409 -0.7378 +vn -0.5112 0.4408 -0.7378 +vn -0.6919 0.5316 -0.4885 +vn -0.8509 0.4833 -0.2060 +vn -0.8512 0.4829 -0.2057 +vn -0.8506 0.4836 -0.2062 +vn -0.9617 0.2732 0.0215 +vn -0.9619 0.2727 0.0216 +vn -0.9371 -0.3470 -0.0382 +vn -0.9366 -0.3482 -0.0385 +vn -0.8096 -0.5076 -0.2947 +vn -0.6424 -0.5075 -0.5743 +vn -0.6421 -0.5076 -0.5744 +vn -0.4566 -0.3839 -0.8026 +vn -0.4559 -0.3841 -0.8029 +vn -0.4570 -0.3838 -0.8024 +vn -0.2648 -0.1869 -0.9460 +vn -0.2640 -0.1870 -0.9462 +vn -0.0616 -0.1899 -0.9799 +vn -0.0618 -0.1898 -0.9799 +vn -0.0622 -0.1898 -0.9798 +vn 0.1492 -0.4035 -0.9027 +vn 0.3727 -0.5627 -0.7379 +vn 0.5988 -0.6193 -0.5078 +vn 0.9083 0.3174 -0.2723 +vn 0.7331 0.4360 -0.5220 +vn 0.7333 0.4358 -0.5218 +vn 0.5178 0.3978 -0.7574 +vn 0.5182 0.3977 -0.7572 +vn 0.5177 0.3978 -0.7575 +vn 0.2951 0.2492 -0.9224 +vn -0.0412 0.9991 -0.0080 +vn -0.7025 0.3760 -0.6042 +vn -0.8620 0.3893 -0.3246 +vn -0.8619 0.3894 -0.3247 +vn -0.9685 0.2433 -0.0532 +vn -0.9934 -0.0463 0.1045 +vn -0.9273 -0.3682 0.0675 +vn -0.9274 -0.3680 0.0675 +vn -0.6281 -0.6554 -0.4195 +vn -0.6277 -0.6556 -0.4197 +vn -0.6284 -0.6552 -0.4194 +vn -0.4453 -0.5796 -0.6825 +vn -0.4459 -0.5794 -0.6823 +vn -0.0551 -0.4147 -0.9083 +vn -0.0548 -0.4148 -0.9083 +vn -0.0546 -0.4147 -0.9083 +vn -0.0554 -0.4147 -0.9083 +vn 0.3895 -0.7263 -0.5664 +vn 0.6165 -0.7201 -0.3184 +vn 0.6167 -0.7200 -0.3183 +vn 0.6170 -0.7198 -0.3182 +vn 0.8211 -0.5608 -0.1060 +vn 0.8205 -0.5617 -0.1059 +vn 0.8216 -0.5602 -0.1061 +vn 0.7238 0.3084 -0.6173 +vn 0.7239 0.3084 -0.6172 +vn 0.5106 0.2183 -0.8316 +vn 0.5110 0.2183 -0.8314 +vn 0.5113 0.2183 -0.8312 +vn 0.1634 0.9862 -0.0247 +vn 0.1638 0.9862 -0.0247 +vn 0.1649 0.9862 -0.0152 +vn -0.2433 0.9699 0.0091 +vn -0.2436 0.9698 0.0091 +vn -0.7100 0.2059 -0.6735 +vn -0.7092 0.2062 -0.6742 +vn -0.7103 0.2058 -0.6731 +vn -0.7087 0.2065 -0.6746 +vn -0.9744 0.1950 -0.1120 +vn -0.9742 0.1958 -0.1127 +vn -0.9172 -0.3578 0.1753 +vn -0.7782 -0.6268 0.0400 +vn -0.7785 -0.6263 0.0399 +vn -0.6072 -0.7635 -0.2197 +vn -0.6082 -0.7629 -0.2193 +vn -0.6067 -0.7639 -0.2200 +vn -0.4299 -0.7480 -0.5057 +vn -0.2446 -0.6170 -0.7480 +vn -0.0424 -0.6263 -0.7785 +vn 0.1791 -0.7868 -0.5906 +vn 0.1794 -0.7868 -0.5906 +vn 0.4099 -0.8447 -0.3442 +vn 0.4087 -0.8452 -0.3443 +vn 0.4081 -0.8454 -0.3445 +vn 0.4105 -0.8444 -0.3441 +vn 0.6380 -0.7636 -0.0995 +vn 0.6378 -0.7638 -0.0995 +vn 0.8356 -0.5462 0.0587 +vn 0.8360 -0.5456 0.0585 +vn 0.9691 -0.2413 0.0519 +vn 0.9690 -0.2414 0.0519 +vn 0.9690 -0.2413 0.0519 +vn 0.9691 -0.2411 0.0518 +vn 0.9904 0.0245 -0.1360 +vn 0.8960 0.1806 -0.4056 +vn 0.8963 0.1802 -0.4051 +vn 0.8957 0.1809 -0.4061 +vn 0.7187 0.1691 -0.6744 +vn 0.7192 0.1689 -0.6739 +vn 0.7182 0.1693 -0.6749 +vn 0.5079 0.0326 -0.8608 +vn 0.5076 0.0326 -0.8610 +vn 0.3683 0.9288 -0.0410 +vn 0.3700 0.9284 -0.0342 +vn -0.2423 0.9700 0.0223 +vn -0.4493 0.8930 0.0270 +vn -0.8747 0.1510 -0.4605 +vn -0.9784 0.1378 -0.1539 +vn -0.9910 -0.0349 0.1290 +vn -0.9090 -0.3161 0.2716 +vn -0.9084 -0.3173 0.2721 +vn -0.9093 -0.3154 0.2713 +vn -0.5872 -0.8094 0.0127 +vn -0.5860 -0.8102 0.0124 +vn -0.5854 -0.8106 0.0123 +vn -0.4093 -0.8695 -0.2766 +vn -0.2271 -0.7967 -0.5600 +vn -0.2268 -0.7968 -0.5601 +vn -0.2274 -0.7967 -0.5599 +vn -0.0251 -0.8088 -0.5876 +vn -0.0254 -0.8088 -0.5876 +vn 0.2042 -0.9141 -0.3503 +vn 0.6593 -0.7416 0.1240 +vn 0.6587 -0.7421 0.1243 +vn 0.8508 -0.4834 0.2061 +vn 0.8511 -0.4829 0.2060 +vn 0.9744 -0.1950 0.1120 +vn 0.9739 -0.1964 0.1133 +vn 0.9737 -0.1973 0.1141 +vn 0.9747 -0.1942 0.1112 +vn 0.9905 0.0237 -0.1351 +vn 0.5748 0.8163 -0.0567 +vn 0.5750 0.8164 -0.0532 +vn 0.5747 0.8164 -0.0568 +vn -0.4480 0.8931 0.0413 +vn -0.4477 0.8932 0.0412 +vn -0.6492 0.7593 0.0449 +vn -0.6499 0.7587 0.0450 +vn -0.6487 0.7597 0.0449 +vn -0.8763 0.0227 -0.4812 +vn -0.8759 0.0228 -0.4819 +vn -0.8765 0.0226 -0.4808 +vn -0.9809 0.0754 -0.1792 +vn -0.9806 0.0759 -0.1806 +vn -0.9804 0.0762 -0.1815 +vn -0.9905 -0.0240 0.1354 +vn -0.9013 -0.2555 0.3497 +vn -0.9018 -0.2548 0.3491 +vn -0.9010 -0.2561 0.3503 +vn -0.9021 -0.2542 0.3487 +vn -0.7440 -0.5426 0.3898 +vn -0.7451 -0.5415 0.3894 +vn -0.7435 -0.5433 0.3900 +vn -0.3836 -0.9234 -0.0123 +vn -0.3842 -0.9232 -0.0121 +vn -0.2045 -0.9264 -0.3163 +vn -0.0028 -0.9404 -0.3401 +vn -0.0023 -0.9403 -0.3402 +vn 0.2291 -0.9707 -0.0719 +vn 0.2288 -0.9708 -0.0718 +vn 0.2297 -0.9706 -0.0720 +vn 0.4582 -0.8714 0.1754 +vn 0.6763 -0.6602 0.3268 +vn 0.9778 -0.1391 0.1565 +vn 0.9780 -0.1388 0.1560 +vn 0.9781 -0.1385 0.1555 +vn 0.9777 -0.1394 0.1568 +vn 0.9901 0.0021 -0.1400 +vn 0.8913 0.0149 -0.4532 +vn 0.5758 0.8159 -0.0530 +vn -0.6494 0.7581 0.0598 +vn -0.6497 0.7578 0.0599 +vn -0.6500 0.7576 0.0599 +vn -0.8317 0.5517 0.0624 +vn -0.8319 0.5514 0.0625 +vn -0.8957 -0.1808 0.4061 +vn -0.7324 -0.4365 0.5225 +vn -0.7316 -0.4373 0.5231 +vn -0.7329 -0.4361 0.5223 +vn -0.7311 -0.4377 0.5234 +vn -0.5452 -0.6985 0.4635 +vn -0.5467 -0.6977 0.4630 +vn -0.5475 -0.6973 0.4626 +vn -0.1782 -0.9834 -0.0347 +vn 0.2549 -0.9434 0.2124 +vn 0.4815 -0.7737 0.4119 +vn 0.4815 -0.7736 0.4118 +vn 0.6918 -0.5317 0.4885 +vn 0.6913 -0.5321 0.4889 +vn 0.8701 -0.2751 0.4089 +vn 0.9902 0.0018 -0.1397 +vn 0.9222 0.3782 -0.0812 +vn 0.7694 0.6349 -0.0709 +vn -0.0001 0.9699 0.2436 +vn -0.0006 0.9701 0.2427 +vn 0.2105 0.9309 0.2985 +vn 0.2102 0.9310 0.2985 +vn -0.8926 -0.0993 0.4399 +vn -0.8929 -0.0991 0.4392 +vn -0.7238 -0.3084 0.6173 +vn -0.5309 -0.5620 0.6342 +vn -0.5299 -0.5624 0.6347 +vn -0.5315 -0.5618 0.6339 +vn -0.3344 -0.7975 0.5022 +vn -0.3337 -0.7977 0.5024 +vn 0.0508 -0.9699 0.2381 +vn 0.0511 -0.9699 0.2380 +vn 0.2780 -0.8383 0.4691 +vn 0.2789 -0.8381 0.4688 +vn 0.2794 -0.8381 0.4686 +vn 0.4985 -0.6235 0.6023 +vn 0.7034 -0.3755 0.6035 +vn 0.7023 -0.3762 0.6044 +vn 0.7016 -0.3767 0.6049 +vn 0.8751 -0.1508 0.4598 +vn 0.9212 0.3797 -0.0848 +vn -0.0007 0.9700 0.2430 +vn -0.0000 0.8934 0.4493 +vn 0.4289 0.8357 0.3429 +vn 0.4281 0.8362 0.3427 +vn 0.4294 0.8354 0.3430 +vn -0.9949 -0.0580 0.0829 +vn -0.7210 -0.1682 0.6722 +vn -0.5178 -0.3979 0.7573 +vn -0.5176 -0.3979 0.7575 +vn -0.5172 -0.3980 0.7577 +vn -0.3153 -0.6427 0.6982 +vn -0.3163 -0.6425 0.6979 +vn -0.3169 -0.6424 0.6977 +vn -0.3147 -0.6428 0.6984 +vn -0.1282 -0.8486 0.5133 +vn -0.1288 -0.8486 0.5132 +vn 0.0761 -0.8617 0.5017 +vn 0.0754 -0.8616 0.5019 +vn 0.5111 -0.4411 0.7377 +vn 0.5117 -0.4408 0.7374 +vn 0.5121 -0.4407 0.7372 +vn 0.7085 -0.2064 0.6749 +vn 0.7089 -0.2062 0.6745 +vn 0.7082 -0.2064 0.6751 +vn 0.8770 -0.0225 0.4799 +vn 0.9593 -0.2721 -0.0762 +vn 0.5604 0.8276 0.0318 +vn -0.0000 0.9861 -0.1659 +vn -0.2104 0.9309 0.2985 +vn -0.2105 0.9309 0.2985 +vn -0.2107 0.9308 0.2986 +vn -0.2100 0.9310 0.2984 +vn 0.6451 0.6692 0.3689 +vn 0.6459 0.6683 0.3689 +vn -0.9941 -0.0582 0.0916 +vn -0.9221 -0.3785 0.0805 +vn -0.9213 -0.3801 0.0824 +vn -0.7183 -0.0252 0.6953 +vn -0.7186 -0.0252 0.6949 +vn -0.7179 -0.0253 0.6957 +vn -0.5103 -0.2182 0.8318 +vn -0.5104 -0.2182 0.8318 +vn -0.5105 -0.2182 0.8317 +vn -0.1094 -0.6838 0.7214 +vn -0.1081 -0.6838 0.7216 +vn -0.1075 -0.6838 0.7217 +vn 0.0974 -0.6942 0.7132 +vn 0.5196 -0.2417 0.8195 +vn 0.8316 -0.5518 -0.0624 +vn 0.8319 -0.5515 -0.0625 +vn 0.7262 0.6852 -0.0556 +vn 0.2417 0.9701 0.0223 +vn -0.4278 0.8364 0.3427 +vn -0.4283 0.8361 0.3428 +vn -0.4286 0.8359 0.3429 +vn 0.8308 0.4205 0.3647 +vn 0.8309 0.4202 0.3647 +vn 0.8306 0.4208 0.3647 +vn -0.7688 -0.6356 0.0707 +vn -0.7697 -0.6345 0.0708 +vn -0.5077 -0.0327 0.8609 +vn -0.2963 -0.2490 0.9221 +vn -0.2960 -0.2490 0.9222 +vn -0.0946 -0.4834 0.8703 +vn -0.0948 -0.4835 0.8702 +vn 0.1107 -0.4908 0.8642 +vn 0.3193 -0.2619 0.9107 +vn 0.3192 -0.2619 0.9108 +vn 0.5220 -0.0362 0.8522 +vn 0.5218 -0.0362 0.8523 +vn 0.8301 -0.5523 -0.0765 +vn 0.8297 -0.5530 -0.0764 +vn 0.8294 -0.5534 -0.0764 +vn 0.8304 -0.5519 -0.0765 +vn 0.8649 0.4788 -0.1506 +vn -0.0000 0.8160 -0.5780 +vn -0.0729 0.0396 -0.9966 +vn -0.6461 0.6682 0.3690 +vn -0.6455 0.6688 0.3689 +vn 0.9406 0.1097 0.3214 +vn 0.9405 0.1102 0.3215 +vn -0.5754 -0.8159 0.0567 +vn -0.5748 -0.8164 0.0565 +vn -0.5761 -0.8156 0.0531 +vn -0.0864 -0.2651 0.9604 +vn 0.3229 -0.0392 0.9456 +vn 0.3233 -0.0391 0.9455 +vn 0.4471 -0.8941 -0.0268 +vn 0.6496 -0.7579 -0.0599 +vn -0.0000 -0.5520 -0.8338 +vn -0.2845 0.0374 -0.9580 +vn -0.2848 0.0374 -0.9578 +vn -0.0772 0.2659 -0.9609 +vn 0.5285 0.0360 -0.8482 +vn 0.5295 0.0358 -0.8476 +vn 0.5301 0.0357 -0.8472 +vn 0.9472 -0.2078 0.2441 +vn -0.5761 -0.8157 0.0531 +vn -0.3683 -0.9288 0.0410 +vn -0.0825 -0.0396 0.9958 +vn 0.1229 -0.0402 0.9916 +vn 0.1226 -0.0403 0.9916 +vn 0.2452 -0.9694 -0.0093 +vn 0.2455 -0.9694 -0.0093 +vn 0.4457 -0.8943 -0.0411 +vn -0.6452 -0.6691 -0.3689 +vn -0.5003 0.0330 -0.8652 +vn -0.2884 0.2505 -0.9242 +vn -0.0850 0.4846 -0.8706 +vn 0.1064 0.6938 -0.7123 +vn 0.1054 0.6939 -0.7123 +vn 0.1069 0.6938 -0.7122 +vn 0.3208 0.4758 -0.8189 +vn 0.3205 0.4759 -0.8190 +vn 0.5267 0.2402 -0.8154 +vn 0.5270 0.2401 -0.8153 +vn 0.7174 0.0305 -0.6960 +vn 0.7179 0.0304 -0.6954 +vn 0.7182 0.0304 -0.6952 +vn 0.7171 0.0306 -0.6963 +vn 0.8290 0.5541 0.0763 +vn 0.8644 -0.4798 0.1502 +vn -0.1623 -0.9864 0.0246 +vn -0.1633 -0.9863 0.0246 +vn -0.1627 -0.9866 0.0150 +vn 0.0395 -0.9992 0.0081 +vn 0.2446 -0.9694 -0.0225 +vn -0.9220 0.3778 -0.0849 +vn -0.9218 0.3781 -0.0849 +vn -0.5033 0.2205 -0.8355 +vn -0.2962 0.4564 -0.8390 +vn -0.2954 0.4565 -0.8392 +vn -0.0988 0.6854 -0.7215 +vn 0.0868 0.8613 -0.5007 +vn 0.0876 0.8613 -0.5005 +vn 0.0880 0.8613 -0.5004 +vn 0.3075 0.6729 -0.6728 +vn 0.3069 0.6730 -0.6730 +vn 0.5190 0.4379 -0.7341 +vn 0.7154 0.2038 -0.6683 +vn 0.7150 0.2040 -0.6687 +vn 0.7145 0.2041 -0.6692 +vn 0.8803 0.0223 -0.4740 +vn 0.9590 0.2693 0.0883 +vn 0.7257 -0.6858 0.0554 +vn 0.7261 -0.6853 0.0556 +vn 0.6452 -0.6691 -0.3689 +vn -0.9941 0.0585 -0.0916 +vn -0.9941 0.0577 -0.0915 +vn -0.7122 0.1720 -0.6806 +vn -0.5081 0.4021 -0.7617 +vn -0.5080 0.4021 -0.7617 +vn -0.3072 0.6458 -0.6990 +vn -0.3066 0.6459 -0.6991 +vn -0.3062 0.6460 -0.6992 +vn -0.1163 0.8508 -0.5124 +vn -0.1168 0.8508 -0.5124 +vn 0.0646 0.9694 -0.2370 +vn 0.0644 0.9694 -0.2371 +vn 0.0649 0.9693 -0.2370 +vn 0.0642 0.9693 -0.2371 +vn 0.2903 0.8352 -0.4671 +vn 0.2895 0.8353 -0.4673 +vn 0.2909 0.8351 -0.4669 +vn 0.5068 0.6195 -0.5995 +vn 0.5059 0.6198 -0.6000 +vn 0.5073 0.6194 -0.5992 +vn 0.7098 0.3713 -0.5985 +vn 0.8788 0.1481 -0.4536 +vn 0.8792 0.1479 -0.4530 +vn 0.8794 0.1477 -0.4527 +vn 0.9834 0.0109 -0.1813 +vn -0.8639 -0.4809 0.1498 +vn -0.0000 -0.9865 0.1636 +vn -0.0000 -0.9866 0.1632 +vn -0.0028 -0.9699 -0.2435 +vn -0.0001 -0.9694 -0.2456 +vn -0.0000 -0.5520 -0.8339 +vn -0.9588 -0.2699 -0.0883 +vn -0.9590 -0.2694 -0.0883 +vn -0.9586 -0.2706 -0.0883 +vn -0.9888 0.0024 -0.1489 +vn -0.8884 0.1028 -0.4475 +vn -0.7171 0.3137 -0.6224 +vn -0.5195 0.5687 -0.6377 +vn -0.5207 0.5683 -0.6371 +vn -0.5188 0.5690 -0.6380 +vn -0.3236 0.8020 -0.5021 +vn -0.3238 0.8019 -0.5021 +vn -0.3242 0.8018 -0.5020 +vn -0.1403 0.9575 -0.2522 +vn 0.2669 0.9402 -0.2118 +vn 0.2676 0.9400 -0.2116 +vn 0.2663 0.9403 -0.2119 +vn 0.9216 -0.3788 0.0849 +vn 0.9214 -0.3792 0.0849 +vn 0.9213 -0.3794 0.0849 +vn -0.7266 -0.6848 0.0559 +vn 0.3808 -0.9180 -0.1105 +vn 0.3806 -0.9181 -0.1106 +vn -0.8292 -0.5537 -0.0764 +vn -0.8291 -0.5538 -0.0763 +vn -0.8289 -0.5542 -0.0763 +vn -0.8294 -0.5534 -0.0764 +vn -0.9838 -0.0107 0.1791 +vn -0.9888 0.0170 -0.1483 +vn -0.7268 0.4428 -0.5251 +vn -0.7260 0.4435 -0.5256 +vn -0.7254 0.4441 -0.5259 +vn -0.5365 0.7050 -0.4638 +vn -0.5365 0.7050 -0.4637 +vn -0.3467 0.9022 -0.2564 +vn -0.3462 0.9024 -0.2565 +vn -0.1661 0.9854 0.0367 +vn 0.2426 0.9675 0.0720 +vn 0.2423 0.9675 0.0720 +vn 0.2429 0.9674 0.0720 +vn 0.4686 0.8658 -0.1753 +vn 0.6855 0.6520 -0.3241 +vn 0.6861 0.6515 -0.3239 +vn 0.8668 0.3820 -0.3206 +vn 0.8667 0.3820 -0.3207 +vn 0.9805 0.1311 -0.1465 +vn 0.9806 0.1308 -0.1461 +vn 0.9804 0.1313 -0.1470 +vn 0.8873 -0.0154 0.4610 +vn -0.0000 -0.8159 0.5781 +vn -0.6490 -0.7584 -0.0598 +vn -0.8798 -0.0222 0.4748 +vn -0.8798 -0.0220 0.4748 +vn -0.9893 0.0309 -0.1426 +vn -0.9894 0.0306 -0.1422 +vn -0.9894 0.0304 -0.1419 +vn -0.8968 0.2647 -0.3547 +vn -0.8968 0.2645 -0.3546 +vn -0.8967 0.2647 -0.3548 +vn -0.5554 0.7938 -0.2478 +vn -0.5548 0.7942 -0.2479 +vn -0.3712 0.9284 0.0154 +vn 0.4455 0.8911 0.0861 +vn 0.4461 0.8908 0.0862 +vn 0.4453 0.8913 0.0860 +vn 0.4464 0.8907 0.0862 +vn 0.8566 0.4740 -0.2038 +vn 0.8563 0.4744 -0.2041 +vn 0.8574 0.4728 -0.2033 +vn 0.8889 -0.1024 0.4465 +vn 0.8887 -0.1026 0.4469 +vn 0.7108 -0.0257 0.7029 +vn 0.5761 -0.8157 0.0531 +vn -0.3800 -0.9183 -0.1108 +vn -0.3803 -0.9182 -0.1107 +vn -0.3798 -0.9184 -0.1109 +vn -0.3806 -0.9181 -0.1106 +vn -0.1963 -0.9640 -0.1796 +vn -0.1960 -0.9640 -0.1797 +vn -0.1964 -0.9639 -0.1796 +vn -0.4464 -0.8939 -0.0411 +vn -0.9801 -0.1321 0.1483 +vn -0.9903 0.0427 -0.1321 +vn -0.9904 0.0418 -0.1314 +vn -0.9903 0.0431 -0.1324 +vn -0.9032 0.3301 -0.2745 +vn -0.5762 0.8172 -0.0085 +vn -0.5763 0.8172 -0.0086 +vn -0.5768 0.8168 -0.0087 +vn -0.3937 0.8750 0.2818 +vn -0.3941 0.8748 0.2817 +vn -0.2119 0.7984 0.5637 +vn -0.0254 0.6261 0.7794 +vn 0.1957 0.7839 0.5892 +vn 0.1965 0.7839 0.5890 +vn 0.4227 0.8396 0.3412 +vn 0.4232 0.8394 0.3412 +vn 0.4235 0.8392 0.3411 +vn 0.4225 0.8397 0.3412 +vn 0.6478 0.7556 0.0969 +vn 0.6483 0.7552 0.0970 +vn 0.9716 0.2312 -0.0504 +vn 0.9717 0.2308 -0.0502 +vn 0.9715 0.2316 -0.0506 +vn 0.8918 -0.1869 0.4119 +vn 0.7123 -0.1720 0.6805 +vn 0.4993 -0.0330 0.8658 +vn 0.5002 -0.0330 0.8653 +vn 0.3686 -0.9290 0.0340 +vn -0.2443 -0.9694 -0.0225 +vn -0.2440 -0.9695 -0.0225 +vn -0.5295 -0.0359 0.8475 +vn -0.7157 -0.2037 0.6680 +vn -0.9768 -0.1857 0.1068 +vn -0.9768 -0.1858 0.1068 +vn -0.9124 0.3708 -0.1735 +vn -0.7695 0.6377 -0.0351 +vn -0.7704 0.6365 -0.0351 +vn -0.5970 0.7698 0.2256 +vn -0.5962 0.7704 0.2260 +vn -0.2283 0.6185 0.7519 +vn 0.1792 0.6073 0.7740 +vn 0.4031 0.7221 0.5623 +vn 0.9652 0.2609 0.0203 +vn 0.9652 0.2607 0.0203 +vn 0.9914 -0.0539 0.1196 +vn 0.9914 -0.0538 0.1196 +vn 0.5020 -0.2206 0.8362 +vn 0.1627 -0.9866 0.0150 +vn 0.1626 -0.9866 0.0150 +vn -0.3325 -0.0390 0.9423 +vn -0.3321 -0.0391 0.9424 +vn -0.3327 -0.0390 0.9422 +vn -0.5262 -0.2402 0.8157 +vn -0.7098 -0.3715 0.5984 +vn -0.7102 -0.3713 0.5981 +vn -0.7095 -0.3717 0.5988 +vn -0.9230 0.3797 -0.0620 +vn -0.9224 0.3812 -0.0621 +vn -0.7862 0.6000 0.1475 +vn -0.6141 0.6626 0.4288 +vn -0.4306 0.5827 0.6893 +vn -0.4311 0.5825 0.6891 +vn -0.4315 0.5824 0.6890 +vn -0.4303 0.5828 0.6894 +vn -0.2401 0.4095 0.8802 +vn -0.2399 0.4096 0.8802 +vn 0.3883 0.5589 0.7327 +vn 0.3893 0.5587 0.7323 +vn 0.3899 0.5586 0.7321 +vn 0.3877 0.5591 0.7329 +vn 0.6130 0.6115 0.5003 +vn 0.6122 0.6120 0.5007 +vn 0.6134 0.6113 0.5001 +vn 0.8154 0.5175 0.2593 +vn 0.8158 0.5170 0.2593 +vn 0.9031 -0.3299 0.2748 +vn 0.9035 -0.3292 0.2746 +vn 0.7263 -0.4433 0.5254 +vn 0.0742 -0.0398 0.9965 +vn 0.0746 -0.0398 0.9964 +vn 0.0747 -0.0397 0.9964 +vn -0.1310 -0.0403 0.9906 +vn -0.3286 -0.2609 0.9077 +vn -0.3290 -0.2609 0.9076 +vn -0.3292 -0.2608 0.9075 +vn -0.5182 -0.4383 0.7344 +vn -0.5185 -0.4382 0.7343 +vn -0.6999 -0.5250 0.4843 +vn -0.6989 -0.5257 0.4850 +vn -0.7003 -0.5246 0.4841 +vn -0.6984 -0.5260 0.4853 +vn -0.9659 -0.2582 -0.0210 +vn -0.9945 0.0615 -0.0847 +vn -0.9326 0.3578 0.0469 +vn -0.8004 0.5159 0.3055 +vn -0.8005 0.5158 0.3054 +vn -0.6288 0.5127 0.5846 +vn -0.6283 0.5129 0.5849 +vn -0.4414 0.3858 0.8101 +vn -0.4417 0.3858 0.8100 +vn -0.2462 0.1873 0.9509 +vn 0.3795 0.3701 0.8480 +vn 0.3795 0.3701 0.8479 +vn 0.6001 0.4737 0.6446 +vn 0.5991 0.4741 0.6452 +vn 0.5983 0.4744 0.6457 +vn 0.9519 0.2513 0.1752 +vn 0.9525 0.2495 0.1748 +vn 0.9945 -0.0615 0.0848 +vn 0.9123 -0.3710 0.1737 +vn 0.7393 -0.5491 0.3898 +vn 0.7389 -0.5496 0.3899 +vn 0.5191 -0.5689 0.6379 +vn 0.5198 -0.5687 0.6375 +vn 0.5187 -0.5691 0.6380 +vn 0.2955 -0.4566 0.8392 +vn 0.2957 -0.4565 0.8391 +vn 0.0773 -0.2658 0.9609 +vn -0.1293 -0.2690 0.9544 +vn -0.3209 -0.4759 0.8189 +vn -0.3208 -0.4759 0.8189 +vn -0.5065 -0.6194 0.5998 +vn -0.5060 -0.6196 0.6001 +vn -0.6845 -0.6527 0.3245 +vn -0.6844 -0.6529 0.3246 +vn -0.6841 -0.6531 0.3248 +vn -0.8446 -0.5322 0.0583 +vn -0.8449 -0.5317 0.0583 +vn -0.8451 -0.5314 0.0582 +vn -0.9583 -0.2680 -0.0987 +vn -0.9585 -0.2675 -0.0988 +vn -0.9585 -0.2674 -0.0988 +vn -0.9583 -0.2682 -0.0987 +vn -0.9960 0.0582 -0.0672 +vn -0.9409 0.3078 0.1412 +vn -0.8110 0.3996 0.4273 +vn -0.8109 0.3997 0.4274 +vn -0.6395 0.3392 0.6899 +vn -0.4467 0.1767 0.8770 +vn 0.3737 0.1693 0.9120 +vn 0.3740 0.1693 0.9118 +vn 0.3742 0.1693 0.9117 +vn 0.5895 0.3144 0.7441 +vn 0.5893 0.3145 0.7442 +vn 0.5891 0.3146 0.7443 +vn 0.5898 0.3144 0.7439 +vn 0.9459 0.2164 0.2416 +vn 0.9464 0.2153 0.2410 +vn 0.9457 0.2170 0.2419 +vn 0.9466 0.2146 0.2406 +vn 0.9961 -0.0571 0.0676 +vn 0.9961 -0.0568 0.0676 +vn 0.9222 -0.3819 0.0617 +vn 0.7542 -0.6181 0.2215 +vn 0.5349 -0.7060 0.4641 +vn 0.5345 -0.7063 0.4642 +vn 0.3091 -0.6453 0.6986 +vn 0.3085 -0.6454 0.6988 +vn 0.3095 -0.6452 0.6986 +vn 0.3082 -0.6454 0.6989 +vn 0.0836 -0.4845 0.8708 +vn 0.0852 -0.4846 0.8706 +vn 0.0826 -0.4845 0.8709 +vn 0.0862 -0.4846 0.8705 +vn -0.1182 -0.4907 0.8633 +vn -0.1170 -0.4909 0.8634 +vn -0.1217 -0.4902 0.8630 +vn -0.3084 -0.6727 0.6726 +vn -0.3086 -0.6727 0.6725 +vn -0.4909 -0.7687 0.4100 +vn -0.4901 -0.7690 0.4105 +vn -0.6666 -0.7349 0.1248 +vn -0.8308 -0.5472 -0.1021 +vn -0.8303 -0.5479 -0.1020 +vn -0.8311 -0.5467 -0.1021 +vn -0.9516 -0.2523 -0.1758 +vn -0.9974 0.0501 -0.0516 +vn -0.9471 0.2388 0.2144 +vn -0.8177 0.2651 0.5109 +vn -0.8175 0.2653 0.5112 +vn -0.6447 0.1552 0.7486 +vn -0.6450 0.1551 0.7482 +vn -0.4486 -0.0374 0.8930 +vn -0.4480 -0.0375 0.8933 +vn 0.1632 -0.2610 0.9515 +vn 0.5847 0.1437 0.7984 +vn 0.5851 0.1437 0.7982 +vn 0.5845 0.1437 0.7985 +vn 0.7887 0.2277 0.5711 +vn 0.9416 0.1673 0.2923 +vn 0.9413 0.1678 0.2928 +vn 0.9418 0.1669 0.2919 +vn 0.9974 -0.0495 0.0522 +vn 0.9974 -0.0492 0.0524 +vn 0.9974 -0.0490 0.0525 +vn 0.9319 -0.3595 -0.0475 +vn 0.5540 -0.7947 0.2480 +vn 0.3261 -0.8011 0.5018 +vn 0.0962 -0.6856 0.7216 +vn 0.0959 -0.6856 0.7216 +vn 0.0956 -0.6856 0.7216 +vn -0.1048 -0.6939 0.7124 +vn -0.1051 -0.6938 0.7125 +vn -0.1036 -0.6940 0.7125 +vn -0.2894 -0.8353 0.4675 +vn -0.2898 -0.8352 0.4673 +vn -0.4701 -0.8652 0.1747 +vn -0.4695 -0.8654 0.1749 +vn -0.4692 -0.8656 0.1750 +vn -0.4706 -0.8649 0.1745 +vn -0.6475 -0.7559 -0.0967 +vn -0.6466 -0.7567 -0.0965 +vn -0.8159 -0.5167 -0.2594 +vn -0.8157 -0.5170 -0.2594 +vn -0.9463 -0.2154 -0.2409 +vn -0.9984 0.0398 -0.0391 +vn -0.9513 0.1587 0.2644 +vn -0.9513 0.1586 0.2642 +vn -0.8212 0.1214 0.5576 +vn -0.6453 -0.0328 0.7632 +vn -0.6460 -0.0327 0.7626 +vn -0.4446 -0.2505 0.8600 +vn -0.0198 -0.6939 0.7198 +vn -0.0201 -0.6939 0.7198 +vn 0.1714 -0.4759 0.8627 +vn 0.3748 -0.2402 0.8954 +vn 0.5842 -0.0304 0.8111 +vn 0.7838 0.1046 0.6121 +vn 0.7848 0.1042 0.6109 +vn 0.9386 0.1106 0.3269 +vn 0.9386 0.1106 0.3267 +vn 0.9984 -0.0395 0.0393 +vn 0.9411 -0.3073 -0.1409 +vn 0.7855 -0.6009 -0.1477 +vn 0.7863 -0.6000 -0.1475 +vn 0.5754 -0.8178 0.0082 +vn 0.5755 -0.8177 0.0082 +vn 0.5758 -0.8175 0.0083 +vn 0.1157 -0.8509 0.5125 +vn 0.1161 -0.8508 0.5124 +vn -0.0876 -0.8613 0.5006 +vn -0.0869 -0.8612 0.5007 +vn -0.2656 -0.9405 0.2117 +vn -0.2658 -0.9405 0.2116 +vn -0.2663 -0.9404 0.2115 +vn -0.2654 -0.9406 0.2118 +vn -0.4466 -0.8906 -0.0859 +vn -0.4470 -0.8904 -0.0860 +vn -0.4473 -0.8902 -0.0860 +vn -0.4464 -0.8907 -0.0858 +vn -0.6289 -0.7116 -0.3133 +vn -0.6286 -0.7118 -0.3134 +vn -0.8033 -0.4451 -0.3957 +vn -0.9418 -0.1668 -0.2919 +vn -0.9412 -0.1679 -0.2931 +vn -0.9409 -0.1685 -0.2937 +vn -0.9992 0.0262 -0.0310 +vn -0.9538 0.0724 0.2914 +vn -0.8223 -0.0256 0.5684 +vn -0.6413 -0.2205 0.7349 +vn -0.6419 -0.2203 0.7345 +vn -0.2231 -0.6855 0.6931 +vn -0.2225 -0.6855 0.6932 +vn 0.1841 -0.6730 0.7164 +vn 0.1827 -0.6731 0.7166 +vn 0.1850 -0.6729 0.7162 +vn 0.1818 -0.6733 0.7167 +vn 0.3808 -0.4382 0.8142 +vn 0.3823 -0.4380 0.8136 +vn 0.3802 -0.4383 0.8145 +vn 0.9371 0.0505 0.3453 +vn 0.9369 0.0508 0.3460 +vn 0.9480 -0.2373 -0.2123 +vn 0.9476 -0.2379 -0.2130 +vn 0.9475 -0.2382 -0.2133 +vn 0.7991 -0.5172 -0.3065 +vn 0.7990 -0.5173 -0.3066 +vn 0.5962 -0.7704 -0.2260 +vn 0.5970 -0.7699 -0.2256 +vn 0.3715 -0.9283 -0.0155 +vn -0.2419 -0.9677 -0.0718 +vn -0.2407 -0.9679 -0.0716 +vn -0.6129 -0.6117 -0.5002 +vn -0.6126 -0.6119 -0.5004 +vn -0.8210 -0.1719 0.5444 +vn -0.6350 -0.4020 0.6596 +vn -0.4261 -0.6456 0.6337 +vn -0.4260 -0.6456 0.6338 +vn -0.2042 -0.8507 0.4844 +vn -0.2048 -0.8507 0.4842 +vn -0.2038 -0.8508 0.4844 +vn 0.0220 -0.9694 0.2445 +vn 0.0221 -0.9694 0.2445 +vn 0.2022 -0.8355 0.5110 +vn 0.2008 -0.8357 0.5112 +vn 0.2029 -0.8354 0.5109 +vn 0.2001 -0.8358 0.5113 +vn 0.3932 -0.6195 0.6794 +vn 0.3952 -0.6190 0.6787 +vn 0.3921 -0.6198 0.6797 +vn 0.3963 -0.6187 0.6784 +vn 0.5945 -0.3709 0.7135 +vn 0.5949 -0.3707 0.7132 +vn 0.9367 -0.0107 0.3501 +vn 0.9525 -0.1568 -0.2612 +vn 0.8103 -0.4001 -0.4281 +vn 0.6153 -0.6618 -0.4282 +vn 0.6149 -0.6620 -0.4285 +vn 0.6148 -0.6621 -0.4286 +vn 0.3949 -0.8746 -0.2814 +vn 0.1660 -0.9854 -0.0367 +vn 0.1663 -0.9854 -0.0367 +vn -0.0385 -0.9977 -0.0555 +vn -0.2162 -0.9118 -0.3490 +vn -0.2171 -0.9116 -0.3492 +vn -0.2175 -0.9114 -0.3493 +vn -0.2159 -0.9119 -0.3490 +vn -0.4038 -0.7218 -0.5622 +vn -0.4042 -0.7216 -0.5620 +vn -0.5994 -0.4739 -0.6451 +vn -0.7882 -0.2282 -0.5716 +vn -0.7879 -0.2282 -0.5719 +vn -0.9531 -0.1029 0.2848 +vn -0.9532 -0.1028 0.2844 +vn -0.8166 -0.3129 0.4851 +vn -0.8163 -0.3131 0.4853 +vn -0.6233 -0.5689 0.5365 +vn -0.6241 -0.5684 0.5360 +vn -0.6231 -0.5691 0.5366 +vn -0.4090 -0.8012 0.4367 +vn -0.4092 -0.8012 0.4367 +vn 0.0471 -0.9977 -0.0477 +vn 0.0458 -0.9978 -0.0480 +vn 0.4105 -0.7688 0.4903 +vn 0.4121 -0.7682 0.4900 +vn 0.4095 -0.7692 0.4906 +vn 0.4130 -0.7678 0.4897 +vn 0.6035 -0.5254 0.5997 +vn 0.6031 -0.5257 0.5999 +vn 0.7890 -0.2705 0.5517 +vn 0.9539 -0.0723 -0.2913 +vn 0.6288 -0.5126 -0.5847 +vn 0.4135 -0.7526 -0.5125 +vn 0.4140 -0.7525 -0.5123 +vn 0.4142 -0.7524 -0.5121 +vn 0.4130 -0.7527 -0.5127 +vn 0.1903 -0.9285 -0.3190 +vn 0.1909 -0.9284 -0.3188 +vn 0.1913 -0.9284 -0.3186 +vn -0.0130 -0.9400 -0.3410 +vn -0.0141 -0.9399 -0.3412 +vn -0.0145 -0.9398 -0.3413 +vn -0.0125 -0.9400 -0.3409 +vn -0.1951 -0.7842 -0.5891 +vn -0.1951 -0.7841 -0.5891 +vn -0.1952 -0.7842 -0.5891 +vn -0.5905 -0.3141 -0.7434 +vn -0.5907 -0.3140 -0.7433 +vn -0.5907 -0.3140 -0.7432 +vn -0.5904 -0.3141 -0.7435 +vn -0.9994 -0.0165 -0.0292 +vn -0.9995 -0.0169 -0.0283 +vn -0.9994 -0.0164 -0.0297 +vn -0.9499 -0.1876 0.2498 +vn -0.8076 -0.4431 0.3891 +vn -0.3856 -0.9025 0.1917 +vn -0.3848 -0.9028 0.1920 +vn -0.3877 -0.9017 0.1911 +vn -0.1551 -0.9858 -0.0649 +vn 0.2504 -0.9677 -0.0281 +vn 0.2505 -0.9677 -0.0281 +vn 0.2499 -0.9679 -0.0283 +vn 0.4311 -0.8656 0.2546 +vn 0.4314 -0.8655 0.2546 +vn 0.4318 -0.8652 0.2548 +vn 0.4307 -0.8658 0.2546 +vn 0.6188 -0.6513 0.4393 +vn 0.6177 -0.6519 0.4397 +vn 0.6194 -0.6509 0.4390 +vn 0.6171 -0.6524 0.4400 +vn 0.7966 -0.3823 0.4683 +vn 0.7967 -0.3822 0.4682 +vn 0.7964 -0.3825 0.4684 +vn 0.9394 -0.1311 0.3169 +vn 0.9393 -0.1312 0.3170 +vn 0.9995 0.0167 0.0282 +vn 0.9994 0.0163 0.0294 +vn 0.8218 -0.1213 -0.5567 +vn 0.6382 -0.3395 -0.6909 +vn 0.4299 -0.5827 -0.6897 +vn 0.4292 -0.5829 -0.6899 +vn 0.4301 -0.5826 -0.6896 +vn 0.0091 -0.8083 -0.5888 +vn 0.0096 -0.8083 -0.5887 +vn 0.0087 -0.8083 -0.5888 +vn -0.3795 -0.3700 -0.8480 +vn -0.5857 -0.1437 -0.7977 +vn -0.5860 -0.1437 -0.7975 +vn -0.5864 -0.1437 -0.7972 +vn -0.9370 0.0721 -0.3419 +vn -0.9364 0.0727 -0.3433 +vn -0.9373 0.0717 -0.3410 +vn -0.9360 0.0730 -0.3442 +vn -0.9989 -0.0303 -0.0345 +vn -0.7948 -0.5508 0.2548 +vn -0.5899 -0.7940 0.1466 +vn -0.5908 -0.7935 0.1463 +vn -0.3611 -0.9290 -0.0803 +vn -0.3629 -0.9283 -0.0806 +vn -0.3604 -0.9293 -0.0802 +vn -0.1310 -0.9285 -0.3475 +vn -0.1312 -0.9284 -0.3476 +vn -0.1305 -0.9286 -0.3475 +vn 0.0940 -0.8083 -0.5812 +vn 0.6363 -0.7332 0.2400 +vn 0.6347 -0.7346 0.2400 +vn 0.6372 -0.7324 0.2400 +vn 0.8071 -0.4744 0.3515 +vn 0.8067 -0.4748 0.3517 +vn 0.9426 -0.1861 0.2771 +vn 0.9989 0.0300 0.0353 +vn 0.9989 0.0304 0.0347 +vn 0.8224 0.0257 -0.5684 +vn 0.8229 0.0256 -0.5676 +vn 0.6443 -0.1554 -0.7488 +vn 0.6442 -0.1554 -0.7489 +vn 0.2287 -0.6182 -0.7520 +vn 0.2282 -0.6183 -0.7520 +vn -0.5842 0.0305 -0.8111 +vn -0.7860 0.1479 -0.6003 +vn -0.9382 -0.3279 0.1105 +vn -0.9381 -0.3283 0.1107 +vn -0.7791 -0.6209 0.0864 +vn -0.7789 -0.6211 0.0864 +vn -0.5696 -0.8167 -0.0930 +vn -0.3393 -0.8745 -0.3466 +vn -0.3396 -0.8744 -0.3465 +vn -0.3390 -0.8746 -0.3466 +vn -0.1102 -0.7982 -0.5922 +vn 0.1106 -0.6259 -0.7720 +vn 0.1105 -0.6259 -0.7720 +vn 0.1100 -0.6260 -0.7720 +vn 0.2966 -0.7839 -0.5455 +vn 0.2974 -0.7838 -0.5452 +vn 0.4771 -0.8391 -0.2615 +vn 0.6542 -0.7561 0.0185 +vn 0.6537 -0.7565 0.0183 +vn 0.9478 -0.2308 0.2202 +vn 0.9481 -0.2300 0.2197 +vn 0.9476 -0.2314 0.2204 +vn 0.9501 0.1873 -0.2494 +vn 0.8211 0.1721 -0.5443 +vn 0.8208 0.1724 -0.5445 +vn 0.6462 0.0329 -0.7624 +vn 0.0387 -0.4148 -0.9091 +vn 0.0385 -0.4148 -0.9091 +vn 0.0389 -0.4147 -0.9091 +vn -0.1622 -0.1841 -0.9694 +vn -0.5872 0.2038 -0.7834 +vn -0.7900 0.2698 -0.5506 +vn -0.7904 0.2694 -0.5502 +vn -0.9970 -0.0528 -0.0571 +vn -0.9296 -0.3683 0.0098 +vn -0.5477 -0.7700 -0.3273 +vn -0.5479 -0.7699 -0.3272 +vn -0.3173 -0.7526 -0.5770 +vn -0.3183 -0.7523 -0.5768 +vn -0.3166 -0.7528 -0.5772 +vn -0.3190 -0.7522 -0.5767 +vn -0.0931 -0.6183 -0.7804 +vn -0.0930 -0.6183 -0.7804 +vn -0.0934 -0.6184 -0.7803 +vn -0.0928 -0.6183 -0.7804 +vn 0.0545 0.7823 -0.6205 +vn 0.0544 0.7822 -0.6206 +vn 0.0546 0.7802 -0.6231 +vn 0.0546 0.7795 -0.6240 +vn 0.0549 0.7802 -0.6231 +vn 0.0547 0.7786 -0.6251 +vn 0.0552 0.7794 -0.6240 +vn 0.0551 0.7796 -0.6238 +vn 0.0552 0.7798 -0.6236 +vn 0.0552 0.7788 -0.6249 +vn 0.0547 0.7787 -0.6251 +vn 0.0550 0.7807 -0.6224 +vn 0.0547 0.7786 -0.6252 +vn 0.0546 0.7750 -0.6296 +vn 0.0546 0.7732 -0.6318 +vn 0.0546 0.7719 -0.6334 +vn 0.3135 -0.6069 -0.7303 +vn 0.3128 -0.6071 -0.7305 +vn 0.4967 -0.7216 -0.4823 +vn 0.6748 -0.7111 -0.1977 +vn 0.8347 -0.5489 0.0452 +vn 0.8343 -0.5495 0.0452 +vn 0.8351 -0.5483 0.0452 +vn 0.9970 0.0532 0.0568 +vn 0.9970 0.0533 0.0569 +vn 0.9451 0.2647 -0.1916 +vn 0.9451 0.2648 -0.1917 +vn 0.9449 0.2650 -0.1919 +vn 0.8158 0.3136 -0.4860 +vn 0.8161 0.3133 -0.4856 +vn 0.6425 0.2204 -0.7339 +vn 0.6429 0.2202 -0.7336 +vn 0.6423 0.2205 -0.7341 +vn 0.0451 -0.1897 -0.9808 +vn -0.5923 0.3719 -0.7147 +vn -0.7973 0.3817 -0.4675 +vn -0.9482 0.2297 -0.2196 +vn -0.9484 0.2292 -0.2193 +vn -0.9485 0.2288 -0.2192 +vn -0.9193 -0.3803 -0.1013 +vn -0.7473 -0.6009 -0.2836 +vn -0.7476 -0.6006 -0.2836 +vn -0.7478 -0.6003 -0.2836 +vn -0.5298 -0.6623 -0.5298 +vn -0.5301 -0.6621 -0.5297 +vn -0.3013 -0.5828 -0.7547 +vn -0.3022 -0.5827 -0.7544 +vn -0.3008 -0.5829 -0.7548 +vn -0.3027 -0.5826 -0.7543 +vn 0.3018 0.5827 0.7546 +vn 0.3032 0.5825 0.7541 +vn 0.3009 0.5828 0.7548 +vn 0.5168 0.5125 0.6858 +vn 0.7233 0.3996 0.5632 +vn 0.8946 0.2389 0.3777 +vn 0.8942 0.2394 0.3782 +vn 0.9899 0.0392 0.1364 +vn 0.9899 0.0388 0.1361 +vn 0.8698 -0.3444 -0.3532 +vn 0.8696 -0.3446 -0.3535 +vn 0.7039 -0.4737 -0.5293 +vn 0.7037 -0.4739 -0.5295 +vn 0.7036 -0.4739 -0.5295 +vn 0.5117 -0.5588 -0.6525 +vn 0.5122 -0.5588 -0.6523 +vn -0.5168 -0.5125 -0.6858 +vn -0.5167 -0.5125 -0.6859 +vn -0.7228 -0.3998 -0.5636 +vn -0.7226 -0.4000 -0.5638 +vn -0.7231 -0.3996 -0.5635 +vn -0.9785 0.1667 0.1216 +vn -0.9782 0.1674 0.1229 +vn -0.9786 0.1665 0.1210 +vn -0.8700 0.3442 0.3529 +vn -0.7035 0.4738 0.5296 +vn -0.7030 0.4742 0.5300 +vn -0.7026 0.4744 0.5304 +vn -0.7039 0.4735 0.5294 +vn -0.5113 0.5588 0.6529 +vn -0.5115 0.5587 0.6528 +vn -0.3132 0.6069 0.7304 +vn -0.3137 0.6068 0.7303 +vn -0.3141 0.6067 0.7302 +vn -0.3129 0.6071 0.7304 +vn -0.1119 0.6259 0.7718 +vn -0.1121 0.6259 0.7718 +vn -0.1122 0.6259 0.7718 +vn 0.0937 0.6184 0.7803 +vn 0.0943 0.6184 0.7802 +vn 0.8480 -0.5180 -0.1124 +vn 0.8480 -0.5179 -0.1124 +vn 0.9955 0.0596 0.0733 +vn 0.9955 0.0599 0.0733 +vn 0.9380 0.3285 -0.1110 +vn 0.9380 0.3285 -0.1109 +vn 0.8076 0.4430 -0.3893 +vn 0.6352 0.4020 -0.6595 +vn 0.6351 0.4020 -0.6595 +vn 0.4447 0.2505 -0.8599 +vn -0.3831 0.4378 -0.8134 +vn -0.3824 0.4379 -0.8136 +vn -0.6030 0.5257 -0.6000 +vn -0.6036 0.5253 -0.5998 +vn -0.6028 0.5258 -0.6001 +vn -0.6038 0.5252 -0.5997 +vn -0.8079 0.4733 -0.3510 +vn -0.9940 -0.0604 -0.0913 +vn -0.9940 -0.0607 -0.0912 +vn -0.9092 -0.3592 -0.2106 +vn -0.9090 -0.3598 -0.2106 +vn 0.3198 0.7520 0.5765 +vn 0.5307 0.6618 0.5296 +vn 0.5313 0.6615 0.5293 +vn 0.7341 0.5161 0.4413 +vn 0.7344 0.5158 0.4412 +vn 0.9009 0.3088 0.3050 +vn 0.9910 0.0502 0.1245 +vn 0.9911 0.0491 0.1238 +vn 0.9737 -0.2163 -0.0714 +vn 0.8605 -0.4449 -0.2481 +vn -0.9010 -0.3085 -0.3049 +vn -0.9009 -0.3088 -0.3049 +vn -0.9912 -0.0485 -0.1234 +vn -0.9912 -0.0487 -0.1234 +vn -0.9738 0.2161 0.0712 +vn -0.8607 0.4447 0.2479 +vn -0.6901 0.6125 0.3854 +vn -0.6900 0.6127 0.3855 +vn -0.2968 0.7838 0.5454 +vn 0.9679 -0.2511 -0.0053 +vn 0.9939 0.0618 0.0913 +vn 0.9939 0.0625 0.0913 +vn 0.9292 0.3695 -0.0099 +vn 0.9294 0.3690 -0.0099 +vn 0.9291 0.3697 -0.0100 +vn 0.7949 0.5507 -0.2547 +vn 0.7951 0.5504 -0.2546 +vn 0.6242 0.5685 -0.5360 +vn 0.4375 0.4567 -0.7746 +vn 0.4377 0.4567 -0.7745 +vn 0.2456 0.2657 -0.9322 +vn -0.1710 0.4760 -0.8626 +vn -0.3938 0.6194 -0.6792 +vn -0.3947 0.6191 -0.6789 +vn -0.3951 0.6190 -0.6788 +vn -0.6170 0.6524 -0.4400 +vn -0.6166 0.6528 -0.4401 +vn -0.8208 0.5327 -0.2060 +vn -0.9609 0.2677 -0.0714 +vn -0.9609 0.2674 -0.0714 +vn -0.9610 0.2671 -0.0713 +vn -0.9924 -0.0572 -0.1085 +vn 0.3380 0.8749 0.3468 +vn 0.3381 0.8749 0.3468 +vn 0.3377 0.8750 0.3468 +vn 0.5490 0.7691 0.3271 +vn 0.7469 0.6013 0.2838 +vn 0.9099 0.3578 0.2100 +vn 0.9096 0.3584 0.2102 +vn 0.9923 0.0587 0.1089 +vn 0.9924 0.0579 0.1087 +vn -0.6730 0.7126 0.1983 +vn -0.6726 0.7129 0.1985 +vn -0.6733 0.7123 0.1981 +vn -0.4778 0.8387 0.2612 +vn -0.4785 0.8384 0.2609 +vn -0.4789 0.8382 0.2608 +vn -0.2737 0.9119 0.3058 +vn -0.0729 0.9398 0.3338 +vn -0.0735 0.9398 0.3336 +vn 0.1311 0.9284 0.3477 +vn 0.1313 0.9283 0.3478 +vn 0.1318 0.9282 0.3478 +vn 0.7795 0.6204 -0.0862 +vn 0.7791 0.6209 -0.0863 +vn 0.7789 0.6212 -0.0863 +vn 0.7797 0.6202 -0.0861 +vn 0.6099 0.7049 -0.3621 +vn 0.6092 0.7053 -0.3625 +vn 0.6101 0.7048 -0.3619 +vn 0.6089 0.7055 -0.3627 +vn 0.4248 0.6459 -0.6343 +vn 0.4251 0.6458 -0.6342 +vn 0.2382 0.4846 -0.8417 +vn 0.2379 0.4846 -0.8418 +vn -0.6340 0.7351 -0.2401 +vn -0.6341 0.7350 -0.2401 +vn -0.8358 0.5471 -0.0456 +vn 0.3614 0.9290 0.0802 +vn 0.3622 0.9286 0.0803 +vn 0.5700 0.8164 0.0929 +vn 0.7623 0.6393 0.1007 +vn -0.6535 0.7567 -0.0186 +vn -0.4558 0.8900 0.0059 +vn -0.4550 0.8905 0.0061 +vn -0.4563 0.8898 0.0058 +vn 0.1573 0.9854 0.0653 +vn 0.1547 0.9858 0.0648 +vn 0.5911 0.7932 -0.1461 +vn 0.4075 0.8018 -0.4371 +vn 0.4068 0.8020 -0.4374 +vn 0.2245 0.6852 -0.6929 +vn 0.2250 0.6851 -0.6928 +vn 0.0204 0.6939 -0.7198 +vn 0.0217 0.6938 -0.7199 +vn -0.2036 0.8351 -0.5110 +vn -0.2033 0.8352 -0.5111 +vn -0.4321 0.8651 -0.2548 +vn -0.4325 0.8649 -0.2549 +vn -0.4327 0.8648 -0.2549 +vn 0.3865 0.9022 -0.1913 +vn 0.3862 0.9023 -0.1914 +vn 0.3869 0.9021 -0.1912 +vn 0.3856 0.9025 -0.1916 +vn -0.2254 0.9402 -0.2554 +vn -0.0205 0.9695 -0.2444 +vn -0.0198 0.9695 -0.2443 +vn 0.1806 0.9578 -0.2235 +vn 0.1811 0.9578 -0.2234 +vn 0.1816 0.9577 -0.2233 +vn 0.2051 0.8506 -0.4842 +vn 0.2060 0.8505 -0.4840 +vn 0.2047 0.8506 -0.4842 +vn 0.2063 0.8505 -0.4838 +vn 0.0033 0.8612 -0.5082 +vn 0.0036 0.8613 -0.5081 +vn 0.0177 -0.8617 0.5072 +vn 0.0176 -0.8617 0.5072 +vn 0.0172 -0.8616 0.5072 +vn -0.0059 -0.9699 0.2435 +vn -0.0062 -0.9699 0.2436 +vn -0.0058 -0.9699 0.2435 +vn -0.0064 -0.9699 0.2436 +vn 0.2200 -0.8488 0.4807 +vn 0.0382 -0.6942 0.7188 +vn 0.0376 -0.6942 0.7188 +vn 0.0387 -0.6942 0.7188 +vn -0.1867 -0.8382 0.5124 +vn -0.1875 -0.8380 0.5125 +vn 0.1957 -0.9554 0.2211 +vn 0.1953 -0.9555 0.2211 +vn -0.2113 -0.9433 0.2560 +vn -0.2107 -0.9435 0.2559 +vn -0.2117 -0.9432 0.2561 +vn -0.2104 -0.9435 0.2559 +vn 0.4227 -0.7971 0.4313 +vn 0.4231 -0.7970 0.4310 +vn 0.4225 -0.7971 0.4314 +vn 0.2393 -0.6838 0.6893 +vn 0.2391 -0.6839 0.6893 +vn 0.0530 -0.4909 0.8696 +vn -0.1689 -0.6750 0.7182 +vn -0.1685 -0.6751 0.7182 +vn -0.3946 -0.7745 0.4943 +vn -0.3955 -0.7743 0.4941 +vn -0.0608 -0.9404 -0.3347 +vn 0.1688 -0.9834 -0.0671 +vn 0.1690 -0.9833 -0.0671 +vn -0.2364 -0.9712 -0.0286 +vn 0.4003 -0.8970 0.1872 +vn 0.3998 -0.8972 0.1874 +vn 0.4008 -0.8969 0.1871 +vn 0.3994 -0.8974 0.1875 +vn -0.4176 -0.8715 0.2572 +vn -0.4178 -0.8714 0.2572 +vn -0.4179 -0.8713 0.2572 +vn 0.6214 -0.6987 0.3545 +vn 0.4417 -0.6420 0.6266 +vn 0.2539 -0.4835 0.8377 +vn 0.2533 -0.4836 0.8379 +vn 0.2529 -0.4836 0.8379 +vn -0.3766 -0.6240 0.6847 +vn -0.6048 -0.6597 0.4461 +vn -0.6042 -0.6601 0.4463 +vn -0.6039 -0.6603 0.4465 +vn -0.0838 -0.8086 -0.5823 +vn -0.0841 -0.8086 -0.5823 +vn -0.0841 -0.8087 -0.5822 +vn 0.1446 -0.9260 -0.3488 +vn 0.1430 -0.9263 -0.3486 +vn -0.2635 -0.9147 -0.3065 +vn -0.2626 -0.9149 -0.3067 +vn 0.3759 -0.9229 -0.0833 +vn 0.3756 -0.9230 -0.0833 +vn 0.3763 -0.9228 -0.0834 +vn -0.4434 -0.8963 -0.0052 +vn -0.4427 -0.8967 -0.0054 +vn -0.4438 -0.8961 -0.0051 +vn -0.4423 -0.8968 -0.0055 +vn 0.6009 -0.7868 0.1409 +vn 0.6015 -0.7864 0.1406 +vn 0.6008 -0.7869 0.1410 +vn 0.6017 -0.7862 0.1406 +vn -0.6234 -0.7428 0.2442 +vn -0.6229 -0.7432 0.2442 +vn 0.8048 -0.5410 0.2442 +vn 0.6369 -0.5631 0.5265 +vn 0.6366 -0.5633 0.5267 +vn 0.4543 -0.4540 0.7665 +vn 0.4545 -0.4539 0.7664 +vn 0.2627 -0.2653 0.9277 +vn -0.1465 -0.2619 0.9539 +vn -0.3656 -0.4409 0.8197 +vn -0.3650 -0.4410 0.8199 +vn -0.7992 -0.4818 0.3593 +vn -0.7981 -0.4833 0.3599 +vn 0.1213 -0.7965 -0.5924 +vn 0.1201 -0.7966 -0.5925 +vn 0.1220 -0.7964 -0.5923 +vn -0.2856 -0.7867 -0.5473 +vn -0.2860 -0.7866 -0.5472 +vn -0.2853 -0.7867 -0.5475 +vn 0.3506 -0.8697 -0.3474 +vn 0.3491 -0.8702 -0.3476 +vn 0.3515 -0.8694 -0.3473 +vn -0.4672 -0.8444 -0.2621 +vn -0.4677 -0.8442 -0.2619 +vn -0.4669 -0.8445 -0.2622 +vn -0.4679 -0.8441 -0.2619 +vn 0.5785 -0.8100 -0.0964 +vn 0.5780 -0.8103 -0.0964 +vn 0.5777 -0.8106 -0.0964 +vn 0.5788 -0.8097 -0.0963 +vn -0.6449 -0.7640 0.0204 +vn -0.6450 -0.7639 0.0204 +vn 0.7891 -0.6091 0.0789 +vn -0.8122 -0.5436 0.2119 +vn -0.8129 -0.5425 0.2119 +vn -0.8118 -0.5442 0.2119 +vn -0.8133 -0.5419 0.2118 +vn 0.9427 -0.3184 0.0999 +vn 0.9428 -0.3180 0.0997 +vn 0.9426 -0.3186 0.1000 +vn 0.8173 -0.4358 0.3770 +vn 0.8184 -0.4347 0.3760 +vn 0.8168 -0.4363 0.3774 +vn 0.6478 -0.3985 0.6493 +vn 0.6484 -0.3981 0.6489 +vn 0.6487 -0.3980 0.6487 +vn 0.6476 -0.3986 0.6494 +vn 0.4614 -0.2493 0.8515 +vn 0.0625 0.1898 0.9798 +vn -0.1422 -0.0391 0.9891 +vn -0.3584 -0.2418 0.9017 +vn -0.7871 -0.3891 0.4785 +vn -0.7870 -0.3893 0.4787 +vn -0.9424 -0.2421 0.2310 +vn -0.0582 0.7777 -0.6259 +vn -0.0581 0.7768 -0.6271 +vn -0.0582 0.7786 -0.6249 +vn -0.0584 0.7796 -0.6235 +vn -0.0581 0.7799 -0.6232 +vn -0.0582 0.7791 -0.6243 +vn -0.0583 0.7792 -0.6240 +vn -0.0583 0.7786 -0.6248 +vn -0.0584 0.7799 -0.6231 +vn -0.0584 0.7804 -0.6226 +vn -0.0583 0.7797 -0.6235 +vn -0.0582 0.7784 -0.6251 +vn -0.0581 0.7775 -0.6262 +vn -0.0581 0.7773 -0.6265 +vn -0.0581 0.7773 -0.6264 +vn 0.1021 -0.6169 -0.7804 +vn -0.3024 -0.6092 -0.7331 +vn -0.3028 -0.6091 -0.7330 +vn -0.3029 -0.6091 -0.7330 +vn 0.3294 -0.7479 -0.5763 +vn -0.4880 -0.7262 -0.4843 +vn -0.4877 -0.7263 -0.4844 +vn -0.4872 -0.7265 -0.4846 +vn 0.5564 -0.7633 -0.3282 +vn 0.5578 -0.7624 -0.3279 +vn 0.7717 -0.6273 -0.1047 +vn 0.9339 -0.3574 0.0023 +vn 0.9340 -0.3573 0.0022 +vn 0.9341 -0.3571 0.0022 +vn -0.9490 -0.2734 0.1572 +vn -0.9491 -0.2730 0.1573 +vn 0.9956 -0.0476 -0.0810 +vn 0.9506 -0.2552 0.1768 +vn 0.9502 -0.2561 0.1776 +vn 0.9499 -0.2567 0.1781 +vn 0.8257 -0.3084 0.4723 +vn 0.6561 -0.2182 0.7225 +vn 0.4654 -0.0371 0.8843 +vn 0.0556 0.4149 0.9082 +vn -0.1454 0.1846 0.9720 +vn -0.3551 -0.0360 0.9342 +vn -0.3554 -0.0361 0.9340 +vn -0.5717 -0.2064 0.7940 +vn -0.9365 -0.1958 0.2910 +vn -0.9369 -0.1950 0.2903 +vn -0.9363 -0.1960 0.2913 +vn -0.3126 0.5792 0.7529 +vn -0.1023 0.6169 0.7803 +vn -0.1018 0.6170 0.7804 +vn 0.3039 0.6091 0.7325 +vn 0.5024 0.5630 0.6562 +vn 0.5027 0.5629 0.6561 +vn 0.5021 0.5632 0.6563 +vn 0.5031 0.5627 0.6559 +vn 0.8643 0.3520 0.3591 +vn 0.8645 0.3519 0.3590 +vn 0.8642 0.3523 0.3593 +vn 0.8646 0.3517 0.3588 +vn 0.9760 0.1759 0.1286 +vn 0.9908 -0.0314 -0.1313 +vn 0.8994 -0.2301 -0.3716 +vn 0.8986 -0.2314 -0.3729 +vn 0.8999 -0.2294 -0.3710 +vn 0.8981 -0.2322 -0.3736 +vn 0.5228 -0.5084 -0.6842 +vn 0.5223 -0.5086 -0.6844 +vn 0.5229 -0.5084 -0.6842 +vn 0.3134 -0.5790 -0.7527 +vn 0.3133 -0.5790 -0.7528 +vn -0.6959 -0.4796 -0.5344 +vn -0.8652 -0.3512 -0.3578 +vn -0.8655 -0.3509 -0.3576 +vn -0.9759 -0.1761 -0.1285 +vn -0.9910 0.0302 0.1304 +vn -0.9911 0.0297 0.1298 +vn -0.9918 0.0392 0.1215 +vn -0.8988 0.2309 0.3726 +vn -0.8985 0.2314 0.3731 +vn -0.7298 0.3931 0.5593 +vn -0.7298 0.3931 0.5594 +vn -0.7302 0.3928 0.5590 +vn -0.7295 0.3933 0.5595 +vn -0.5241 0.5079 0.6836 +vn 0.5369 -0.6567 -0.5295 +vn -0.6824 -0.6193 -0.3882 +vn 0.9241 -0.3673 -0.1054 +vn 0.9243 -0.3669 -0.1055 +vn -0.9567 -0.2813 0.0746 +vn 0.9956 -0.0470 -0.0813 +vn 0.9955 -0.0481 -0.0811 +vn -0.9955 0.0477 0.0813 +vn -0.9956 0.0471 0.0813 +vn 0.9425 0.2418 -0.2309 +vn 0.9426 0.2415 -0.2307 +vn 0.9984 -0.0233 -0.0524 +vn 0.9556 -0.1808 0.2325 +vn 0.9558 -0.1804 0.2320 +vn 0.9560 -0.1801 0.2315 +vn 0.8303 -0.1695 0.5309 +vn 0.8309 -0.1691 0.5300 +vn 0.8298 -0.1697 0.5316 +vn 0.0431 0.6264 0.7783 +vn 0.0434 0.6264 0.7783 +vn 0.0429 0.6264 0.7783 +vn -0.1521 0.4034 0.9023 +vn -0.3556 0.1705 0.9190 +vn -0.7744 -0.1511 0.6144 +vn -0.9329 -0.1377 0.3328 +vn -0.9431 0.3173 -0.0992 +vn -0.9434 0.3166 -0.0989 +vn -0.9436 0.3161 -0.0987 +vn -0.1194 0.7968 0.5924 +vn -0.1200 0.7967 0.5924 +vn 0.2863 0.7866 0.5470 +vn 0.2860 0.7867 0.5471 +vn 0.2858 0.7867 0.5472 +vn 0.2864 0.7866 0.5470 +vn 0.4871 0.7266 0.4845 +vn 0.4868 0.7267 0.4846 +vn 0.4867 0.7267 0.4848 +vn 0.4873 0.7266 0.4844 +vn 0.9711 0.2267 0.0747 +vn 0.9929 -0.0465 -0.1097 +vn 0.9061 -0.2969 -0.3013 +vn -0.9711 -0.2267 -0.0749 +vn -0.9710 -0.2272 -0.0751 +vn -0.9052 0.2987 0.3024 +vn -0.9058 0.2974 0.3017 +vn -0.9061 0.2969 0.3014 +vn -0.7410 0.5076 0.4396 +vn -0.7403 0.5084 0.4399 +vn -0.5385 0.6559 0.5290 +vn -0.5388 0.6558 0.5288 +vn -0.5382 0.6561 0.5290 +vn 0.9942 -0.0497 -0.0954 +vn -0.9942 0.0498 0.0952 +vn 0.9493 0.2724 -0.1569 +vn 0.9491 0.2730 -0.1569 +vn 0.9490 0.2735 -0.1570 +vn 0.9494 0.2720 -0.1568 +vn -0.9339 0.3577 -0.0022 +vn 0.7978 0.4835 -0.3602 +vn 0.7973 0.4842 -0.3605 +vn 0.9367 0.1953 -0.2907 +vn 0.9368 0.1949 -0.2905 +vn 0.9587 -0.0992 0.2665 +vn 0.9587 -0.0993 0.2666 +vn 0.9589 -0.0990 0.2660 +vn 0.8322 -0.0252 0.5540 +vn 0.6573 0.1536 0.7378 +vn 0.4570 0.3840 0.8023 +vn 0.4565 0.3841 0.8025 +vn 0.2454 0.6169 0.7478 +vn 0.2452 0.6169 0.7478 +vn 0.0255 0.8087 0.5877 +vn -0.1619 0.6093 0.7763 +vn -0.1633 0.6092 0.7761 +vn -0.1610 0.6093 0.7764 +vn -0.1642 0.6091 0.7759 +vn -0.5699 0.1456 0.8087 +vn -0.5696 0.1456 0.8089 +vn -0.9306 -0.0756 0.3582 +vn -0.9303 -0.0759 0.3588 +vn -0.9307 -0.0755 0.3578 +vn -0.9985 0.0238 0.0494 +vn -0.9988 0.0136 0.0461 +vn -0.9508 0.2547 -0.1762 +vn -0.8045 0.5413 -0.2445 +vn -0.8044 0.5414 -0.2445 +vn -0.8048 0.5409 -0.2444 +vn -0.3497 0.8700 0.3477 +vn -0.3492 0.8701 0.3478 +vn -0.3486 0.8703 0.3479 +vn -0.1446 0.9259 0.3489 +vn -0.1435 0.9262 0.3487 +vn -0.1451 0.9258 0.3490 +vn 0.0624 0.9403 0.3345 +vn 0.0615 0.9403 0.3347 +vn 0.0609 0.9403 0.3349 +vn 0.0629 0.9403 0.3345 +vn 0.2634 0.9146 0.3067 +vn 0.2637 0.9146 0.3066 +vn 0.2632 0.9147 0.3068 +vn 0.4660 0.8449 0.2625 +vn 0.8422 0.5274 0.1121 +vn 0.9644 0.2646 0.0057 +vn -0.9145 0.3454 0.2106 +vn -0.9146 0.3452 0.2106 +vn -0.7545 0.5913 0.2846 +vn -0.7548 0.5909 0.2846 +vn -0.7545 0.5914 0.2846 +vn 0.9566 0.2815 -0.0747 +vn 0.8112 0.5449 -0.2122 +vn 0.8116 0.5443 -0.2122 +vn -0.7893 0.6089 -0.0786 +vn -0.7894 0.6088 -0.0786 +vn 0.6049 0.6596 -0.4461 +vn 0.6042 0.6600 -0.4464 +vn 0.6053 0.6594 -0.4460 +vn 0.7872 0.3893 -0.4783 +vn 0.7872 0.3892 -0.4784 +vn 0.9328 0.1379 -0.3329 +vn 0.9600 -0.0147 0.2798 +vn 0.9600 -0.0146 0.2797 +vn 0.9598 -0.0147 0.2802 +vn 0.8321 0.1191 0.5417 +vn 0.8318 0.1193 0.5422 +vn 0.0024 0.9404 0.3401 +vn -0.1794 0.7867 0.5907 +vn -0.1785 0.7868 0.5909 +vn -0.1782 0.7868 0.5909 +vn -0.3734 0.5625 0.7377 +vn -0.3728 0.5626 0.7379 +vn -0.5757 0.3178 0.7534 +vn -0.5753 0.3179 0.7537 +vn -0.7742 0.1062 0.6239 +vn -0.9295 -0.0111 0.3685 +vn -0.9294 -0.0111 0.3688 +vn -0.9556 0.1807 -0.2328 +vn -0.8167 0.4363 -0.3777 +vn -0.8169 0.4361 -0.3775 +vn -0.6213 0.6987 -0.3546 +vn -0.6219 0.6984 -0.3543 +vn -0.6221 0.6983 -0.3541 +vn -0.3748 0.9234 0.0831 +vn -0.3738 0.9238 0.0828 +vn -0.3733 0.9240 0.0827 +vn -0.3753 0.9231 0.0832 +vn -0.1695 0.9833 0.0669 +vn -0.1706 0.9831 0.0671 +vn -0.1714 0.9829 0.0673 +vn -0.1688 0.9834 0.0667 +vn 0.0343 0.9982 0.0488 +vn 0.0356 0.9982 0.0485 +vn 0.0363 0.9982 0.0483 +vn 0.2376 0.9710 0.0282 +vn 0.2372 0.9711 0.0283 +vn 0.2369 0.9711 0.0284 +vn 0.2379 0.9709 0.0281 +vn 0.8272 0.5599 -0.0480 +vn -0.7717 0.6273 0.1047 +vn -0.7718 0.6272 0.1047 +vn -0.5807 0.8085 0.0962 +vn 0.6241 0.7422 -0.2441 +vn -0.6018 0.7862 -0.1406 +vn -0.6011 0.7867 -0.1409 +vn -0.6021 0.7860 -0.1404 +vn 0.3956 0.7741 -0.4943 +vn 0.3949 0.7743 -0.4945 +vn 0.5891 0.5318 -0.6084 +vn 0.5895 0.5316 -0.6082 +vn 0.5887 0.5320 -0.6086 +vn 0.5897 0.5316 -0.6080 +vn 0.7793 0.2752 -0.5630 +vn 0.7792 0.2753 -0.5631 +vn 0.9593 0.0699 0.2736 +vn 0.8282 0.2607 0.4962 +vn 0.8288 0.2604 0.4953 +vn 0.8280 0.2608 0.4964 +vn 0.6418 0.5079 0.5746 +vn 0.6414 0.5081 0.5748 +vn 0.6420 0.5078 0.5745 +vn 0.4302 0.7479 0.5055 +vn 0.4301 0.7479 0.5056 +vn 0.4298 0.7481 0.5057 +vn 0.2050 0.9263 0.3161 +vn 0.2049 0.9263 0.3161 +vn 0.2048 0.9263 0.3161 +vn -0.0242 0.9982 0.0544 +vn -0.0239 0.9982 0.0543 +vn -0.0238 0.9982 0.0543 +vn -0.0243 0.9982 0.0544 +vn -0.2020 0.9147 0.3500 +vn -0.2013 0.9149 0.3500 +vn -0.2023 0.9146 0.3500 +vn -0.3898 0.7262 0.5663 +vn -0.3905 0.7260 0.5662 +vn -0.3895 0.7263 0.5664 +vn -0.9299 0.0533 0.3640 +vn -0.9298 0.0534 0.3643 +vn -0.9587 0.0994 -0.2666 +vn -0.9588 0.0992 -0.2664 +vn -0.6372 0.5629 -0.5264 +vn -0.6371 0.5630 -0.5264 +vn -0.4230 0.7970 -0.4311 +vn -0.4228 0.7971 -0.4312 +vn -0.4235 0.7969 -0.4308 +vn -0.3997 0.8973 -0.1873 +vn -0.4005 0.8970 -0.1870 +vn -0.1956 0.9555 -0.2208 +vn -0.1952 0.9555 -0.2209 +vn -0.1960 0.9554 -0.2208 +vn 0.2123 0.9431 -0.2557 +vn 0.2120 0.9432 -0.2557 +vn 0.2118 0.9433 -0.2557 +vn 0.2126 0.9430 -0.2559 +vn 0.4172 0.8717 -0.2571 +vn 0.4175 0.8716 -0.2572 +vn 0.4169 0.8719 -0.2571 +vn 0.1861 0.8383 -0.5125 +vn 0.1881 0.8377 -0.5127 +vn 0.1854 0.8385 -0.5124 +vn 0.3785 0.6236 -0.6840 +vn 0.3782 0.6236 -0.6842 +vn 0.3789 0.6235 -0.6839 +vn 0.3778 0.6237 -0.6843 +vn 0.5786 0.3758 -0.7238 +vn 0.7738 0.1512 -0.6151 +vn 0.9296 0.0114 -0.3684 +vn 0.9990 0.0097 -0.0438 +vn 0.8202 0.3935 0.4151 +vn 0.8204 0.3933 0.4150 +vn 0.6273 0.6559 0.4199 +vn 0.6277 0.6556 0.4196 +vn 0.6271 0.6560 0.4201 +vn 0.4089 0.8697 0.2765 +vn 0.1784 0.9833 0.0346 +vn -0.0519 0.9699 -0.2381 +vn -0.0527 0.9699 -0.2379 +vn -0.0530 0.9699 -0.2378 +vn -0.0515 0.9699 -0.2381 +vn -0.2277 0.9711 0.0718 +vn -0.2283 0.9709 0.0720 +vn -0.2286 0.9708 0.0721 +vn -0.4095 0.8448 0.3444 +vn -0.4101 0.8446 0.3443 +vn -0.4091 0.8450 0.3444 +vn -0.4105 0.8444 0.3441 +vn -0.5987 0.6194 0.5078 +vn -0.5981 0.6198 0.5081 +vn -0.5976 0.6200 0.5083 +vn -0.7837 0.3514 0.5122 +vn -0.7841 0.3510 0.5119 +vn -0.7835 0.3515 0.5123 +vn -0.9319 0.1165 0.3434 +vn -0.9318 0.1167 0.3437 +vn -0.9318 0.1169 0.3437 +vn -0.9989 -0.0091 0.0469 +vn -0.4413 0.6420 -0.6269 +vn -0.0175 0.8617 -0.5071 +vn 0.1682 0.6753 -0.7181 +vn 0.1689 0.6752 -0.7180 +vn 0.3659 0.4408 -0.8196 +vn 0.3655 0.4408 -0.8198 +vn 0.5720 0.2063 -0.7939 +vn 0.7714 0.0227 -0.6359 +vn 0.7714 0.0227 -0.6360 +vn 0.9299 -0.0533 -0.3640 +vn 0.9523 0.2309 0.1997 +vn 0.6090 0.7623 0.2191 +vn 0.6097 0.7618 0.2188 +vn 0.6100 0.7616 0.2187 +vn 0.3828 0.9237 0.0123 +vn 0.3830 0.9237 0.0123 +vn 0.1511 0.9556 -0.2532 +vn 0.1515 0.9554 -0.2533 +vn -0.0772 0.8616 -0.5017 +vn -0.0770 0.8615 -0.5018 +vn -0.0775 0.8616 -0.5016 +vn -0.2550 0.9433 -0.2125 +vn -0.2547 0.9434 -0.2126 +vn -0.2545 0.9434 -0.2126 +vn -0.2552 0.9433 -0.2124 +vn -0.4337 0.8968 0.0872 +vn -0.6168 0.7199 0.3183 +vn -0.6165 0.7201 0.3184 +vn -0.6164 0.7202 0.3185 +vn -0.7936 0.4538 0.4052 +vn -0.9355 0.1754 0.3066 +vn -0.9357 0.1752 0.3063 +vn -0.9980 -0.0293 0.0567 +vn -0.9593 -0.0700 -0.2737 +vn -0.8332 0.0252 -0.5524 +vn -0.6554 0.2185 -0.7230 +vn -0.6553 0.2185 -0.7230 +vn -0.2396 0.6839 -0.6891 +vn -0.2391 0.6840 -0.6892 +vn -0.0371 0.6942 -0.7188 +vn 0.1545 0.4774 -0.8650 +vn 0.1550 0.4774 -0.8649 +vn 0.1533 0.4775 -0.8652 +vn 0.5699 0.0308 -0.8211 +vn 0.5697 0.0308 -0.8213 +vn 0.5695 0.0308 -0.8214 +vn 0.7731 -0.1065 -0.6253 +vn 0.7734 -0.1064 -0.6250 +vn 0.7728 -0.1066 -0.6257 +vn 0.7737 -0.1063 -0.6246 +vn 0.9317 -0.1168 -0.3439 +vn 0.9318 -0.1167 -0.3438 +vn 0.9318 -0.1166 -0.3436 +vn 0.5880 0.8088 -0.0128 +vn 0.5877 0.8090 -0.0127 +vn 0.3577 0.8975 -0.2580 +vn 0.1276 0.8487 -0.5132 +vn 0.1269 0.8488 -0.5133 +vn -0.2780 0.8383 -0.4690 +vn -0.2778 0.8383 -0.4690 +vn -0.2775 0.8384 -0.4691 +vn -0.2783 0.8383 -0.4689 +vn -0.4582 0.8714 -0.1755 +vn -0.4585 0.8712 -0.1754 +vn -0.4590 0.8710 -0.1752 +vn -0.4579 0.8715 -0.1756 +vn -0.8068 0.5274 0.2664 +vn -0.9401 0.2275 0.2538 +vn -0.9407 0.2261 0.2530 +vn -0.9399 0.2280 0.2540 +vn -0.9980 -0.0311 0.0553 +vn -0.9971 -0.0382 0.0652 +vn -0.9570 -0.1527 -0.2468 +vn -0.9571 -0.1525 -0.2465 +vn -0.8329 -0.1190 -0.5405 +vn -0.6578 0.0325 -0.7525 +vn -0.4623 0.2491 -0.8510 +vn -0.4627 0.2490 -0.8508 +vn -0.4629 0.2489 -0.8507 +vn -0.2529 0.4835 -0.8380 +vn -0.2542 0.4834 -0.8377 +vn -0.0523 0.4908 -0.8697 +vn -0.0515 0.4908 -0.8697 +vn -0.0527 0.4907 -0.8697 +vn -0.0509 0.4909 -0.8697 +vn 0.1454 0.2619 -0.9541 +vn 0.1464 0.2620 -0.9539 +vn 0.5702 -0.1455 -0.8086 +vn 0.5703 -0.1455 -0.8084 +vn 0.5701 -0.1455 -0.8086 +vn 0.5704 -0.1455 -0.8084 +vn 0.7774 -0.2326 -0.5844 +vn 0.9353 -0.1758 -0.3071 +vn 0.9353 -0.1757 -0.3070 +vn 0.9352 -0.1760 -0.3073 +vn 0.9961 0.0439 -0.0771 +vn 0.9373 0.3464 0.0379 +vn 0.7775 0.6276 -0.0400 +vn 0.7777 0.6273 -0.0400 +vn 0.7772 0.6280 -0.0400 +vn 0.5659 0.7858 -0.2495 +vn 0.5663 0.7856 -0.2494 +vn 0.3357 0.7969 -0.5022 +vn 0.3353 0.7971 -0.5022 +vn 0.3362 0.7967 -0.5022 +vn 0.3354 0.7971 -0.5022 +vn 0.1086 0.6838 -0.7216 +vn -0.1105 0.4908 -0.8643 +vn -0.1109 0.4907 -0.8642 +vn -0.2977 0.6753 -0.6748 +vn -0.4814 0.7737 -0.4118 +vn -0.6573 0.7432 -0.1246 +vn -0.6565 0.7439 -0.1250 +vn -0.6559 0.7444 -0.1253 +vn -0.8220 0.5596 0.1060 +vn -0.8217 0.5600 0.1059 +vn -0.8221 0.5594 0.1060 +vn -0.9971 -0.0377 0.0657 +vn -0.9525 -0.2306 -0.1989 +vn -0.9522 -0.2311 -0.1995 +vn -0.8287 -0.2602 -0.4956 +vn -0.6575 -0.1538 -0.7376 +vn -0.0594 0.2691 -0.9613 +vn 0.1438 0.0392 -0.9888 +vn 0.3577 -0.1704 -0.9181 +vn 0.3578 -0.1704 -0.9181 +vn 0.5748 -0.3181 -0.7539 +vn 0.7837 -0.3513 -0.5122 +vn 0.7836 -0.3514 -0.5124 +vn 0.9408 -0.2259 -0.2529 +vn 0.9409 -0.2257 -0.2527 +vn 0.9960 0.0443 -0.0771 +vn 0.9948 0.0468 -0.0905 +vn 0.9275 0.3678 -0.0675 +vn 0.9272 0.3683 -0.0675 +vn 0.7610 0.6091 -0.2235 +vn 0.7611 0.6089 -0.2235 +vn 0.7614 0.6086 -0.2235 +vn 0.7607 0.6093 -0.2236 +vn 0.5451 0.6985 -0.4636 +vn 0.5457 0.6982 -0.4634 +vn 0.5447 0.6987 -0.4638 +vn 0.5460 0.6980 -0.4633 +vn 0.3164 0.6424 -0.6980 +vn 0.3168 0.6423 -0.6979 +vn 0.3171 0.6423 -0.6978 +vn -0.3119 0.4774 -0.8215 +vn -0.3122 0.4772 -0.8214 +vn -0.6755 0.6608 -0.3272 +vn -0.6749 0.6612 -0.3275 +vn -0.6758 0.6605 -0.3270 +vn -0.6746 0.6615 -0.3276 +vn -0.8375 0.5434 -0.0577 +vn -0.9541 0.2811 0.1036 +vn -0.9454 -0.2989 -0.1298 +vn -0.9456 -0.2985 -0.1295 +vn -0.9457 -0.2981 -0.1292 +vn -0.9452 -0.2993 -0.1301 +vn -0.8206 -0.3932 -0.4146 +vn -0.8211 -0.3928 -0.4142 +vn -0.6526 -0.3362 -0.6791 +vn -0.6525 -0.3363 -0.6791 +vn -0.6526 -0.3362 -0.6790 +vn -0.6521 -0.3364 -0.6794 +vn -0.4616 -0.1758 -0.8695 +vn -0.4625 -0.1757 -0.8691 +vn -0.4613 -0.1759 -0.8696 +vn 0.1437 -0.1847 -0.9722 +vn 0.3633 -0.3726 -0.8539 +vn 0.3624 -0.3727 -0.8543 +vn 0.3638 -0.3725 -0.8537 +vn 0.5848 -0.4797 -0.6541 +vn 0.7936 -0.4538 -0.4053 +vn 0.7936 -0.4538 -0.4052 +vn 0.7936 -0.4537 -0.4053 +vn 0.9473 -0.2625 -0.1835 +vn 0.9475 -0.2620 -0.1834 +vn 0.9937 0.0530 -0.0988 +vn 0.9170 0.3583 -0.1754 +vn 0.9170 0.3584 -0.1754 +vn 0.9169 0.3584 -0.1754 +vn 0.7455 0.5411 -0.3891 +vn 0.7451 0.5415 -0.3893 +vn 0.7449 0.5418 -0.3894 +vn 0.5291 0.5629 -0.6350 +vn 0.3026 0.4543 -0.8379 +vn 0.3030 0.4543 -0.8378 +vn 0.0868 0.2653 -0.9603 +vn 0.0869 0.2653 -0.9602 +vn -0.3199 0.2620 -0.9105 +vn -0.5117 0.4407 -0.7375 +vn -0.6918 0.5316 -0.4887 +vn -0.6916 0.5317 -0.4888 +vn -0.6920 0.5314 -0.4885 +vn -0.9618 0.2729 0.0217 +vn -0.9947 -0.0489 0.0905 +vn -0.9948 -0.0470 0.0903 +vn -0.9372 -0.3467 -0.0382 +vn -0.9369 -0.3475 -0.0384 +vn -0.9367 -0.3481 -0.0385 +vn -0.6419 -0.5078 -0.5746 +vn -0.6426 -0.5073 -0.5741 +vn -0.6431 -0.5071 -0.5739 +vn -0.6415 -0.5080 -0.5748 +vn -0.4557 -0.3841 -0.8030 +vn -0.4552 -0.3843 -0.8032 +vn -0.4548 -0.3844 -0.8034 +vn -0.2650 -0.1869 -0.9460 +vn -0.2642 -0.1871 -0.9461 +vn 0.5993 -0.6191 -0.5075 +vn 0.6004 -0.6184 -0.5071 +vn 0.8067 -0.5276 -0.2663 +vn 0.8068 -0.5275 -0.2663 +vn 0.9549 -0.2783 -0.1037 +vn 0.9548 -0.2784 -0.1036 +vn 0.9549 -0.2784 -0.1038 +vn 0.9923 0.0410 -0.1169 +vn 0.9082 0.3178 -0.2725 +vn 0.9080 0.3180 -0.2726 +vn 0.7326 0.4363 -0.5224 +vn 0.7325 0.4364 -0.5224 +vn 0.5186 0.3976 -0.7569 +vn 0.5187 0.3976 -0.7569 +vn 0.2947 0.2492 -0.9225 +vn -0.0420 0.9991 -0.0075 +vn -0.0403 0.9992 -0.0080 +vn -0.0383 0.9993 0.0035 +vn -0.0422 0.9991 -0.0078 +vn -0.5188 0.2419 -0.8200 +vn -0.5190 0.2418 -0.8198 +vn -0.7021 0.3762 -0.6046 +vn -0.7017 0.3765 -0.6049 +vn -0.7028 0.3759 -0.6039 +vn -0.8622 0.3891 -0.3244 +vn -0.8623 0.3889 -0.3242 +vn -0.8624 0.3888 -0.3241 +vn -0.9685 0.2433 -0.0531 +vn -0.9921 -0.0436 0.1178 +vn -0.9935 -0.0457 0.1045 +vn -0.9268 -0.3696 0.0675 +vn -0.7956 -0.5896 -0.1392 +vn -0.6274 -0.6559 -0.4198 +vn -0.6272 -0.6560 -0.4199 +vn -0.6270 -0.6561 -0.4201 +vn -0.4443 -0.5798 -0.6829 +vn -0.4457 -0.5793 -0.6824 +vn -0.4466 -0.5791 -0.6821 +vn -0.4434 -0.5802 -0.6833 +vn -0.2575 -0.4086 -0.8756 +vn 0.3890 -0.7266 -0.5664 +vn 0.3882 -0.7268 -0.5666 +vn 0.3877 -0.7270 -0.5667 +vn 0.3896 -0.7264 -0.5661 +vn 0.6172 -0.7196 -0.3181 +vn 0.6160 -0.7205 -0.3185 +vn 0.6154 -0.7209 -0.3187 +vn 0.6178 -0.7192 -0.3179 +vn 0.8213 -0.5606 -0.1061 +vn 0.8213 -0.5606 -0.1060 +vn 0.8210 -0.5610 -0.1060 +vn 0.9624 -0.2707 -0.0221 +vn 0.9922 0.0416 -0.1174 +vn 0.9913 0.0339 -0.1275 +vn 0.9010 0.2559 -0.3504 +vn 0.7236 0.3085 -0.6174 +vn 0.5113 0.2183 -0.8313 +vn 0.1637 0.9862 -0.0247 +vn 0.1642 0.9861 -0.0248 +vn 0.1653 0.9861 -0.0152 +vn -0.2451 0.9695 0.0093 +vn -0.2447 0.9693 0.0225 +vn -0.7089 0.2063 -0.6744 +vn -0.7083 0.2065 -0.6750 +vn -0.8697 0.2756 -0.4094 +vn -0.8700 0.2754 -0.4090 +vn -0.8698 0.2754 -0.4093 +vn -0.9743 0.1954 -0.1123 +vn -0.9741 0.1959 -0.1128 +vn -0.9744 0.1951 -0.1122 +vn -0.9922 -0.0415 0.1171 +vn -0.9168 -0.3587 0.1756 +vn -0.6084 -0.7627 -0.2194 +vn -0.6083 -0.7628 -0.2195 +vn -0.4289 -0.7484 -0.5059 +vn -0.4294 -0.7483 -0.5057 +vn -0.4286 -0.7485 -0.5061 +vn -0.4298 -0.7482 -0.5055 +vn -0.0434 -0.6263 -0.7783 +vn -0.0431 -0.6264 -0.7783 +vn -0.0429 -0.6264 -0.7783 +vn 0.1800 -0.7865 -0.5908 +vn 0.4086 -0.8452 -0.3444 +vn 0.6363 -0.7650 -0.0994 +vn 0.6361 -0.7651 -0.0994 +vn 0.6367 -0.7646 -0.0995 +vn 0.6358 -0.7655 -0.0993 +vn 0.9693 -0.2405 0.0514 +vn 0.9914 0.0331 -0.1269 +vn 0.9904 0.0243 -0.1359 +vn 0.8966 0.1799 -0.4046 +vn 0.8958 0.1809 -0.4059 +vn 0.7191 0.1690 -0.6740 +vn 0.5078 0.0326 -0.8608 +vn 0.5075 0.0325 -0.8610 +vn 0.5073 0.0325 -0.8612 +vn 0.3691 0.9285 -0.0411 +vn 0.3705 0.9279 -0.0412 +vn -0.4487 0.8933 0.0270 +vn -0.4477 0.8938 0.0268 +vn -0.4472 0.8941 0.0268 +vn -0.8740 0.1513 -0.4617 +vn -0.9781 0.1385 -0.1551 +vn -0.9783 0.1382 -0.1546 +vn -0.9783 0.1380 -0.1543 +vn -0.9910 -0.0350 0.1293 +vn -0.9904 -0.0245 0.1361 +vn -0.9082 -0.3178 0.2723 +vn -0.9093 -0.3155 0.2714 +vn -0.7609 -0.6091 0.2237 +vn -0.7607 -0.6093 0.2237 +vn -0.5874 -0.8092 0.0126 +vn -0.5877 -0.8089 0.0127 +vn -0.5871 -0.8094 0.0125 +vn -0.4075 -0.8702 -0.2770 +vn -0.4074 -0.8702 -0.2771 +vn -0.4079 -0.8700 -0.2769 +vn -0.4070 -0.8703 -0.2772 +vn -0.2278 -0.7966 -0.5600 +vn -0.2280 -0.7965 -0.5599 +vn -0.2275 -0.7966 -0.5601 +vn -0.0253 -0.8086 -0.5878 +vn 0.4339 -0.8967 -0.0872 +vn 0.4343 -0.8965 -0.0873 +vn 0.6571 -0.7434 0.1248 +vn 0.8503 -0.4841 0.2065 +vn 0.8504 -0.4839 0.2065 +vn 0.9899 0.0135 -0.1409 +vn 0.8933 0.0989 -0.4385 +vn 0.8931 0.0990 -0.4387 +vn 0.7167 0.0253 -0.6969 +vn 0.7164 0.0253 -0.6972 +vn 0.5745 0.8165 -0.0567 +vn 0.5742 0.8167 -0.0567 +vn 0.5732 0.8177 -0.0532 +vn 0.3693 0.9287 -0.0340 +vn 0.3699 0.9285 -0.0341 +vn 0.0033 0.9695 0.2449 +vn -0.4460 0.8941 0.0410 +vn -0.4459 0.8941 0.0411 +vn -0.6492 0.7593 0.0451 +vn -0.6500 0.7586 0.0452 +vn -0.6506 0.7581 0.0452 +vn -0.9806 0.0758 -0.1807 +vn -0.9811 0.0751 -0.1786 +vn -0.9804 0.0761 -0.1815 +vn -0.9018 -0.2546 0.3492 +vn -0.5656 -0.7859 0.2497 +vn -0.5656 -0.7860 0.2498 +vn -0.3826 -0.9238 -0.0126 +vn -0.0024 -0.9403 -0.3402 +vn 0.2278 -0.9711 -0.0719 +vn 0.2286 -0.9708 -0.0721 +vn 0.4580 -0.8715 0.1754 +vn 0.6764 -0.6602 0.3267 +vn 0.6768 -0.6598 0.3265 +vn 0.6771 -0.6595 0.3264 +vn 0.8619 -0.3896 0.3246 +vn 0.8621 -0.3893 0.3245 +vn 0.9779 -0.1389 0.1562 +vn 0.8917 0.0148 -0.4524 +vn 0.7679 0.6366 -0.0707 +vn 0.7681 0.6364 -0.0707 +vn 0.5738 0.8173 -0.0529 +vn 0.5731 0.8178 -0.0528 +vn -0.3819 0.9176 0.1100 +vn -0.0000 0.9993 0.0383 +vn -0.0003 0.9697 0.2443 +vn -0.8321 0.5511 0.0625 +vn -0.8315 0.5520 0.0624 +vn -0.9820 0.0113 -0.1887 +vn -0.8959 -0.1808 0.4059 +vn -0.8963 -0.1802 0.4052 +vn -0.8958 -0.1809 0.4061 +vn -0.7321 -0.4368 0.5228 +vn -0.7321 -0.4368 0.5227 +vn -0.5458 -0.6981 0.4633 +vn -0.3575 -0.8974 0.2585 +vn -0.3576 -0.8974 0.2584 +vn -0.3580 -0.8973 0.2583 +vn 0.0242 -0.9982 -0.0545 +vn 0.0239 -0.9982 -0.0543 +vn 0.4806 -0.7740 0.4123 +vn 0.6926 -0.5312 0.4880 +vn 0.6931 -0.5308 0.4877 +vn 0.6924 -0.5313 0.4882 +vn 0.8704 -0.2749 0.4086 +vn 0.9805 -0.0759 0.1812 +vn 0.9223 0.3777 -0.0812 +vn 0.9232 0.3758 -0.0805 +vn 0.9223 0.3772 -0.0842 +vn 0.9216 0.3796 -0.0805 +vn -0.0000 0.8169 -0.5767 +vn -0.0000 0.8170 -0.5767 +vn -0.0001 0.9692 0.2461 +vn -0.0032 0.9699 0.2434 +vn 0.2084 0.9315 0.2980 +vn -0.0000 0.7581 0.6522 +vn -0.8306 0.5517 0.0765 +vn -0.8310 0.5510 0.0766 +vn -0.9593 0.2718 0.0762 +vn -0.9602 0.2669 0.0824 +vn -0.9601 0.2674 0.0820 +vn -0.8920 -0.0997 0.4409 +vn -0.8926 -0.0993 0.4397 +vn -0.8916 -0.1000 0.4416 +vn -0.8930 -0.0990 0.4390 +vn -0.7232 -0.3087 0.6178 +vn -0.7230 -0.3088 0.6180 +vn -0.7230 -0.3089 0.6180 +vn -0.5306 -0.5622 0.6344 +vn -0.5302 -0.5623 0.6345 +vn -0.3346 -0.7975 0.5020 +vn -0.3337 -0.7978 0.5022 +vn -0.3332 -0.7979 0.5023 +vn -0.3351 -0.7974 0.5018 +vn -0.1518 -0.9553 0.2536 +vn 0.0519 -0.9699 0.2381 +vn 0.0513 -0.9698 0.2382 +vn 0.2795 -0.8379 0.4688 +vn 0.2794 -0.8380 0.4688 +vn 0.4970 -0.6240 0.6030 +vn 0.4972 -0.6240 0.6029 +vn 0.4967 -0.6240 0.6032 +vn 0.7036 -0.3755 0.6033 +vn 0.7028 -0.3760 0.6039 +vn 0.7022 -0.3763 0.6044 +vn 0.7041 -0.3753 0.6028 +vn 0.8749 -0.1509 0.4602 +vn 0.8748 -0.1510 0.4604 +vn 0.8753 -0.1507 0.4596 +vn 0.9948 0.0581 -0.0838 +vn 0.9947 0.0604 -0.0829 +vn 0.9942 0.0585 -0.0908 +vn 0.9949 0.0575 -0.0829 +vn 0.9229 0.3755 -0.0851 +vn 0.9217 0.3786 -0.0849 +vn -0.7254 0.6861 -0.0552 +vn 0.3825 0.9174 0.1098 +vn -0.0032 0.9695 0.2451 +vn -0.9581 0.2726 0.0882 +vn -0.9584 0.2714 0.0883 +vn -0.9947 -0.0541 0.0873 +vn -0.9946 -0.0543 0.0883 +vn -0.7202 -0.1686 0.6730 +vn -0.7187 -0.1692 0.6744 +vn -0.5188 -0.3975 0.7568 +vn -0.5185 -0.3976 0.7570 +vn -0.3153 -0.6426 0.6983 +vn -0.3170 -0.6423 0.6979 +vn -0.3146 -0.6427 0.6985 +vn -0.1282 -0.8488 0.5130 +vn -0.1287 -0.8487 0.5130 +vn -0.1293 -0.8487 0.5129 +vn -0.1277 -0.8488 0.5131 +vn 0.0764 -0.8616 0.5018 +vn 0.0761 -0.8616 0.5018 +vn 0.2984 -0.6751 0.6747 +vn 0.2982 -0.6751 0.6748 +vn 0.5100 -0.4414 0.7383 +vn 0.5109 -0.4410 0.7379 +vn 0.5112 -0.4409 0.7378 +vn 0.5097 -0.4415 0.7385 +vn 0.7088 -0.2063 0.6745 +vn 0.7090 -0.2062 0.6743 +vn 0.7094 -0.2061 0.6740 +vn 0.7086 -0.2064 0.6747 +vn 0.8770 -0.0225 0.4800 +vn 0.9595 -0.2713 -0.0762 +vn 0.9593 -0.2719 -0.0762 +vn 0.9940 0.0606 -0.0915 +vn 0.9941 0.0578 -0.0916 +vn -0.0000 0.3760 -0.9266 +vn -0.8639 0.4809 -0.1498 +vn 0.0386 0.9993 0.0035 +vn 0.0384 0.9993 0.0035 +vn 0.6460 0.6682 0.3689 +vn 0.6464 0.6679 0.3690 +vn 0.6466 0.6677 0.3690 +vn -0.0000 0.2710 0.9626 +vn -0.0000 0.2699 0.9629 +vn -0.0000 0.2719 0.9623 +vn -0.9222 -0.3783 0.0805 +vn -0.5108 -0.2181 0.8316 +vn -0.5106 -0.2181 0.8317 +vn -0.5110 -0.2181 0.8314 +vn -0.3041 -0.4541 0.8375 +vn -0.1082 -0.6836 0.7217 +vn -0.1075 -0.6837 0.7218 +vn 0.0961 -0.6941 0.7134 +vn 0.0955 -0.6942 0.7134 +vn 0.0951 -0.6942 0.7134 +vn 0.0966 -0.6941 0.7134 +vn 0.3117 -0.4775 0.8215 +vn 0.3115 -0.4775 0.8215 +vn 0.3115 -0.4776 0.8215 +vn 0.5191 -0.2417 0.8198 +vn 0.8314 -0.5522 -0.0624 +vn 0.7245 0.6871 -0.0547 +vn 0.7249 0.6866 -0.0549 +vn 0.7242 0.6874 -0.0545 +vn -0.1649 0.9862 -0.0152 +vn 0.1296 0.0401 -0.9907 +vn 0.2439 0.9695 0.0225 +vn -0.4291 0.8356 0.3430 +vn -0.4294 0.8354 0.3430 +vn 0.8301 0.4217 0.3648 +vn 0.8304 0.4212 0.3648 +vn 0.8300 0.4219 0.3649 +vn 0.8305 0.4209 0.3648 +vn -0.7683 -0.6362 0.0707 +vn -0.5075 -0.0326 0.8611 +vn -0.2962 -0.2491 0.9221 +vn -0.2955 -0.2491 0.9223 +vn -0.2950 -0.2491 0.9224 +vn -0.0944 -0.4834 0.8703 +vn -0.0937 -0.4834 0.8704 +vn 0.1097 -0.4909 0.8643 +vn 0.1094 -0.4909 0.8643 +vn 0.3198 -0.2618 0.9106 +vn 0.3195 -0.2619 0.9107 +vn 0.3200 -0.2618 0.9105 +vn 0.5223 -0.0362 0.8520 +vn 0.6493 -0.7592 -0.0450 +vn 0.6498 -0.7588 -0.0451 +vn 0.6490 -0.7595 -0.0450 +vn 0.8312 -0.5506 -0.0766 +vn -0.0000 0.8176 -0.5758 +vn -0.0000 0.8173 -0.5762 +vn -0.0000 0.8178 -0.5755 +vn -0.3690 0.9288 -0.0340 +vn -0.0736 0.0397 -0.9965 +vn 0.3328 0.0388 -0.9422 +vn 0.4467 0.8938 0.0411 +vn -0.0000 0.7590 0.6511 +vn -0.0000 0.7584 0.6517 +vn -0.6466 0.6677 0.3690 +vn 0.9406 0.1096 0.3214 +vn -0.5748 -0.8163 0.0565 +vn -0.2922 -0.0372 0.9556 +vn -0.0869 -0.2652 0.9603 +vn -0.0862 -0.2652 0.9603 +vn 0.1193 -0.2692 0.9557 +vn 0.3228 -0.0392 0.9456 +vn 0.6481 -0.7592 -0.0596 +vn 0.6478 -0.7594 -0.0596 +vn 0.6476 -0.7596 -0.0597 +vn 0.9474 0.2068 -0.2443 +vn -0.0000 0.6358 -0.7718 +vn -0.0000 0.6371 -0.7707 +vn -0.5740 0.8171 -0.0529 +vn -0.5744 0.8169 -0.0529 +vn -0.2851 0.0374 -0.9578 +vn -0.0779 0.2659 -0.9609 +vn 0.1197 0.4907 -0.8631 +vn 0.1200 0.4906 -0.8631 +vn -0.8300 0.4219 0.3649 +vn 0.9472 -0.2077 0.2441 +vn -0.3691 -0.9285 0.0411 +vn -0.3702 -0.9281 0.0409 +vn -0.3705 -0.9279 0.0412 +vn -0.0832 -0.0397 0.9957 +vn -0.0823 -0.0396 0.9958 +vn -0.0819 -0.0396 0.9959 +vn 0.1225 -0.0402 0.9916 +vn 0.2432 -0.9699 -0.0091 +vn -0.6466 -0.6677 -0.3690 +vn 0.9406 -0.1096 -0.3214 +vn -0.7684 0.6360 -0.0707 +vn -0.5001 0.0330 -0.8653 +vn -0.0862 0.4845 -0.8705 +vn 0.1063 0.6940 -0.7121 +vn 0.1054 0.6941 -0.7122 +vn 0.1048 0.6941 -0.7122 +vn 0.5272 0.2400 -0.8151 +vn 0.5268 0.2402 -0.8154 +vn 0.7179 0.0305 -0.6955 +vn 0.8298 0.5528 0.0764 +vn 0.8301 0.5524 0.0765 +vn 0.0001 0.2719 0.9623 +vn -0.1633 -0.9863 0.0247 +vn -0.1615 -0.9866 0.0240 +vn -0.1610 -0.9866 0.0245 +vn 0.0399 -0.9992 0.0079 +vn 0.0407 -0.9992 -0.0038 +vn 0.2422 -0.9700 -0.0223 +vn -0.0001 0.0589 -0.9983 +vn -0.9226 0.3763 -0.0850 +vn -0.9224 0.3768 -0.0850 +vn -0.9223 0.3771 -0.0850 +vn -0.9227 0.3760 -0.0850 +vn -0.7109 0.0258 -0.7029 +vn -0.7108 0.0258 -0.7029 +vn -0.5032 0.2204 -0.8356 +vn -0.5024 0.2205 -0.8360 +vn -0.5020 0.2205 -0.8363 +vn -0.2951 0.4565 -0.8394 +vn -0.2956 0.4565 -0.8392 +vn -0.0983 0.6855 -0.7214 +vn -0.0972 0.6855 -0.7216 +vn -0.0964 0.6856 -0.7216 +vn -0.0990 0.6855 -0.7213 +vn 0.0869 0.8612 -0.5008 +vn 0.0876 0.8612 -0.5007 +vn 0.0864 0.8612 -0.5009 +vn 0.3082 0.6729 -0.6725 +vn 0.3080 0.6729 -0.6726 +vn 0.3085 0.6728 -0.6724 +vn 0.5188 0.4379 -0.7342 +vn 0.7153 0.2038 -0.6684 +vn 0.8809 0.0221 -0.4727 +vn 0.9586 0.2709 0.0883 +vn 0.9584 0.2714 0.0883 +vn 0.9589 0.2695 0.0883 +vn 0.7253 -0.6863 0.0551 +vn -0.1650 -0.9862 0.0152 +vn -0.0003 -0.9701 -0.2429 +vn -0.2108 -0.9308 -0.2986 +vn 0.6466 -0.6677 -0.3690 +vn -0.9940 0.0600 -0.0916 +vn -0.9941 0.0588 -0.0915 +vn -0.9941 0.0579 -0.0915 +vn -0.9939 0.0610 -0.0916 +vn -0.8874 0.0153 -0.4607 +vn -0.8877 0.0152 -0.4602 +vn -0.5090 0.4019 -0.7611 +vn -0.3076 0.6457 -0.6989 +vn -0.3076 0.6458 -0.6989 +vn -0.1158 0.8508 -0.5125 +vn -0.1153 0.8509 -0.5125 +vn -0.1151 0.8509 -0.5126 +vn -0.1160 0.8508 -0.5126 +vn 0.0648 0.9694 -0.2367 +vn 0.0641 0.9694 -0.2369 +vn 0.2890 0.8354 -0.4675 +vn 0.2889 0.8355 -0.4675 +vn 0.5062 0.6197 -0.5998 +vn 0.5065 0.6196 -0.5997 +vn 0.5068 0.6196 -0.5994 +vn 0.7097 0.3714 -0.5986 +vn 0.8793 0.1478 -0.4528 +vn -0.8640 -0.4806 0.1499 +vn -0.0000 -0.8168 0.5769 +vn -0.0007 -0.9701 -0.2426 +vn -0.9889 0.0024 -0.1489 +vn -0.7181 0.3132 -0.6215 +vn -0.7184 0.3129 -0.6213 +vn -0.7177 0.3134 -0.6219 +vn -0.3261 0.8012 -0.5018 +vn -0.1400 0.9576 -0.2519 +vn -0.1403 0.9575 -0.2519 +vn -0.1394 0.9577 -0.2517 +vn -0.1407 0.9574 -0.2520 +vn 0.0380 0.9977 0.0553 +vn 0.0373 0.9978 0.0552 +vn 0.0368 0.9978 0.0551 +vn 0.2663 0.9403 -0.2117 +vn 0.4905 0.7687 -0.4104 +vn 0.4901 0.7690 -0.4105 +vn 0.4906 0.7687 -0.4103 +vn 0.4901 0.7689 -0.4105 +vn 0.6996 0.5253 -0.4843 +vn 0.7000 0.5250 -0.4841 +vn 0.6994 0.5255 -0.4845 +vn 0.8743 0.2700 -0.4034 +vn 0.9823 0.0724 -0.1726 +vn 0.9891 -0.0162 0.1464 +vn 0.9214 -0.3793 0.0848 +vn 0.9212 -0.3796 0.0848 +vn -0.8301 -0.5523 -0.0765 +vn -0.8298 -0.5528 -0.0764 +vn -0.9890 0.0163 -0.1467 +vn -0.8915 0.1870 -0.4126 +vn -0.7271 0.4426 -0.5249 +vn -0.7266 0.4430 -0.5251 +vn -0.5350 0.7060 -0.4641 +vn -0.3458 0.9026 -0.2565 +vn -0.3472 0.9021 -0.2564 +vn -0.3481 0.9018 -0.2563 +vn -0.3449 0.9029 -0.2566 +vn -0.1653 0.9856 0.0366 +vn -0.1642 0.9857 0.0368 +vn -0.1660 0.9855 0.0364 +vn -0.1636 0.9858 0.0370 +vn 0.0120 0.9400 0.3410 +vn 0.0126 0.9399 0.3412 +vn 0.0114 0.9400 0.3409 +vn 0.2418 0.9677 0.0718 +vn 0.2423 0.9675 0.0719 +vn 0.4701 0.8651 -0.1748 +vn 0.6851 0.6522 -0.3244 +vn 0.6851 0.6522 -0.3243 +vn 0.6848 0.6526 -0.3244 +vn 0.6852 0.6522 -0.3243 +vn 0.9804 0.1312 -0.1468 +vn 0.9807 0.1306 -0.1458 +vn 0.9803 0.1315 -0.1473 +vn 0.8872 -0.0155 0.4612 +vn 0.7685 -0.6359 0.0707 +vn 0.7684 -0.6360 0.0707 +vn 0.0003 -0.9701 -0.2429 +vn -0.6470 -0.7601 -0.0596 +vn -0.8798 -0.0222 0.4747 +vn -0.8800 -0.0221 0.4744 +vn -0.9821 -0.0727 0.1735 +vn -0.7386 0.5498 -0.3900 +vn -0.7383 0.5502 -0.3902 +vn -0.5543 0.7946 -0.2480 +vn -0.3702 0.9288 0.0157 +vn -0.3715 0.9283 0.0154 +vn -0.3694 0.9291 0.0158 +vn -0.3722 0.9280 0.0152 +vn -0.1891 0.9286 0.3192 +vn -0.1899 0.9285 0.3190 +vn -0.1885 0.9287 0.3193 +vn -0.1904 0.9285 0.3189 +vn -0.0087 0.8082 0.5888 +vn -0.0087 0.8082 0.5889 +vn 0.4471 0.8903 0.0861 +vn 0.4466 0.8906 0.0860 +vn 0.6673 0.7343 -0.1246 +vn 0.6666 0.7349 -0.1250 +vn 0.6677 0.7339 -0.1244 +vn 0.6661 0.7353 -0.1252 +vn 0.8570 0.4733 -0.2035 +vn 0.8567 0.4739 -0.2037 +vn 0.8567 0.4739 -0.2038 +vn 0.9896 -0.0298 0.1405 +vn 0.8890 -0.1023 0.4463 +vn 0.8886 -0.1026 0.4471 +vn 0.7107 -0.0258 0.7031 +vn 0.7109 -0.0257 0.7028 +vn 0.7111 -0.0257 0.7026 +vn 0.5757 -0.8160 0.0530 +vn 0.5758 -0.8159 0.0530 +vn -0.3820 -0.9176 -0.1100 +vn -0.3817 -0.9177 -0.1101 +vn -0.1943 -0.9642 -0.1803 +vn -0.1939 -0.9643 -0.1804 +vn -0.4488 -0.8927 -0.0413 +vn -0.4484 -0.8929 -0.0413 +vn -0.7184 -0.0305 0.6950 +vn -0.8786 -0.1481 0.4540 +vn -0.8791 -0.1478 0.4532 +vn -0.8784 -0.1483 0.4543 +vn -0.8793 -0.1476 0.4527 +vn -0.9903 0.0426 -0.1321 +vn -0.9036 0.3291 -0.2743 +vn -0.9034 0.3295 -0.2745 +vn -0.9032 0.3298 -0.2746 +vn -0.7539 0.6185 -0.2216 +vn -0.5755 0.8178 -0.0083 +vn -0.5752 0.8179 -0.0082 +vn -0.5751 0.8181 -0.0082 +vn -0.3958 0.8742 0.2813 +vn -0.3963 0.8740 0.2812 +vn -0.3955 0.8743 0.2815 +vn -0.2124 0.7983 0.5636 +vn -0.2129 0.7982 0.5634 +vn -0.2122 0.7982 0.5638 +vn -0.2133 0.7982 0.5634 +vn -0.0255 0.6261 0.7793 +vn -0.0259 0.6260 0.7794 +vn -0.0259 0.6259 0.7794 +vn -0.0255 0.6260 0.7794 +vn 0.4241 0.8389 0.3412 +vn 0.4238 0.8390 0.3413 +vn 0.4245 0.8388 0.3411 +vn 0.6473 0.7561 0.0968 +vn 0.8442 0.5329 -0.0587 +vn 0.8438 0.5334 -0.0588 +vn 0.8436 0.5337 -0.0588 +vn 0.8441 0.5329 -0.0587 +vn 0.9902 -0.0434 0.1328 +vn 0.8917 -0.1869 0.4122 +vn 0.8920 -0.1866 0.4117 +vn 0.8916 -0.1872 0.4124 +vn 0.7122 -0.1720 0.6806 +vn 0.7125 -0.1719 0.6803 +vn 0.7127 -0.1718 0.6801 +vn 0.7120 -0.1721 0.6808 +vn 0.4998 -0.0330 0.8655 +vn -0.2416 -0.9701 -0.0223 +vn -0.7152 -0.2039 0.6685 +vn -0.8749 -0.2695 0.4024 +vn -0.8750 -0.2693 0.4022 +vn -0.9916 0.0515 -0.1182 +vn -0.9122 0.3711 -0.1736 +vn -0.9124 0.3707 -0.1735 +vn -0.5960 0.7705 0.2261 +vn -0.5957 0.7707 0.2263 +vn -0.4160 0.7519 0.5115 +vn -0.4144 0.7523 0.5122 +vn -0.4172 0.7516 0.5109 +vn -0.4132 0.7526 0.5126 +vn -0.2294 0.6182 0.7518 +vn -0.2287 0.6184 0.7519 +vn -0.2282 0.6186 0.7519 +vn -0.2299 0.6182 0.7516 +vn 0.1802 0.6070 0.7740 +vn 0.1804 0.6070 0.7740 +vn 0.1805 0.6070 0.7740 +vn 0.4035 0.7218 0.5623 +vn 0.4041 0.7216 0.5621 +vn 0.4046 0.7214 0.5620 +vn 0.6288 0.7117 0.3133 +vn 0.6291 0.7115 0.3131 +vn 0.6294 0.7112 0.3130 +vn 0.6284 0.7120 0.3134 +vn 0.8293 0.5494 0.1022 +vn 0.8297 0.5487 0.1023 +vn 0.8290 0.5499 0.1021 +vn 0.8300 0.5483 0.1023 +vn 0.9653 0.2604 0.0203 +vn 0.9914 -0.0540 0.1196 +vn 0.7171 -0.3138 0.6223 +vn 0.5016 -0.2206 0.8365 +vn 0.5012 -0.2206 0.8367 +vn -0.5264 -0.2403 0.8156 +vn -0.5265 -0.2402 0.8155 +vn -0.7093 -0.3717 0.5990 +vn -0.7100 -0.3713 0.5983 +vn -0.7088 -0.3720 0.5994 +vn -0.7105 -0.3710 0.5979 +vn -0.9718 -0.2305 0.0498 +vn -0.9721 -0.2293 0.0491 +vn -0.9716 -0.2312 0.0501 +vn -0.9931 0.0575 -0.1026 +vn -0.9226 0.3807 -0.0621 +vn -0.9232 0.3792 -0.0620 +vn -0.7855 0.6009 0.1479 +vn -0.6135 0.6630 0.4291 +vn -0.6145 0.6624 0.4285 +vn -0.4294 0.5830 0.6897 +vn -0.4298 0.5828 0.6896 +vn -0.4290 0.5831 0.6899 +vn -0.2400 0.4097 0.8801 +vn 0.1690 0.4022 0.8998 +vn 0.1693 0.4022 0.8998 +vn 0.1686 0.4022 0.8999 +vn 0.3881 0.5591 0.7327 +vn 0.3883 0.5591 0.7326 +vn 0.3878 0.5592 0.7328 +vn 0.6129 0.6115 0.5004 +vn 0.6123 0.6119 0.5007 +vn 0.6118 0.6122 0.5010 +vn 0.8152 0.5178 0.2595 +vn 0.8151 0.5179 0.2596 +vn 0.8153 0.5176 0.2595 +vn 0.8149 0.5183 0.2596 +vn 0.9584 0.2678 0.0989 +vn 0.9928 -0.0608 0.1032 +vn 0.9035 -0.3293 0.2744 +vn 0.9036 -0.3290 0.2743 +vn 0.9038 -0.3287 0.2741 +vn 0.9033 -0.3296 0.2745 +vn 0.7272 -0.4426 0.5247 +vn 0.2885 -0.2504 0.9242 +vn 0.2879 -0.2504 0.9243 +vn 0.0747 -0.0398 0.9964 +vn 0.0742 -0.0397 0.9965 +vn -0.3293 -0.2608 0.9075 +vn -0.3276 -0.2611 0.9080 +vn -0.3299 -0.2607 0.9073 +vn -0.5192 -0.4380 0.7338 +vn -0.5196 -0.4379 0.7337 +vn -0.5190 -0.4381 0.7340 +vn -0.5197 -0.4379 0.7336 +vn -0.7002 -0.5248 0.4841 +vn -0.6996 -0.5252 0.4844 +vn -0.6995 -0.5253 0.4845 +vn -0.8569 -0.4736 0.2036 +vn -0.8567 -0.4740 0.2037 +vn -0.8564 -0.4744 0.2039 +vn -0.9327 0.3575 0.0468 +vn -0.9327 0.3576 0.0469 +vn -0.7998 0.5165 0.3058 +vn -0.8001 0.5162 0.3057 +vn -0.6283 0.5130 0.5849 +vn -0.4406 0.3860 0.8105 +vn -0.2468 0.1873 0.9508 +vn 0.6001 0.4738 0.6445 +vn 0.5991 0.4742 0.6452 +vn 0.6008 0.4735 0.6441 +vn 0.8028 0.4455 0.3963 +vn 0.8031 0.4451 0.3961 +vn 0.8026 0.4458 0.3963 +vn 0.9519 0.2514 0.1753 +vn 0.9945 -0.0621 0.0849 +vn 0.9124 -0.3707 0.1736 +vn 0.7393 -0.5491 0.3897 +vn 0.7397 -0.5487 0.3896 +vn 0.7385 -0.5499 0.3901 +vn 0.5196 -0.5688 0.6375 +vn 0.5187 -0.5692 0.6380 +vn 0.5202 -0.5686 0.6372 +vn 0.2961 -0.4564 0.8391 +vn 0.2966 -0.4563 0.8389 +vn 0.0777 -0.2660 0.9609 +vn -0.1293 -0.2689 0.9544 +vn -0.1282 -0.2691 0.9546 +vn -0.3198 -0.4761 0.8192 +vn -0.3196 -0.4761 0.8192 +vn -0.3201 -0.4760 0.8191 +vn -0.3194 -0.4761 0.8193 +vn -0.5076 -0.6191 0.5991 +vn -0.6852 -0.6522 0.3243 +vn -0.6849 -0.6525 0.3244 +vn -0.8437 -0.5335 0.0587 +vn -0.8441 -0.5329 0.0586 +vn -0.8436 -0.5338 0.0588 +vn -0.9583 -0.2681 -0.0986 +vn -0.9409 0.3078 0.1410 +vn -0.8111 0.3996 0.4272 +vn -0.6397 0.3392 0.6897 +vn -0.4468 0.1766 0.8770 +vn -0.4465 0.1767 0.8772 +vn 0.3733 0.1694 0.9121 +vn 0.3730 0.1694 0.9122 +vn 0.7944 0.3444 0.5003 +vn 0.7937 0.3451 0.5010 +vn 0.7934 0.3454 0.5012 +vn 0.9462 0.2158 0.2413 +vn 0.9961 -0.0574 0.0674 +vn 0.9222 -0.3819 0.0618 +vn 0.7539 -0.6185 0.2216 +vn 0.7534 -0.6191 0.2216 +vn 0.7531 -0.6195 0.2216 +vn 0.5350 -0.7059 0.4642 +vn 0.5365 -0.7051 0.4637 +vn 0.3082 -0.6456 0.6987 +vn 0.0846 -0.4845 0.8707 +vn -0.1191 -0.4907 0.8631 +vn -0.1208 -0.4905 0.8630 +vn -0.1182 -0.4909 0.8632 +vn -0.3080 -0.6729 0.6726 +vn -0.3073 -0.6729 0.6728 +vn -0.3068 -0.6730 0.6730 +vn -0.3085 -0.6729 0.6723 +vn -0.4907 -0.7687 0.4103 +vn -0.4909 -0.7686 0.4102 +vn -0.4900 -0.7689 0.4107 +vn -0.6664 -0.7351 0.1249 +vn -0.8300 -0.5483 -0.1019 +vn -0.8174 0.2654 0.5113 +vn -0.6444 0.1553 0.7488 +vn -0.6445 0.1552 0.7487 +vn -0.6443 0.1553 0.7489 +vn -0.4488 -0.0374 0.8928 +vn 0.7886 0.2278 0.5711 +vn 0.9318 -0.3598 -0.0476 +vn 0.5557 -0.7936 0.2477 +vn 0.3247 -0.8017 0.5019 +vn 0.3242 -0.8018 0.5020 +vn 0.0980 -0.6854 0.7215 +vn 0.0975 -0.6854 0.7216 +vn -0.1053 -0.6939 0.7123 +vn -0.1051 -0.6939 0.7123 +vn -0.2909 -0.8349 0.4672 +vn -0.4701 -0.8651 0.1747 +vn -0.4696 -0.8654 0.1749 +vn -0.4705 -0.8650 0.1745 +vn -0.8162 -0.5163 -0.2593 +vn -0.9463 -0.2155 -0.2410 +vn -0.9514 0.1583 0.2641 +vn -0.9513 0.1586 0.2644 +vn -0.9512 0.1587 0.2647 +vn -0.8210 0.1215 0.5578 +vn -0.8210 0.1216 0.5579 +vn -0.4457 -0.2503 0.8595 +vn -0.4450 -0.2505 0.8598 +vn -0.4459 -0.2503 0.8594 +vn -0.4448 -0.2505 0.8599 +vn -0.2361 -0.4847 0.8423 +vn -0.2361 -0.4846 0.8422 +vn -0.2357 -0.4847 0.8423 +vn -0.0200 -0.6937 0.7199 +vn -0.0199 -0.6937 0.7199 +vn -0.0198 -0.6937 0.7200 +vn 0.3738 -0.2403 0.8959 +vn 0.3746 -0.2403 0.8955 +vn 0.3733 -0.2403 0.8961 +vn 0.5856 -0.0305 0.8100 +vn 0.7838 0.1047 0.6122 +vn 0.7847 0.1043 0.6111 +vn 0.7832 0.1049 0.6129 +vn 0.7852 0.1041 0.6104 +vn 0.9409 -0.3078 -0.1411 +vn 0.7859 -0.6004 -0.1478 +vn 0.7842 -0.6026 -0.1484 +vn 0.5758 -0.8176 0.0084 +vn 0.3464 -0.9023 0.2565 +vn 0.3472 -0.9021 0.2564 +vn 0.3459 -0.9025 0.2565 +vn 0.3477 -0.9019 0.2563 +vn 0.1168 -0.8507 0.5126 +vn 0.1163 -0.8507 0.5126 +vn 0.1170 -0.8506 0.5125 +vn -0.0862 -0.8612 0.5009 +vn -0.0864 -0.8612 0.5008 +vn -0.4481 -0.8898 -0.0862 +vn -0.4475 -0.8901 -0.0861 +vn -0.4484 -0.8897 -0.0862 +vn -0.6282 -0.7121 -0.3135 +vn -0.6285 -0.7119 -0.3134 +vn -0.8037 -0.4446 -0.3955 +vn -0.8030 -0.4454 -0.3960 +vn -0.8026 -0.4459 -0.3962 +vn -0.8041 -0.4441 -0.3953 +vn -0.9418 -0.1668 -0.2920 +vn -0.9421 -0.1662 -0.2912 +vn -0.9538 0.0725 0.2915 +vn -0.9539 0.0725 0.2914 +vn -0.9539 0.0725 0.2913 +vn -0.6419 -0.2204 0.7345 +vn -0.6422 -0.2202 0.7342 +vn -0.4380 -0.4567 0.7743 +vn -0.4386 -0.4566 0.7740 +vn -0.4377 -0.4568 0.7745 +vn -0.4390 -0.4565 0.7739 +vn -0.0015 -0.8613 0.5082 +vn 0.1835 -0.6731 0.7164 +vn 0.1833 -0.6731 0.7165 +vn 0.3818 -0.4379 0.8139 +vn 0.3818 -0.4380 0.8139 +vn 0.9369 0.0507 0.3460 +vn 0.9991 -0.0252 0.0327 +vn 0.9474 -0.2383 -0.2135 +vn 0.9473 -0.2387 -0.2138 +vn 0.7994 -0.5169 -0.3063 +vn 0.7987 -0.5176 -0.3068 +vn 0.7999 -0.5163 -0.3060 +vn 0.5972 -0.7697 -0.2256 +vn 0.5973 -0.7696 -0.2254 +vn 0.5976 -0.7695 -0.2253 +vn 0.3698 -0.9290 -0.0156 +vn 0.3710 -0.9285 -0.0153 +vn 0.3716 -0.9283 -0.0152 +vn 0.1386 -0.9578 0.2517 +vn 0.1391 -0.9577 0.2518 +vn 0.1385 -0.9578 0.2517 +vn -0.0632 -0.9694 0.2372 +vn -0.0645 -0.9694 0.2368 +vn -0.2403 -0.9680 -0.0718 +vn -0.2405 -0.9680 -0.0718 +vn -0.2406 -0.9679 -0.0719 +vn -0.2400 -0.9681 -0.0717 +vn -0.4249 -0.8386 -0.3410 +vn -0.4239 -0.8390 -0.3412 +vn -0.4255 -0.8383 -0.3409 +vn -0.6120 -0.6123 -0.5006 +vn -0.6121 -0.6122 -0.5005 +vn -0.6118 -0.6124 -0.5007 +vn -0.6123 -0.6121 -0.5004 +vn -0.7939 -0.3448 -0.5008 +vn -0.7947 -0.3440 -0.5001 +vn -0.9377 -0.1117 -0.3291 +vn -0.9545 -0.0153 0.2978 +vn -0.9544 -0.0154 0.2980 +vn -0.8215 -0.1718 0.5438 +vn -0.8211 -0.1720 0.5442 +vn -0.8217 -0.1716 0.5435 +vn -0.6348 -0.4020 0.6599 +vn -0.6337 -0.4025 0.6606 +vn -0.6355 -0.4017 0.6593 +vn -0.6330 -0.4029 0.6611 +vn -0.2053 -0.8506 0.4840 +vn 0.0216 -0.9694 0.2447 +vn 0.0218 -0.9693 0.2447 +vn 0.0221 -0.9693 0.2447 +vn 0.2025 -0.8354 0.5110 +vn 0.2030 -0.8353 0.5109 +vn 0.5947 -0.3709 0.7133 +vn 0.9366 -0.0107 0.3501 +vn 0.9362 -0.0108 0.3514 +vn 0.9369 -0.0106 0.3494 +vn 0.9995 -0.0115 0.0289 +vn 0.9995 -0.0116 0.0293 +vn 0.9520 -0.1574 -0.2624 +vn 0.9514 -0.1583 -0.2641 +vn 0.9523 -0.1570 -0.2616 +vn 0.6142 -0.6624 -0.4290 +vn 0.6146 -0.6622 -0.4287 +vn 0.6147 -0.6621 -0.4286 +vn 0.6141 -0.6624 -0.4290 +vn 0.3951 -0.8745 -0.2814 +vn 0.3932 -0.8752 -0.2820 +vn 0.1656 -0.9855 -0.0364 +vn 0.1644 -0.9857 -0.0367 +vn 0.1637 -0.9858 -0.0369 +vn 0.1662 -0.9854 -0.0363 +vn -0.0389 -0.9977 -0.0554 +vn -0.0386 -0.9977 -0.0553 +vn -0.0383 -0.9977 -0.0554 +vn -0.2172 -0.9115 -0.3492 +vn -0.2160 -0.9119 -0.3491 +vn -0.2153 -0.9121 -0.3490 +vn -0.4045 -0.7216 -0.5619 +vn -0.7884 -0.2280 -0.5713 +vn -0.7879 -0.2284 -0.5719 +vn -0.9369 -0.0506 -0.3458 +vn -0.9373 -0.0503 -0.3450 +vn -0.9997 -0.0025 -0.0259 +vn -0.9531 -0.1028 0.2846 +vn -0.9532 -0.1027 0.2842 +vn -0.6236 -0.5687 0.5364 +vn -0.6226 -0.5693 0.5369 +vn -0.6218 -0.5697 0.5373 +vn -0.4076 -0.8018 0.4371 +vn -0.1824 -0.9575 0.2235 +vn -0.1821 -0.9575 0.2236 +vn 0.0482 -0.9977 -0.0478 +vn 0.0480 -0.9977 -0.0478 +vn 0.0477 -0.9977 -0.0479 +vn 0.2257 -0.9402 0.2553 +vn 0.2266 -0.9399 0.2554 +vn 0.4101 -0.7691 0.4903 +vn 0.4108 -0.7688 0.4901 +vn 0.6047 -0.5246 0.5992 +vn 0.6051 -0.5244 0.5991 +vn 0.9372 -0.0720 0.3412 +vn 0.9371 -0.0721 0.3415 +vn 0.9539 -0.0724 -0.2912 +vn 0.6277 -0.5132 -0.5853 +vn 0.4152 -0.7522 -0.5117 +vn 0.1904 -0.9284 -0.3192 +vn 0.1908 -0.9283 -0.3190 +vn 0.1900 -0.9284 -0.3193 +vn -0.3903 -0.5584 -0.7320 +vn -0.3900 -0.5584 -0.7322 +vn -0.3907 -0.5582 -0.7319 +vn -0.3895 -0.5585 -0.7324 +vn -0.5903 -0.3142 -0.7435 +vn -0.5897 -0.3143 -0.7440 +vn -0.7835 -0.1046 -0.6126 +vn -0.9995 -0.0171 -0.0274 +vn -0.9500 -0.1874 0.2496 +vn -0.8080 -0.4428 0.3887 +vn -0.6093 -0.7053 0.3623 +vn -0.3856 -0.9025 0.1918 +vn -0.1574 -0.9854 -0.0653 +vn 0.0732 -0.9398 -0.3337 +vn 0.0734 -0.9398 -0.3336 +vn 0.4311 -0.8657 0.2546 +vn 0.4314 -0.8655 0.2547 +vn 0.4318 -0.8653 0.2547 +vn 0.6184 -0.6515 0.4394 +vn 0.6184 -0.6516 0.4394 +vn 0.7959 -0.3830 0.4689 +vn 0.7961 -0.3828 0.4688 +vn 0.7963 -0.3824 0.4688 +vn 0.7958 -0.3831 0.4690 +vn 0.9995 0.0168 0.0282 +vn 0.9545 0.0152 -0.2980 +vn 0.9543 0.0153 -0.2983 +vn 0.8213 -0.1216 -0.5574 +vn 0.8218 -0.1214 -0.5567 +vn 0.6374 -0.3399 -0.6915 +vn 0.6387 -0.3394 -0.6906 +vn 0.4295 -0.5828 -0.6898 +vn 0.2122 -0.7984 -0.5635 +vn 0.2138 -0.7983 -0.5631 +vn 0.0093 -0.8083 -0.5887 +vn -0.1796 -0.6069 -0.7742 +vn -0.1801 -0.6069 -0.7741 +vn -0.1791 -0.6070 -0.7743 +vn -0.3792 -0.3701 -0.8481 +vn -0.3805 -0.3700 -0.8475 +vn -0.5856 -0.1437 -0.7978 +vn -0.7841 0.0218 -0.6202 +vn -0.9370 0.0720 -0.3418 +vn -0.9365 0.0726 -0.3432 +vn -0.9989 -0.0303 -0.0347 +vn -0.9454 -0.2642 0.1907 +vn -0.9453 -0.2645 0.1910 +vn -0.9456 -0.2639 0.1905 +vn -0.9452 -0.2647 0.1912 +vn -0.7948 -0.5509 0.2545 +vn -0.7953 -0.5504 0.2543 +vn -0.7954 -0.5504 0.2539 +vn -0.3621 -0.9286 -0.0805 +vn -0.3613 -0.9290 -0.0803 +vn -0.1324 -0.9282 -0.3478 +vn -0.1318 -0.9283 -0.3477 +vn 0.0938 -0.8083 -0.5813 +vn 0.0950 -0.8083 -0.5811 +vn 0.6344 -0.7348 0.2400 +vn 0.8074 -0.4740 0.3514 +vn 0.8065 -0.4752 0.3518 +vn 0.9428 -0.1858 0.2767 +vn 0.9427 -0.1859 0.2769 +vn 0.6449 -0.1552 -0.7483 +vn 0.6440 -0.1555 -0.7491 +vn 0.4400 -0.3861 -0.8107 +vn 0.4409 -0.3859 -0.8104 +vn 0.2287 -0.6184 -0.7518 +vn 0.2298 -0.6182 -0.7517 +vn 0.0273 -0.6260 -0.7794 +vn 0.0262 -0.6261 -0.7793 +vn -0.1680 -0.4024 -0.8999 +vn -0.1680 -0.4023 -0.8999 +vn -0.3737 -0.1694 -0.9119 +vn -0.7855 0.1481 -0.6009 +vn -0.9395 0.1309 -0.3166 +vn -0.9981 -0.0428 -0.0440 +vn -0.9981 -0.0428 -0.0441 +vn -0.9384 -0.3274 0.1102 +vn -0.9386 -0.3270 0.1100 +vn -0.9384 -0.3275 0.1102 +vn -0.7794 -0.6206 0.0863 +vn -0.3373 -0.8753 -0.3466 +vn -0.3375 -0.8752 -0.3466 +vn -0.3370 -0.8753 -0.3467 +vn -0.1097 -0.7984 -0.5921 +vn -0.1084 -0.7985 -0.5921 +vn -0.1102 -0.7983 -0.5920 +vn 0.1106 -0.6260 -0.7719 +vn 0.1099 -0.6261 -0.7719 +vn 0.2964 -0.7840 -0.5455 +vn 0.2961 -0.7840 -0.5455 +vn 0.2960 -0.7841 -0.5455 +vn 0.4760 -0.8396 -0.2617 +vn 0.6538 -0.7564 0.0186 +vn 0.6541 -0.7562 0.0186 +vn 0.6540 -0.7562 0.0185 +vn 0.8206 -0.5330 0.2062 +vn 0.9478 -0.2306 0.2201 +vn 0.9981 0.0429 0.0438 +vn 0.9981 0.0426 0.0440 +vn 0.9502 0.1871 -0.2491 +vn 0.6460 0.0330 -0.7626 +vn 0.4461 -0.1766 -0.8774 +vn 0.4456 -0.1767 -0.8776 +vn 0.0388 -0.4148 -0.9091 +vn 0.0390 -0.4148 -0.9091 +vn -0.1618 -0.1840 -0.9695 +vn -0.1618 -0.1841 -0.9695 +vn -0.3713 0.0360 -0.9278 +vn -0.3720 0.0360 -0.9275 +vn -0.5869 0.2037 -0.7836 +vn -0.5877 0.2037 -0.7830 +vn -0.7900 0.2698 -0.5505 +vn -0.9433 0.1845 -0.2758 +vn -0.9428 0.1857 -0.2768 +vn -0.9437 0.1838 -0.2752 +vn -0.9970 -0.0530 -0.0570 +vn -0.9970 -0.0532 -0.0569 +vn -0.9295 -0.3686 0.0100 +vn -0.7636 -0.6378 -0.1009 +vn -0.3175 -0.7525 -0.5770 +vn -0.0925 -0.6184 -0.7804 +vn -0.0937 -0.6184 -0.7803 +vn 0.0554 0.7784 -0.6253 +vn 0.0567 0.7781 -0.6256 +vn 0.0554 0.7773 -0.6267 +vn 0.0553 0.7808 -0.6223 +vn 0.0551 0.7795 -0.6240 +vn 0.0551 0.7787 -0.6249 +vn 0.0553 0.7793 -0.6242 +vn 0.0551 0.7794 -0.6241 +vn 0.0552 0.7801 -0.6233 +vn 0.0549 0.7749 -0.6298 +vn 0.3123 -0.6073 -0.7305 +vn 0.4957 -0.7221 -0.4826 +vn 0.4962 -0.7219 -0.4824 +vn 0.4967 -0.7217 -0.4822 +vn 0.4953 -0.7222 -0.4828 +vn 0.6740 -0.7118 -0.1978 +vn 0.6748 -0.7111 -0.1975 +vn 0.6736 -0.7120 -0.1980 +vn 0.8348 -0.5487 0.0454 +vn 0.8351 -0.5482 0.0454 +vn 0.9542 -0.2589 0.1496 +vn 0.9540 -0.2600 0.1495 +vn 0.8158 0.3134 -0.4860 +vn 0.8161 0.3132 -0.4857 +vn 0.8164 0.3130 -0.4853 +vn 0.6426 0.2203 -0.7338 +vn 0.4480 0.0375 -0.8932 +vn 0.0448 -0.1896 -0.9808 +vn 0.0441 -0.1897 -0.9809 +vn -0.1609 0.0390 -0.9862 +vn -0.3740 0.2402 -0.8958 +vn -0.5933 0.3715 -0.7142 +vn -0.5930 0.3716 -0.7143 +vn -0.5927 0.3717 -0.7145 +vn -0.7975 0.3815 -0.4674 +vn -0.9482 0.2296 -0.2195 +vn -0.9485 0.2288 -0.2191 +vn -0.9955 -0.0599 -0.0731 +vn -0.9195 -0.3799 -0.1013 +vn -0.7475 -0.6006 -0.2836 +vn -0.7479 -0.6002 -0.2835 +vn -0.5303 -0.6618 -0.5298 +vn -0.5310 -0.6615 -0.5295 +vn -0.3026 -0.5825 -0.7544 +vn 0.3028 0.5826 0.7543 +vn 0.7228 0.3999 0.5635 +vn 0.7223 0.4003 0.5640 +vn 0.8945 0.2391 0.3778 +vn 0.8945 0.2390 0.3778 +vn 0.9899 0.0391 0.1364 +vn 0.9900 0.0384 0.1357 +vn 0.9781 -0.1678 -0.1231 +vn 0.9781 -0.1679 -0.1232 +vn 0.8700 -0.3443 -0.3530 +vn 0.8697 -0.3446 -0.3534 +vn 0.7039 -0.4736 -0.5293 +vn 0.5121 -0.5588 -0.6523 +vn 0.5129 -0.5584 -0.6519 +vn 0.5134 -0.5583 -0.6518 +vn -0.7223 -0.4002 -0.5641 +vn -0.8949 -0.2383 -0.3772 +vn -0.9901 -0.0378 -0.1352 +vn -0.9901 -0.0375 -0.1351 +vn -0.9785 0.1665 0.1216 +vn -0.9782 0.1675 0.1225 +vn -0.8698 0.3444 0.3533 +vn -0.8704 0.3437 0.3525 +vn -0.8706 0.3435 0.3522 +vn -0.8695 0.3448 0.3536 +vn -0.7036 0.4739 0.5296 +vn -0.7030 0.4743 0.5300 +vn -0.7026 0.4745 0.5303 +vn -0.7039 0.4736 0.5293 +vn -0.5118 0.5587 0.6526 +vn -0.5119 0.5587 0.6525 +vn -0.3128 0.6070 0.7306 +vn -0.3129 0.6069 0.7306 +vn -0.3127 0.6070 0.7306 +vn -0.1110 0.6259 0.7720 +vn -0.1116 0.6258 0.7720 +vn -0.1118 0.6258 0.7719 +vn -0.1107 0.6259 0.7720 +vn 0.0926 0.6184 0.7804 +vn 0.0921 0.6183 0.7805 +vn 0.6910 -0.6119 -0.3850 +vn 0.8483 -0.5175 -0.1121 +vn 0.9380 0.3285 -0.1111 +vn 0.9377 0.3291 -0.1115 +vn 0.9376 0.3292 -0.1115 +vn 0.8073 0.4432 -0.3896 +vn 0.8067 0.4438 -0.3902 +vn 0.8078 0.4427 -0.3892 +vn 0.4448 0.2504 -0.8599 +vn 0.4441 0.2505 -0.8602 +vn 0.0456 0.0402 -0.9982 +vn 0.0455 0.0403 -0.9982 +vn -0.1649 0.2609 -0.9512 +vn -0.1640 0.2609 -0.9513 +vn -0.1636 0.2609 -0.9514 +vn -0.1653 0.2610 -0.9511 +vn -0.3818 0.4381 -0.8138 +vn -0.3821 0.4380 -0.8137 +vn -0.6032 0.5254 -0.6001 +vn -0.6037 0.5251 -0.5998 +vn -0.8081 0.4731 -0.3509 +vn -0.9543 0.2590 -0.1493 +vn -0.9940 -0.0610 -0.0912 +vn -0.9092 -0.3593 -0.2106 +vn -0.9089 -0.3598 -0.2106 +vn -0.9089 -0.3600 -0.2106 +vn 0.5299 0.6621 0.5299 +vn 0.5303 0.6620 0.5297 +vn 0.5304 0.6620 0.5296 +vn 0.7343 0.5158 0.4413 +vn 0.7337 0.5164 0.4416 +vn 0.7347 0.5154 0.4411 +vn 0.9011 0.3084 0.3048 +vn 0.9909 0.0504 0.1246 +vn 0.9911 0.0492 0.1239 +vn 0.9909 0.0507 0.1248 +vn 0.9738 -0.2158 -0.0710 +vn 0.9739 -0.2156 -0.0711 +vn 0.8607 -0.4446 -0.2480 +vn 0.8609 -0.4444 -0.2478 +vn 0.8610 -0.4442 -0.2477 +vn -0.9011 -0.3085 -0.3049 +vn -0.9008 -0.3089 -0.3051 +vn -0.9911 -0.0494 -0.1238 +vn -0.9912 -0.0485 -0.1233 +vn -0.9737 0.2163 0.0713 +vn -0.9737 0.2164 0.0714 +vn -0.9737 0.2165 0.0714 +vn -0.8610 0.4442 0.2476 +vn -0.8605 0.4450 0.2481 +vn -0.8602 0.4455 0.2483 +vn -0.6909 0.6119 0.3850 +vn -0.2956 0.7842 0.5456 +vn -0.2948 0.7843 0.5459 +vn -0.2959 0.7841 0.5455 +vn -0.2945 0.7843 0.5460 +vn 0.1088 0.7985 0.5920 +vn 0.9678 -0.2517 -0.0054 +vn 0.9680 -0.2509 -0.0052 +vn 0.9676 -0.2523 -0.0056 +vn 0.9681 -0.2503 -0.0051 +vn 0.9939 0.0619 0.0913 +vn 0.9939 0.0615 0.0913 +vn 0.9938 0.0628 0.0913 +vn 0.7947 0.5510 -0.2549 +vn 0.4371 0.4569 -0.7747 +vn 0.4367 0.4569 -0.7749 +vn 0.4366 0.4570 -0.7750 +vn 0.2469 0.2655 -0.9320 +vn 0.0422 0.2692 -0.9622 +vn 0.0419 0.2693 -0.9622 +vn -0.3932 0.6196 -0.6793 +vn -0.3934 0.6195 -0.6794 +vn -0.6159 0.6533 -0.4402 +vn -0.6163 0.6531 -0.4401 +vn -0.6157 0.6534 -0.4403 +vn -0.8219 0.5311 -0.2057 +vn -0.8218 0.5314 -0.2058 +vn -0.8220 0.5311 -0.2057 +vn -0.9609 0.2677 -0.0713 +vn -0.9610 0.2673 -0.0713 +vn -0.9610 0.2671 -0.0712 +vn 0.3394 0.8745 0.3465 +vn 0.5470 0.7705 0.3275 +vn 0.7472 0.6011 0.2836 +vn 0.7483 0.5997 0.2834 +vn 0.7465 0.6019 0.2837 +vn 0.7490 0.5990 0.2833 +vn 0.9099 0.3578 0.2099 +vn 0.9096 0.3584 0.2100 +vn 0.9100 0.3576 0.2099 +vn 0.9095 0.3586 0.2101 +vn 0.9924 0.0578 0.1088 +vn 0.9924 0.0574 0.1086 +vn -0.9680 0.2511 0.0053 +vn -0.6740 0.7117 0.1978 +vn -0.4774 0.8389 0.2615 +vn -0.4770 0.8390 0.2616 +vn -0.0734 0.9399 0.3334 +vn -0.0729 0.9399 0.3335 +vn 0.1310 0.9285 0.3475 +vn 0.1315 0.9284 0.3476 +vn 0.1306 0.9286 0.3474 +vn 0.9196 0.3796 0.1013 +vn 0.9196 0.3795 0.1013 +vn 0.7806 0.6191 -0.0857 +vn 0.7802 0.6196 -0.0858 +vn 0.7800 0.6198 -0.0858 +vn 0.7808 0.6189 -0.0857 +vn 0.6097 0.7051 -0.3620 +vn 0.6093 0.7053 -0.3623 +vn 0.4255 0.6456 -0.6341 +vn 0.4258 0.6456 -0.6340 +vn 0.2377 0.4845 -0.8419 +vn 0.2385 0.4844 -0.8417 +vn 0.2373 0.4846 -0.8419 +vn 0.0344 0.4905 -0.8707 +vn -0.1855 0.6727 -0.7163 +vn -0.1849 0.6728 -0.7164 +vn -0.1856 0.6727 -0.7163 +vn -0.4096 0.7693 -0.4904 +vn -0.4097 0.7692 -0.4904 +vn -0.4095 0.7693 -0.4904 +vn -0.4101 0.7691 -0.4902 +vn 0.3629 0.9284 0.0805 +vn 0.3628 0.9284 0.0804 +vn 0.3632 0.9282 0.0805 +vn 0.3625 0.9285 0.0804 +vn 0.5682 0.8176 0.0928 +vn 0.7631 0.6383 0.1009 +vn 0.7630 0.6384 0.1008 +vn 0.7635 0.6379 0.1009 +vn -0.6531 0.7570 -0.0185 +vn -0.6534 0.7568 -0.0186 +vn -0.6529 0.7572 -0.0184 +vn -0.4546 0.8907 0.0060 +vn -0.2511 0.9676 0.0281 +vn -0.0479 0.9977 0.0477 +vn -0.0478 0.9977 0.0478 +vn 0.1573 0.9854 0.0654 +vn 0.1569 0.9854 0.0654 +vn 0.1570 0.9854 0.0653 +vn 0.5899 0.7940 -0.1465 +vn 0.4078 0.8017 -0.4370 +vn 0.4082 0.8016 -0.4368 +vn 0.2233 0.6853 -0.6931 +vn 0.2230 0.6854 -0.6932 +vn 0.0215 0.6938 -0.7198 +vn -0.4314 0.8654 -0.2548 +vn -0.4307 0.8658 -0.2547 +vn 0.3865 0.9021 -0.1916 +vn 0.3857 0.9025 -0.1919 +vn -0.2252 0.9403 -0.2551 +vn -0.2251 0.9404 -0.2550 +vn -0.0221 0.9694 -0.2445 +vn -0.0221 0.9694 -0.2444 +vn -0.0215 0.9695 -0.2443 +vn 0.1822 0.9575 -0.2235 +vn 0.1819 0.9576 -0.2236 +vn 0.1824 0.9575 -0.2233 +vn 0.0018 0.8612 -0.5083 +vn 0.0027 0.8613 -0.5081 +s 1 +f 1//1 2//1 3//1 +f 2//1 1//1 4//1 +f 5//2 2//3 6//4 +f 2//3 5//2 3//5 +f 7//6 1//6 3//6 +f 1//6 7//6 8//6 +f 4//7 9//8 10//8 +f 9//8 4//7 1//7 +f 4//9 11//9 2//9 +f 11//9 4//9 12//9 +f 6//10 13//10 5//10 +f 13//10 6//10 14//10 +f 15//11 3//12 5//13 +f 3//12 15//11 7//14 +f 11//15 6//16 2//15 +f 6//16 11//15 16//16 +f 17//17 8//17 7//17 +f 8//17 17//17 18//17 +f 1//18 19//18 9//18 +f 19//18 1//18 8//18 +f 20//19 9//19 21//19 +f 9//19 20//19 10//19 +f 4//20 22//21 12//20 +f 22//21 4//20 10//21 +f 12//22 23//23 11//23 +f 23//23 12//22 24//24 +f 24//24 12//22 25//22 +f 13//25 26//26 27//26 +f 26//26 13//25 14//25 +f 15//27 13//28 28//28 +f 13//28 15//27 5//27 +f 16//29 14//30 6//29 +f 14//30 16//29 29//30 +f 30//31 7//31 15//31 +f 7//31 30//31 17//31 +f 23//32 16//33 11//32 +f 16//33 23//32 31//33 +f 32//34 18//34 17//34 +f 18//34 32//34 33//34 +f 8//35 34//35 19//35 +f 34//35 8//35 18//35 +f 21//36 19//37 35//36 +f 19//37 21//36 9//37 +f 36//38 21//39 37//40 +f 21//39 36//38 20//41 +f 38//42 10//43 20//42 +f 10//43 38//42 22//43 +f 12//44 39//45 25//46 +f 39//45 12//44 22//47 +f 23//48 40//48 41//48 +f 40//48 23//48 24//49 +f 40//48 24//49 25//48 +f 26//50 37//51 27//50 +f 37//51 26//50 36//51 +f 28//52 27//52 42//52 +f 27//52 28//52 13//52 +f 29//53 26//54 14//53 +f 26//54 29//53 43//54 +f 30//55 28//55 44//55 +f 28//55 30//55 15//55 +f 31//56 29//56 16//56 +f 29//56 31//56 45//56 +f 46//57 17//57 30//57 +f 17//57 46//57 32//57 +f 41//58 31//58 23//58 +f 31//58 41//58 47//58 +f 48//59 33//60 32//59 +f 33//60 48//59 49//60 +f 18//61 50//61 34//61 +f 50//61 18//61 33//61 +f 35//62 34//62 51//62 +f 34//62 35//62 19//62 +f 37//63 35//64 52//65 +f 35//64 37//63 21//66 +f 53//67 20//68 36//67 +f 20//68 53//67 38//68 +f 54//69 22//69 38//69 +f 22//70 54//70 55//70 +f 55//71 54//72 56//71 +f 55//73 39//73 22//73 +f 39//71 55//71 57//71 +f 39//74 40//75 25//76 +f 40//75 39//74 58//77 +f 41//78 59//79 60//78 +f 59//79 41//78 40//79 +f 27//80 52//81 42//80 +f 52//81 27//80 37//81 +f 43//82 36//83 26//82 +f 36//83 43//82 53//83 +f 44//84 42//84 61//84 +f 42//84 44//84 28//84 +f 45//85 43//86 29//85 +f 43//86 45//85 62//86 +f 46//87 44//87 63//87 +f 44//87 46//87 30//87 +f 47//88 45//88 31//88 +f 45//88 47//88 64//88 +f 65//89 32//89 46//89 +f 32//89 65//89 48//89 +f 60//90 47//91 41//90 +f 47//91 60//90 66//91 +f 67//92 49//92 48//92 +f 49//92 67//92 68//92 +f 33//93 69//94 50//94 +f 69//94 33//93 49//93 +f 51//95 50//95 70//95 +f 50//95 51//95 34//95 +f 52//96 51//96 71//96 +f 51//96 52//96 35//96 +f 72//97 38//98 53//99 +f 38//98 72//97 54//100 +f 56//101 73//101 55//101 +f 73//101 56//101 54//101 +f 73//102 54//102 74//102 +f 73//103 57//103 55//103 +f 57//104 73//104 39//104 +f 39//102 73//102 58//102 +f 58//105 59//106 40//106 +f 59//106 58//105 75//105 +f 60//107 76//107 77//107 +f 76//107 60//107 59//107 +f 42//92 71//92 61//92 +f 71//92 42//92 52//92 +f 62//108 53//109 43//110 +f 53//109 62//108 78//111 +f 53//109 78//111 72//109 +f 63//112 61//112 79//112 +f 61//112 63//112 44//112 +f 80//113 45//114 64//114 +f 45//114 80//113 62//113 +f 65//95 63//95 81//95 +f 63//95 65//95 46//95 +f 66//115 64//116 47//115 +f 64//116 66//115 82//116 +f 83//117 48//96 65//117 +f 48//96 83//117 67//96 +f 60//118 84//119 66//120 +f 84//119 60//118 77//121 +f 85//122 68//123 67//122 +f 68//123 85//122 86//123 +f 49//84 87//124 69//124 +f 87//124 49//84 68//84 +f 70//87 69//87 88//87 +f 69//87 70//87 50//87 +f 71//89 70//89 89//89 +f 70//89 71//89 51//89 +f 90//125 54//126 72//127 +f 54//126 90//125 74//128 +f 91//129 73//130 74//131 +f 73//130 91//129 58//132 +f 58//132 91//129 75//131 +f 92//133 59//134 75//133 +f 59//134 92//133 76//134 +f 93//135 77//135 76//135 +f 77//135 93//135 94//135 +f 61//60 89//60 79//60 +f 89//60 61//60 71//60 +f 90//136 62//136 80//136 +f 62//136 90//136 78//136 +f 78//136 90//136 72//136 +f 81//137 79//138 95//138 +f 79//138 81//137 63//137 +f 82//139 80//140 64//139 +f 80//140 82//139 96//140 +f 83//62 81//62 97//62 +f 81//62 83//62 65//62 +f 84//141 82//142 66//141 +f 82//142 84//141 98//142 +f 99//63 67//143 83//144 +f 67//143 99//63 85//66 +f 77//145 100//146 84//147 +f 100//146 77//145 94//148 +f 101//149 85//150 102//150 +f 85//150 101//149 86//149 +f 68//151 103//151 87//151 +f 103//151 68//151 86//151 +f 88//55 87//55 104//55 +f 87//55 88//55 69//55 +f 89//57 88//57 105//57 +f 88//57 89//57 70//57 +f 106//152 74//153 90//152 +f 74//153 106//152 91//153 +f 107//154 75//154 91//154 +f 75//154 107//154 92//154 +f 108//155 76//156 92//155 +f 76//156 108//155 93//156 +f 109//157 94//157 93//157 +f 94//157 109//157 110//157 +f 79//34 105//158 95//34 +f 105//158 79//34 89//158 +f 106//159 80//160 96//160 +f 80//160 106//159 90//159 +f 97//161 95//35 111//35 +f 95//35 97//161 81//161 +f 96//162 98//163 112//162 +f 98//163 96//162 82//163 +f 99//164 97//164 113//164 +f 97//164 99//164 83//164 +f 98//165 100//165 114//165 +f 100//165 98//165 84//165 +f 115//166 85//39 99//40 +f 85//39 115//166 102//41 +f 94//167 116//167 100//167 +f 116//167 94//167 110//167 +f 117//83 102//83 118//83 +f 102//83 117//83 101//83 +f 86//168 119//168 103//168 +f 119//168 86//168 101//168 +f 104//169 103//170 120//169 +f 103//170 104//169 87//170 +f 105//31 104//171 121//31 +f 104//171 105//31 88//171 +f 107//172 106//173 122//174 +f 106//173 107//172 91//175 +f 108//176 107//176 123//176 +f 107//176 108//176 92//176 +f 124//177 93//177 108//177 +f 93//177 124//177 109//177 +f 125//178 110//178 109//178 +f 110//178 125//178 126//178 +f 95//17 121//17 111//17 +f 121//17 95//17 105//17 +f 122//179 96//180 112//180 +f 96//180 122//179 106//179 +f 113//181 111//181 127//181 +f 111//181 113//181 97//181 +f 112//182 114//183 128//182 +f 114//183 112//182 98//183 +f 129//184 99//184 113//184 +f 99//184 129//184 115//184 +f 114//185 116//185 130//185 +f 116//185 114//185 100//185 +f 131//68 102//68 115//68 +f 102//68 131//68 118//68 +f 110//186 132//186 116//186 +f 132//186 110//186 126//186 +f 133//109 118//111 134//111 +f 118//111 133//109 117//109 +f 117//53 119//53 101//53 +f 119//53 117//53 135//53 +f 119//187 120//187 103//187 +f 120//187 119//187 136//187 +f 121//188 120//189 137//13 +f 120//189 121//188 104//190 +f 123//191 122//191 138//192 +f 122//191 123//191 107//193 +f 124//194 123//194 139//194 +f 123//194 124//194 108//194 +f 140//195 109//195 124//195 +f 109//195 140//195 125//195 +f 141//196 126//196 125//196 +f 126//196 141//196 142//196 +f 111//197 137//198 127//197 +f 137//198 111//197 121//198 +f 122//199 128//200 138//199 +f 128//200 122//199 112//200 +f 129//201 127//201 143//201 +f 127//201 129//201 113//201 +f 128//202 130//202 144//202 +f 130//202 128//202 114//202 +f 145//43 115//42 129//43 +f 115//42 145//43 131//42 +f 130//203 132//203 146//203 +f 132//203 130//203 116//203 +f 147//204 118//205 131//204 +f 118//205 147//204 134//205 +f 126//206 148//207 132//207 +f 148//207 126//206 142//206 +f 149//208 133//136 134//208 +f 133//136 149//208 150//136 +f 133//209 135//210 117//85 +f 135//210 133//209 151//211 +f 135//30 136//29 119//30 +f 136//29 135//30 152//29 +f 137//212 136//3 153//4 +f 136//3 137//212 120//213 +f 139//214 138//214 154//214 +f 138//214 139//214 123//214 +f 140//215 139//215 155//215 +f 139//215 140//215 124//215 +f 156//216 125//216 140//216 +f 125//216 156//216 141//216 +f 157//217 142//217 141//217 +f 142//217 157//217 158//217 +f 127//218 153//219 143//218 +f 153//219 127//218 137//219 +f 138//217 144//217 154//217 +f 144//217 138//217 128//217 +f 129//21 159//21 145//21 +f 159//21 129//21 143//21 +f 144//216 146//216 160//216 +f 146//216 144//216 130//216 +f 161//220 131//220 145//220 +f 131//220 161//220 147//220 +f 146//215 148//215 162//215 +f 148//215 146//215 132//215 +f 163//127 134//221 147//127 +f 134//221 163//127 149//221 +f 142//214 164//214 148//214 +f 164//214 142//214 158//214 +f 165//159 150//160 149//159 +f 150//160 165//159 166//160 +f 150//222 151//223 133//114 +f 151//223 150//222 167//224 +f 151//225 152//225 135//225 +f 152//225 151//225 168//225 +f 152//16 153//16 136//16 +f 153//16 152//16 169//16 +f 155//207 154//207 170//207 +f 154//207 155//207 139//207 +f 156//203 155//203 171//203 +f 155//203 156//203 140//203 +f 172//202 141//202 156//202 +f 141//202 172//202 157//202 +f 173//226 158//227 157//226 +f 158//227 173//226 174//227 +f 143//9 169//228 159//9 +f 169//228 143//9 153//228 +f 154//196 160//196 170//196 +f 160//196 154//196 144//196 +f 145//46 175//229 161//46 +f 175//229 145//46 159//229 +f 160//195 162//195 176//195 +f 162//195 160//195 146//195 +f 177//230 147//230 161//230 +f 147//230 177//230 163//230 +f 162//194 164//231 178//194 +f 164//231 162//194 148//231 +f 179//153 149//153 163//153 +f 149//153 179//153 165//153 +f 158//232 180//233 164//192 +f 180//233 158//232 174//193 +f 174//234 166//235 165//234 +f 166//235 174//234 173//235 +f 166//139 167//139 150//139 +f 167//139 166//139 181//139 +f 167//236 168//236 151//236 +f 168//236 167//236 182//236 +f 168//237 169//238 152//239 +f 169//238 168//237 183//33 +f 171//186 170//240 184//240 +f 170//240 171//186 155//186 +f 172//241 171//185 185//185 +f 171//185 172//241 156//241 +f 186//242 157//242 172//242 +f 157//242 186//242 173//242 +f 159//243 183//243 175//243 +f 183//243 159//243 169//243 +f 170//178 176//178 184//178 +f 176//178 170//178 160//178 +f 187//244 161//76 175//244 +f 161//76 187//244 177//76 +f 176//177 178//177 188//177 +f 178//177 176//177 162//177 +f 189//131 163//245 177//131 +f 163//245 189//131 179//245 +f 178//246 180//246 190//246 +f 180//246 178//246 164//246 +f 174//247 179//173 180//174 +f 179//173 174//247 165//175 +f 181//248 173//248 186//248 +f 173//248 181//248 166//248 +f 181//116 182//115 167//116 +f 182//115 181//116 191//115 +f 182//249 183//250 168//251 +f 183//250 182//249 192//252 +f 185//167 184//167 193//167 +f 184//167 185//167 171//167 +f 186//253 185//253 194//253 +f 185//253 186//253 172//253 +f 183//48 187//49 175//49 +f 187//49 183//48 192//48 +f 184//157 188//157 193//157 +f 188//157 184//157 176//157 +f 187//106 189//105 177//105 +f 189//105 187//106 195//106 +f 188//254 190//254 196//254 +f 190//254 188//254 178//254 +f 190//255 179//255 189//255 +f 179//255 190//255 180//255 +f 186//256 191//256 181//256 +f 191//256 186//256 194//256 +f 191//91 192//91 182//91 +f 192//91 191//91 197//91 +f 194//257 193//146 198//147 +f 193//146 194//257 185//148 +f 192//79 195//79 187//79 +f 195//79 192//79 197//79 +f 193//258 196//259 198//258 +f 196//259 193//258 188//259 +f 196//260 189//260 195//260 +f 189//260 196//260 190//260 +f 191//261 198//119 197//120 +f 198//119 191//261 194//262 +f 197//263 196//264 195//264 +f 196//264 197//263 198//263 +f 199//265 200//265 201//265 +f 200//265 199//265 202//265 +f 203//266 200//266 204//266 +f 200//266 203//266 201//266 +f 203//267 199//267 201//267 +f 199//267 203//267 205//267 +f 202//268 205//268 206//268 +f 205//268 202//268 199//268 +f 206//269 200//269 202//269 +f 200//269 206//269 204//269 +f 206//71 203//71 204//71 +f 203//71 206//71 205//71 +f 207//266 208//266 209//266 +f 208//266 207//266 210//266 +f 211//71 207//71 209//71 +f 207//71 211//71 212//71 +f 207//267 213//267 210//267 +f 213//267 207//267 212//267 +f 213//265 208//265 210//265 +f 208//265 213//265 214//265 +f 211//270 208//270 214//270 +f 208//270 211//270 209//270 +f 214//268 212//268 211//268 +f 212//268 214//268 213//268 +f 215//267 216//267 217//267 +f 216//267 215//267 218//267 +f 215//266 219//266 220//266 +f 219//266 215//266 217//266 +f 221//71 215//71 220//71 +f 215//71 221//71 218//71 +f 222//268 218//268 221//268 +f 218//268 222//268 216//268 +f 216//265 219//265 217//265 +f 219//265 216//265 222//265 +f 221//270 219//270 222//270 +f 219//270 221//270 220//270 +f 223//268 224//268 225//268 +f 224//268 223//268 226//268 +f 226//265 227//265 228//265 +f 227//265 226//265 223//265 +f 229//267 226//267 228//267 +f 226//267 229//267 224//267 +f 225//71 229//71 230//71 +f 229//71 225//71 224//71 +f 225//269 227//269 223//269 +f 227//269 225//269 230//269 +f 229//266 227//266 230//266 +f 227//266 229//266 228//266 +f 231//271 232//272 233//273 +f 232//272 231//271 234//274 +f 232//178 235//178 233//178 +f 235//178 232//178 236//178 +f 237//275 233//276 238//277 +f 233//276 237//275 231//278 +f 239//279 240//280 241//281 +f 240//280 239//279 242//282 +f 240//280 242//282 243//283 +f 243//283 242//282 244//284 +f 243//283 244//284 245//285 +f 245//285 244//284 246//286 +f 245//285 246//286 237//287 +f 237//287 246//286 247//288 +f 237//287 247//288 231//289 +f 231//289 247//288 248//290 +f 231//289 248//290 234//291 +f 234//291 248//290 249//291 +f 234//291 249//291 250//292 +f 234//291 250//292 251//293 +f 251//293 250//292 252//294 +f 251//293 252//294 253//295 +f 251//293 253//295 254//296 +f 254//296 253//295 255//297 +f 254//296 255//297 256//298 +f 256//298 255//297 257//299 +f 256//298 257//299 258//300 +f 258//300 257//299 259//301 +f 258//300 259//301 260//302 +f 260//302 259//301 261//303 +f 232//304 251//305 262//306 +f 251//305 232//304 234//307 +f 236//308 263//309 235//310 +f 263//309 236//308 264//311 +f 233//312 265//312 238//312 +f 265//312 233//312 235//312 +f 262//196 236//196 232//196 +f 236//196 262//196 266//196 +f 245//313 238//314 267//315 +f 238//314 245//313 237//313 +f 256//316 268//317 269//318 +f 268//317 256//316 258//316 +f 270//319 256//320 269//321 +f 256//320 270//319 254//322 +f 262//323 254//324 270//325 +f 254//324 262//323 251//326 +f 271//327 245//328 267//327 +f 245//328 271//327 243//328 +f 272//329 243//330 271//331 +f 243//330 272//329 240//332 +f 273//333 240//334 272//333 +f 240//334 273//333 241//334 +f 274//335 241//336 273//335 +f 241//336 274//335 239//336 +f 275//337 239//338 274//337 +f 239//338 275//337 242//339 +f 276//340 242//341 275//340 +f 242//341 276//340 244//341 +f 246//342 276//343 277//344 +f 276//343 246//342 244//345 +f 247//346 277//347 278//347 +f 277//347 247//346 246//348 +f 249//349 279//349 280//349 +f 279//350 249//350 278//350 +f 278//351 249//351 248//351 +f 278//352 248//352 247//352 +f 281//353 280//353 282//353 +f 280//354 281//354 249//354 +f 249//351 281//351 250//351 +f 250//355 281//355 252//355 +f 283//356 252//357 281//356 +f 252//357 283//356 253//357 +f 284//358 253//359 283//360 +f 253//359 284//358 255//359 +f 257//361 284//362 285//362 +f 284//362 257//361 255//361 +f 259//363 285//364 286//365 +f 285//364 259//363 257//366 +f 261//367 286//368 287//368 +f 286//368 261//367 259//367 +f 260//369 287//370 288//370 +f 287//370 260//369 261//369 +f 258//371 288//372 268//372 +f 288//372 258//371 260//371 +f 289//373 290//374 291//375 +f 290//374 289//373 292//376 +f 290//374 292//376 293//377 +f 293//377 292//376 294//378 +f 293//377 294//378 295//379 +f 295//379 294//378 296//379 +f 295//379 296//379 297//380 +f 297//380 296//379 263//381 +f 297//380 263//381 298//382 +f 298//382 263//381 264//383 +f 298//382 264//383 299//384 +f 299//384 264//383 300//385 +f 300//385 264//383 301//386 +f 300//385 301//386 302//387 +f 302//387 301//386 303//388 +f 303//388 301//386 304//389 +f 303//388 304//389 305//377 +f 305//377 304//389 306//390 +f 305//377 306//390 307//390 +f 307//390 306//390 308//379 +f 307//390 308//379 309//390 +f 309//390 308//379 310//391 +f 309//390 310//391 311//392 +f 311//392 310//391 312//393 +f 235//394 296//395 265//396 +f 296//395 235//394 263//397 +f 301//398 236//399 266//400 +f 236//399 301//398 264//401 +f 238//402 313//402 267//402 +f 313//402 238//402 265//402 +f 270//403 266//403 262//403 +f 266//403 270//403 314//403 +f 315//404 268//405 316//404 +f 268//405 315//404 269//405 +f 269//406 314//407 270//406 +f 314//407 269//406 315//407 +f 271//408 313//409 317//409 +f 313//409 271//408 267//408 +f 272//410 317//410 318//410 +f 317//410 272//410 271//410 +f 273//411 318//412 319//412 +f 318//412 273//411 272//411 +f 274//23 319//23 320//23 +f 319//23 274//23 273//23 +f 275//413 320//414 321//414 +f 320//414 275//413 274//415 +f 276//416 321//416 322//416 +f 321//416 276//416 275//416 +f 323//417 276//417 322//417 +f 276//417 323//417 277//417 +f 324//418 277//418 323//418 +f 277//418 324//418 278//418 +f 325//34 278//34 324//34 +f 278//34 325//34 279//34 +f 279//71 325//71 326//71 +f 279//71 326//71 280//71 +f 327//71 280//71 326//71 +f 280//71 327//71 282//71 +f 282//419 327//419 281//419 +f 281//419 327//419 328//419 +f 329//420 281//420 328//420 +f 281//420 329//420 283//420 +f 330//421 283//422 329//421 +f 283//422 330//421 284//422 +f 331//423 284//423 330//423 +f 284//423 331//423 285//423 +f 332//424 285//425 331//424 +f 285//425 332//424 286//425 +f 333//110 286//426 332//110 +f 286//426 333//110 287//426 +f 334//427 287//427 333//427 +f 287//427 334//427 288//427 +f 316//428 288//429 334//428 +f 288//429 316//428 268//429 +f 303//430 329//431 328//431 +f 329//431 303//430 305//430 +f 300//432 327//432 326//432 +f 327//433 300//433 328//433 +f 328//434 300//434 302//434 +f 328//435 302//435 303//435 +f 324//436 326//436 325//436 +f 326//437 324//437 300//437 +f 300//434 324//434 299//434 +f 299//438 324//438 298//438 +f 323//439 298//440 324//441 +f 298//440 323//439 297//439 +f 322//442 297//443 323//442 +f 297//443 322//442 295//443 +f 322//444 293//445 295//446 +f 293//445 322//444 321//447 +f 321//448 290//448 293//448 +f 290//448 321//448 320//448 +f 320//449 291//449 290//449 +f 291//449 320//449 319//449 +f 319//450 289//451 291//451 +f 289//451 319//450 318//450 +f 318//452 292//453 289//454 +f 292//453 318//452 317//455 +f 317//456 294//457 292//458 +f 294//457 317//456 313//459 +f 265//460 294//461 313//462 +f 294//461 265//460 296//463 +f 304//464 266//465 314//465 +f 266//465 304//464 301//464 +f 306//466 314//467 315//468 +f 314//467 306//466 304//469 +f 306//470 316//471 308//470 +f 316//471 306//470 315//472 +f 308//473 334//474 310//475 +f 334//474 308//473 316//476 +f 310//477 333//478 312//477 +f 333//478 310//477 334//478 +f 312//479 332//479 311//479 +f 332//479 312//479 333//479 +f 311//480 331//480 309//481 +f 331//480 311//480 332//480 +f 309//482 330//483 307//484 +f 330//483 309//482 331//485 +f 305//486 330//487 329//487 +f 330//487 305//486 307//486 +f 335//488 336//489 337//490 +f 336//489 335//488 338//491 +f 336//178 339//178 337//178 +f 339//178 336//178 340//178 +f 341//492 337//493 342//493 +f 337//493 341//492 335//494 +f 343//495 344//496 345//497 +f 344//496 343//495 346//289 +f 344//496 346//289 347//498 +f 347//498 346//289 348//499 +f 347//498 348//499 349//500 +f 349//500 348//499 350//501 +f 349//500 350//501 341//502 +f 341//502 350//501 351//503 +f 341//502 351//503 335//504 +f 335//504 351//503 352//505 +f 335//504 352//505 338//506 +f 338//506 352//505 353//506 +f 338//506 353//506 354//507 +f 338//506 354//507 355//508 +f 355//508 354//507 356//288 +f 355//508 356//288 357//509 +f 355//508 357//509 358//510 +f 358//510 357//509 359//511 +f 358//510 359//511 360//512 +f 360//512 359//511 361//498 +f 360//512 361//498 362//513 +f 362//513 361//498 363//514 +f 362//513 363//514 364//515 +f 364//515 363//514 365//516 +f 336//517 355//518 366//519 +f 355//518 336//517 338//520 +f 340//308 367//521 339//310 +f 367//521 340//308 368//522 +f 337//312 369//312 342//312 +f 369//312 337//312 339//312 +f 366//196 340//196 336//196 +f 340//196 366//196 370//196 +f 349//523 342//524 371//524 +f 342//524 349//523 341//523 +f 360//525 372//526 373//526 +f 372//526 360//525 362//525 +f 374//321 360//527 373//321 +f 360//527 374//321 358//527 +f 366//528 358//529 374//528 +f 358//529 366//528 355//529 +f 375//530 349//531 371//530 +f 349//531 375//530 347//531 +f 376//532 347//533 375//532 +f 347//533 376//532 344//533 +f 377//534 344//334 376//534 +f 344//334 377//534 345//334 +f 378//336 345//336 377//336 +f 345//336 378//336 343//336 +f 379//535 343//536 378//535 +f 343//536 379//535 346//536 +f 380//537 346//340 379//537 +f 346//340 380//537 348//340 +f 350//342 380//538 381//344 +f 380//538 350//342 348//345 +f 351//539 381//347 382//347 +f 381//347 351//539 350//539 +f 353//349 383//349 384//349 +f 383//540 353//540 382//540 +f 382//351 353//351 352//351 +f 382//541 352//541 351//541 +f 385//542 384//542 386//542 +f 384//543 385//543 353//543 +f 353//544 385//544 354//544 +f 354//545 385//545 356//545 +f 387//546 356//357 385//546 +f 356//357 387//546 357//357 +f 388//547 357//359 387//547 +f 357//359 388//547 359//359 +f 361//548 388//549 389//549 +f 388//549 361//548 359//548 +f 363//550 389//551 390//552 +f 389//551 363//550 361//363 +f 365//367 390//368 391//368 +f 390//368 365//367 363//367 +f 364//369 391//553 392//553 +f 391//553 364//369 365//369 +f 362//554 392//555 372//555 +f 392//555 362//554 364//554 +f 393//556 394//557 395//558 +f 394//557 393//556 396//559 +f 394//557 396//559 397//560 +f 397//560 396//559 398//561 +f 397//560 398//561 399//562 +f 399//562 398//561 400//563 +f 399//562 400//563 401//564 +f 401//564 400//563 367//565 +f 401//564 367//565 402//566 +f 402//566 367//565 368//379 +f 402//566 368//379 403//567 +f 403//567 368//379 404//568 +f 404//568 368//379 405//390 +f 404//568 405//390 406//569 +f 406//569 405//390 407//570 +f 407//570 405//390 408//571 +f 407//570 408//571 409//564 +f 409//564 408//571 410//572 +f 409//564 410//572 411//561 +f 411//561 410//572 412//573 +f 411//561 412//573 413//574 +f 413//574 412//573 414//575 +f 413//574 414//575 415//576 +f 415//576 414//575 416//577 +f 339//394 400//396 369//396 +f 400//396 339//394 367//578 +f 405//579 340//580 370//400 +f 340//580 405//579 368//581 +f 342//402 417//582 371//402 +f 417//582 342//402 369//582 +f 374//403 370//583 366//403 +f 370//583 374//403 418//583 +f 419//584 372//584 420//584 +f 372//584 419//584 373//584 +f 373//407 418//406 374//407 +f 418//406 373//407 419//406 +f 375//585 417//585 421//585 +f 417//585 375//585 371//585 +f 376//586 421//587 422//587 +f 421//587 376//586 375//586 +f 377//411 422//411 423//411 +f 422//411 377//411 376//411 +f 378//23 423//243 424//23 +f 423//243 378//23 377//23 +f 379//588 424//589 425//589 +f 424//589 379//588 378//588 +f 380//590 425//591 426//591 +f 425//591 380//590 379//590 +f 427//417 380//592 426//417 +f 380//592 427//417 381//592 +f 428//418 381//418 427//418 +f 381//418 428//418 382//418 +f 429//34 382//34 428//34 +f 382//34 429//34 383//34 +f 383//71 429//71 430//71 +f 383//71 430//71 384//71 +f 431//71 384//71 430//71 +f 384//71 431//71 386//71 +f 386//419 431//419 385//419 +f 385//419 431//419 432//419 +f 433//420 385//593 432//420 +f 385//593 433//420 387//593 +f 434//422 387//421 433//422 +f 387//421 434//422 388//421 +f 435//594 388//594 434//594 +f 388//594 435//594 389//594 +f 436//595 389//596 435//595 +f 389//596 436//595 390//596 +f 437//426 390//426 436//426 +f 390//426 437//426 391//426 +f 438//427 391//427 437//427 +f 391//427 438//427 392//427 +f 420//597 392//597 438//597 +f 392//597 420//597 372//597 +f 407//430 433//431 432//431 +f 433//431 407//430 409//430 +f 404//432 431//432 430//432 +f 431//598 404//598 432//598 +f 432//434 404//434 406//434 +f 432//599 406//599 407//599 +f 428//600 430//600 429//600 +f 430//601 428//601 404//601 +f 404//602 428//602 403//602 +f 403//603 428//603 402//603 +f 427//439 402//441 428//441 +f 402//441 427//439 401//604 +f 426//605 401//606 427//442 +f 401//606 426//605 399//607 +f 426//608 397//609 399//609 +f 397//609 426//608 425//610 +f 425//611 394//612 397//612 +f 394//612 425//611 424//611 +f 424//613 395//614 394//614 +f 395//614 424//613 423//613 +f 423//450 393//450 395//450 +f 393//450 423//450 422//450 +f 422//453 396//615 393//616 +f 396//615 422//453 421//617 +f 421//457 398//456 396//456 +f 398//456 421//457 417//457 +f 369//462 398//618 417//462 +f 398//618 369//462 400//618 +f 408//465 370//619 418//465 +f 370//619 408//465 405//620 +f 410//621 418//468 419//468 +f 418//468 410//621 408//621 +f 410//471 420//622 412//623 +f 420//622 410//471 419//622 +f 412//624 438//625 414//626 +f 438//625 412//624 420//475 +f 414//478 437//478 416//478 +f 437//478 414//478 438//478 +f 416//627 436//627 415//627 +f 436//627 416//627 437//627 +f 415//628 435//629 413//628 +f 435//629 415//628 436//629 +f 413//630 434//631 411//632 +f 434//631 413//630 435//630 +f 409//633 434//634 433//487 +f 434//634 409//633 411//635 +f 439//636 440//637 441//273 +f 440//637 439//636 442//491 +f 440//178 443//178 441//178 +f 443//178 440//178 444//178 +f 445//638 441//493 446//493 +f 441//493 445//638 439//638 +f 447//639 448//640 449//641 +f 448//640 447//639 450//513 +f 448//640 450//513 451//642 +f 451//642 450//513 452//643 +f 451//642 452//643 453//501 +f 453//501 452//643 454//644 +f 453//501 454//644 445//502 +f 445//502 454//644 455//503 +f 445//502 455//503 439//645 +f 439//645 455//503 456//645 +f 439//645 456//645 442//646 +f 442//646 456//645 457//646 +f 442//646 457//646 458//500 +f 442//646 458//500 459//508 +f 459//508 458//500 460//288 +f 459//508 460//288 461//509 +f 459//508 461//509 462//510 +f 462//510 461//509 463//647 +f 462//510 463//647 464//648 +f 464//648 463//647 465//649 +f 464//648 465//649 466//650 +f 466//650 465//649 467//651 +f 466//650 467//651 468//652 +f 468//652 467//651 469//516 +f 440//517 459//653 470//519 +f 459//653 440//517 442//654 +f 444//655 471//656 443//310 +f 471//656 444//655 472//522 +f 441//312 473//312 446//312 +f 473//312 441//312 443//312 +f 470//196 444//196 440//196 +f 444//196 470//196 474//196 +f 453//657 446//314 475//315 +f 446//314 453//657 445//657 +f 464//658 476//659 477//660 +f 476//659 464//658 466//317 +f 478//321 464//322 477//321 +f 464//322 478//321 462//322 +f 470//528 462//661 478//528 +f 462//661 470//528 459//661 +f 479//662 453//663 475//664 +f 453//663 479//662 451//665 +f 480//331 451//533 479//532 +f 451//533 480//331 448//666 +f 481//534 448//334 480//534 +f 448//334 481//534 449//334 +f 482//336 449//336 481//336 +f 449//336 482//336 447//336 +f 483//667 447//668 482//669 +f 447//668 483//667 450//536 +f 484//537 450//670 483//537 +f 450//670 484//537 452//670 +f 454//342 484//345 485//342 +f 484//345 454//342 452//538 +f 455//348 485//347 486//347 +f 485//347 455//348 454//348 +f 457//349 487//349 488//349 +f 487//671 457//671 486//671 +f 486//351 457//351 456//351 +f 486//672 456//672 455//672 +f 489//673 488//673 490//673 +f 488//674 489//674 457//674 +f 457//544 489//544 458//544 +f 458//675 489//675 460//675 +f 491//546 460//357 489//546 +f 460//357 491//546 461//357 +f 492//676 461//677 491//676 +f 461//677 492//676 463//678 +f 465//548 492//549 493//549 +f 492//549 465//548 463//548 +f 467//363 493//552 494//552 +f 493//552 467//363 465//363 +f 469//367 494//367 495//367 +f 494//367 469//367 467//367 +f 468//369 495//553 496//553 +f 495//553 468//369 469//369 +f 466//679 496//680 476//555 +f 496//680 466//679 468//681 +f 497//682 498//683 499//684 +f 498//683 497//682 500//685 +f 498//683 500//685 501//686 +f 501//686 500//685 502//561 +f 501//686 502//561 503//687 +f 503//687 502//561 504//688 +f 503//687 504//688 505//571 +f 505//571 504//688 471//689 +f 505//571 471//689 506//566 +f 506//566 471//689 472//379 +f 506//566 472//379 507//690 +f 507//690 472//379 508//691 +f 508//691 472//379 509//692 +f 508//691 509//692 510//693 +f 510//693 509//692 511//570 +f 511//570 509//692 512//564 +f 511//570 512//564 513//688 +f 513//688 512//564 514//694 +f 513//688 514//694 515//561 +f 515//561 514//694 516//686 +f 515//561 516//686 517//695 +f 517//695 516//686 518//696 +f 517//695 518//696 519//557 +f 519//557 518//696 520//697 +f 443//394 504//396 473//698 +f 504//396 443//394 471//578 +f 509//398 444//399 474//400 +f 444//399 509//398 472//581 +f 446//699 521//402 475//699 +f 521//402 446//699 473//402 +f 478//403 474//403 470//403 +f 474//403 478//403 522//403 +f 523//584 476//584 524//584 +f 476//584 523//584 477//584 +f 477//406 522//700 478//406 +f 522//700 477//406 523//700 +f 479//585 521//585 525//585 +f 521//585 479//585 475//585 +f 480//587 525//586 526//586 +f 525//586 480//587 479//587 +f 481//411 526//411 527//411 +f 526//411 481//411 480//411 +f 482//23 527//23 528//23 +f 527//23 482//23 481//23 +f 483//588 528//589 529//589 +f 528//589 483//588 482//588 +f 484//591 529//591 530//591 +f 529//591 484//591 483//591 +f 531//701 484//417 530//701 +f 484//417 531//701 485//417 +f 532//418 485//418 531//418 +f 485//418 532//418 486//418 +f 533//34 486//34 532//34 +f 486//34 533//34 487//34 +f 487//71 533//71 534//71 +f 487//71 534//71 488//71 +f 535//71 488//71 534//71 +f 488//71 535//71 490//71 +f 490//419 535//419 489//419 +f 489//419 535//419 536//419 +f 537//420 489//420 536//420 +f 489//420 537//420 491//420 +f 538//421 491//702 537//421 +f 491//702 538//421 492//702 +f 539//594 492//594 538//594 +f 492//594 539//594 493//594 +f 540//596 493//703 539//596 +f 493//703 540//596 494//703 +f 541//426 494//426 540//426 +f 494//426 541//426 495//426 +f 542//427 495//427 541//427 +f 495//427 542//427 496//427 +f 524//704 496//704 542//704 +f 496//704 524//704 476//704 +f 511//431 537//705 536//431 +f 537//705 511//431 513//430 +f 508//432 535//432 534//432 +f 535//598 508//598 536//598 +f 536//706 508//706 510//706 +f 536//707 510//707 511//707 +f 532//708 534//708 533//708 +f 534//709 532//709 508//709 +f 508//602 532//602 507//602 +f 507//438 532//438 506//438 +f 531//710 506//711 532//711 +f 506//711 531//710 505//439 +f 530//443 505//442 531//443 +f 505//442 530//443 503//442 +f 530//712 501//713 503//446 +f 501//713 530//712 529//610 +f 529//714 498//715 501//448 +f 498//715 529//714 528//611 +f 528//449 499//614 498//614 +f 499//614 528//449 527//449 +f 527//716 497//450 499//450 +f 497//450 527//716 526//716 +f 526//717 500//452 497//616 +f 500//452 526//717 525//617 +f 525//456 502//457 500//458 +f 502//457 525//456 521//459 +f 473//462 502//718 521//462 +f 502//718 473//462 504//718 +f 512//465 474//619 522//465 +f 474//619 512//465 509//620 +f 514//719 522//468 523//468 +f 522//468 514//719 512//719 +f 514//720 524//471 516//721 +f 524//471 514//720 523//722 +f 516//474 542//473 518//626 +f 542//473 516//474 524//475 +f 518//478 541//723 520//478 +f 541//723 518//478 542//723 +f 520//627 540//479 519//627 +f 540//479 520//627 541//479 +f 519//724 539//725 517//480 +f 539//725 519//724 540//629 +f 517//726 538//631 515//484 +f 538//631 517//726 539//630 +f 513//487 538//487 537//487 +f 538//487 513//487 515//487 +f 543//636 544//637 545//273 +f 544//637 543//636 546//491 +f 544//178 547//178 545//178 +f 547//178 544//178 548//178 +f 549//727 545//728 550//728 +f 545//728 549//727 543//727 +f 551//495 552//496 553//641 +f 552//496 551//495 554//729 +f 552//496 554//729 555//498 +f 555//498 554//729 556//499 +f 555//498 556//499 557//500 +f 557//500 556//499 558//501 +f 557//500 558//501 549//502 +f 549//502 558//501 559//730 +f 549//502 559//730 543//505 +f 543//505 559//730 560//731 +f 543//505 560//731 546//506 +f 546//506 560//731 561//646 +f 546//506 561//646 562//732 +f 546//506 562//732 563//508 +f 563//508 562//732 564//288 +f 563//508 564//288 565//509 +f 563//508 565//509 566//510 +f 566//510 565//509 567//511 +f 566//510 567//511 568//512 +f 568//512 567//511 569//498 +f 568//512 569//498 570//513 +f 570//513 569//498 571//514 +f 570//513 571//514 572//515 +f 572//515 571//514 573//516 +f 544//733 563//734 574//653 +f 563//734 544//733 546//735 +f 548//308 575//309 547//310 +f 575//309 548//308 576//522 +f 545//736 577//736 550//736 +f 577//736 545//736 547//736 +f 574//737 548//737 544//737 +f 548//737 574//737 578//737 +f 557//314 550//524 579//524 +f 550//524 557//314 549//314 +f 568//525 580//526 581//526 +f 580//526 568//525 570//525 +f 582//738 568//319 581//738 +f 568//319 582//738 566//319 +f 574//528 566//739 582//528 +f 566//739 574//528 563//529 +f 583//530 557//531 579//530 +f 557//531 583//530 555//531 +f 584//532 555//533 583//532 +f 555//533 584//532 552//533 +f 585//534 552//334 584//534 +f 552//334 585//534 553//334 +f 586//336 553//336 585//336 +f 553//336 586//336 551//336 +f 587//740 551//741 586//535 +f 551//741 587//740 554//536 +f 588//537 554//341 587//537 +f 554//341 588//537 556//341 +f 558//345 588//344 589//344 +f 588//344 558//345 556//345 +f 559//742 589//743 590//743 +f 589//743 559//742 558//744 +f 561//349 591//349 592//349 +f 591//745 561//745 590//745 +f 590//544 561//544 560//544 +f 590//746 560//746 559//746 +f 593//747 592//747 594//747 +f 592//748 593//748 561//748 +f 561//544 593//544 562//544 +f 562//545 593//545 564//545 +f 595//546 564//357 593//546 +f 564//357 595//546 565//357 +f 596//678 565//676 595//547 +f 565//676 596//678 567//359 +f 569//548 596//549 597//549 +f 596//549 569//548 567//548 +f 571//552 597//551 598//749 +f 597//551 571//552 569//363 +f 573//367 598//367 599//367 +f 598//367 573//367 571//367 +f 572//369 599//750 600//750 +f 599//750 572//369 573//369 +f 570//679 600//555 580//555 +f 600//555 570//679 572//679 +f 601//556 602//557 603//558 +f 602//557 601//556 604//559 +f 602//557 604//559 605//751 +f 605//751 604//559 606//752 +f 605//751 606//752 607//753 +f 607//753 606//752 608//563 +f 607//753 608//563 609//564 +f 609//564 608//563 575//689 +f 609//564 575//689 610//754 +f 610//754 575//689 576//379 +f 610//754 576//379 611//567 +f 611//567 576//379 612//568 +f 612//568 576//379 613//569 +f 612//568 613//569 614//569 +f 614//569 613//569 615//570 +f 615//570 613//569 616//564 +f 615//570 616//564 617//755 +f 617//755 616//564 618//756 +f 617//755 618//756 619//757 +f 619//757 618//756 620//573 +f 619//757 620//573 621//574 +f 621//574 620//573 622//683 +f 621//574 622//683 623//758 +f 623//758 622//683 624//577 +f 547//759 608//760 577//759 +f 608//760 547//759 575//760 +f 613//399 548//581 578//761 +f 548//581 613//399 576//762 +f 550//699 625//402 579//699 +f 625//402 550//699 577//402 +f 582//583 578//403 574//583 +f 578//403 582//583 626//403 +f 627//584 580//584 628//584 +f 580//584 627//584 581//584 +f 581//406 626//406 582//406 +f 626//406 581//406 627//406 +f 583//585 625//585 629//585 +f 625//585 583//585 579//585 +f 584//586 629//586 630//586 +f 629//586 584//586 583//586 +f 585//411 630//411 631//411 +f 630//411 585//411 584//411 +f 586//23 631//23 632//23 +f 631//23 586//23 585//23 +f 587//589 632//589 633//589 +f 632//589 587//589 586//589 +f 588//591 633//591 634//591 +f 633//591 588//591 587//591 +f 635//701 588//417 634//701 +f 588//417 635//701 589//417 +f 636//763 589//763 635//763 +f 589//763 636//763 590//763 +f 637//764 590//764 636//764 +f 590//764 637//764 591//764 +f 591//71 637//71 638//71 +f 591//71 638//71 592//71 +f 639//71 592//71 638//71 +f 592//71 639//71 594//71 +f 594//419 639//419 593//419 +f 593//419 639//419 640//419 +f 641//420 593//420 640//420 +f 593//420 641//420 595//420 +f 642//422 595//421 641//422 +f 595//421 642//422 596//421 +f 643//765 596//594 642//765 +f 596//594 643//765 597//594 +f 644//595 597//596 643//595 +f 597//596 644//595 598//596 +f 645//426 598//426 644//426 +f 598//426 645//426 599//426 +f 646//427 599//427 645//427 +f 599//427 646//427 600//427 +f 628//704 600//704 646//704 +f 600//704 628//704 580//704 +f 615//430 641//431 640//431 +f 641//431 615//430 617//430 +f 612//432 639//432 638//432 +f 639//598 612//598 640//598 +f 640//434 612//434 614//434 +f 640//599 614//599 615//599 +f 636//766 638//766 637//766 +f 638//601 636//601 612//601 +f 612//434 636//434 611//434 +f 611//767 636//767 610//767 +f 635//768 610//769 636//768 +f 610//769 635//768 609//770 +f 634//771 609//772 635//442 +f 609//772 634//771 607//607 +f 634//610 605//609 607//609 +f 605//609 634//610 633//610 +f 633//611 602//611 605//611 +f 602//611 633//611 632//611 +f 632//613 603//614 602//614 +f 603//614 632//613 631//613 +f 631//450 601//450 603//450 +f 601//450 631//450 630//450 +f 630//773 604//615 601//616 +f 604//615 630//773 629//617 +f 629//457 606//456 604//456 +f 606//456 629//457 625//457 +f 577//462 606//618 625//462 +f 606//618 577//462 608//618 +f 616//620 578//465 626//465 +f 578//465 616//620 613//620 +f 618//621 626//468 627//468 +f 626//468 618//621 616//621 +f 618//471 628//774 620//471 +f 628//774 618//471 627//775 +f 620//776 646//777 622//626 +f 646//777 620//776 628//475 +f 622//478 645//778 624//478 +f 645//778 622//478 646//778 +f 624//627 644//779 623//627 +f 644//779 624//627 645//779 +f 623//628 643//629 621//628 +f 643//629 623//628 644//629 +f 621//780 642//632 619//632 +f 642//632 621//780 643//630 +f 617//781 642//634 641//486 +f 642//634 617//781 619//635 +f 647//636 648//782 649//273 +f 648//782 647//636 650//491 +f 648//178 651//178 649//178 +f 651//178 648//178 652//178 +f 653//638 649//493 654//493 +f 649//493 653//638 647//638 +f 655//639 656//640 657//641 +f 656//640 655//639 658//513 +f 656//640 658//513 659//642 +f 659//642 658//513 660//643 +f 659//642 660//643 661//783 +f 661//783 660//643 662//501 +f 661//783 662//501 653//502 +f 653//502 662//501 663//503 +f 653//502 663//503 647//645 +f 647//645 663//503 664//784 +f 647//645 664//784 650//646 +f 650//646 664//784 665//646 +f 650//646 665//646 666//500 +f 650//646 666//500 667//508 +f 667//508 666//500 668//785 +f 667//508 668//785 669//509 +f 667//508 669//509 670//786 +f 670//786 669//509 671//647 +f 670//786 671//647 672//787 +f 672//787 671//647 673//642 +f 672//787 673//642 674//650 +f 674//650 673//642 675//788 +f 674//650 675//788 676//789 +f 676//789 675//788 677//516 +f 648//517 667//653 678//519 +f 667//653 648//517 650//654 +f 652//308 679//656 651//310 +f 679//656 652//308 680//522 +f 649//312 681//312 654//312 +f 681//312 649//312 651//312 +f 678//196 652//196 648//196 +f 652//196 678//196 682//196 +f 661//657 654//315 683//315 +f 654//315 661//657 653//657 +f 672//658 684//659 685//660 +f 684//659 672//658 674//317 +f 686//321 672//322 685//321 +f 672//322 686//321 670//322 +f 678//528 670//661 686//528 +f 670//661 678//528 667//661 +f 687//790 661//663 683//664 +f 661//663 687//790 659//665 +f 688//331 659//791 687//532 +f 659//791 688//331 656//666 +f 689//534 656//792 688//534 +f 656//792 689//534 657//334 +f 690//336 657//336 689//336 +f 657//336 690//336 655//336 +f 691//669 655//741 690//669 +f 655//741 691//669 658//667 +f 692//537 658//793 691//537 +f 658//793 692//537 660//670 +f 662//538 692//342 693//342 +f 692//342 662//538 660//538 +f 663//348 693//794 694//347 +f 693//794 663//348 662//348 +f 665//349 695//349 696//349 +f 695//671 665//671 694//671 +f 694//544 665//544 664//544 +f 694//672 664//672 663//672 +f 697//795 696//795 698//795 +f 696//674 697//674 665//674 +f 665//544 697//544 666//544 +f 666//675 697//675 668//675 +f 699//546 668//357 697//546 +f 668//357 699//546 669//357 +f 700//676 669//547 699//676 +f 669//547 700//676 671//678 +f 673//548 700//549 701//549 +f 700//549 673//548 671//548 +f 675//551 701//363 702//552 +f 701//363 675//551 673//551 +f 677//367 702//367 703//367 +f 702//367 677//367 675//367 +f 676//369 703//553 704//553 +f 703//553 676//369 677//369 +f 674//554 704//680 684//555 +f 704//680 674//554 676//681 +f 705//682 706//683 707//796 +f 706//683 705//682 708//685 +f 706//683 708//685 709//686 +f 709//686 708//685 710//797 +f 709//686 710//797 711//694 +f 711//694 710//797 712//688 +f 711//694 712//688 713//571 +f 713//571 712//688 679//565 +f 713//571 679//565 714//376 +f 714//376 679//565 680//568 +f 714//376 680//568 715//690 +f 715//690 680//568 716//798 +f 716//798 680//568 717//692 +f 716//798 717//692 718//799 +f 718//799 717//692 719//570 +f 719//570 717//692 720//571 +f 719//570 720//571 721//688 +f 721//688 720//571 722//800 +f 721//688 722//800 723//801 +f 723//801 722//800 724//802 +f 723//801 724//802 725//803 +f 725//803 724//802 726//804 +f 725//803 726//804 727//576 +f 727//576 726//804 728//577 +f 651//805 712//397 681//698 +f 712//397 651//805 679//578 +f 717//398 652//399 682//400 +f 652//399 717//398 680//581 +f 654//402 729//402 683//402 +f 729//402 654//402 681//402 +f 686//403 682//403 678//403 +f 682//403 686//403 730//403 +f 731//584 684//584 732//584 +f 684//584 731//584 685//584 +f 685//406 730//406 686//406 +f 730//406 685//406 731//406 +f 687//585 729//585 733//585 +f 729//585 687//585 683//585 +f 688//586 733//586 734//586 +f 733//586 688//586 687//586 +f 689//411 734//411 735//411 +f 734//411 689//411 688//411 +f 690//23 735//23 736//23 +f 735//23 690//23 689//23 +f 691//589 736//589 737//589 +f 736//589 691//589 690//589 +f 692//591 737//591 738//591 +f 737//591 692//591 691//591 +f 739//417 692//417 738//417 +f 692//417 739//417 693//417 +f 740//418 693//418 739//418 +f 693//418 740//418 694//418 +f 741//34 694//34 740//34 +f 694//34 741//34 695//34 +f 695//71 741//71 742//71 +f 695//71 742//71 696//71 +f 743//71 696//71 742//71 +f 696//71 743//71 698//71 +f 698//419 743//419 697//419 +f 697//419 743//419 744//419 +f 745//420 697//420 744//420 +f 697//420 745//420 699//420 +f 746//421 699//421 745//421 +f 699//421 746//421 700//421 +f 747//594 700//594 746//594 +f 700//594 747//594 701//594 +f 748//596 701//596 747//596 +f 701//596 748//596 702//596 +f 749//426 702//426 748//426 +f 702//426 749//426 703//426 +f 750//427 703//427 749//427 +f 703//427 750//427 704//427 +f 732//704 704//704 750//704 +f 704//704 732//704 684//704 +f 719//430 745//431 744//431 +f 745//431 719//430 721//430 +f 716//432 743//432 742//432 +f 743//598 716//598 744//598 +f 744//806 716//806 718//806 +f 744//599 718//599 719//599 +f 740//807 742//807 741//807 +f 742//709 740//709 716//709 +f 716//434 740//434 715//434 +f 715//438 740//438 714//438 +f 739//604 714//808 740//711 +f 714//808 739//604 713//439 +f 738//442 713//442 739//442 +f 713//442 738//442 711//442 +f 738//608 709//713 711//446 +f 709//713 738//608 737//610 +f 737//714 706//715 709//448 +f 706//715 737//714 736//611 +f 736//613 707//449 706//614 +f 707//449 736//613 735//449 +f 735//716 705//450 707//450 +f 705//450 735//716 734//716 +f 734//773 708//615 705//616 +f 708//615 734//773 733//617 +f 733//459 710//457 708//458 +f 710//457 733//459 729//459 +f 681//462 710//718 729//462 +f 710//718 681//462 712//718 +f 720//620 682//465 730//465 +f 682//465 720//620 717//620 +f 722//719 730//468 731//468 +f 730//468 722//719 720//719 +f 722//622 732//809 724//721 +f 732//809 722//622 731//722 +f 724//624 750//810 726//626 +f 750//810 724//624 732//475 +f 726//478 749//478 728//478 +f 749//478 726//478 750//723 +f 728//479 748//479 727//627 +f 748//479 728//479 749//479 +f 727//628 747//811 725//480 +f 747//811 727//628 748//629 +f 725//726 746//631 723//484 +f 746//631 725//726 747//812 +f 721//487 746//487 745//487 +f 746//487 721//487 723//487 +f 751//813 752//813 753//813 +f 752//813 751//813 754//813 +f 753//813 755//814 756//814 +f 755//814 753//813 752//813 +f 757//815 753//816 758//816 +f 753//816 757//815 751//815 +f 759//814 754//813 751//813 +f 754//813 759//814 760//814 +f 761//817 752//817 754//817 +f 752//817 761//817 762//817 +f 755//818 763//819 756//820 +f 763//819 755//818 764//821 +f 758//816 756//822 765//822 +f 756//822 758//816 753//816 +f 762//817 755//823 752//817 +f 755//823 762//817 766//823 +f 767//824 758//824 768//824 +f 758//824 767//824 757//824 +f 769//822 751//815 757//815 +f 751//815 769//822 759//822 +f 770//825 759//826 771//825 +f 759//826 770//825 760//826 +f 772//817 754//817 760//817 +f 754//817 772//817 761//817 +f 773//827 762//827 761//827 +f 762//827 773//827 774//827 +f 775//828 776//829 78//830 +f 776//829 775//828 763//831 +f 776//829 763//831 777//832 +f 777//832 763//831 764//833 +f 777//832 764//833 778//833 +f 778//833 764//833 779//834 +f 778//833 779//834 780//834 +f 780//834 779//834 781//835 +f 780//834 781//835 782//836 +f 782//836 781//835 783//836 +f 782//836 783//836 784//828 +f 784//828 783//836 785//837 +f 784//828 785//837 786//838 +f 786//838 785//837 787//835 +f 786//838 787//835 788//835 +f 788//835 787//835 789//833 +f 788//835 789//833 790//833 +f 790//833 789//833 791//839 +f 790//833 791//839 792//840 +f 792//840 791//839 793//841 +f 792//840 793//841 794//842 +f 794//842 793//841 795//830 +f 756//843 775//844 765//845 +f 775//844 756//843 763//844 +f 766//846 764//847 755//848 +f 764//847 766//846 779//847 +f 768//824 765//849 796//849 +f 765//849 768//824 758//824 +f 774//827 766//827 762//827 +f 766//827 774//827 797//827 +f 798//850 767//850 768//850 +f 767//850 798//850 799//850 +f 800//87 757//824 767//824 +f 757//824 800//87 769//87 +f 771//851 769//852 801//851 +f 769//852 771//851 759//852 +f 771//853 802//853 770//853 +f 802//853 771//853 803//853 +f 804//854 760//855 770//854 +f 760//855 804//854 772//855 +f 805//827 761//827 772//827 +f 761//827 805//827 773//827 +f 806//856 773//857 805//858 +f 773//857 806//856 774//859 +f 774//859 806//856 797//860 +f 797//860 806//856 807//859 +f 797//860 807//859 808//858 +f 808//861 807//861 809//861 +f 810//862 777//863 811//862 +f 777//863 810//862 776//863 +f 796//864 776//865 810//866 +f 776//865 796//864 78//867 +f 765//868 78//869 796//870 +f 78//869 765//868 775//871 +f 797//872 779//873 766//874 +f 779//873 797//872 781//875 +f 808//876 781//877 797//876 +f 781//877 808//876 783//878 +f 812//879 783//880 808//879 +f 783//880 812//879 785//880 +f 813//881 785//881 812//881 +f 785//881 813//881 787//881 +f 814//882 787//883 813//884 +f 787//883 814//882 789//885 +f 789//886 815//887 791//886 +f 815//887 789//886 814//887 +f 791//888 816//889 793//890 +f 816//889 791//888 815//891 +f 793//892 817//893 795//894 +f 817//893 793//892 816//895 +f 795//896 818//897 794//898 +f 818//897 795//896 817//899 +f 794//900 819//901 792//902 +f 819//901 794//900 818//903 +f 792//904 820//905 790//904 +f 820//905 792//904 819//905 +f 788//906 820//907 821//908 +f 820//907 788//906 790//909 +f 786//910 821//911 822//911 +f 821//911 786//910 788//910 +f 784//912 822//913 823//913 +f 822//913 784//912 786//912 +f 782//914 823//915 824//916 +f 823//915 782//914 784//914 +f 780//917 824//918 825//919 +f 824//918 780//917 782//920 +f 778//921 825//921 826//922 +f 825//921 778//921 780//921 +f 811//923 778//924 826//925 +f 778//924 811//923 777//926 +f 810//95 768//850 796//95 +f 768//850 810//95 798//850 +f 827//927 799//927 798//927 +f 799//927 827//927 828//927 +f 799//850 800//95 767//850 +f 800//95 799//850 829//95 +f 801//928 800//929 830//928 +f 800//929 801//928 769//929 +f 801//930 803//930 771//930 +f 803//930 801//930 831//930 +f 832//931 802//932 803//932 +f 802//932 832//931 833//931 +f 834//817 770//817 802//817 +f 770//817 834//817 804//817 +f 804//933 805//933 772//933 +f 805//933 804//933 835//933 +f 835//934 806//934 805//934 +f 806//934 835//934 836//934 +f 837//935 807//936 806//937 +f 807//936 837//935 809//938 +f 809//938 837//935 808//939 +f 808//939 837//935 838//940 +f 808//939 838//940 812//937 +f 812//861 838//861 839//861 +f 811//941 798//927 810//941 +f 798//927 811//941 827//927 +f 840//942 812//942 839//942 +f 812//942 840//942 813//942 +f 841//943 813//943 840//943 +f 813//943 841//943 814//943 +f 841//944 815//945 814//945 +f 815//945 841//944 842//944 +f 842//946 816//947 815//947 +f 816//947 842//946 843//946 +f 843//948 817//203 816//203 +f 817//203 843//948 844//948 +f 817//215 845//949 818//215 +f 845//949 817//215 844//949 +f 818//950 846//951 819//950 +f 846//951 818//950 845//951 +f 819//952 847//953 820//952 +f 847//953 819//952 846//953 +f 820//954 848//954 821//954 +f 848//954 820//954 847//954 +f 821//955 849//955 822//955 +f 849//955 821//955 848//955 +f 822//956 850//957 823//958 +f 850//957 822//956 851//959 +f 850//957 851//959 852//956 +f 851//960 822//960 849//960 +f 853//961 852//956 854//958 +f 852//956 853//961 850//957 +f 823//962 855//963 824//964 +f 855//963 823//962 853//965 +f 855//963 853//965 854//966 +f 853//967 823//967 850//967 +f 856//968 854//966 857//964 +f 854//966 856//968 855//963 +f 824//969 858//969 825//969 +f 858//969 824//969 855//969 +f 825//970 859//970 826//970 +f 859//970 825//970 858//970 +f 826//971 827//972 811//971 +f 827//972 826//971 859//972 +f 859//972 828//973 827//972 +f 828//973 859//972 860//973 +f 828//927 829//941 799//927 +f 829//941 828//927 861//974 +f 830//975 829//976 862//975 +f 829//976 830//975 800//976 +f 863//87 801//87 830//87 +f 801//87 863//87 831//87 +f 864//977 803//977 831//977 +f 803//977 864//977 832//977 +f 865//814 833//814 832//814 +f 833//814 865//814 866//814 +f 833//978 834//979 802//979 +f 834//979 833//978 867//978 +f 868//827 804//827 834//827 +f 804//827 868//827 835//827 +f 869//858 835//858 868//858 +f 835//858 869//858 836//858 +f 836//980 837//980 806//980 +f 837//980 836//980 870//980 +f 871//942 838//942 837//942 +f 838//942 871//942 872//942 +f 872//942 839//942 838//942 +f 839//942 872//942 840//942 +f 873//981 840//943 872//982 +f 840//943 873//981 841//943 +f 873//944 842//944 841//944 +f 842//944 873//944 874//983 +f 874//946 843//946 842//946 +f 843//946 874//946 875//946 +f 875//948 844//948 843//948 +f 844//948 875//948 876//984 +f 844//949 877//949 845//949 +f 877//949 844//949 876//985 +f 845//951 878//951 846//951 +f 878//951 845//951 877//951 +f 846//953 879//986 847//953 +f 879//986 846//953 878//987 +f 848//954 879//988 880//989 +f 879//988 848//954 847//954 +f 849//955 880//990 851//990 +f 880//990 849//955 848//955 +f 851//990 881//955 852//955 +f 881//955 851//990 880//990 +f 852//991 882//991 854//991 +f 882//991 852//991 883//991 +f 854//992 884//993 857//992 +f 884//993 854//992 882//993 +f 885//969 857//969 886//969 +f 857//969 885//969 856//969 +f 858//969 856//969 885//969 +f 856//969 858//969 855//969 +f 859//970 885//970 860//970 +f 885//970 859//970 858//970 +f 860//973 861//971 828//973 +f 861//971 860//973 887//971 +f 862//994 861//994 888//994 +f 861//994 862//994 829//994 +f 889//95 830//95 862//95 +f 830//95 889//95 863//95 +f 890//995 831//996 863//996 +f 831//996 890//995 864//995 +f 891//930 832//822 864//822 +f 832//822 891//930 865//930 +f 865//814 892//814 866//814 +f 892//814 865//814 893//814 +f 894//817 833//817 866//817 +f 833//817 894//817 867//817 +f 895//997 834//998 867//997 +f 834//998 895//997 868//998 +f 896//999 868//999 895//999 +f 868//999 896//999 869//999 +f 897//937 836//937 869//937 +f 836//937 897//937 870//937 +f 870//1000 871//1001 837//1001 +f 871//1001 870//1000 898//1000 +f 899//1002 872//982 871//1003 +f 872//982 899//1002 873//981 +f 899//1004 874//983 873//944 +f 874//983 899//1004 900//1005 +f 900//947 875//946 874//946 +f 875//946 900//947 901//947 +f 901//1006 876//984 875//948 +f 876//984 901//1006 902//1007 +f 876//985 903//1008 877//949 +f 903//1008 876//985 902//1009 +f 877//951 904//950 878//951 +f 904//950 877//951 903//950 +f 878//987 905//1010 879//986 +f 905//1010 878//987 904//246 +f 880//989 905//1011 881//1012 +f 905//1011 880//989 879//988 +f 881//1013 883//1014 852//1013 +f 883//1014 881//1013 906//1014 +f 883//958 907//958 882//958 +f 907//958 883//958 908//958 +f 882//964 909//964 884//964 +f 909//964 882//964 907//964 +f 857//1015 910//1016 886//1015 +f 910//1016 857//1015 884//1016 +f 860//970 886//970 887//970 +f 886//970 860//970 885//970 +f 887//1017 888//1018 861//1017 +f 888//1018 887//1017 911//1018 +f 912//941 862//941 888//941 +f 862//941 912//941 889//941 +f 913//1019 863//1019 889//1019 +f 863//1019 913//1019 890//1019 +f 890//87 891//87 864//87 +f 891//87 890//87 914//87 +f 891//930 893//930 865//930 +f 893//930 891//930 915//930 +f 916//1020 893//1021 917//1022 +f 893//1021 916//1020 892//1023 +f 892//817 894//817 866//817 +f 894//817 892//817 918//817 +f 919//827 867//827 894//827 +f 867//827 919//827 895//827 +f 920//858 921//1024 922//1025 +f 922//1026 895//1026 919//1026 +f 895//858 922//1025 896//1025 +f 896//1025 922//1025 921//1024 +f 923//1027 869//1028 896//1027 +f 869//1028 923//1027 897//1028 +f 924//942 870//942 897//942 +f 870//942 924//942 898//942 +f 898//1029 899//1029 871//1030 +f 899//1029 898//1029 925//1031 +f 925//1032 900//1033 899//1034 +f 900//1033 925//1032 926//1035 +f 901//1036 926//1036 927//1036 +f 926//1036 901//1036 900//1036 +f 902//1037 927//1038 928//1039 +f 927//1038 902//1037 901//1040 +f 903//1041 928//1042 929//1043 +f 928//1042 903//1041 902//1044 +f 904//1045 929//1045 930//1045 +f 929//1045 904//1045 903//1045 +f 905//1046 930//1047 931//1047 +f 930//1047 905//1046 904//1046 +f 881//1048 931//1049 906//1050 +f 931//1049 881//1048 905//1048 +f 906//990 908//990 883//990 +f 908//990 906//990 932//990 +f 907//1051 933//1051 934//1051 +f 933//1051 907//1051 908//1051 +f 909//1052 934//1053 935//1053 +f 934//1053 909//1052 907//1052 +f 884//969 936//969 910//969 +f 936//969 884//969 909//969 +f 886//1054 911//1054 887//1054 +f 911//1054 886//1054 910//1054 +f 937//1055 888//1055 911//1055 +f 888//1055 937//1055 912//1055 +f 938//1056 889//1057 912//1057 +f 889//1057 938//1056 913//1056 +f 913//95 914//95 890//95 +f 914//95 913//95 939//95 +f 914//87 915//849 891//87 +f 915//849 914//87 940//849 +f 917//1058 915//1059 941//1058 +f 915//1059 917//1058 893//1059 +f 942//1060 943//1061 944//1062 +f 943//1061 942//1060 945//1063 +f 945//1063 942//1060 946//1064 +f 945//1063 946//1064 947//1065 +f 947//1065 946//1064 948//1065 +f 947//1065 948//1065 949//1066 +f 949//1066 948//1065 950//1066 +f 949//1066 950//1066 951//1067 +f 951//1067 950//1066 952//1068 +f 951//1067 952//1068 953//1069 +f 953//1069 952//1068 954//1070 +f 953//1069 954//1070 955//1070 +f 955//1070 954//1070 956//1066 +f 955//1070 956//1066 957//1071 +f 957//1071 956//1066 958//1071 +f 957//1071 958//1071 959//1065 +f 959//1065 958//1071 916//1065 +f 959//1065 916//1065 960//1072 +f 960//1072 916//1065 917//1073 +f 960//1072 917//1073 961//1074 +f 961//1074 917//1073 941//1069 +f 961//1074 941//1069 24//1062 +f 916//1075 918//1076 892//1076 +f 918//1076 916//1075 958//1077 +f 918//827 919//827 894//827 +f 919//827 918//827 922//827 +f 956//1078 920//1079 922//1079 +f 920//1079 956//1078 954//1078 +f 962//937 963//936 920//937 +f 920//1080 896//1080 921//1080 +f 896//937 920//937 923//937 +f 923//937 920//937 963//936 +f 964//1081 897//1082 923//1081 +f 897//1082 964//1081 924//1082 +f 965//1083 898//1083 924//1083 +f 898//1083 965//1083 925//1083 +f 925//1084 966//1084 926//1084 +f 966//1084 925//1084 965//1084 +f 926//947 967//947 927//947 +f 967//947 926//947 966//947 +f 927//203 968//203 928//203 +f 968//203 927//203 967//203 +f 928//215 969//215 929//215 +f 969//215 928//215 968//215 +f 969//950 930//950 929//950 +f 930//950 969//950 970//950 +f 970//1085 931//1085 930//1085 +f 931//1085 970//1085 971//1085 +f 931//954 932//954 906//954 +f 932//954 931//954 971//954 +f 908//1086 972//1086 933//1086 +f 972//1086 908//1086 932//1086 +f 934//1087 973//1088 974//1089 +f 973//102 934//958 933//1090 +f 973//102 933//1090 975//1091 +f 976//958 973//102 975//1091 +f 935//1092 977//1093 978//1094 +f 977//1093 935//1092 934//1087 +f 977//1093 934//1087 974//1089 +f 973//1088 977//1093 974//1089 +f 936//1095 935//1096 979//1096 +f 935//1096 936//1095 909//1095 +f 910//970 937//970 911//970 +f 937//970 910//970 936//970 +f 912//1097 980//1098 938//1098 +f 980//1098 912//1097 937//1097 +f 938//941 939//941 913//941 +f 939//941 938//941 981//941 +f 982//95 914//95 939//95 +f 914//95 982//95 940//95 +f 941//1099 940//1100 24//1101 +f 940//1100 941//1099 915//1102 +f 961//1103 983//1103 960//1104 +f 983//1103 961//1103 982//1103 +f 960//1105 984//1106 959//1107 +f 984//1106 960//1105 983//1108 +f 985//1109 959//1110 984//1111 +f 959//1110 985//1109 957//1112 +f 977//1113 957//1114 985//1113 +f 957//1114 977//1113 955//1114 +f 973//1115 955//1116 977//1115 +f 955//1116 973//1115 953//1116 +f 976//1117 953//1118 973//1117 +f 953//1118 976//1117 951//1119 +f 986//1120 951//1120 976//1120 +f 951//1120 986//1120 949//1121 +f 987//1122 949//1123 986//1124 +f 949//1123 987//1122 947//1125 +f 988//1126 947//1127 987//1128 +f 947//1127 988//1126 945//1129 +f 989//1130 945//1131 988//1132 +f 945//1131 989//1130 943//1133 +f 990//1134 943//1135 989//1136 +f 943//1135 990//1134 944//1137 +f 991//1138 944//1139 990//1140 +f 944//1139 991//1138 942//1141 +f 992//1142 942//1143 991//1144 +f 942//1143 992//1142 946//1143 +f 993//1145 946//1146 992//1147 +f 946//1146 993//1145 948//1148 +f 950//1149 993//1150 994//1151 +f 993//1150 950//1149 948//1152 +f 952//1153 994//1153 962//1153 +f 994//1153 952//1153 950//1153 +f 954//1154 962//1154 920//1154 +f 962//1154 954//1154 952//1154 +f 958//1155 922//1155 918//1155 +f 922//1155 958//1155 956//1155 +f 24//1156 982//1157 961//1158 +f 982//1157 24//1156 940//1159 +f 962//1160 995//942 963//942 +f 995//942 962//1160 994//1160 +f 995//942 923//942 963//942 +f 923//942 995//942 964//942 +f 996//1161 924//1162 964//1161 +f 924//1162 996//1161 965//1162 +f 997//1163 965//1164 996//1163 +f 965//1164 997//1163 966//1164 +f 966//1165 998//1166 967//1165 +f 998//1166 966//1165 997//1166 +f 967//1167 999//1168 968//1167 +f 999//1168 967//1167 998//1168 +f 968//1169 1000//1169 969//1169 +f 1000//1169 968//1169 999//1169 +f 969//1170 1001//1170 970//1170 +f 1001//1170 969//1170 1000//1170 +f 970//1171 1002//1171 971//1171 +f 1002//1171 970//1171 1001//1171 +f 971//1172 972//1173 932//1172 +f 972//1173 971//1172 1002//1173 +f 933//955 1003//955 975//955 +f 1003//955 933//955 972//955 +f 1003//955 976//990 975//955 +f 976//990 1003//955 986//990 +f 978//969 985//969 1004//969 +f 985//969 978//969 977//969 +f 979//969 978//969 1004//969 +f 978//969 979//969 935//969 +f 937//1174 979//1175 980//1175 +f 979//1175 937//1174 936//1174 +f 980//971 981//971 938//971 +f 981//971 980//971 1005//971 +f 983//941 939//941 981//941 +f 939//941 983//941 982//941 +f 984//1176 981//971 1005//971 +f 981//971 984//1176 983//1176 +f 1004//970 984//970 1005//970 +f 984//970 1004//970 985//970 +f 1006//954 986//954 1003//954 +f 986//954 1006//954 987//954 +f 988//1177 1006//1085 1007//1085 +f 1006//1085 988//1177 987//1177 +f 989//950 1007//950 1008//950 +f 1007//950 989//950 988//950 +f 990//215 1008//215 1009//215 +f 1008//215 990//215 989//215 +f 1010//1178 990//203 1009//1178 +f 990//203 1010//1178 991//203 +f 1011//947 991//947 1010//947 +f 991//947 1011//947 992//947 +f 1012//945 992//1179 1011//945 +f 992//1179 1012//945 993//1179 +f 994//943 1012//943 995//943 +f 1012//943 994//943 993//943 +f 1012//943 964//943 995//943 +f 964//943 1012//943 996//943 +f 1012//945 997//945 996//945 +f 997//945 1012//945 1011//945 +f 1011//947 998//947 997//947 +f 998//947 1011//947 1010//947 +f 1010//1178 999//203 998//203 +f 999//203 1010//1178 1009//1178 +f 1009//215 1000//1180 999//1180 +f 1000//1180 1009//215 1008//215 +f 1000//950 1007//950 1001//950 +f 1007//950 1000//950 1008//950 +f 1001//952 1006//1085 1002//952 +f 1006//1085 1001//952 1007//1085 +f 972//954 1006//954 1003//954 +f 1006//954 972//954 1002//954 +f 980//970 1004//970 1005//970 +f 1004//970 980//970 979//970 +f 1013//813 1014//1181 1015//1181 +f 1014//1181 1013//813 1016//813 +f 1015//1181 1017//814 1018//814 +f 1017//814 1015//1181 1014//1181 +f 1019//815 1015//816 1020//816 +f 1015//816 1019//815 1013//815 +f 1021//814 1016//813 1013//813 +f 1016//813 1021//814 1022//814 +f 1023//817 1014//817 1016//817 +f 1014//817 1023//817 1024//817 +f 1017//818 1025//1182 1018//1183 +f 1025//1182 1017//818 1026//1184 +f 1020//816 1018//822 1027//822 +f 1018//822 1020//816 1015//816 +f 1024//817 1017//817 1014//817 +f 1017//817 1024//817 1028//817 +f 1029//824 1020//824 1030//824 +f 1020//824 1029//824 1019//824 +f 1031//822 1013//815 1019//815 +f 1013//815 1031//822 1021//822 +f 1032//825 1021//826 1033//825 +f 1021//826 1032//825 1022//826 +f 1034//817 1016//817 1022//817 +f 1016//817 1034//817 1023//817 +f 1035//827 1024//827 1023//827 +f 1024//827 1035//827 1036//827 +f 1037//828 1038//829 1039//1185 +f 1038//829 1037//828 1025//1186 +f 1038//829 1025//1186 1040//832 +f 1040//832 1025//1186 1026//1187 +f 1040//832 1026//1187 1041//833 +f 1041//833 1026//1187 1042//834 +f 1041//833 1042//834 1043//834 +f 1043//834 1042//834 1044//1188 +f 1043//834 1044//1188 1045//836 +f 1045//836 1044//1188 1046//836 +f 1045//836 1046//836 1047//828 +f 1047//828 1046//836 1048//837 +f 1047//828 1048//837 1049//830 +f 1049//830 1048//837 1050//835 +f 1049//830 1050//835 1051//835 +f 1051//835 1050//835 1052//833 +f 1051//835 1052//833 1053//833 +f 1053//833 1052//833 1054//839 +f 1053//833 1054//839 1055//1189 +f 1055//1189 1054//839 1056//841 +f 1055//1189 1056//841 1057//1190 +f 1057//1190 1056//841 1058//830 +f 1018//1191 1037//1192 1027//1191 +f 1037//1192 1018//1191 1025//1192 +f 1028//848 1026//1193 1017//848 +f 1026//1193 1028//848 1042//1194 +f 1030//824 1027//849 1059//849 +f 1027//849 1030//824 1020//824 +f 1036//827 1028//827 1024//827 +f 1028//827 1036//827 1060//827 +f 1061//850 1029//850 1030//850 +f 1029//850 1061//850 1062//850 +f 1063//87 1019//824 1029//824 +f 1019//824 1063//87 1031//87 +f 1033//852 1031//852 1064//852 +f 1031//852 1033//852 1021//852 +f 1033//853 1065//853 1032//853 +f 1065//853 1033//853 1066//853 +f 1067//854 1022//1195 1032//854 +f 1022//1195 1067//854 1034//1195 +f 1068//827 1023//827 1034//1196 +f 1023//827 1068//827 1035//827 +f 1069//856 1035//857 1068//858 +f 1035//857 1069//856 1036//859 +f 1036//859 1069//856 1060//860 +f 1060//860 1069//856 1070//859 +f 1060//860 1070//859 1071//858 +f 1071//1197 1070//1197 1072//1197 +f 1073//862 1040//863 1074//862 +f 1040//863 1073//862 1038//863 +f 1059//864 1038//1198 1073//1199 +f 1038//1198 1059//864 1039//867 +f 1027//1200 1039//1201 1059//870 +f 1039//1201 1027//1200 1037//871 +f 1060//1202 1042//873 1028//1203 +f 1042//873 1060//1202 1044//1204 +f 1071//876 1044//877 1060//876 +f 1044//877 1071//876 1046//878 +f 1075//879 1046//880 1071//879 +f 1046//880 1075//879 1048//880 +f 1076//881 1048//1205 1075//881 +f 1048//1205 1076//881 1050//1205 +f 1077//1206 1050//882 1076//884 +f 1050//882 1077//1206 1052//883 +f 1052//1207 1078//886 1054//1207 +f 1078//886 1052//1207 1077//886 +f 1054//1208 1079//1209 1056//1210 +f 1079//1209 1054//1208 1078//1211 +f 1056//892 1080//893 1058//894 +f 1080//893 1056//892 1079//895 +f 1058//1212 1081//1213 1057//898 +f 1081//1213 1058//1212 1080//899 +f 1057//902 1082//1214 1055//902 +f 1082//1214 1057//902 1081//903 +f 1055//905 1083//904 1053//905 +f 1083//904 1055//905 1082//904 +f 1051//1215 1083//1216 1084//1217 +f 1083//1216 1051//1215 1053//909 +f 1049//1218 1084//1219 1085//1219 +f 1084//1219 1049//1218 1051//1218 +f 1047//1220 1085//1220 1086//1220 +f 1085//1220 1047//1220 1049//1220 +f 1045//915 1086//916 1087//916 +f 1086//916 1045//915 1047//915 +f 1043//1221 1087//1222 1088//919 +f 1087//1222 1043//1221 1045//920 +f 1041//1223 1088//921 1089//1224 +f 1088//921 1041//1223 1043//1223 +f 1074//1225 1041//924 1089//925 +f 1041//924 1074//1225 1040//926 +f 1073//95 1030//850 1059//95 +f 1030//850 1073//95 1061//850 +f 1090//927 1062//927 1061//927 +f 1062//927 1090//927 1091//1226 +f 1062//850 1063//95 1029//850 +f 1063//95 1062//850 1092//95 +f 1064//928 1063//929 1093//928 +f 1063//929 1064//928 1031//929 +f 1064//930 1066//930 1033//930 +f 1066//930 1064//930 1094//930 +f 1095//932 1065//932 1066//932 +f 1065//932 1095//932 1096//932 +f 1097//817 1032//817 1065//817 +f 1032//817 1097//817 1067//817 +f 1067//1227 1068//933 1034//933 +f 1068//933 1067//1227 1098//1227 +f 1098//934 1069//1228 1068//1228 +f 1069//1228 1098//934 1099//934 +f 1100//935 1070//936 1069//937 +f 1070//936 1100//935 1072//1229 +f 1072//1229 1100//935 1071//939 +f 1071//939 1100//935 1101//940 +f 1071//939 1101//940 1075//937 +f 1075//861 1101//861 1102//861 +f 1074//941 1061//927 1073//941 +f 1061//927 1074//941 1090//927 +f 1103//942 1075//942 1102//942 +f 1075//942 1103//942 1076//942 +f 1104//943 1076//1083 1103//943 +f 1076//1083 1104//943 1077//1083 +f 1104//1230 1078//1084 1077//1084 +f 1078//1084 1104//1230 1105//1230 +f 1105//946 1079//947 1078//947 +f 1079//947 1105//946 1106//946 +f 1106//948 1080//203 1079//203 +f 1080//203 1106//948 1107//948 +f 1080//215 1108//949 1081//215 +f 1108//949 1080//215 1107//949 +f 1081//950 1109//951 1082//950 +f 1109//951 1081//950 1108//951 +f 1082//952 1110//953 1083//952 +f 1110//953 1082//952 1109//953 +f 1083//954 1111//954 1084//954 +f 1111//954 1083//954 1110//954 +f 1084//955 1112//955 1085//955 +f 1112//955 1084//955 1111//955 +f 1085//956 1113//1231 1086//958 +f 1113//1231 1085//956 1114//959 +f 1113//1231 1114//959 1115//956 +f 1114//960 1085//960 1112//960 +f 1116//961 1115//956 1117//958 +f 1115//956 1116//961 1113//1231 +f 1086//962 1118//963 1087//964 +f 1118//963 1086//962 1116//965 +f 1118//963 1116//965 1117//1232 +f 1116//967 1086//967 1113//967 +f 1119//968 1117//1232 1120//964 +f 1117//1232 1119//968 1118//963 +f 1087//969 1121//969 1088//969 +f 1121//969 1087//969 1118//969 +f 1088//970 1122//970 1089//970 +f 1122//970 1088//970 1121//970 +f 1089//971 1090//972 1074//971 +f 1090//972 1089//971 1122//972 +f 1122//972 1091//973 1090//972 +f 1091//973 1122//972 1123//973 +f 1091//1226 1092//974 1062//927 +f 1092//974 1091//1226 1124//974 +f 1093//975 1092//976 1125//975 +f 1092//976 1093//975 1063//976 +f 1126//87 1064//87 1093//87 +f 1064//87 1126//87 1094//87 +f 1127//1233 1066//977 1094//977 +f 1066//977 1127//1233 1095//1233 +f 1128//814 1096//853 1095//853 +f 1096//853 1128//814 1129//814 +f 1096//1234 1097//1235 1065//1235 +f 1097//1235 1096//1234 1130//1234 +f 1131//827 1067//827 1097//827 +f 1067//827 1131//827 1098//827 +f 1132//858 1098//858 1131//858 +f 1098//858 1132//858 1099//858 +f 1099//980 1100//980 1069//980 +f 1100//980 1099//980 1133//980 +f 1134//942 1101//942 1100//942 +f 1101//942 1134//942 1135//942 +f 1135//942 1102//942 1101//942 +f 1102//942 1135//942 1103//942 +f 1136//1236 1103//943 1135//1237 +f 1103//943 1136//1236 1104//943 +f 1136//1230 1105//1230 1104//1230 +f 1105//1230 1136//1230 1137//1238 +f 1137//946 1106//946 1105//946 +f 1106//946 1137//946 1138//946 +f 1138//948 1107//948 1106//948 +f 1107//948 1138//948 1139//984 +f 1107//949 1140//949 1108//949 +f 1140//949 1107//949 1139//1239 +f 1108//951 1141//951 1109//951 +f 1141//951 1108//951 1140//951 +f 1109//953 1142//1240 1110//953 +f 1142//1240 1109//953 1141//987 +f 1111//954 1142//988 1143//1241 +f 1142//988 1111//954 1110//954 +f 1112//955 1143//990 1114//990 +f 1143//990 1112//955 1111//955 +f 1114//990 1144//955 1115//955 +f 1144//955 1114//990 1143//990 +f 1115//1242 1145//991 1117//1242 +f 1145//991 1115//1242 1146//991 +f 1117//993 1147//993 1120//993 +f 1147//993 1117//993 1145//993 +f 1148//969 1120//969 1149//969 +f 1120//969 1148//969 1119//969 +f 1121//969 1119//969 1148//969 +f 1119//969 1121//969 1118//969 +f 1122//970 1148//970 1123//970 +f 1148//970 1122//970 1121//970 +f 1123//973 1124//971 1091//973 +f 1124//971 1123//973 1150//971 +f 1125//994 1124//994 1151//994 +f 1124//994 1125//994 1092//994 +f 1152//95 1093//95 1125//95 +f 1093//95 1152//95 1126//95 +f 1153//995 1094//995 1126//995 +f 1094//995 1153//995 1127//995 +f 1154//930 1095//822 1127//822 +f 1095//822 1154//930 1128//930 +f 1128//814 1155//853 1129//814 +f 1155//853 1128//814 1156//853 +f 1157//817 1096//817 1129//817 +f 1096//817 1157//817 1130//817 +f 1158//1243 1097//1244 1130//1243 +f 1097//1244 1158//1243 1131//1244 +f 1159//999 1131//999 1158//999 +f 1131//999 1159//999 1132//999 +f 1160//937 1099//937 1132//937 +f 1099//937 1160//937 1133//937 +f 1133//1000 1134//1245 1100//1245 +f 1134//1245 1133//1000 1161//1000 +f 1162//1002 1135//1237 1134//1003 +f 1135//1237 1162//1002 1136//1236 +f 1162//1246 1137//1238 1136//1230 +f 1137//1238 1162//1246 1163//253 +f 1163//947 1138//946 1137//946 +f 1138//946 1163//947 1164//947 +f 1164//1006 1139//984 1138//948 +f 1139//984 1164//1006 1165//1007 +f 1139//1239 1166//1008 1140//949 +f 1166//1008 1139//1239 1165//1009 +f 1140//951 1167//950 1141//951 +f 1167//950 1140//951 1166//950 +f 1141//987 1168//1010 1142//1240 +f 1168//1010 1141//987 1167//246 +f 1143//1241 1168//1247 1144//1012 +f 1168//1247 1143//1241 1142//988 +f 1144//1248 1146//1013 1115//1248 +f 1146//1013 1144//1248 1169//1013 +f 1146//958 1170//958 1145//958 +f 1170//958 1146//958 1171//958 +f 1145//964 1172//964 1147//964 +f 1172//964 1145//964 1170//964 +f 1120//1016 1173//1016 1149//1016 +f 1173//1016 1120//1016 1147//1016 +f 1123//970 1149//970 1150//970 +f 1149//970 1123//970 1148//970 +f 1150//1249 1151//1017 1124//1249 +f 1151//1017 1150//1249 1174//1017 +f 1175//941 1125//941 1151//941 +f 1125//941 1175//941 1152//941 +f 1176//1019 1126//1019 1152//1019 +f 1126//1019 1176//1019 1153//1019 +f 1153//87 1154//87 1127//87 +f 1154//87 1153//87 1177//87 +f 1154//930 1156//930 1128//930 +f 1156//930 1154//930 1178//930 +f 1179//1020 1156//1021 1180//1022 +f 1156//1021 1179//1020 1155//1023 +f 1155//817 1157//817 1129//817 +f 1157//817 1155//817 1181//817 +f 1182//827 1130//827 1157//827 +f 1130//827 1182//827 1158//827 +f 1183//858 1184//1024 1185//1025 +f 1185//1250 1158//1250 1182//1250 +f 1158//858 1185//1025 1159//1025 +f 1159//1025 1185//1025 1184//1024 +f 1186//1027 1132//1028 1159//1027 +f 1132//1028 1186//1027 1160//1028 +f 1187//942 1133//942 1160//942 +f 1133//942 1187//942 1161//942 +f 1161//1029 1162//1251 1134//1030 +f 1162//1251 1161//1029 1188//1252 +f 1188//1032 1163//1032 1162//1034 +f 1163//1032 1188//1032 1189//1253 +f 1164//1036 1189//1036 1190//1036 +f 1189//1036 1164//1036 1163//1036 +f 1165//1254 1190//1255 1191//1256 +f 1190//1255 1165//1254 1164//1257 +f 1166//1258 1191//1042 1192//1259 +f 1191//1042 1166//1258 1165//1044 +f 1167//1045 1192//1045 1193//1045 +f 1192//1045 1167//1045 1166//1045 +f 1168//1046 1193//1047 1194//1047 +f 1193//1047 1168//1046 1167//1046 +f 1144//1048 1194//1260 1169//1260 +f 1194//1260 1144//1048 1168//1048 +f 1169//990 1171//990 1146//990 +f 1171//990 1169//990 1195//990 +f 1170//1051 1196//1051 1197//1051 +f 1196//1051 1170//1051 1171//1051 +f 1172//1052 1197//1052 1198//1052 +f 1197//1052 1172//1052 1170//1052 +f 1147//969 1199//969 1173//969 +f 1199//969 1147//969 1172//969 +f 1149//1054 1174//1054 1150//1054 +f 1174//1054 1149//1054 1173//1054 +f 1200//971 1151//971 1174//971 +f 1151//971 1200//971 1175//971 +f 1201//1056 1152//1056 1175//1056 +f 1152//1056 1201//1056 1176//1056 +f 1176//95 1177//95 1153//95 +f 1177//95 1176//95 1202//95 +f 1177//87 1178//849 1154//87 +f 1178//849 1177//87 1203//849 +f 1180//1261 1178//1059 1204//1261 +f 1178//1059 1180//1261 1156//1059 +f 1205//1060 1206//1262 1207//1062 +f 1206//1262 1205//1060 1208//1063 +f 1208//1063 1205//1060 1209//1064 +f 1208//1063 1209//1064 1210//1065 +f 1210//1065 1209//1064 1211//1065 +f 1210//1065 1211//1065 1212//1066 +f 1212//1066 1211//1065 1213//1066 +f 1212//1066 1213//1066 1214//1071 +f 1214//1071 1213//1066 1215//1068 +f 1214//1071 1215//1068 1216//1069 +f 1216//1069 1215//1068 1217//1070 +f 1216//1069 1217//1070 1218//1070 +f 1218//1070 1217//1070 1219//1263 +f 1218//1070 1219//1263 1220//1071 +f 1220//1071 1219//1263 1221//1071 +f 1220//1071 1221//1071 1222//1065 +f 1222//1065 1221//1071 1179//1264 +f 1222//1065 1179//1264 1223//1072 +f 1223//1072 1179//1264 1180//1265 +f 1223//1072 1180//1265 1224//1074 +f 1224//1074 1180//1265 1204//1266 +f 1224//1074 1204//1266 1225//1267 +f 1179//1268 1181//1269 1155//1269 +f 1181//1269 1179//1268 1221//1270 +f 1181//827 1182//827 1157//827 +f 1182//827 1181//827 1185//827 +f 1219//1078 1183//1079 1185//1079 +f 1183//1079 1219//1078 1217//1078 +f 1226//937 1227//1271 1183//1272 +f 1183//1273 1159//1273 1184//1273 +f 1159//937 1183//1272 1186//935 +f 1186//935 1183//1272 1227//1271 +f 1228//1274 1160//1274 1186//1081 +f 1160//1274 1228//1274 1187//1274 +f 1229//1083 1161//1083 1187//1083 +f 1161//1083 1229//1083 1188//1083 +f 1188//1084 1230//1084 1189//1084 +f 1230//1084 1188//1084 1229//1084 +f 1189//947 1231//947 1190//947 +f 1231//947 1189//947 1230//947 +f 1190//203 1232//203 1191//203 +f 1232//203 1190//203 1231//203 +f 1191//215 1233//215 1192//215 +f 1233//215 1191//215 1232//215 +f 1233//950 1193//950 1192//950 +f 1193//950 1233//950 1234//950 +f 1234//1177 1194//1177 1193//1177 +f 1194//1177 1234//1177 1235//1177 +f 1194//1275 1195//1275 1169//1275 +f 1195//1275 1194//1275 1235//1275 +f 1171//1276 1236//1086 1196//1086 +f 1236//1086 1171//1276 1195//1276 +f 1197//967 1237//967 1238//967 +f 1237//1277 1197//958 1196//1278 +f 1237//1277 1196//1278 1239//956 +f 1240//958 1237//1277 1239//956 +f 1198//1092 1241//1279 1242//1094 +f 1241//1279 1198//1092 1197//962 +f 1241//1279 1197//962 1238//1280 +f 1237//964 1241//1279 1238//1280 +f 1199//1095 1198//1281 1243//1281 +f 1198//1281 1199//1095 1172//1095 +f 1173//970 1200//970 1174//970 +f 1200//970 1173//970 1199//970 +f 1175//1282 1244//1282 1201//1282 +f 1244//1282 1175//1282 1200//1282 +f 1201//941 1202//941 1176//941 +f 1202//941 1201//941 1245//941 +f 1246//95 1177//95 1202//95 +f 1177//95 1246//95 1203//95 +f 1204//1099 1203//1100 1225//1101 +f 1203//1100 1204//1099 1178//1102 +f 1224//1283 1247//1103 1223//1283 +f 1247//1103 1224//1283 1246//1103 +f 1223//1105 1248//1284 1222//1285 +f 1248//1284 1223//1105 1247//1108 +f 1249//1286 1222//1287 1248//1288 +f 1222//1287 1249//1286 1220//1286 +f 1241//1289 1220//1290 1249//1289 +f 1220//1290 1241//1289 1218//1290 +f 1237//1115 1218//1115 1241//1115 +f 1218//1115 1237//1115 1216//1115 +f 1240//1118 1216//1119 1237//1118 +f 1216//1119 1240//1118 1214//1119 +f 1250//1291 1214//1292 1240//1291 +f 1214//1292 1250//1291 1212//1292 +f 1251//1122 1212//1293 1250//1124 +f 1212//1293 1251//1122 1210//1125 +f 1252//1126 1210//1127 1251//1294 +f 1210//1127 1252//1126 1208//1295 +f 1253//1296 1208//1297 1252//1132 +f 1208//1297 1253//1296 1206//1297 +f 1254//1298 1206//1299 1253//1136 +f 1206//1299 1254//1298 1207//1137 +f 1255//1300 1207//1138 1254//1301 +f 1207//1138 1255//1300 1205//1141 +f 1256//1302 1205//1303 1255//1304 +f 1205//1303 1256//1302 1209//1305 +f 1257//1145 1209//1146 1256//1147 +f 1209//1146 1257//1145 1211//1306 +f 1213//1149 1257//1150 1258//1307 +f 1257//1150 1213//1149 1211//1152 +f 1215//1308 1258//1153 1226//1153 +f 1258//1153 1215//1308 1213//1308 +f 1217//1154 1226//1309 1183//1309 +f 1226//1309 1217//1154 1215//1154 +f 1221//1310 1185//1155 1181//1155 +f 1185//1155 1221//1310 1219//1310 +f 1225//1156 1246//1156 1224//1158 +f 1246//1156 1225//1156 1203//1311 +f 1226//1160 1259//942 1227//942 +f 1259//942 1226//1160 1258//1160 +f 1259//942 1186//942 1227//942 +f 1186//942 1259//942 1228//942 +f 1260//1312 1187//1313 1228//1312 +f 1187//1313 1260//1312 1229//1313 +f 1261//1164 1229//1163 1260//1164 +f 1229//1163 1261//1164 1230//1163 +f 1230//1165 1262//1165 1231//1165 +f 1262//1165 1230//1165 1261//1165 +f 1231//1168 1263//1314 1232//1168 +f 1263//1314 1231//1168 1262//1314 +f 1232//1315 1264//1169 1233//1315 +f 1264//1169 1232//1315 1263//1169 +f 1233//1170 1265//1170 1234//1170 +f 1265//1170 1233//1170 1264//1170 +f 1234//1316 1266//1317 1235//1316 +f 1266//1317 1234//1316 1265//1317 +f 1235//1172 1236//1172 1195//1172 +f 1236//1172 1235//1172 1266//1172 +f 1196//955 1267//955 1239//955 +f 1267//955 1196//955 1236//955 +f 1267//955 1240//990 1239//955 +f 1240//990 1267//955 1250//990 +f 1242//969 1249//969 1268//969 +f 1249//969 1242//969 1241//969 +f 1243//969 1242//969 1268//969 +f 1242//969 1243//969 1198//969 +f 1200//1175 1243//1175 1244//1175 +f 1243//1175 1200//1175 1199//1175 +f 1244//971 1245//971 1201//971 +f 1245//971 1244//971 1269//971 +f 1247//941 1202//941 1245//941 +f 1202//941 1247//941 1246//941 +f 1248//1176 1245//971 1269//971 +f 1245//971 1248//1176 1247//1176 +f 1268//970 1248//970 1269//970 +f 1248//970 1268//970 1249//970 +f 1270//954 1250//954 1267//954 +f 1250//954 1270//954 1251//954 +f 1252//1177 1270//1085 1271//1085 +f 1270//1085 1252//1177 1251//1177 +f 1253//950 1271//950 1272//950 +f 1271//950 1253//950 1252//950 +f 1254//215 1272//215 1273//215 +f 1272//215 1254//215 1253//215 +f 1274//1178 1254//203 1273//1178 +f 1254//203 1274//1178 1255//203 +f 1275//947 1255//947 1274//947 +f 1255//947 1275//947 1256//947 +f 1276//1084 1256//945 1275//1084 +f 1256//945 1276//1084 1257//945 +f 1258//1083 1276//1083 1259//1083 +f 1276//1083 1258//1083 1257//1083 +f 1276//1083 1228//943 1259//1083 +f 1228//943 1276//1083 1260//943 +f 1276//1084 1261//1084 1260//1084 +f 1261//1084 1276//1084 1275//1084 +f 1275//947 1262//947 1261//947 +f 1262//947 1275//947 1274//947 +f 1274//1178 1263//203 1262//203 +f 1263//203 1274//1178 1273//1178 +f 1273//215 1264//1180 1263//1180 +f 1264//1180 1273//215 1272//215 +f 1264//950 1271//950 1265//950 +f 1271//950 1264//950 1272//950 +f 1265//952 1270//1085 1266//952 +f 1270//1085 1265//952 1271//1085 +f 1236//954 1270//954 1267//954 +f 1270//954 1236//954 1266//954 +f 1244//970 1268//970 1269//970 +f 1268//970 1244//970 1243//970 +f 1277//1318 1278//1319 1279//1319 +f 1278//1319 1277//1318 1280//1318 +f 1279//1319 1281//1320 1282//1320 +f 1281//1320 1279//1319 1278//1319 +f 1283//816 1279//816 1284//816 +f 1279//816 1283//816 1277//816 +f 1285//1320 1280//1318 1277//1318 +f 1280//1318 1285//1320 1286//1320 +f 1287//1321 1278//1321 1280//1321 +f 1278//1321 1287//1321 1288//1321 +f 1281//1322 1289//1323 1282//1324 +f 1289//1323 1281//1322 1290//1325 +f 1284//816 1282//930 1291//930 +f 1282//930 1284//816 1279//816 +f 1288//1321 1281//1321 1278//1321 +f 1281//1321 1288//1321 1292//1321 +f 1293//824 1284//824 1294//824 +f 1284//824 1293//824 1283//824 +f 1295//822 1277//816 1283//816 +f 1277//816 1295//822 1285//822 +f 1296//1326 1285//1326 1297//1326 +f 1285//1326 1296//1326 1286//1326 +f 1298//1321 1280//1321 1286//1321 +f 1280//1321 1298//1321 1287//1321 +f 1299//827 1288//827 1287//827 +f 1288//827 1299//827 1300//827 +f 1301//828 1302//829 1303//830 +f 1302//829 1301//828 1289//1186 +f 1302//829 1289//1186 1304//1327 +f 1304//1327 1289//1186 1290//1187 +f 1304//1327 1290//1187 1305//833 +f 1305//833 1290//1187 1306//834 +f 1305//833 1306//834 1307//834 +f 1307//834 1306//834 1308//1188 +f 1307//834 1308//1188 1309//1328 +f 1309//1328 1308//1188 1310//1329 +f 1309//1328 1310//1329 1311//1330 +f 1311//1330 1310//1329 1312//837 +f 1311//1330 1312//837 1313//838 +f 1313//838 1312//837 1314//835 +f 1313//838 1314//835 1315//835 +f 1315//835 1314//835 1316//833 +f 1315//835 1316//833 1317//833 +f 1317//833 1316//833 1318//1331 +f 1317//833 1318//1331 1319//1189 +f 1319//1189 1318//1331 1320//1332 +f 1319//1189 1320//1332 1321//1187 +f 1321//1187 1320//1332 1322//830 +f 1282//845 1301//1192 1291//845 +f 1301//1192 1282//845 1289//1192 +f 1292//1333 1290//1334 1281//1335 +f 1290//1334 1292//1333 1306//1336 +f 1294//824 1291//849 1323//849 +f 1291//849 1294//824 1284//824 +f 1300//827 1292//827 1288//827 +f 1292//827 1300//827 1324//827 +f 1325//850 1293//850 1294//850 +f 1293//850 1325//850 1326//850 +f 1327//87 1283//824 1293//824 +f 1283//824 1327//87 1295//87 +f 1297//852 1295//852 1328//852 +f 1295//852 1297//852 1285//852 +f 1297//1320 1329//1337 1296//1320 +f 1329//1337 1297//1320 1330//1337 +f 1331//1338 1286//1339 1296//1338 +f 1286//1339 1331//1338 1298//1339 +f 1332//827 1287//827 1298//827 +f 1287//827 1332//827 1299//827 +f 1333//1340 1299//225 1332//1341 +f 1299//225 1333//1340 1300//1342 +f 1300//1342 1333//1340 1324//1340 +f 1324//1340 1333//1340 1334//1343 +f 1324//1340 1334//1343 1335//1341 +f 1335//861 1334//861 1336//861 +f 1337//862 1304//862 1338//862 +f 1304//862 1337//862 1302//863 +f 1323//1344 1302//865 1337//1199 +f 1302//865 1323//1344 1303//867 +f 1291//1345 1303//869 1323//1346 +f 1303//869 1291//1345 1301//871 +f 1324//1202 1306//1347 1292//1348 +f 1306//1347 1324//1202 1308//1349 +f 1335//1350 1308//1351 1324//1350 +f 1308//1351 1335//1350 1310//1351 +f 1339//1352 1310//1353 1335//1352 +f 1310//1353 1339//1352 1312//1353 +f 1340//881 1312//1205 1339//881 +f 1312//1205 1340//881 1314//1205 +f 1341//1354 1314//1355 1340//1356 +f 1314//1355 1341//1354 1316//1357 +f 1316//1358 1342//1359 1318//1358 +f 1342//1359 1316//1358 1341//1359 +f 1318//1208 1343//889 1320//1360 +f 1343//889 1318//1208 1342//891 +f 1320//1361 1344//1362 1322//894 +f 1344//1362 1320//1361 1343//895 +f 1322//1363 1345//1364 1321//898 +f 1345//1364 1322//1363 1344//899 +f 1321//900 1346//1365 1319//902 +f 1346//1365 1321//900 1345//903 +f 1319//1366 1347//1367 1317//1366 +f 1347//1367 1319//1366 1346//1367 +f 1315//1368 1347//1369 1348//1370 +f 1347//1369 1315//1368 1317//1371 +f 1313//1372 1348//1373 1349//1373 +f 1348//1373 1313//1372 1315//1372 +f 1311//1220 1349//912 1350//912 +f 1349//912 1311//1220 1313//1220 +f 1309//1374 1350//1375 1351//1375 +f 1350//1375 1309//1374 1311//915 +f 1307//1376 1351//1377 1352//1378 +f 1351//1377 1307//1376 1309//1379 +f 1305//1380 1352//1381 1353//1382 +f 1352//1381 1305//1380 1307//1383 +f 1338//1384 1305//1385 1353//1386 +f 1305//1385 1338//1384 1304//1387 +f 1337//95 1294//850 1323//95 +f 1294//850 1337//95 1325//850 +f 1354//927 1326//927 1325//927 +f 1326//927 1354//927 1355//1226 +f 1326//850 1327//95 1293//850 +f 1327//95 1326//850 1356//95 +f 1328//1388 1327//929 1357//1388 +f 1327//929 1328//1388 1295//929 +f 1328//930 1330//930 1297//930 +f 1330//930 1328//930 1358//930 +f 1359//1389 1329//1390 1330//1390 +f 1329//1390 1359//1389 1360//1389 +f 1361//1321 1296//1321 1329//1321 +f 1296//1321 1361//1321 1331//1321 +f 1331//933 1332//1227 1298//1227 +f 1332//1227 1331//933 1362//933 +f 1362//1391 1333//1391 1332//1391 +f 1333//1391 1362//1391 1363//1391 +f 1364//1392 1334//1393 1333//1394 +f 1334//1393 1364//1392 1336//1395 +f 1336//1395 1364//1392 1335//1392 +f 1335//1392 1364//1392 1365//1396 +f 1335//1392 1365//1396 1339//1394 +f 1339//1397 1365//1397 1366//1397 +f 1338//941 1325//927 1337//941 +f 1325//927 1338//941 1354//927 +f 1367//942 1339//942 1366//942 +f 1339//942 1367//942 1340//942 +f 1368//1398 1340//1399 1367//1398 +f 1340//1399 1368//1398 1341//1399 +f 1368//1400 1342//1401 1341//1401 +f 1342//1401 1368//1400 1369//1400 +f 1369//946 1343//947 1342//947 +f 1343//947 1369//946 1370//946 +f 1370//948 1344//203 1343//203 +f 1344//203 1370//948 1371//948 +f 1344//215 1372//949 1345//215 +f 1372//949 1344//215 1371//949 +f 1345//950 1373//951 1346//950 +f 1373//951 1345//950 1372//951 +f 1346//1402 1374//1403 1347//1402 +f 1374//1403 1346//1402 1373//1403 +f 1347//1404 1375//1404 1348//1404 +f 1375//1404 1347//1404 1374//1404 +f 1348//1405 1376//1405 1349//1405 +f 1376//1405 1348//1405 1375//1405 +f 1349//956 1377//958 1350//958 +f 1377//958 1349//956 1378//959 +f 1377//958 1378//959 1379//959 +f 1378//967 1349//967 1376//967 +f 1380//961 1379//959 1381//958 +f 1379//959 1380//961 1377//958 +f 1350//962 1382//1406 1351//964 +f 1382//1406 1350//962 1380//965 +f 1382//1406 1380//965 1381//1407 +f 1380//1408 1350//1408 1377//1408 +f 1383//968 1381//1407 1384//964 +f 1381//1407 1383//968 1382//1406 +f 1351//1409 1385//1409 1352//1409 +f 1385//1409 1351//1409 1382//1409 +f 1352//1410 1386//1410 1353//1410 +f 1386//1410 1352//1410 1385//1410 +f 1353//1411 1354//1412 1338//1411 +f 1354//1412 1353//1411 1386//1412 +f 1386//1412 1355//1412 1354//1412 +f 1355//1412 1386//1412 1387//1412 +f 1355//1226 1356//974 1326//927 +f 1356//974 1355//1226 1388//974 +f 1357//976 1356//976 1389//976 +f 1356//976 1357//976 1327//976 +f 1390//87 1328//87 1357//87 +f 1328//87 1390//87 1358//87 +f 1391//977 1330//977 1358//977 +f 1330//977 1391//977 1359//977 +f 1392//1320 1360//1337 1359//1413 +f 1360//1337 1392//1320 1393//1320 +f 1360//1414 1361//1415 1329//1415 +f 1361//1415 1360//1414 1394//1414 +f 1395//827 1331//827 1361//827 +f 1331//827 1395//827 1362//827 +f 1396//1341 1362//1341 1395//1341 +f 1362//1341 1396//1341 1363//1341 +f 1363//1416 1364//1417 1333//1417 +f 1364//1417 1363//1416 1397//1416 +f 1398//942 1365//942 1364//942 +f 1365//942 1398//942 1399//942 +f 1399//942 1366//942 1365//942 +f 1366//942 1399//942 1367//942 +f 1400//1418 1367//1398 1399//1419 +f 1367//1398 1400//1418 1368//1398 +f 1400//1420 1369//1400 1368//1400 +f 1369//1400 1400//1420 1401//1421 +f 1401//946 1370//946 1369//946 +f 1370//946 1401//946 1402//946 +f 1402//948 1371//948 1370//948 +f 1371//948 1402//948 1403//984 +f 1371//949 1404//949 1372//949 +f 1404//949 1371//949 1403//985 +f 1372//951 1405//951 1373//951 +f 1405//951 1372//951 1404//951 +f 1373//1403 1406//1422 1374//1403 +f 1406//1422 1373//1403 1405//1423 +f 1375//1404 1406//1424 1407//1425 +f 1406//1424 1375//1404 1374//1404 +f 1376//1405 1407//1405 1378//1405 +f 1407//1405 1376//1405 1375//1405 +f 1378//1405 1408//1405 1379//1405 +f 1408//1405 1378//1405 1407//1405 +f 1379//991 1409//991 1381//991 +f 1409//991 1379//991 1410//991 +f 1381//993 1411//993 1384//993 +f 1411//993 1381//993 1409//993 +f 1412//1409 1384//1409 1413//1409 +f 1384//1409 1412//1409 1383//1409 +f 1385//1409 1383//1409 1412//1409 +f 1383//1409 1385//1409 1382//1409 +f 1386//1410 1412//1426 1387//1426 +f 1412//1426 1386//1410 1385//1410 +f 1387//1412 1388//1411 1355//1412 +f 1388//1411 1387//1412 1414//1411 +f 1389//994 1388//994 1415//994 +f 1388//994 1389//994 1356//994 +f 1416//95 1357//95 1389//95 +f 1357//95 1416//95 1390//95 +f 1417//995 1358//996 1390//996 +f 1358//996 1417//995 1391//995 +f 1418//930 1359//930 1391//930 +f 1359//930 1418//930 1392//930 +f 1392//1320 1419//1337 1393//1320 +f 1419//1337 1392//1320 1420//1337 +f 1421//1321 1360//1321 1393//1321 +f 1360//1321 1421//1321 1394//1321 +f 1422//1243 1361//1243 1394//1243 +f 1361//1243 1422//1243 1395//1243 +f 1423//1427 1395//1428 1422//1427 +f 1395//1428 1423//1427 1396//1428 +f 1424//1394 1363//1394 1396//1394 +f 1363//1394 1424//1394 1397//1394 +f 1397//1429 1398//1001 1364//1001 +f 1398//1001 1397//1429 1425//1429 +f 1426//1430 1399//1419 1398//1431 +f 1399//1419 1426//1430 1400//1418 +f 1426//1432 1401//1421 1400//1420 +f 1401//1421 1426//1432 1427//165 +f 1427//947 1402//946 1401//946 +f 1402//946 1427//947 1428//947 +f 1428//1006 1403//984 1402//948 +f 1403//984 1428//1006 1429//1007 +f 1403//985 1430//1008 1404//949 +f 1430//1008 1403//985 1429//1009 +f 1404//951 1431//950 1405//951 +f 1431//950 1404//951 1430//950 +f 1405//1423 1432//1433 1406//1422 +f 1432//1433 1405//1423 1431//1434 +f 1407//1425 1432//1435 1408//1436 +f 1432//1435 1407//1425 1406//1424 +f 1408//1437 1410//1438 1379//1437 +f 1410//1438 1408//1437 1433//1438 +f 1410//958 1434//958 1409//958 +f 1434//958 1410//958 1435//958 +f 1409//964 1436//964 1411//964 +f 1436//964 1409//964 1434//964 +f 1384//1439 1437//1440 1413//1439 +f 1437//1440 1384//1439 1411//1440 +f 1387//1426 1413//1410 1414//1410 +f 1413//1410 1387//1426 1412//1426 +f 1414//1441 1415//1442 1388//1441 +f 1415//1442 1414//1441 1438//1442 +f 1439//941 1389//941 1415//941 +f 1389//941 1439//941 1416//941 +f 1440//1443 1390//1019 1416//1019 +f 1390//1019 1440//1443 1417//1443 +f 1417//87 1418//87 1391//87 +f 1418//87 1417//87 1441//87 +f 1418//930 1420//930 1392//930 +f 1420//930 1418//930 1442//930 +f 1443//1444 1420//1445 1444//1446 +f 1420//1445 1443//1444 1419//1447 +f 1419//1321 1421//1321 1393//1321 +f 1421//1321 1419//1321 1445//1321 +f 1446//827 1394//827 1421//827 +f 1394//827 1446//827 1422//827 +f 1447//1341 1448//1448 1449//1449 +f 1449//1450 1422//1450 1446//1450 +f 1422//1341 1449//1449 1423//1451 +f 1423//1451 1449//1449 1448//1448 +f 1450//1452 1396//1453 1423//1452 +f 1396//1453 1450//1452 1424//1453 +f 1451//942 1397//942 1424//942 +f 1397//942 1451//942 1425//942 +f 1425//1454 1426//1455 1398//1456 +f 1426//1455 1425//1454 1452//1457 +f 1452//1458 1427//1459 1426//1460 +f 1427//1459 1452//1458 1453//1461 +f 1428//1036 1453//1036 1454//1036 +f 1453//1036 1428//1036 1427//1036 +f 1429//1254 1454//1038 1455//1039 +f 1454//1038 1429//1254 1428//1040 +f 1430//1462 1455//1044 1456//1463 +f 1455//1044 1430//1462 1429//1044 +f 1431//1045 1456//1045 1457//1045 +f 1456//1045 1431//1045 1430//1045 +f 1432//1464 1457//1465 1458//1465 +f 1457//1465 1432//1464 1431//1464 +f 1408//1466 1458//1467 1433//1467 +f 1458//1467 1408//1466 1432//1466 +f 1433//1405 1435//1405 1410//1405 +f 1435//1405 1433//1405 1459//1405 +f 1434//1051 1460//1051 1461//1051 +f 1460//1051 1434//1051 1435//1051 +f 1436//1052 1461//1053 1462//1053 +f 1461//1053 1436//1052 1434//1052 +f 1411//1409 1463//1409 1437//1409 +f 1463//1409 1411//1409 1436//1409 +f 1413//1468 1438//1469 1414//1468 +f 1438//1469 1413//1468 1437//1469 +f 1464//1470 1415//1470 1438//1470 +f 1415//1470 1464//1470 1439//1470 +f 1465//1056 1416//1056 1439//1056 +f 1416//1056 1465//1056 1440//1056 +f 1440//95 1441//95 1417//95 +f 1441//95 1440//95 1466//95 +f 1441//87 1442//849 1418//87 +f 1442//849 1441//87 1467//849 +f 1444//1058 1442//1471 1468//1058 +f 1442//1471 1444//1058 1420//1471 +f 1469//1472 1470//1264 1471//1062 +f 1470//1264 1469//1472 1472//1473 +f 1472//1473 1469//1472 1473//1474 +f 1472//1473 1473//1474 1474//1065 +f 1474//1065 1473//1474 1475//1065 +f 1474//1065 1475//1065 1476//1066 +f 1476//1066 1475//1065 1477//1066 +f 1476//1066 1477//1066 1478//1067 +f 1478//1067 1477//1066 1479//1068 +f 1478//1067 1479//1068 1480//1069 +f 1480//1069 1479//1068 1481//1070 +f 1480//1069 1481//1070 1482//1475 +f 1482//1475 1481//1070 1483//1263 +f 1482//1475 1483//1263 1484//1071 +f 1484//1071 1483//1263 1485//1071 +f 1484//1071 1485//1071 1486//1065 +f 1486//1065 1485//1071 1443//1065 +f 1486//1065 1443//1065 1487//1072 +f 1487//1072 1443//1065 1444//1073 +f 1487//1072 1444//1073 1488//1074 +f 1488//1074 1444//1073 1468//1069 +f 1488//1074 1468//1069 1489//1062 +f 1443//1476 1445//1476 1419//1476 +f 1445//1476 1443//1476 1485//1477 +f 1445//827 1446//827 1421//827 +f 1446//827 1445//827 1449//827 +f 1483//1478 1447//1479 1449//1479 +f 1447//1479 1483//1478 1481//1478 +f 1490//1394 1491//1480 1447//1481 +f 1447//1482 1423//1482 1448//1482 +f 1423//1394 1447//1481 1450//1483 +f 1450//1483 1447//1481 1491//1480 +f 1492//1484 1424//1082 1450//1484 +f 1424//1082 1492//1484 1451//1082 +f 1493//1399 1425//1399 1451//1399 +f 1425//1399 1493//1399 1452//1399 +f 1452//1485 1494//1485 1453//1485 +f 1494//1485 1452//1485 1493//1485 +f 1453//947 1495//947 1454//947 +f 1495//947 1453//947 1494//947 +f 1454//203 1496//203 1455//203 +f 1496//203 1454//203 1495//203 +f 1455//215 1497//215 1456//215 +f 1497//215 1455//215 1496//215 +f 1497//950 1457//950 1456//950 +f 1457//950 1497//950 1498//950 +f 1498//1486 1458//1486 1457//1486 +f 1458//1486 1498//1486 1499//1486 +f 1458//1487 1459//1487 1433//1487 +f 1459//1487 1458//1487 1499//1487 +f 1435//1488 1500//1489 1460//1489 +f 1500//1489 1435//1488 1459//1488 +f 1461//1490 1501//1491 1502//1492 +f 1501//1491 1461//1490 1460//1493 +f 1501//1491 1460//1493 1503//956 +f 1504//958 1501//1491 1503//956 +f 1462//1494 1505//1494 1506//1494 +f 1505//1495 1462//964 1461//1496 +f 1505//1495 1461//1496 1502//1280 +f 1501//964 1505//1495 1502//1280 +f 1463//1497 1462//1498 1507//1498 +f 1462//1498 1463//1497 1436//1497 +f 1437//1410 1464//1410 1438//1410 +f 1464//1410 1437//1410 1463//1410 +f 1439//1499 1508//1500 1465//1500 +f 1508//1500 1439//1499 1464//1499 +f 1465//941 1466//941 1440//941 +f 1466//941 1465//941 1509//941 +f 1510//95 1441//95 1466//95 +f 1441//95 1510//95 1467//95 +f 1468//1501 1467//1502 1489//1101 +f 1467//1502 1468//1501 1442//1102 +f 1488//1283 1511//1103 1487//1283 +f 1511//1103 1488//1283 1510//1103 +f 1487//1285 1512//1503 1486//1504 +f 1512//1503 1487//1285 1511//1106 +f 1513//1505 1486//1506 1512//1505 +f 1486//1506 1513//1505 1484//1507 +f 1505//1508 1484//1508 1513//1508 +f 1484//1508 1505//1508 1482//1508 +f 1501//1509 1482//1115 1505//1509 +f 1482//1115 1501//1509 1480//1115 +f 1504//1119 1480//1118 1501//1119 +f 1480//1118 1504//1119 1478//1118 +f 1514//1510 1478//1511 1504//1510 +f 1478//1511 1514//1510 1476//1511 +f 1515//1512 1476//1513 1514//1514 +f 1476//1513 1515//1512 1474//1515 +f 1516//1516 1474//1517 1515//1518 +f 1474//1517 1516//1516 1472//1519 +f 1517//1130 1472//1133 1516//1520 +f 1472//1133 1517//1130 1470//1133 +f 1518//1298 1470//1521 1517//1136 +f 1470//1521 1518//1298 1471//1137 +f 1519//1300 1471//1138 1518//1301 +f 1471//1138 1519//1300 1469//1141 +f 1520//1142 1469//1143 1519//1304 +f 1469//1143 1520//1142 1473//1143 +f 1521//1522 1473//1523 1520//1524 +f 1473//1523 1521//1522 1475//1525 +f 1477//1526 1521//1527 1522//1528 +f 1521//1527 1477//1526 1475//1529 +f 1479//1308 1522//1153 1490//1153 +f 1522//1153 1479//1308 1477//1308 +f 1481//1530 1490//1531 1447//1531 +f 1490//1531 1481//1530 1479//1530 +f 1485//1532 1449//1533 1445//1533 +f 1449//1533 1485//1532 1483//1532 +f 1489//1534 1510//1156 1488//1158 +f 1510//1156 1489//1534 1467//1159 +f 1490//1160 1523//942 1491//942 +f 1523//942 1490//1160 1522//1160 +f 1523//942 1450//942 1491//942 +f 1450//942 1523//942 1492//942 +f 1524//1535 1451//1535 1492//1535 +f 1451//1535 1524//1535 1493//1535 +f 1525//1536 1493//1537 1524//1536 +f 1493//1537 1525//1536 1494//1537 +f 1494//1165 1526//1166 1495//1165 +f 1526//1166 1494//1165 1525//1166 +f 1495//1167 1527//1168 1496//1167 +f 1527//1168 1495//1167 1526//1168 +f 1496//1538 1528//1169 1497//1538 +f 1528//1169 1496//1538 1527//1169 +f 1497//1170 1529//1170 1498//1170 +f 1529//1170 1497//1170 1528//1170 +f 1498//1539 1530//1540 1499//1539 +f 1530//1540 1498//1539 1529//1540 +f 1499//1541 1500//1542 1459//1541 +f 1500//1542 1499//1541 1530//1542 +f 1460//1405 1531//1405 1503//1405 +f 1531//1405 1460//1405 1500//1405 +f 1531//1405 1504//1543 1503//1405 +f 1504//1543 1531//1405 1514//1543 +f 1506//1409 1513//1409 1532//1409 +f 1513//1409 1506//1409 1505//1409 +f 1507//1409 1506//1409 1532//1409 +f 1506//1409 1507//1409 1462//1409 +f 1464//1544 1507//1545 1508//1545 +f 1507//1545 1464//1544 1463//1544 +f 1508//1411 1509//1411 1465//1411 +f 1509//1411 1508//1411 1533//1411 +f 1511//941 1466//941 1509//941 +f 1466//941 1511//941 1510//941 +f 1512//1411 1509//1411 1533//1411 +f 1509//1411 1512//1411 1511//1411 +f 1532//1410 1512//1410 1533//1410 +f 1512//1410 1532//1410 1513//1410 +f 1534//1404 1514//1487 1531//1404 +f 1514//1487 1534//1404 1515//1487 +f 1516//1546 1534//1402 1535//1402 +f 1534//1402 1516//1546 1515//1546 +f 1517//950 1535//950 1536//950 +f 1535//950 1517//950 1516//950 +f 1518//215 1536//215 1537//215 +f 1536//215 1518//215 1517//215 +f 1538//1178 1518//203 1537//1178 +f 1518//203 1538//1178 1519//203 +f 1539//947 1519//947 1538//947 +f 1519//947 1539//947 1520//947 +f 1540//1547 1520//1547 1539//1547 +f 1520//1547 1540//1547 1521//1547 +f 1522//1399 1540//1398 1523//1398 +f 1540//1398 1522//1399 1521//1399 +f 1540//1398 1492//1398 1523//1398 +f 1492//1398 1540//1398 1524//1398 +f 1540//1547 1525//1401 1524//1401 +f 1525//1401 1540//1547 1539//1547 +f 1539//947 1526//947 1525//947 +f 1526//947 1539//947 1538//947 +f 1538//1178 1527//203 1526//203 +f 1527//203 1538//1178 1537//1178 +f 1537//215 1528//1180 1527//1180 +f 1528//1180 1537//215 1536//215 +f 1528//950 1535//950 1529//950 +f 1535//950 1528//950 1536//950 +f 1529//1402 1534//1402 1530//1402 +f 1534//1402 1529//1402 1535//1402 +f 1500//1404 1534//1404 1531//1404 +f 1534//1404 1500//1404 1530//1404 +f 1508//1410 1532//1410 1533//1410 +f 1532//1410 1508//1410 1507//1410 +f 1541//1318 1542//1319 1543//1319 +f 1542//1319 1541//1318 1544//1318 +f 1543//1319 1545//1320 1546//1320 +f 1545//1320 1543//1319 1542//1319 +f 1547//816 1543//816 1548//816 +f 1543//816 1547//816 1541//816 +f 1549//1320 1544//1318 1541//1318 +f 1544//1318 1549//1320 1550//1320 +f 1551//1321 1542//1321 1544//1321 +f 1542//1321 1551//1321 1552//1321 +f 1545//1322 1553//1548 1546//1549 +f 1553//1548 1545//1322 1554//819 +f 1548//816 1546//930 1555//930 +f 1546//930 1548//816 1543//816 +f 1552//1321 1545//1321 1542//1321 +f 1545//1321 1552//1321 1556//1321 +f 1557//824 1548//824 1558//824 +f 1548//824 1557//824 1547//824 +f 1559//822 1541//816 1547//816 +f 1541//816 1559//822 1549//822 +f 1560//1326 1549//1326 1561//1326 +f 1549//1326 1560//1326 1550//1326 +f 1562//1321 1544//1321 1550//1321 +f 1544//1321 1562//1321 1551//1321 +f 1563//827 1552//827 1551//827 +f 1552//827 1563//827 1564//827 +f 1565//828 1566//829 1567//830 +f 1566//829 1565//828 1553//1186 +f 1566//829 1553//1186 1568//1327 +f 1568//1327 1553//1186 1554//833 +f 1568//1327 1554//833 1569//833 +f 1569//833 1554//833 1570//834 +f 1569//833 1570//834 1571//834 +f 1571//834 1570//834 1572//1188 +f 1571//834 1572//1188 1573//836 +f 1573//836 1572//1188 1574//1329 +f 1573//836 1574//1329 1575//1330 +f 1575//1330 1574//1329 1576//837 +f 1575//1330 1576//837 1577//838 +f 1577//838 1576//837 1578//835 +f 1577//838 1578//835 1579//835 +f 1579//835 1578//835 1580//833 +f 1579//835 1580//833 1581//833 +f 1581//833 1580//833 1582//1331 +f 1581//833 1582//1331 1583//1189 +f 1583//1189 1582//1331 1584//1332 +f 1583//1189 1584//1332 1585//1187 +f 1585//1187 1584//1332 1586//830 +f 1546//1191 1565//1191 1555//1191 +f 1565//1191 1546//1191 1553//1192 +f 1556//1336 1554//1550 1545//1334 +f 1554//1550 1556//1336 1570//1551 +f 1558//824 1555//849 1587//849 +f 1555//849 1558//824 1548//824 +f 1564//827 1556//827 1552//827 +f 1556//827 1564//827 1588//827 +f 1589//850 1557//850 1558//850 +f 1557//850 1589//850 1590//850 +f 1591//87 1547//824 1557//824 +f 1547//824 1591//87 1559//87 +f 1561//852 1559//852 1592//852 +f 1559//852 1561//852 1549//852 +f 1561//1320 1593//1337 1560//1320 +f 1593//1337 1561//1320 1594//1337 +f 1595//1339 1550//1339 1560//1339 +f 1550//1339 1595//1339 1562//1339 +f 1596//827 1551//827 1562//1196 +f 1551//827 1596//827 1563//827 +f 1597//1340 1563//225 1596//1341 +f 1563//225 1597//1340 1564//1552 +f 1564//1552 1597//1340 1588//1553 +f 1588//1553 1597//1340 1598//1343 +f 1588//1553 1598//1343 1599//1341 +f 1599//861 1598//861 1600//861 +f 1601//862 1568//862 1602//862 +f 1568//862 1601//862 1566//863 +f 1587//864 1566//1554 1601//1199 +f 1566//1554 1587//864 1567//867 +f 1555//868 1567//869 1587//870 +f 1567//869 1555//868 1565//871 +f 1588//1202 1570//1555 1556//874 +f 1570//1555 1588//1202 1572//1349 +f 1599//1556 1572//1351 1588//1556 +f 1572//1351 1599//1556 1574//1351 +f 1603//1352 1574//1352 1599//1352 +f 1574//1352 1603//1352 1576//1352 +f 1604//1557 1576//1558 1603//1557 +f 1576//1558 1604//1557 1578//1558 +f 1605//1559 1578//1560 1604//1356 +f 1578//1560 1605//1559 1580//1561 +f 1580//1562 1606//1358 1582//1562 +f 1606//1358 1580//1562 1605//1358 +f 1582//888 1607//889 1584//890 +f 1607//889 1582//888 1606//891 +f 1584//892 1608//893 1586//894 +f 1608//893 1584//892 1607//895 +f 1586//1212 1609//1213 1585//898 +f 1609//1213 1586//1212 1608//899 +f 1585//1563 1610//903 1583//1563 +f 1610//903 1585//1563 1609//903 +f 1583//1367 1611//1367 1581//1367 +f 1611//1367 1583//1367 1610//1367 +f 1579//1564 1611//1368 1612//1565 +f 1611//1368 1579//1564 1581//1371 +f 1577//1372 1612//1373 1613//1373 +f 1612//1373 1577//1372 1579//1372 +f 1575//913 1613//912 1614//912 +f 1613//912 1575//913 1577//913 +f 1573//914 1614//1375 1615//1375 +f 1614//1375 1573//914 1575//914 +f 1571//1566 1615//1377 1616//1567 +f 1615//1377 1571//1566 1573//1379 +f 1569//1381 1616//1382 1617//1382 +f 1616//1382 1569//1381 1571//1381 +f 1602//1568 1569//1385 1617//1569 +f 1569//1385 1602//1568 1568//1387 +f 1601//95 1558//850 1587//95 +f 1558//850 1601//95 1589//850 +f 1618//927 1590//927 1589//927 +f 1590//927 1618//927 1619//927 +f 1590//850 1591//95 1557//850 +f 1591//95 1590//850 1620//95 +f 1592//928 1591//928 1621//928 +f 1591//928 1592//928 1559//928 +f 1592//930 1594//930 1561//930 +f 1594//930 1592//930 1622//930 +f 1623//1390 1593//1570 1594//1570 +f 1593//1570 1623//1390 1624//1390 +f 1625//1321 1560//1321 1593//1321 +f 1560//1321 1625//1321 1595//1321 +f 1595//1571 1596//933 1562//933 +f 1596//933 1595//1571 1626//1571 +f 1626//1391 1597//1572 1596//1572 +f 1597//1572 1626//1391 1627//1391 +f 1628//1392 1598//1393 1597//1394 +f 1598//1393 1628//1392 1600//1395 +f 1600//1395 1628//1392 1599//1573 +f 1599//1573 1628//1392 1629//1396 +f 1599//1573 1629//1396 1603//1394 +f 1603//861 1629//861 1630//861 +f 1602//941 1589//927 1601//941 +f 1589//927 1602//941 1618//927 +f 1631//942 1603//942 1630//942 +f 1603//942 1631//942 1604//942 +f 1632//1398 1604//1399 1631//1398 +f 1604//1399 1632//1398 1605//1399 +f 1632//1400 1606//1401 1605//1401 +f 1606//1401 1632//1400 1633//1400 +f 1633//946 1607//947 1606//947 +f 1607//947 1633//946 1634//946 +f 1634//948 1608//203 1607//203 +f 1608//203 1634//948 1635//948 +f 1608//215 1636//949 1609//215 +f 1636//949 1608//215 1635//949 +f 1609//950 1637//951 1610//950 +f 1637//951 1609//950 1636//951 +f 1610//1486 1638//1574 1611//1486 +f 1638//1574 1610//1486 1637//1574 +f 1611//1404 1639//1404 1612//1404 +f 1639//1404 1611//1404 1638//1404 +f 1612//1405 1640//1405 1613//1405 +f 1640//1405 1612//1405 1639//1405 +f 1613//956 1641//958 1614//958 +f 1641//958 1613//956 1642//959 +f 1641//958 1642//959 1643//1575 +f 1642//967 1613//967 1640//967 +f 1644//961 1643//1575 1645//958 +f 1643//1575 1644//961 1641//958 +f 1614//962 1646//1406 1615//964 +f 1646//1406 1614//962 1644//1232 +f 1646//1406 1644//1232 1645//962 +f 1644//1408 1614//1408 1641//1408 +f 1647//968 1645//962 1648//964 +f 1645//962 1647//968 1646//1406 +f 1615//1409 1649//1409 1616//1409 +f 1649//1409 1615//1409 1646//1409 +f 1616//1410 1650//1410 1617//1410 +f 1650//1410 1616//1410 1649//1410 +f 1617//1470 1618//1576 1602//1470 +f 1618//1576 1617//1470 1650//1576 +f 1650//1576 1619//1412 1618//1576 +f 1619//1412 1650//1576 1651//1412 +f 1619//927 1620//941 1590//927 +f 1620//941 1619//927 1652//974 +f 1621//976 1620//976 1653//976 +f 1620//976 1621//976 1591//976 +f 1654//87 1592//87 1621//87 +f 1592//87 1654//87 1622//87 +f 1655//977 1594//977 1622//977 +f 1594//977 1655//977 1623//977 +f 1656//1320 1624//1337 1623//1413 +f 1624//1337 1656//1320 1657//1320 +f 1624//1414 1625//1415 1593//1415 +f 1625//1415 1624//1414 1658//1414 +f 1659//827 1595//827 1625//827 +f 1595//827 1659//827 1626//827 +f 1660//1341 1626//1341 1659//1577 +f 1626//1341 1660//1341 1627//1341 +f 1627//1417 1628//1417 1597//1417 +f 1628//1417 1627//1417 1661//1417 +f 1662//942 1629//942 1628//942 +f 1629//942 1662//942 1663//942 +f 1663//942 1630//942 1629//942 +f 1630//942 1663//942 1631//942 +f 1664//1418 1631//1398 1663//1578 +f 1631//1398 1664//1418 1632//1398 +f 1664//1420 1633//1400 1632//1400 +f 1633//1400 1664//1420 1665//1579 +f 1665//946 1634//946 1633//946 +f 1634//946 1665//946 1666//946 +f 1666//948 1635//948 1634//948 +f 1635//948 1666//948 1667//984 +f 1635//949 1668//949 1636//949 +f 1668//949 1635//949 1667//985 +f 1636//951 1669//951 1637//951 +f 1669//951 1636//951 1668//951 +f 1637//1574 1670//1580 1638//1574 +f 1670//1580 1637//1574 1669//1581 +f 1639//1404 1670//1424 1671//1425 +f 1670//1424 1639//1404 1638//1404 +f 1640//1405 1671//1405 1642//1405 +f 1671//1405 1640//1405 1639//1405 +f 1642//1405 1672//1405 1643//1405 +f 1672//1405 1642//1405 1671//1405 +f 1643//991 1673//991 1645//991 +f 1673//991 1643//991 1674//991 +f 1645//993 1675//993 1648//993 +f 1675//993 1645//993 1673//993 +f 1676//1409 1648//1409 1677//1409 +f 1648//1409 1676//1409 1647//1409 +f 1649//1409 1647//1409 1676//1409 +f 1647//1409 1649//1409 1646//1409 +f 1650//1410 1676//1426 1651//1426 +f 1676//1426 1650//1410 1649//1410 +f 1651//1412 1652//1411 1619//1412 +f 1652//1411 1651//1412 1678//1411 +f 1653//994 1652//994 1679//994 +f 1652//994 1653//994 1620//994 +f 1680//95 1621//95 1653//95 +f 1621//95 1680//95 1654//95 +f 1681//996 1622//995 1654//995 +f 1622//995 1681//996 1655//996 +f 1682//930 1623//930 1655//930 +f 1623//930 1682//930 1656//930 +f 1656//1320 1683//1337 1657//1320 +f 1683//1337 1656//1320 1684//1337 +f 1685//1321 1624//1321 1657//1321 +f 1624//1321 1685//1321 1658//1321 +f 1686//1243 1625//1243 1658//1243 +f 1625//1243 1686//1243 1659//1243 +f 1687//1428 1659//1582 1686//1428 +f 1659//1582 1687//1428 1660//1582 +f 1688//1394 1627//1394 1660//1394 +f 1627//1394 1688//1394 1661//1394 +f 1661//1001 1662//1001 1628//1001 +f 1662//1001 1661//1001 1689//1001 +f 1690//1583 1663//1578 1662//1431 +f 1663//1578 1690//1583 1664//1418 +f 1690//1432 1665//1579 1664//1420 +f 1665//1579 1690//1432 1691//165 +f 1691//947 1666//946 1665//946 +f 1666//946 1691//947 1692//947 +f 1692//1006 1667//984 1666//948 +f 1667//984 1692//1006 1693//1007 +f 1667//985 1694//1008 1668//949 +f 1694//1008 1667//985 1693//1009 +f 1668//951 1695//950 1669//951 +f 1695//950 1668//951 1694//950 +f 1669//1581 1696//1584 1670//1580 +f 1696//1584 1669//1581 1695//176 +f 1671//1425 1696//1435 1672//1436 +f 1696//1435 1671//1425 1670//1424 +f 1672//1437 1674//1438 1643//1437 +f 1674//1438 1672//1437 1697//1438 +f 1674//958 1698//958 1673//958 +f 1698//958 1674//958 1699//958 +f 1673//964 1700//964 1675//964 +f 1700//964 1673//964 1698//964 +f 1648//1440 1701//1439 1677//1440 +f 1701//1439 1648//1440 1675//1439 +f 1651//1426 1677//1410 1678//1410 +f 1677//1410 1651//1426 1676//1426 +f 1678//1585 1679//1442 1652//1585 +f 1679//1442 1678//1585 1702//1442 +f 1703//941 1653//941 1679//941 +f 1653//941 1703//941 1680//941 +f 1704//1586 1654//1443 1680//1443 +f 1654//1443 1704//1586 1681//1586 +f 1681//87 1682//87 1655//87 +f 1682//87 1681//87 1705//87 +f 1682//930 1684//930 1656//930 +f 1684//930 1682//930 1706//930 +f 1707//1587 1684//1588 1708//1446 +f 1684//1588 1707//1587 1683//1447 +f 1683//1321 1685//1321 1657//1321 +f 1685//1321 1683//1321 1709//1321 +f 1710//827 1658//827 1685//827 +f 1658//827 1710//827 1686//827 +f 1711//1341 1712//1341 1713//1341 +f 1713//1589 1686//1589 1710//1589 +f 1686//1341 1713//1341 1687//1341 +f 1687//1341 1713//1341 1712//1341 +f 1714//1452 1660//1590 1687//1452 +f 1660//1590 1714//1452 1688//1590 +f 1715//942 1661//942 1688//942 +f 1661//942 1715//942 1689//942 +f 1689//1455 1690//1456 1662//1456 +f 1690//1456 1689//1455 1716//1591 +f 1716//1592 1691//1593 1690//1594 +f 1691//1593 1716//1592 1717//1595 +f 1692//1036 1717//1036 1718//1036 +f 1717//1036 1692//1036 1691//1036 +f 1693//1037 1718//1596 1719//1039 +f 1718//1596 1693//1037 1692//1597 +f 1694//1258 1719//1598 1720//1043 +f 1719//1598 1694//1258 1693//1044 +f 1695//1045 1720//1045 1721//1045 +f 1720//1045 1695//1045 1694//1045 +f 1696//1599 1721//1600 1722//1601 +f 1721//1600 1696//1599 1695//1464 +f 1672//1466 1722//1467 1697//1467 +f 1722//1467 1672//1466 1696//1466 +f 1697//1405 1699//1405 1674//1405 +f 1699//1405 1697//1405 1723//1405 +f 1698//1051 1724//1051 1725//1051 +f 1724//1051 1698//1051 1699//1051 +f 1700//1053 1725//1052 1726//1052 +f 1725//1052 1700//1053 1698//1053 +f 1675//1409 1727//1409 1701//1409 +f 1727//1409 1675//1409 1700//1409 +f 1677//1468 1702//1469 1678//1468 +f 1702//1469 1677//1468 1701//1469 +f 1728//1470 1679//1470 1702//1470 +f 1679//1470 1728//1470 1703//1470 +f 1729//1057 1680//1056 1703//1056 +f 1680//1056 1729//1057 1704//1057 +f 1704//95 1705//95 1681//95 +f 1705//95 1704//95 1730//95 +f 1705//87 1706//849 1682//87 +f 1706//849 1705//87 1731//849 +f 1708//1058 1706//1059 1732//1058 +f 1706//1059 1708//1058 1684//1059 +f 1733//1472 1734//1264 1735//1062 +f 1734//1264 1733//1472 1736//1473 +f 1736//1473 1733//1472 1737//1474 +f 1736//1473 1737//1474 1738//1065 +f 1738//1065 1737//1474 1739//1065 +f 1738//1065 1739//1065 1740//1066 +f 1740//1066 1739//1065 1741//1066 +f 1740//1066 1741//1066 1742//1067 +f 1742//1067 1741//1066 1743//1068 +f 1742//1067 1743//1068 1744//1069 +f 1744//1069 1743//1068 1745//1070 +f 1744//1069 1745//1070 1746//1070 +f 1746//1070 1745//1070 1747//1263 +f 1746//1070 1747//1263 1748//1071 +f 1748//1071 1747//1263 1749//1071 +f 1748//1071 1749//1071 1750//1065 +f 1750//1065 1749//1071 1707//1065 +f 1750//1065 1707//1065 1751//1072 +f 1751//1072 1707//1065 1708//1265 +f 1751//1072 1708//1265 1752//1074 +f 1752//1074 1708//1265 1732//1069 +f 1752//1074 1732//1069 1753//1062 +f 1707//1602 1709//1477 1683//1476 +f 1709//1477 1707//1602 1749//1477 +f 1709//827 1710//827 1685//827 +f 1710//827 1709//827 1713//827 +f 1747//1478 1711//1478 1713//1478 +f 1711//1478 1747//1478 1745//1478 +f 1754//1394 1755//1603 1711//1604 +f 1711//1605 1687//1605 1712//1605 +f 1687//1394 1711//1604 1714//1606 +f 1714//1606 1711//1604 1755//1603 +f 1756//1484 1688//1274 1714//1484 +f 1688//1274 1756//1484 1715//1274 +f 1757//1399 1689//1399 1715//1399 +f 1689//1399 1757//1399 1716//1399 +f 1716//1485 1758//1485 1717//1485 +f 1758//1485 1716//1485 1757//1485 +f 1717//947 1759//947 1718//947 +f 1759//947 1717//947 1758//947 +f 1718//203 1760//203 1719//203 +f 1760//203 1718//203 1759//203 +f 1719//215 1761//215 1720//215 +f 1761//215 1719//215 1760//215 +f 1761//950 1721//950 1720//950 +f 1721//950 1761//950 1762//950 +f 1762//1486 1722//1486 1721//1486 +f 1722//1486 1762//1486 1763//1486 +f 1722//1487 1723//1487 1697//1487 +f 1723//1487 1722//1487 1763//1487 +f 1699//1488 1764//1607 1724//1607 +f 1764//1607 1699//1488 1723//1488 +f 1725//967 1765//967 1766//967 +f 1765//958 1725//958 1724//1608 +f 1765//958 1724//1608 1767//957 +f 1768//958 1765//958 1767//957 +f 1726//967 1769//967 1770//967 +f 1769//1609 1726//964 1725//1610 +f 1769//1609 1725//1610 1766//962 +f 1765//964 1769//1609 1766//962 +f 1727//1498 1726//1497 1771//1497 +f 1726//1497 1727//1498 1700//1498 +f 1701//1410 1728//1410 1702//1410 +f 1728//1410 1701//1410 1727//1410 +f 1703//1499 1772//1499 1729//1499 +f 1772//1499 1703//1499 1728//1499 +f 1729//941 1730//941 1704//941 +f 1730//941 1729//941 1773//941 +f 1774//95 1705//95 1730//95 +f 1705//95 1774//95 1731//95 +f 1732//1611 1731//1099 1753//1101 +f 1731//1099 1732//1611 1706//1612 +f 1752//1104 1775//1283 1751//1104 +f 1775//1283 1752//1104 1774//1613 +f 1751//1107 1776//1503 1750//1614 +f 1776//1503 1751//1107 1775//1106 +f 1777//1505 1750//1615 1776//1505 +f 1750//1615 1777//1505 1748//1615 +f 1769//1508 1748//1508 1777//1508 +f 1748//1508 1769//1508 1746//1508 +f 1765//1616 1746//1116 1769//1616 +f 1746//1116 1765//1616 1744//1116 +f 1768//1119 1744//1617 1765//1119 +f 1744//1617 1768//1119 1742//1117 +f 1778//1510 1742//1511 1768//1510 +f 1742//1511 1778//1510 1740//1511 +f 1779//1512 1740//1513 1778//1514 +f 1740//1513 1779//1512 1738//1515 +f 1780//1516 1738//1618 1779//1518 +f 1738//1618 1780//1516 1736//1127 +f 1781//1130 1736//1133 1780//1132 +f 1736//1133 1781//1130 1734//1133 +f 1782//1134 1734//1135 1781//1136 +f 1734//1135 1782//1134 1735//1137 +f 1783//1138 1735//1139 1782//1140 +f 1735//1139 1783//1138 1733//1141 +f 1784//1142 1733//1143 1783//1144 +f 1733//1143 1784//1142 1737//1143 +f 1785//1522 1737//1523 1784//1524 +f 1737//1523 1785//1522 1739//1619 +f 1741//1526 1785//1620 1786//1528 +f 1785//1620 1741//1526 1739//1529 +f 1743//1621 1786//1621 1754//1621 +f 1786//1621 1743//1621 1741//1622 +f 1745//1531 1754//1531 1711//1531 +f 1754//1531 1745//1531 1743//1531 +f 1749//1532 1713//1623 1709//1533 +f 1713//1623 1749//1532 1747//1532 +f 1753//1534 1774//1534 1752//1158 +f 1774//1534 1753//1534 1731//1159 +f 1754//1160 1787//942 1755//942 +f 1787//942 1754//1160 1786//1160 +f 1787//942 1714//942 1755//942 +f 1714//942 1787//942 1756//942 +f 1788//1535 1715//1624 1756//1535 +f 1715//1624 1788//1535 1757//1624 +f 1789//1537 1757//1536 1788//1537 +f 1757//1536 1789//1537 1758//1536 +f 1758//1166 1790//1165 1759//1166 +f 1790//1165 1758//1166 1789//1165 +f 1759//1168 1791//1314 1760//1168 +f 1791//1314 1759//1168 1790//1314 +f 1760//1169 1792//1315 1761//1169 +f 1792//1315 1760//1169 1791//1315 +f 1761//1170 1793//1170 1762//1170 +f 1793//1170 1761//1170 1792//1170 +f 1762//1540 1794//1540 1763//1540 +f 1794//1540 1762//1540 1793//1540 +f 1763//1541 1764//1542 1723//1541 +f 1764//1542 1763//1541 1794//1542 +f 1724//1405 1795//1405 1767//1405 +f 1795//1405 1724//1405 1764//1405 +f 1795//1405 1768//1543 1767//1405 +f 1768//1543 1795//1405 1778//1543 +f 1770//1409 1777//1409 1796//1409 +f 1777//1409 1770//1409 1769//1409 +f 1771//1409 1770//1409 1796//1409 +f 1770//1409 1771//1409 1726//1409 +f 1728//1625 1771//1544 1772//1544 +f 1771//1544 1728//1625 1727//1625 +f 1772//1470 1773//1470 1729//1470 +f 1773//1470 1772//1470 1797//1470 +f 1775//941 1730//941 1773//941 +f 1730//941 1775//941 1774//941 +f 1776//1470 1773//1470 1797//1470 +f 1773//1470 1776//1470 1775//1470 +f 1796//1410 1776//1410 1797//1410 +f 1776//1410 1796//1410 1777//1410 +f 1798//1404 1778//1487 1795//1404 +f 1778//1487 1798//1404 1779//1487 +f 1780//1402 1798//1486 1799//1486 +f 1798//1486 1780//1402 1779//1402 +f 1781//950 1799//950 1800//950 +f 1799//950 1781//950 1780//950 +f 1782//215 1800//215 1801//215 +f 1800//215 1782//215 1781//215 +f 1802//1178 1782//203 1801//1178 +f 1782//203 1802//1178 1783//203 +f 1803//947 1783//947 1802//947 +f 1783//947 1803//947 1784//947 +f 1804//1547 1784//1547 1803//1547 +f 1784//1547 1804//1547 1785//1547 +f 1786//1399 1804//1398 1787//1398 +f 1804//1398 1786//1399 1785//1399 +f 1804//1398 1756//1398 1787//1398 +f 1756//1398 1804//1398 1788//1398 +f 1804//1547 1789//1401 1788//1401 +f 1789//1401 1804//1547 1803//1547 +f 1803//947 1790//947 1789//947 +f 1790//947 1803//947 1802//947 +f 1802//1178 1791//203 1790//203 +f 1791//203 1802//1178 1801//1178 +f 1801//215 1792//1180 1791//1180 +f 1792//1180 1801//215 1800//215 +f 1792//950 1799//950 1793//950 +f 1799//950 1792//950 1800//950 +f 1793//1486 1798//1486 1794//1486 +f 1798//1486 1793//1486 1799//1486 +f 1764//1404 1798//1404 1795//1404 +f 1798//1404 1764//1404 1794//1404 +f 1772//1410 1796//1410 1797//1410 +f 1796//1410 1772//1410 1771//1410 +f 1805//1318 1806//1626 1807//1626 +f 1806//1626 1805//1318 1808//1318 +f 1807//1626 1809//1320 1810//1320 +f 1809//1320 1807//1626 1806//1626 +f 1811//815 1807//816 1812//816 +f 1807//816 1811//815 1805//815 +f 1813//1320 1808//1318 1805//1318 +f 1808//1318 1813//1320 1814//1320 +f 1815//1321 1806//1321 1808//1321 +f 1806//1321 1815//1321 1816//1321 +f 1809//1322 1817//1548 1810//1627 +f 1817//1548 1809//1322 1818//1628 +f 1812//816 1810//822 1819//822 +f 1810//822 1812//816 1807//816 +f 1816//1321 1809//1321 1806//1321 +f 1809//1321 1816//1321 1820//1321 +f 1821//824 1812//824 1822//824 +f 1812//824 1821//824 1811//824 +f 1823//822 1805//815 1811//815 +f 1805//815 1823//822 1813//822 +f 1824//1326 1813//1326 1825//1326 +f 1813//1326 1824//1326 1814//1326 +f 1826//1321 1808//1321 1814//1321 +f 1808//1321 1826//1321 1815//1321 +f 1827//1629 1816//1629 1815//1629 +f 1816//1629 1827//1629 1828//1629 +f 1829//828 1830//829 1831//1185 +f 1830//829 1829//828 1817//1186 +f 1830//829 1817//1186 1832//832 +f 1832//832 1817//1186 1818//833 +f 1832//832 1818//833 1833//833 +f 1833//833 1818//833 1834//834 +f 1833//833 1834//834 1835//834 +f 1835//834 1834//834 1836//1188 +f 1835//834 1836//1188 1837//1630 +f 1837//1630 1836//1188 1838//1329 +f 1837//1630 1838//1329 1839//1330 +f 1839//1330 1838//1329 1840//1631 +f 1839//1330 1840//1631 1841//838 +f 1841//838 1840//1631 1842//835 +f 1841//838 1842//835 1843//835 +f 1843//835 1842//835 1844//833 +f 1843//835 1844//833 1845//833 +f 1845//833 1844//833 1846//1632 +f 1845//833 1846//1632 1847//1633 +f 1847//1633 1846//1632 1848//1634 +f 1847//1633 1848//1634 1849//1187 +f 1849//1187 1848//1634 1850//830 +f 1810//844 1829//844 1819//1191 +f 1829//844 1810//844 1817//844 +f 1820//1335 1818//1336 1809//1335 +f 1818//1336 1820//1335 1834//1336 +f 1822//824 1819//87 1851//87 +f 1819//87 1822//824 1812//824 +f 1828//1629 1820//1629 1816//1629 +f 1820//1629 1828//1629 1852//1629 +f 1853//850 1821//850 1822//850 +f 1821//850 1853//850 1854//850 +f 1855//87 1811//824 1821//824 +f 1811//824 1855//87 1823//87 +f 1825//852 1823//852 1856//852 +f 1823//852 1825//852 1813//852 +f 1825//1320 1857//1320 1824//1320 +f 1857//1320 1825//1320 1858//1320 +f 1859//1339 1814//1338 1824//1339 +f 1814//1338 1859//1339 1826//1338 +f 1860//1629 1815//1629 1826//1629 +f 1815//1629 1860//1629 1827//1629 +f 1861//860 1827//857 1860//858 +f 1827//857 1861//860 1828//1635 +f 1828//1635 1861//860 1852//1636 +f 1852//1636 1861//860 1862//1024 +f 1852//1636 1862//1024 1863//858 +f 1863//861 1862//861 1864//861 +f 1865//1637 1832//1638 1866//1637 +f 1832//1638 1865//1637 1830//1638 +f 1851//864 1830//1198 1865//1199 +f 1830//1198 1851//864 1831//867 +f 1819//1345 1831//869 1851//870 +f 1831//869 1819//1345 1829//871 +f 1852//1639 1834//1640 1820//1641 +f 1834//1640 1852//1639 1836//1642 +f 1863//876 1836//1643 1852//876 +f 1836//1643 1863//876 1838//878 +f 1867//879 1838//880 1863//879 +f 1838//880 1867//879 1840//880 +f 1868//1644 1840//1645 1867//1644 +f 1840//1645 1868//1644 1842//1645 +f 1869//1354 1842//1646 1868//1356 +f 1842//1646 1869//1354 1844//1357 +f 1844//1562 1870//1562 1846//1562 +f 1870//1562 1844//1562 1869//1562 +f 1846//888 1871//1209 1848//890 +f 1871//1209 1846//888 1870//891 +f 1848//892 1872//893 1850//894 +f 1872//893 1848//892 1871//895 +f 1850//1212 1873//897 1849//898 +f 1873//897 1850//1212 1872//899 +f 1849//1647 1874//1648 1847//1649 +f 1874//1648 1849//1647 1873//1650 +f 1847//904 1875//905 1845//904 +f 1875//905 1847//904 1874//905 +f 1843//1651 1875//1216 1876//1217 +f 1875//1216 1843//1651 1845//909 +f 1841//1218 1876//1219 1877//1219 +f 1876//1219 1841//1218 1843//1218 +f 1839//1220 1877//1220 1878//1220 +f 1877//1220 1839//1220 1841//1220 +f 1837//1652 1878//916 1879//916 +f 1878//916 1837//1652 1839//915 +f 1835//1653 1879//920 1880//1654 +f 1879//920 1835//1653 1837//920 +f 1833//1655 1880//1656 1881//1224 +f 1880//1656 1833//1655 1835//1223 +f 1866//1225 1833//924 1881//925 +f 1833//924 1866//1225 1832//1657 +f 1865//95 1822//850 1851//95 +f 1822//850 1865//95 1853//850 +f 1882//1658 1854//1658 1853//1658 +f 1854//1658 1882//1658 1883//1658 +f 1854//850 1855//95 1821//850 +f 1855//95 1854//850 1884//95 +f 1856//928 1855//928 1885//928 +f 1855//928 1856//928 1823//928 +f 1856//930 1858//930 1825//930 +f 1858//930 1856//930 1886//930 +f 1887//1570 1857//1659 1858//1659 +f 1857//1659 1887//1570 1888//1570 +f 1889//1321 1824//1321 1857//1321 +f 1824//1321 1889//1321 1859//1321 +f 1859//1660 1860//1660 1826//1660 +f 1860//1660 1859//1660 1890//1660 +f 1890//934 1861//1228 1860//1228 +f 1861//1228 1890//934 1891//934 +f 1892//935 1862//936 1861//937 +f 1862//936 1892//935 1864//1229 +f 1864//1229 1892//935 1863//1661 +f 1863//1661 1892//935 1893//940 +f 1863//1661 1893//940 1867//937 +f 1867//1662 1893//1662 1894//1662 +f 1866//1663 1853//1658 1865//1663 +f 1853//1658 1866//1663 1882//1658 +f 1895//1664 1867//1664 1894//1664 +f 1867//1664 1895//1664 1868//1664 +f 1896//1398 1868//1399 1895//1398 +f 1868//1399 1896//1398 1869//1399 +f 1896//1400 1870//1547 1869//1547 +f 1870//1547 1896//1400 1897//1400 +f 1897//946 1871//947 1870//947 +f 1871//947 1897//946 1898//946 +f 1898//948 1872//203 1871//203 +f 1872//203 1898//948 1899//948 +f 1872//215 1900//949 1873//215 +f 1900//949 1872//215 1899//949 +f 1873//1665 1901//1666 1874//1665 +f 1901//1666 1873//1665 1900//1666 +f 1874//1085 1902//953 1875//1085 +f 1902//953 1874//1085 1901//953 +f 1875//954 1903//954 1876//954 +f 1903//954 1875//954 1902//954 +f 1876//990 1904//990 1877//990 +f 1904//990 1876//990 1903//990 +f 1877//956 1905//958 1878//958 +f 1905//958 1877//956 1906//959 +f 1905//958 1906//959 1907//1575 +f 1906//967 1877//967 1904//967 +f 1908//961 1907//1575 1909//958 +f 1907//1575 1908//961 1905//958 +f 1878//962 1910//963 1879//964 +f 1910//963 1878//962 1908//965 +f 1910//963 1908//965 1909//1232 +f 1908//1667 1878//1667 1905//1667 +f 1911//968 1909//1232 1912//964 +f 1909//1232 1911//968 1910//963 +f 1879//969 1913//969 1880//969 +f 1913//969 1879//969 1910//969 +f 1880//970 1914//970 1881//970 +f 1914//970 1880//970 1913//970 +f 1881//971 1882//1668 1866//971 +f 1882//1668 1881//971 1914//1668 +f 1914//1668 1883//973 1882//1668 +f 1883//973 1914//1668 1915//973 +f 1883//1658 1884//1663 1854//1658 +f 1884//1663 1883//1658 1916//1663 +f 1885//976 1884//976 1917//976 +f 1884//976 1885//976 1855//976 +f 1918//87 1856//87 1885//87 +f 1856//87 1918//87 1886//87 +f 1919//977 1858//977 1886//977 +f 1858//977 1919//977 1887//977 +f 1920//1320 1888//1320 1887//1320 +f 1888//1320 1920//1320 1921//1320 +f 1888//1415 1889//1415 1857//1415 +f 1889//1415 1888//1415 1922//1415 +f 1923//1629 1859//1629 1889//1629 +f 1859//1629 1923//1629 1890//1629 +f 1924//858 1890//858 1923//858 +f 1890//858 1924//858 1891//858 +f 1891//980 1892//980 1861//980 +f 1892//980 1891//980 1925//980 +f 1926//1664 1893//1664 1892//1664 +f 1893//1664 1926//1664 1927//1664 +f 1927//1664 1894//1664 1893//1664 +f 1894//1664 1927//1664 1895//1664 +f 1928//1669 1895//1398 1927//1578 +f 1895//1398 1928//1669 1896//1398 +f 1928//1400 1897//1400 1896//1400 +f 1897//1400 1928//1400 1929//1579 +f 1929//946 1898//946 1897//946 +f 1898//946 1929//946 1930//946 +f 1930//948 1899//948 1898//948 +f 1899//948 1930//948 1931//984 +f 1899//949 1932//949 1900//949 +f 1932//949 1899//949 1931//985 +f 1900//1666 1933//1666 1901//1666 +f 1933//1666 1900//1666 1932//1666 +f 1901//953 1934//1670 1902//953 +f 1934//1670 1901//953 1933//1671 +f 1903//954 1934//988 1935//1672 +f 1934//988 1903//954 1902//954 +f 1904//990 1935//990 1906//990 +f 1935//990 1904//990 1903//990 +f 1906//990 1936//990 1907//990 +f 1936//990 1906//990 1935//990 +f 1907//991 1937//991 1909//991 +f 1937//991 1907//991 1938//991 +f 1909//993 1939//993 1912//993 +f 1939//993 1909//993 1937//993 +f 1940//969 1912//969 1941//969 +f 1912//969 1940//969 1911//969 +f 1913//969 1911//969 1940//969 +f 1911//969 1913//969 1910//969 +f 1914//970 1940//970 1915//970 +f 1940//970 1914//970 1913//970 +f 1915//973 1916//971 1883//973 +f 1916//971 1915//973 1942//971 +f 1917//1673 1916//1673 1943//1673 +f 1916//1673 1917//1673 1884//1673 +f 1944//95 1885//95 1917//95 +f 1885//95 1944//95 1918//95 +f 1945//995 1886//996 1918//996 +f 1886//996 1945//995 1919//995 +f 1946//930 1887//822 1919//822 +f 1887//822 1946//930 1920//930 +f 1920//1320 1947//1320 1921//1320 +f 1947//1320 1920//1320 1948//1320 +f 1949//1321 1888//1321 1921//1321 +f 1888//1321 1949//1321 1922//1321 +f 1950//1674 1889//1674 1922//1674 +f 1889//1674 1950//1674 1923//1674 +f 1951//999 1923//999 1950//999 +f 1923//999 1951//999 1924//999 +f 1952//937 1891//937 1924//937 +f 1891//937 1952//937 1925//937 +f 1925//1675 1926//1675 1892//1675 +f 1926//1675 1925//1675 1953//1675 +f 1954//1430 1927//1578 1926//1431 +f 1927//1578 1954//1430 1928//1669 +f 1954//1676 1929//1579 1928//1400 +f 1929//1579 1954//1676 1955//165 +f 1955//947 1930//946 1929//946 +f 1930//946 1955//947 1956//947 +f 1956//1006 1931//984 1930//948 +f 1931//984 1956//1006 1957//1007 +f 1931//985 1958//1008 1932//949 +f 1958//1008 1931//985 1957//1009 +f 1932//1666 1959//1665 1933//1666 +f 1959//1665 1932//1666 1958//1665 +f 1933//1671 1960//1677 1934//1670 +f 1960//1677 1933//1671 1959//246 +f 1935//1672 1960//1011 1936//1012 +f 1960//1011 1935//1672 1934//988 +f 1936//1013 1938//1013 1907//1013 +f 1938//1013 1936//1013 1961//1013 +f 1938//958 1962//958 1937//958 +f 1962//958 1938//958 1963//958 +f 1937//964 1964//964 1939//964 +f 1964//964 1937//964 1962//964 +f 1912//1015 1965//1015 1941//1015 +f 1965//1015 1912//1015 1939//1015 +f 1915//970 1941//970 1942//970 +f 1941//970 1915//970 1940//970 +f 1942//1249 1943//1017 1916//1249 +f 1943//1017 1942//1249 1966//1017 +f 1967//1663 1917//1663 1943//1663 +f 1917//1663 1967//1663 1944//1663 +f 1968//1019 1918//1019 1944//1019 +f 1918//1019 1968//1019 1945//1019 +f 1945//87 1946//87 1919//87 +f 1946//87 1945//87 1969//87 +f 1946//930 1948//930 1920//930 +f 1948//930 1946//930 1970//930 +f 1971//1444 1948//1588 1972//1678 +f 1948//1588 1971//1444 1947//1447 +f 1947//1321 1949//1321 1921//1321 +f 1949//1321 1947//1321 1973//1321 +f 1974//1629 1922//1629 1949//1629 +f 1922//1629 1974//1629 1950//1629 +f 1975//858 1976//1025 1977//860 +f 1977//1679 1950//1679 1974//1679 +f 1950//858 1977//860 1951//1680 +f 1951//1680 1977//860 1976//1025 +f 1978//1027 1924//1028 1951//1027 +f 1924//1028 1978//1027 1952//1028 +f 1979//1664 1925//1664 1952//1664 +f 1925//1664 1979//1664 1953//1664 +f 1953//1591 1954//1456 1926//1456 +f 1954//1456 1953//1591 1980//1591 +f 1980//1681 1955//1459 1954//1682 +f 1955//1459 1980//1681 1981//1595 +f 1956//1036 1981//1036 1982//1036 +f 1981//1036 1956//1036 1955//1036 +f 1957//1037 1982//1683 1983//1256 +f 1982//1683 1957//1037 1956//1257 +f 1958//1258 1983//1258 1984//1043 +f 1983//1258 1958//1258 1957//1044 +f 1959//1684 1984//1684 1985//1684 +f 1984//1684 1959//1684 1958//1684 +f 1960//1046 1985//1047 1986//1047 +f 1985//1047 1960//1046 1959//1046 +f 1936//1048 1986//1685 1961//1685 +f 1986//1685 1936//1048 1960//1048 +f 1961//990 1963//990 1938//990 +f 1963//990 1961//990 1987//990 +f 1962//1051 1988//1051 1989//1051 +f 1988//1051 1962//1051 1963//1051 +f 1964//1052 1989//1052 1990//1052 +f 1989//1052 1964//1052 1962//1052 +f 1939//969 1991//969 1965//969 +f 1991//969 1939//969 1964//969 +f 1941//1054 1966//1054 1942//1054 +f 1966//1054 1941//1054 1965//1054 +f 1992//1055 1943//1055 1966//1055 +f 1943//1055 1992//1055 1967//1055 +f 1993//1686 1944//1687 1967//1687 +f 1944//1687 1993//1686 1968//1686 +f 1968//95 1969//95 1945//95 +f 1969//95 1968//95 1994//95 +f 1969//87 1970//849 1946//87 +f 1970//849 1969//87 1995//849 +f 1972//1058 1970//1059 1996//1261 +f 1970//1059 1972//1058 1948//1059 +f 1997//1472 1998//1264 1999//1267 +f 1998//1264 1997//1472 2000//1063 +f 2000//1063 1997//1472 2001//1688 +f 2000//1063 2001//1688 2002//1065 +f 2002//1065 2001//1688 2003//1065 +f 2002//1065 2003//1065 2004//1066 +f 2004//1066 2003//1065 2005//1066 +f 2004//1066 2005//1066 2006//1067 +f 2006//1067 2005//1066 2007//1068 +f 2006//1067 2007//1068 2008//1069 +f 2008//1069 2007//1068 2009//1070 +f 2008//1069 2009//1070 2010//1689 +f 2010//1689 2009//1070 2011//1263 +f 2010//1689 2011//1263 2012//1071 +f 2012//1071 2011//1263 2013//1071 +f 2012//1071 2013//1071 2014//1065 +f 2014//1065 2013//1071 1971//1065 +f 2014//1065 1971//1065 2015//1072 +f 2015//1072 1971//1065 1972//1265 +f 2015//1072 1972//1265 2016//1074 +f 2016//1074 1972//1265 1996//1069 +f 2016//1074 1996//1069 2017//1267 +f 1971//1690 1973//1691 1947//1692 +f 1973//1691 1971//1690 2013//1477 +f 1973//1629 1974//1629 1949//1629 +f 1974//1629 1973//1629 1977//1629 +f 2011//1078 1975//1079 1977//1079 +f 1975//1079 2011//1078 2009//1078 +f 2018//937 2019//1693 1975//937 +f 1975//1694 1951//1694 1976//1694 +f 1951//937 1975//937 1978//937 +f 1978//937 1975//937 2019//1693 +f 2020//1695 1952//1695 1978//1696 +f 1952//1695 2020//1695 1979//1695 +f 2021//1399 1953//1399 1979//1399 +f 1953//1399 2021//1399 1980//1399 +f 1980//1547 2022//1547 1981//1547 +f 2022//1547 1980//1547 2021//1547 +f 1981//947 2023//947 1982//947 +f 2023//947 1981//947 2022//947 +f 1982//203 2024//203 1983//203 +f 2024//203 1982//203 2023//203 +f 1983//215 2025//215 1984//215 +f 2025//215 1983//215 2024//215 +f 2025//1665 1985//1665 1984//1665 +f 1985//1665 2025//1665 2026//1665 +f 2026//1085 1986//1085 1985//1085 +f 1986//1085 2026//1085 2027//1085 +f 1986//954 1987//954 1961//954 +f 1987//954 1986//954 2027//954 +f 1963//1086 2028//1086 1988//1086 +f 2028//1086 1963//1086 1987//1086 +f 1989//1697 2029//1088 2030//1698 +f 2029//1493 1989//958 1988//1699 +f 2029//1493 1988//1699 2031//1700 +f 2032//958 2029//1493 2031//1700 +f 1990//967 2033//967 2034//967 +f 2033//965 1990//964 1989//1697 +f 2033//965 1989//1697 2030//1698 +f 2029//1088 2033//965 2030//1698 +f 1991//1095 1990//1281 2035//1281 +f 1990//1281 1991//1095 1964//1095 +f 1965//970 1992//970 1966//970 +f 1992//970 1965//970 1991//970 +f 1967//1098 2036//1098 1993//1098 +f 2036//1098 1967//1098 1992//1098 +f 1993//1663 1994//1663 1968//1663 +f 1994//1663 1993//1663 2037//1663 +f 2038//95 1969//95 1994//95 +f 1969//95 2038//95 1995//95 +f 1996//1099 1995//1502 2017//1101 +f 1995//1502 1996//1099 1970//1102 +f 2016//1701 2039//1702 2015//1701 +f 2039//1702 2016//1701 2038//1702 +f 2015//1703 2040//1284 2014//1285 +f 2040//1284 2015//1703 2039//1108 +f 2041//1288 2014//1111 2040//1288 +f 2014//1111 2041//1288 2012//1109 +f 2033//1113 2012//1114 2041//1113 +f 2012//1114 2033//1113 2010//1114 +f 2029//1616 2010//1115 2033//1616 +f 2010//1115 2029//1616 2008//1115 +f 2032//1118 2008//1118 2029//1118 +f 2008//1118 2032//1118 2006//1118 +f 2042//1291 2006//1292 2032//1291 +f 2006//1292 2042//1291 2004//1292 +f 2043//1704 2004//1293 2042//1124 +f 2004//1293 2043//1704 2002//1125 +f 2044//1126 2002//1127 2043//1294 +f 2002//1127 2044//1126 2000//1295 +f 2045//1132 2000//1131 2044//1132 +f 2000//1131 2045//1132 1998//1133 +f 2046//1137 1998//1705 2045//1706 +f 1998//1705 2046//1137 1999//1707 +f 2047//1300 1999//1708 2046//1709 +f 1999//1708 2047//1300 1997//1141 +f 2048//1302 1997//1303 2047//1304 +f 1997//1303 2048//1302 2001//1305 +f 2049//1710 2001//1523 2048//1524 +f 2001//1523 2049//1710 2003//1525 +f 2005//1526 2049//1711 2050//1528 +f 2049//1711 2005//1526 2003//1529 +f 2007//1712 2050//1713 2018//1713 +f 2050//1713 2007//1712 2005//1712 +f 2009//1154 2018//1309 1975//1309 +f 2018//1309 2009//1154 2007//1154 +f 2013//1714 1977//1714 1973//1714 +f 1977//1714 2013//1714 2011//1714 +f 2017//1715 2038//1716 2016//1717 +f 2038//1716 2017//1715 1995//1718 +f 2018//1664 2051//1664 2019//1664 +f 2051//1664 2018//1664 2050//1664 +f 2051//1664 1978//1664 2019//1664 +f 1978//1664 2051//1664 2020//1664 +f 2052//1535 1979//1535 2020//1535 +f 1979//1535 2052//1535 2021//1535 +f 2053//1536 2021//1536 2052//1536 +f 2021//1536 2053//1536 2022//1536 +f 2022//1166 2054//1165 2023//1166 +f 2054//1165 2022//1166 2053//1165 +f 2023//1314 2055//1314 2024//1314 +f 2055//1314 2023//1314 2054//1314 +f 2024//1538 2056//1719 2025//1538 +f 2056//1719 2024//1538 2055//1719 +f 2025//1720 2057//1721 2026//1720 +f 2057//1721 2025//1720 2056//1170 +f 2026//1317 2058//1316 2027//1317 +f 2058//1316 2026//1317 2057//1316 +f 2027//1173 2028//1173 1987//1173 +f 2028//1173 2027//1173 2058//1173 +f 1988//990 2059//990 2031//990 +f 2059//990 1988//990 2028//990 +f 2059//990 2032//990 2031//990 +f 2032//990 2059//990 2042//990 +f 2034//969 2041//969 2060//969 +f 2041//969 2034//969 2033//969 +f 2035//969 2034//969 2060//969 +f 2034//969 2035//969 1990//969 +f 1992//1175 2035//1175 2036//1175 +f 2035//1175 1992//1175 1991//1175 +f 2036//971 2037//971 1993//971 +f 2037//971 2036//971 2061//971 +f 2039//1663 1994//1663 2037//1663 +f 1994//1663 2039//1663 2038//1663 +f 2040//971 2037//971 2061//971 +f 2037//971 2040//971 2039//971 +f 2060//970 2040//970 2061//970 +f 2040//970 2060//970 2041//970 +f 2062//954 2042//954 2059//954 +f 2042//954 2062//954 2043//954 +f 2044//1177 2062//1177 2063//1177 +f 2062//1177 2044//1177 2043//1177 +f 2045//950 2063//950 2064//950 +f 2063//950 2045//950 2044//950 +f 2046//1722 2064//1722 2065//1722 +f 2064//1722 2046//1722 2045//1722 +f 2066//1178 2046//203 2065//1178 +f 2046//203 2066//1178 2047//203 +f 2067//947 2047//947 2066//947 +f 2047//947 2067//947 2048//947 +f 2068//1547 2048//1547 2067//1547 +f 2048//1547 2068//1547 2049//1547 +f 2050//1399 2068//1399 2051//1399 +f 2068//1399 2050//1399 2049//1399 +f 2068//1399 2020//1398 2051//1399 +f 2020//1398 2068//1399 2052//1398 +f 2068//1547 2053//1485 2052//1485 +f 2053//1485 2068//1547 2067//1547 +f 2067//947 2054//947 2053//947 +f 2054//947 2067//947 2066//947 +f 2066//1178 2055//203 2054//203 +f 2055//203 2066//1178 2065//1178 +f 2065//1722 2056//1722 2055//1722 +f 2056//1722 2065//1722 2064//1722 +f 2056//950 2063//950 2057//950 +f 2063//950 2056//950 2064//950 +f 2057//1177 2062//1177 2058//1085 +f 2062//1177 2057//1177 2063//1177 +f 2028//954 2062//954 2059//954 +f 2062//954 2028//954 2058//954 +f 2036//970 2060//970 2061//970 +f 2060//970 2036//970 2035//970 +f 2069//1318 2070//1319 2071//1319 +f 2070//1319 2069//1318 2072//1318 +f 2071//1319 2073//1320 2074//1320 +f 2073//1320 2071//1319 2070//1319 +f 2075//815 2071//816 2076//816 +f 2071//816 2075//815 2069//815 +f 2077//1320 2072//1318 2069//1318 +f 2072//1318 2077//1320 2078//1320 +f 2079//1321 2070//1321 2072//1321 +f 2070//1321 2079//1321 2080//1321 +f 2073//1322 2081//1548 2074//1324 +f 2081//1548 2073//1322 2082//1628 +f 2076//816 2074//930 2083//930 +f 2074//930 2076//816 2071//816 +f 2080//1321 2073//1321 2070//1321 +f 2073//1321 2080//1321 2084//1321 +f 2085//824 2076//824 2086//824 +f 2076//824 2085//824 2075//824 +f 2087//822 2069//815 2075//815 +f 2069//815 2087//822 2077//822 +f 2088//1723 2077//1326 2089//1723 +f 2077//1326 2088//1723 2078//1326 +f 2090//1321 2072//1321 2078//1321 +f 2072//1321 2090//1321 2079//1321 +f 2091//1629 2080//1629 2079//1629 +f 2080//1629 2091//1629 2092//1629 +f 2093//828 2094//829 2095//1185 +f 2094//829 2093//828 2081//831 +f 2094//829 2081//831 2096//832 +f 2096//832 2081//831 2082//833 +f 2096//832 2082//833 2097//833 +f 2097//833 2082//833 2098//834 +f 2097//833 2098//834 2099//834 +f 2099//834 2098//834 2100//835 +f 2099//834 2100//835 2101//836 +f 2101//836 2100//835 2102//1630 +f 2101//836 2102//1630 2103//828 +f 2103//828 2102//1630 2104//1631 +f 2103//828 2104//1631 2105//838 +f 2105//838 2104//1631 2106//835 +f 2105//838 2106//835 2107//835 +f 2107//835 2106//835 2108//833 +f 2107//835 2108//833 2109//833 +f 2109//833 2108//833 2110//839 +f 2109//833 2110//839 2111//1633 +f 2111//1633 2110//839 2112//1634 +f 2111//1633 2112//1634 2113//1187 +f 2113//1187 2112//1634 2114//830 +f 2074//1192 2093//1192 2083//1191 +f 2093//1192 2074//1192 2081//1192 +f 2084//1335 2082//1334 2073//1335 +f 2082//1334 2084//1335 2098//1336 +f 2086//824 2083//849 2115//849 +f 2083//849 2086//824 2076//824 +f 2092//1629 2084//1629 2080//1629 +f 2084//1629 2092//1629 2116//1629 +f 2117//850 2085//850 2086//850 +f 2085//850 2117//850 2118//850 +f 2119//87 2075//824 2085//824 +f 2075//824 2119//87 2087//87 +f 2089//852 2087//852 2120//852 +f 2087//852 2089//852 2077//852 +f 2089//1320 2121//1320 2088//1320 +f 2121//1320 2089//1320 2122//1320 +f 2123//1339 2078//1338 2088//1339 +f 2078//1338 2123//1339 2090//1338 +f 2124//1629 2079//1629 2090//1629 +f 2079//1629 2124//1629 2091//1629 +f 2125//860 2091//857 2124//858 +f 2091//857 2125//860 2092//1635 +f 2092//1635 2125//860 2116//1636 +f 2116//1636 2125//860 2126//859 +f 2116//1636 2126//859 2127//858 +f 2127//861 2126//861 2128//861 +f 2129//1637 2096//1638 2130//1637 +f 2096//1638 2129//1637 2094//1638 +f 2115//1344 2094//1554 2129//1199 +f 2094//1554 2115//1344 2095//867 +f 2083//1724 2095//1201 2115//870 +f 2095//1201 2083//1724 2093//871 +f 2116//1725 2098//1640 2084//1641 +f 2098//1640 2116//1725 2100//1726 +f 2127//1727 2100//878 2116//877 +f 2100//878 2127//1727 2102//878 +f 2131//879 2102//880 2127//879 +f 2102//880 2131//879 2104//880 +f 2132//1644 2104//1645 2131//1644 +f 2104//1645 2132//1644 2106//1645 +f 2133//1354 2106//1646 2132//1356 +f 2106//1646 2133//1354 2108//1357 +f 2108//1562 2134//1562 2110//1562 +f 2134//1562 2108//1562 2133//1562 +f 2110//1360 2135//1728 2112//1360 +f 2135//1728 2110//1360 2134//891 +f 2112//1729 2136//1730 2114//894 +f 2136//1730 2112//1729 2135//895 +f 2114//1363 2137//1213 2113//898 +f 2137//1213 2114//1363 2136//899 +f 2113//1649 2138//1731 2111//1649 +f 2138//1731 2113//1649 2137//1650 +f 2111//904 2139//905 2109//904 +f 2139//905 2111//904 2138//905 +f 2107//906 2139//1216 2140//1217 +f 2139//1216 2107//906 2109//909 +f 2105//1732 2140//1219 2141//1219 +f 2140//1219 2105//1732 2107//910 +f 2103//1220 2141//1220 2142//1220 +f 2141//1220 2103//1220 2105//1220 +f 2101//915 2142//916 2143//916 +f 2142//916 2101//915 2103//915 +f 2099//1653 2143//918 2144//1654 +f 2143//918 2099//1653 2101//920 +f 2097//922 2144//1224 2145//1224 +f 2144//1224 2097//922 2099//1223 +f 2130//1225 2097//924 2145//925 +f 2097//924 2130//1225 2096//1733 +f 2129//95 2086//850 2115//95 +f 2086//850 2129//95 2117//850 +f 2146//1658 2118//1658 2117//1658 +f 2118//1658 2146//1658 2147//1658 +f 2118//850 2119//95 2085//850 +f 2119//95 2118//850 2148//95 +f 2120//928 2119//928 2149//928 +f 2119//928 2120//928 2087//928 +f 2120//930 2122//930 2089//930 +f 2122//930 2120//930 2150//930 +f 2151//1570 2121//1570 2122//1570 +f 2121//1570 2151//1570 2152//1570 +f 2153//1321 2088//1321 2121//1321 +f 2088//1321 2153//1321 2123//1321 +f 2123//1734 2124//1660 2090//1660 +f 2124//1660 2123//1734 2154//1734 +f 2154//934 2125//1228 2124//1228 +f 2125//1228 2154//934 2155//934 +f 2156//1735 2126//936 2125//937 +f 2126//936 2156//1735 2128//938 +f 2128//938 2156//1735 2127//935 +f 2127//935 2156//1735 2157//940 +f 2127//935 2157//940 2131//937 +f 2131//1662 2157//1662 2158//1662 +f 2130//1663 2117//1658 2129//1663 +f 2117//1658 2130//1663 2146//1658 +f 2159//1664 2131//1664 2158//1664 +f 2131//1664 2159//1664 2132//1664 +f 2160//1398 2132//1399 2159//1398 +f 2132//1399 2160//1398 2133//1399 +f 2160//1400 2134//1401 2133//1401 +f 2134//1401 2160//1400 2161//1400 +f 2161//946 2135//947 2134//947 +f 2135//947 2161//946 2162//946 +f 2162//948 2136//203 2135//203 +f 2136//203 2162//948 2163//948 +f 2136//215 2164//949 2137//215 +f 2164//949 2136//215 2163//949 +f 2137//1665 2165//1666 2138//1665 +f 2165//1666 2137//1665 2164//1666 +f 2138//1085 2166//953 2139//1085 +f 2166//953 2138//1085 2165//953 +f 2139//954 2167//954 2140//954 +f 2167//954 2139//954 2166//954 +f 2140//990 2168//990 2141//990 +f 2168//990 2140//990 2167//990 +f 2141//1700 2169//958 2142//958 +f 2169//958 2141//1700 2170//1277 +f 2169//958 2170//1277 2171//1700 +f 2170//967 2141//967 2168//967 +f 2172//961 2171//1700 2173//958 +f 2171//1700 2172//961 2169//958 +f 2142//1736 2174//964 2143//964 +f 2174//964 2142//1736 2172//965 +f 2174//964 2172//965 2173//962 +f 2172//1737 2142//1737 2169//1737 +f 2175//968 2173//962 2176//964 +f 2173//962 2175//968 2174//964 +f 2143//969 2177//969 2144//969 +f 2177//969 2143//969 2174//969 +f 2144//970 2178//970 2145//970 +f 2178//970 2144//970 2177//970 +f 2145//971 2146//1668 2130//971 +f 2146//1668 2145//971 2178//1668 +f 2178//1668 2147//973 2146//1668 +f 2147//973 2178//1668 2179//973 +f 2147//1658 2148//1663 2118//1658 +f 2148//1663 2147//1658 2180//1663 +f 2149//976 2148//976 2181//976 +f 2148//976 2149//976 2119//976 +f 2182//87 2120//87 2149//87 +f 2120//87 2182//87 2150//87 +f 2183//977 2122//977 2150//977 +f 2122//977 2183//977 2151//977 +f 2184//1320 2152//1337 2151//1413 +f 2152//1337 2184//1320 2185//1320 +f 2152//1738 2153//1415 2121//1415 +f 2153//1415 2152//1738 2186//1738 +f 2187//1629 2123//1629 2153//1629 +f 2123//1629 2187//1629 2154//1629 +f 2188//858 2154//858 2187//858 +f 2154//858 2188//858 2155//858 +f 2155//980 2156//980 2125//980 +f 2156//980 2155//980 2189//980 +f 2190//1664 2157//1664 2156//1664 +f 2157//1664 2190//1664 2191//1664 +f 2191//1664 2158//1664 2157//1664 +f 2158//1664 2191//1664 2159//1664 +f 2192//1669 2159//1398 2191//1578 +f 2159//1398 2192//1669 2160//1398 +f 2192//1420 2161//1400 2160//1400 +f 2161//1400 2192//1420 2193//1579 +f 2193//946 2162//946 2161//946 +f 2162//946 2193//946 2194//946 +f 2194//948 2163//948 2162//948 +f 2163//948 2194//948 2195//984 +f 2163//949 2196//949 2164//949 +f 2196//949 2163//949 2195//1239 +f 2164//1666 2197//1666 2165//1666 +f 2197//1666 2164//1666 2196//1666 +f 2165//953 2198//1670 2166//953 +f 2198//1670 2165//953 2197//1671 +f 2167//954 2198//988 2199//1672 +f 2198//988 2167//954 2166//954 +f 2168//990 2199//990 2170//990 +f 2199//990 2168//990 2167//990 +f 2170//990 2200//990 2171//990 +f 2200//990 2170//990 2199//990 +f 2171//991 2201//991 2173//991 +f 2201//991 2171//991 2202//991 +f 2173//993 2203//993 2176//993 +f 2203//993 2173//993 2201//993 +f 2204//969 2176//969 2205//969 +f 2176//969 2204//969 2175//969 +f 2177//969 2175//969 2204//969 +f 2175//969 2177//969 2174//969 +f 2178//970 2204//970 2179//970 +f 2204//970 2178//970 2177//970 +f 2179//973 2180//971 2147//973 +f 2180//971 2179//973 2206//971 +f 2181//1673 2180//1673 2207//1673 +f 2180//1673 2181//1673 2148//1673 +f 2208//95 2149//95 2181//95 +f 2149//95 2208//95 2182//95 +f 2209//996 2150//995 2182//995 +f 2150//995 2209//996 2183//996 +f 2210//930 2151//930 2183//930 +f 2151//930 2210//930 2184//930 +f 2184//1320 2211//1337 2185//1320 +f 2211//1337 2184//1320 2212//1337 +f 2213//1321 2152//1321 2185//1321 +f 2152//1321 2213//1321 2186//1321 +f 2214//1674 2153//1739 2186//1674 +f 2153//1739 2214//1674 2187//1739 +f 2215//999 2187//999 2214//999 +f 2187//999 2215//999 2188//999 +f 2216//937 2155//937 2188//937 +f 2155//937 2216//937 2189//937 +f 2189//1675 2190//1740 2156//1740 +f 2190//1740 2189//1675 2217//1675 +f 2218//1430 2191//1578 2190//1431 +f 2191//1578 2218//1430 2192//1669 +f 2218//1432 2193//1579 2192//1420 +f 2193//1579 2218//1432 2219//165 +f 2219//947 2194//946 2193//946 +f 2194//946 2219//947 2220//947 +f 2220//1006 2195//984 2194//948 +f 2195//984 2220//1006 2221//1007 +f 2195//1239 2222//1008 2196//949 +f 2222//1008 2195//1239 2221//1009 +f 2196//1666 2223//1665 2197//1666 +f 2223//1665 2196//1666 2222//1665 +f 2197//1671 2224//1741 2198//1670 +f 2224//1741 2197//1671 2223//246 +f 2199//1672 2224//1011 2200//1012 +f 2224//1011 2199//1672 2198//988 +f 2200//1013 2202//1013 2171//1013 +f 2202//1013 2200//1013 2225//1013 +f 2202//958 2226//958 2201//958 +f 2226//958 2202//958 2227//958 +f 2201//964 2228//964 2203//964 +f 2228//964 2201//964 2226//964 +f 2176//1016 2229//1015 2205//1016 +f 2229//1015 2176//1016 2203//1015 +f 2179//970 2205//970 2206//970 +f 2205//970 2179//970 2204//970 +f 2206//1017 2207//1017 2180//1017 +f 2207//1017 2206//1017 2230//1017 +f 2231//1663 2181//1663 2207//1663 +f 2181//1663 2231//1663 2208//1663 +f 2232//1019 2182//1019 2208//1019 +f 2182//1019 2232//1019 2209//1019 +f 2209//87 2210//87 2183//87 +f 2210//87 2209//87 2233//87 +f 2210//930 2212//930 2184//930 +f 2212//930 2210//930 2234//930 +f 2235//1444 2212//1588 2236//1678 +f 2212//1588 2235//1444 2211//1742 +f 2211//1321 2213//1321 2185//1321 +f 2213//1321 2211//1321 2237//1321 +f 2238//1629 2186//1629 2213//1629 +f 2186//1629 2238//1629 2214//1629 +f 2239//858 2240//1025 2241//860 +f 2241//1743 2214//1743 2238//1743 +f 2214//858 2241//860 2215//1680 +f 2215//1680 2241//860 2240//1025 +f 2242//1027 2188//1028 2215//1027 +f 2188//1028 2242//1027 2216//1028 +f 2243//1664 2189//1664 2216//1664 +f 2189//1664 2243//1664 2217//1664 +f 2217//1744 2218//1455 2190//1456 +f 2218//1455 2217//1744 2244//1591 +f 2244//1681 2219//1459 2218//1460 +f 2219//1459 2244//1681 2245//1595 +f 2220//1036 2245//1036 2246//1036 +f 2245//1036 2220//1036 2219//1036 +f 2221//1254 2246//1038 2247//1039 +f 2246//1038 2221//1254 2220//1257 +f 2222//1041 2247//1598 2248//1043 +f 2247//1598 2222//1041 2221//1044 +f 2223//1684 2248//1684 2249//1684 +f 2248//1684 2223//1684 2222//1684 +f 2224//1046 2249//1047 2250//1047 +f 2249//1047 2224//1046 2223//1046 +f 2200//1048 2250//1260 2225//1260 +f 2250//1260 2200//1048 2224//1048 +f 2225//990 2227//990 2202//990 +f 2227//990 2225//990 2251//990 +f 2226//1051 2252//1745 2253//1745 +f 2252//1745 2226//1051 2227//1051 +f 2228//1053 2253//1053 2254//1053 +f 2253//1053 2228//1053 2226//1053 +f 2203//969 2255//969 2229//969 +f 2255//969 2203//969 2228//969 +f 2205//1054 2230//1054 2206//1054 +f 2230//1054 2205//1054 2229//1054 +f 2256//971 2207//971 2230//971 +f 2207//971 2256//971 2231//971 +f 2257//1687 2208//1687 2231//1687 +f 2208//1687 2257//1687 2232//1687 +f 2232//95 2233//95 2209//95 +f 2233//95 2232//95 2258//95 +f 2233//87 2234//849 2210//87 +f 2234//849 2233//87 2259//849 +f 2236//1058 2234//1058 2260//1261 +f 2234//1058 2236//1058 2212//1059 +f 2261//1472 2262//1264 2263//1267 +f 2262//1264 2261//1472 2264//1063 +f 2264//1063 2261//1472 2265//1474 +f 2264//1063 2265//1474 2266//1065 +f 2266//1065 2265//1474 2267//1065 +f 2266//1065 2267//1065 2268//1066 +f 2268//1066 2267//1065 2269//1066 +f 2268//1066 2269//1066 2270//1071 +f 2270//1071 2269//1066 2271//1068 +f 2270//1071 2271//1068 2272//1266 +f 2272//1266 2271//1068 2273//1689 +f 2272//1266 2273//1689 2274//1070 +f 2274//1070 2273//1689 2275//1066 +f 2274//1070 2275//1066 2276//1071 +f 2276//1071 2275//1066 2277//1071 +f 2276//1071 2277//1071 2278//1065 +f 2278//1065 2277//1071 2235//1065 +f 2278//1065 2235//1065 2279//1746 +f 2279//1746 2235//1065 2236//1073 +f 2279//1746 2236//1073 2280//1074 +f 2280//1074 2236//1073 2260//1069 +f 2280//1074 2260//1069 2281//1267 +f 2235//1476 2237//1747 2211//1692 +f 2237//1747 2235//1476 2277//1477 +f 2237//1629 2238//1629 2213//1629 +f 2238//1629 2237//1629 2241//1629 +f 2275//1078 2239//1079 2241//1079 +f 2239//1079 2275//1078 2273//1078 +f 2282//937 2283//1271 2239//1735 +f 2239//1748 2215//1748 2240//1748 +f 2215//937 2239//1735 2242//935 +f 2242//935 2239//1735 2283//1271 +f 2284//1696 2216//1696 2242//1696 +f 2216//1696 2284//1696 2243//1695 +f 2285//1399 2217//1399 2243//1399 +f 2217//1399 2285//1399 2244//1399 +f 2244//1547 2286//1547 2245//1547 +f 2286//1547 2244//1547 2285//1547 +f 2245//947 2287//947 2246//947 +f 2287//947 2245//947 2286//947 +f 2246//203 2288//203 2247//203 +f 2288//203 2246//203 2287//203 +f 2247//215 2289//215 2248//215 +f 2289//215 2247//215 2288//215 +f 2289//1665 2249//1665 2248//1665 +f 2249//1665 2289//1665 2290//1665 +f 2290//1177 2250//1177 2249//1177 +f 2250//1177 2290//1177 2291//1177 +f 2250//954 2251//954 2225//954 +f 2251//954 2250//954 2291//954 +f 2227//1086 2292//1086 2252//1086 +f 2292//1086 2227//1086 2251//1086 +f 2253//967 2293//967 2294//967 +f 2293//1749 2253//958 2252//1750 +f 2293//1749 2252//1750 2295//956 +f 2296//958 2293//1749 2295//956 +f 2254//967 2297//967 2298//967 +f 2297//1407 2254//964 2253//1232 +f 2297//1407 2253//1232 2294//1736 +f 2293//964 2297//1407 2294//1736 +f 2255//1281 2254//1281 2299//1281 +f 2254//1281 2255//1281 2228//1281 +f 2229//970 2256//970 2230//970 +f 2256//970 2229//970 2255//970 +f 2231//1751 2300//1098 2257//1098 +f 2300//1098 2231//1751 2256//1751 +f 2257//1663 2258//1663 2232//1663 +f 2258//1663 2257//1663 2301//1663 +f 2302//95 2233//95 2258//95 +f 2233//95 2302//95 2259//95 +f 2260//1099 2259//1100 2281//1101 +f 2259//1100 2260//1099 2234//1102 +f 2280//1701 2303//1702 2279//1701 +f 2303//1702 2280//1701 2302//1702 +f 2279//1703 2304//1284 2278//1285 +f 2304//1284 2279//1703 2303//1108 +f 2305//1288 2278//1109 2304//1288 +f 2278//1109 2305//1288 2276//1109 +f 2297//1113 2276//1114 2305//1113 +f 2276//1114 2297//1113 2274//1114 +f 2293//1115 2274//1115 2297//1115 +f 2274//1115 2293//1115 2272//1115 +f 2296//1118 2272//1118 2293//1118 +f 2272//1118 2296//1118 2270//1118 +f 2306//1291 2270//1121 2296//1291 +f 2270//1121 2306//1291 2268//1121 +f 2307//1704 2268//1293 2306//1124 +f 2268//1293 2307//1704 2266//1125 +f 2308//1752 2266//1519 2307//1128 +f 2266//1519 2308//1752 2264//1129 +f 2309//1132 2264//1297 2308//1132 +f 2264//1297 2309//1132 2262//1297 +f 2310//1753 2262//1134 2309//1706 +f 2262//1134 2310//1753 2263//1707 +f 2311//1138 2263//1708 2310//1140 +f 2263//1708 2311//1138 2261//1141 +f 2312//1302 2261//1303 2311//1304 +f 2261//1303 2312//1302 2265//1305 +f 2313//1522 2265//1523 2312//1524 +f 2265//1523 2313//1522 2267//1525 +f 2269//1526 2313//1620 2314//1528 +f 2313//1620 2269//1526 2267//1529 +f 2271//1712 2314//1713 2282//1713 +f 2314//1713 2271//1712 2269//1712 +f 2273//1154 2282//1309 2239//1309 +f 2282//1309 2273//1154 2271//1154 +f 2277//1754 2241//1714 2237//1714 +f 2241//1714 2277//1754 2275//1754 +f 2281//1715 2302//1157 2280//1717 +f 2302//1157 2281//1715 2259//1311 +f 2282//1664 2315//1664 2283//1664 +f 2315//1664 2282//1664 2314//1664 +f 2315//1664 2242//1664 2283//1664 +f 2242//1664 2315//1664 2284//1664 +f 2316//1535 2243//1535 2284//1535 +f 2243//1535 2316//1535 2285//1535 +f 2317//1536 2285//1536 2316//1536 +f 2285//1536 2317//1536 2286//1536 +f 2286//1165 2318//1165 2287//1165 +f 2318//1165 2286//1165 2317//1165 +f 2287//1314 2319//1314 2288//1314 +f 2319//1314 2287//1314 2318//1314 +f 2288//1538 2320//1719 2289//1538 +f 2320//1719 2288//1538 2319//1719 +f 2289//1755 2321//1721 2290//1720 +f 2321//1721 2289//1755 2320//1170 +f 2290//1171 2322//1317 2291//1171 +f 2322//1317 2290//1171 2321//1317 +f 2291//1173 2292//1173 2251//1173 +f 2292//1173 2291//1173 2322//1173 +f 2252//990 2323//990 2295//990 +f 2323//990 2252//990 2292//990 +f 2323//990 2296//990 2295//990 +f 2296//990 2323//990 2306//990 +f 2298//969 2305//969 2324//969 +f 2305//969 2298//969 2297//969 +f 2299//969 2298//969 2324//969 +f 2298//969 2299//969 2254//969 +f 2256//1175 2299//1175 2300//1175 +f 2299//1175 2256//1175 2255//1175 +f 2300//971 2301//971 2257//971 +f 2301//971 2300//971 2325//971 +f 2303//1663 2258//1663 2301//1663 +f 2258//1663 2303//1663 2302//1663 +f 2304//971 2301//971 2325//971 +f 2301//971 2304//971 2303//971 +f 2324//970 2304//970 2325//970 +f 2304//970 2324//970 2305//970 +f 2326//954 2306//954 2323//954 +f 2306//954 2326//954 2307//954 +f 2308//1177 2326//1177 2327//1177 +f 2326//1177 2308//1177 2307//1177 +f 2309//950 2327//950 2328//950 +f 2327//950 2309//950 2308//950 +f 2310//1722 2328//1722 2329//1722 +f 2328//1722 2310//1722 2309//1722 +f 2330//1178 2310//203 2329//1178 +f 2310//203 2330//1178 2311//203 +f 2331//947 2311//947 2330//947 +f 2311//947 2331//947 2312//947 +f 2332//1547 2312//1547 2331//1547 +f 2312//1547 2332//1547 2313//1547 +f 2314//1399 2332//1399 2315//1399 +f 2332//1399 2314//1399 2313//1399 +f 2332//1399 2284//1398 2315//1399 +f 2284//1398 2332//1399 2316//1398 +f 2332//1547 2317//1401 2316//1401 +f 2317//1401 2332//1547 2331//1547 +f 2331//947 2318//947 2317//947 +f 2318//947 2331//947 2330//947 +f 2330//1178 2319//203 2318//203 +f 2319//203 2330//1178 2329//1178 +f 2329//1722 2320//1722 2319//1722 +f 2320//1722 2329//1722 2328//1722 +f 2320//950 2327//950 2321//950 +f 2327//950 2320//950 2328//950 +f 2321//1177 2326//1177 2322//1085 +f 2326//1177 2321//1177 2327//1177 +f 2292//954 2326//954 2323//954 +f 2326//954 2292//954 2322//954 +f 2300//970 2324//970 2325//970 +f 2324//970 2300//970 2299//970 +f 2333//1318 2334//1319 2335//1319 +f 2334//1319 2333//1318 2336//1318 +f 2335//1319 2337//1320 2338//1320 +f 2337//1320 2335//1319 2334//1319 +f 2339//815 2335//816 2340//816 +f 2335//816 2339//815 2333//815 +f 2341//1320 2336//1318 2333//1318 +f 2336//1318 2341//1320 2342//1320 +f 2343//1321 2334//1321 2336//1321 +f 2334//1321 2343//1321 2344//1321 +f 2337//1322 2345//1548 2338//1324 +f 2345//1548 2337//1322 2346//1628 +f 2340//816 2338//930 2347//930 +f 2338//930 2340//816 2335//816 +f 2344//1321 2337//1321 2334//1321 +f 2337//1321 2344//1321 2348//1321 +f 2349//824 2340//824 2350//824 +f 2340//824 2349//824 2339//824 +f 2351//822 2333//815 2339//815 +f 2333//815 2351//822 2341//822 +f 2352//1723 2341//1326 2353//1723 +f 2341//1326 2352//1723 2342//1326 +f 2354//1321 2336//1321 2342//1321 +f 2336//1321 2354//1321 2343//1321 +f 2355//827 2344//827 2343//827 +f 2344//827 2355//827 2356//827 +f 2357//828 2358//829 2359//1185 +f 2358//829 2357//828 2345//831 +f 2358//829 2345//831 2360//832 +f 2360//832 2345//831 2346//833 +f 2360//832 2346//833 2361//833 +f 2361//833 2346//833 2362//834 +f 2361//833 2362//834 2363//834 +f 2363//834 2362//834 2364//1188 +f 2363//834 2364//1188 2365//836 +f 2365//836 2364//1188 2366//1329 +f 2365//836 2366//1329 2367//828 +f 2367//828 2366//1329 2368//828 +f 2367//828 2368//828 2369//834 +f 2369//834 2368//828 2370//835 +f 2369//834 2370//835 2371//835 +f 2371//835 2370//835 2372//833 +f 2371//835 2372//833 2373//833 +f 2373//833 2372//833 2374//839 +f 2373//833 2374//839 2375//1633 +f 2375//1633 2374//839 2376//1332 +f 2375//1633 2376//1332 2377//1187 +f 2377//1187 2376//1332 2378//830 +f 2338//1191 2357//1192 2347//1191 +f 2357//1192 2338//1191 2345//1192 +f 2348//1336 2346//1756 2337//1334 +f 2346//1756 2348//1336 2362//1336 +f 2350//824 2347//849 2379//849 +f 2347//849 2350//824 2340//824 +f 2356//827 2348//827 2344//827 +f 2348//827 2356//827 2380//827 +f 2381//850 2349//850 2350//850 +f 2349//850 2381//850 2382//850 +f 2383//87 2339//824 2349//824 +f 2339//824 2383//87 2351//87 +f 2353//852 2351//852 2384//852 +f 2351//852 2353//852 2341//852 +f 2353//1320 2385//1320 2352//1320 +f 2385//1320 2353//1320 2386//1320 +f 2387//1338 2342//1338 2352//1338 +f 2342//1338 2387//1338 2354//1338 +f 2388//827 2343//827 2354//827 +f 2343//827 2388//827 2355//827 +f 2389//1340 2355//225 2388//1341 +f 2355//225 2389//1340 2356//1342 +f 2356//1342 2389//1340 2380//1340 +f 2380//1340 2389//1340 2390//1343 +f 2380//1340 2390//1343 2391//1341 +f 2391//1757 2390//1757 2392//1757 +f 2393//862 2360//863 2394//862 +f 2360//863 2393//862 2358//863 +f 2379//1344 2358//1554 2393//1199 +f 2358//1554 2379//1344 2359//867 +f 2347//1724 2359//1201 2379//1346 +f 2359//1201 2347//1724 2357//871 +f 2380//1758 2362//1555 2348//874 +f 2362//1555 2380//1758 2364//875 +f 2391//1556 2364//1351 2380//1556 +f 2364//1351 2391//1556 2366//1351 +f 2395//1353 2366//1352 2391//1353 +f 2366//1352 2395//1353 2368//1352 +f 2396//1759 2368//1205 2395//1759 +f 2368//1205 2396//1759 2370//1205 +f 2397//1559 2370//1760 2396//1356 +f 2370//1760 2397//1559 2372//1357 +f 2372//1562 2398//1562 2374//1562 +f 2398//1562 2372//1562 2397//1562 +f 2374//1360 2399//1728 2376//1360 +f 2399//1728 2374//1360 2398//891 +f 2376//1729 2400//1730 2378//894 +f 2400//1730 2376//1729 2399//895 +f 2378//1212 2401//1213 2377//898 +f 2401//1213 2378//1212 2400//899 +f 2377//1761 2402//901 2375//902 +f 2402//901 2377//1761 2401//903 +f 2375//1762 2403//1367 2373//1762 +f 2403//1367 2375//1762 2402//1367 +f 2371//1564 2403//1763 2404//1764 +f 2403//1763 2371//1564 2373//1371 +f 2369//1373 2404//1373 2405//1373 +f 2404//1373 2369//1373 2371//1373 +f 2367//1220 2405//912 2406//912 +f 2405//912 2367//1220 2369//1220 +f 2365//915 2406//1375 2407//1375 +f 2406//1375 2365//915 2367//915 +f 2363//1376 2407//1377 2408//1378 +f 2407//1377 2363//1376 2365//1379 +f 2361//1765 2408//1383 2409//1382 +f 2408//1383 2361//1765 2363//1766 +f 2394//1568 2361//1385 2409//1386 +f 2361//1385 2394//1568 2360//1767 +f 2393//95 2350//850 2379//95 +f 2350//850 2393//95 2381//850 +f 2410//927 2382//927 2381//927 +f 2382//927 2410//927 2411//1226 +f 2382//850 2383//95 2349//850 +f 2383//95 2382//850 2412//95 +f 2384//928 2383//928 2413//928 +f 2383//928 2384//928 2351//928 +f 2384//930 2386//930 2353//930 +f 2386//930 2384//930 2414//930 +f 2415//1390 2385//1570 2386//1570 +f 2385//1570 2415//1390 2416//1390 +f 2417//1321 2352//1321 2385//1321 +f 2352//1321 2417//1321 2387//1321 +f 2387//933 2388//1768 2354//1768 +f 2388//1768 2387//933 2418//933 +f 2418//1391 2389//1572 2388//1572 +f 2389//1572 2418//1391 2419//1391 +f 2420//1392 2390//1393 2389//1394 +f 2390//1393 2420//1392 2392//1395 +f 2392//1395 2420//1392 2391//1392 +f 2391//1392 2420//1392 2421//1396 +f 2391//1392 2421//1396 2395//1394 +f 2395//861 2421//861 2422//861 +f 2394//941 2381//927 2393//941 +f 2381//927 2394//941 2410//927 +f 2423//942 2395//942 2422//942 +f 2395//942 2423//942 2396//942 +f 2424//1399 2396//1399 2423//1399 +f 2396//1399 2424//1399 2397//1399 +f 2424//1400 2398//1401 2397//1401 +f 2398//1401 2424//1400 2425//1400 +f 2425//946 2399//947 2398//947 +f 2399//947 2425//946 2426//946 +f 2426//948 2400//203 2399//203 +f 2400//203 2426//948 2427//948 +f 2400//215 2428//949 2401//215 +f 2428//949 2400//215 2427//949 +f 2401//950 2429//951 2402//950 +f 2429//951 2401//950 2428//951 +f 2402//1486 2430//1403 2403//1486 +f 2430//1403 2402//1486 2429//1403 +f 2403//1404 2431//1404 2404//1404 +f 2431//1404 2403//1404 2430//1404 +f 2404//1405 2432//1405 2405//1405 +f 2432//1405 2404//1405 2431//1405 +f 2405//1700 2433//958 2406//958 +f 2433//958 2405//1700 2434//1277 +f 2433//958 2434//1277 2435//1700 +f 2434//967 2405//967 2432//967 +f 2436//961 2435//1700 2437//958 +f 2435//1700 2436//961 2433//958 +f 2406//1736 2438//964 2407//964 +f 2438//964 2406//1736 2436//965 +f 2438//964 2436//965 2437//1407 +f 2436//1408 2406//1408 2433//1408 +f 2439//968 2437//1407 2440//964 +f 2437//1407 2439//968 2438//964 +f 2407//1409 2441//1409 2408//1409 +f 2441//1409 2407//1409 2438//1409 +f 2408//1410 2442//1410 2409//1410 +f 2442//1410 2408//1410 2441//1410 +f 2409//1411 2410//1576 2394//1411 +f 2410//1576 2409//1411 2442//1576 +f 2442//1576 2411//1412 2410//1576 +f 2411//1412 2442//1576 2443//1412 +f 2411//1226 2412//974 2382//927 +f 2412//974 2411//1226 2444//974 +f 2413//976 2412//976 2445//976 +f 2412//976 2413//976 2383//976 +f 2446//87 2384//87 2413//87 +f 2384//87 2446//87 2414//87 +f 2447//977 2386//977 2414//977 +f 2386//977 2447//977 2415//977 +f 2448//1320 2416//1337 2415//1413 +f 2416//1337 2448//1320 2449//1320 +f 2416//1738 2417//1415 2385//1415 +f 2417//1415 2416//1738 2450//1414 +f 2451//827 2387//827 2417//827 +f 2387//827 2451//827 2418//827 +f 2452//1341 2418//1341 2451//1341 +f 2418//1341 2452//1341 2419//1341 +f 2419//1769 2420//1416 2389//1416 +f 2420//1416 2419//1769 2453//1769 +f 2454//942 2421//942 2420//942 +f 2421//942 2454//942 2455//942 +f 2455//942 2422//942 2421//942 +f 2422//942 2455//942 2423//942 +f 2456//1669 2423//1399 2455//1770 +f 2423//1399 2456//1669 2424//1399 +f 2456//1420 2425//1400 2424//1400 +f 2425//1400 2456//1420 2457//1579 +f 2457//946 2426//946 2425//946 +f 2426//946 2457//946 2458//946 +f 2458//948 2427//948 2426//948 +f 2427//948 2458//948 2459//984 +f 2427//949 2460//949 2428//949 +f 2460//949 2427//949 2459//1239 +f 2428//951 2461//951 2429//951 +f 2461//951 2428//951 2460//951 +f 2429//1403 2462//1422 2430//1403 +f 2462//1422 2429//1403 2461//1771 +f 2431//1404 2462//1424 2463//1772 +f 2462//1424 2431//1404 2430//1404 +f 2432//1405 2463//1405 2434//1405 +f 2463//1405 2432//1405 2431//1405 +f 2434//1405 2464//1405 2435//1405 +f 2464//1405 2434//1405 2463//1405 +f 2435//991 2465//991 2437//991 +f 2465//991 2435//991 2466//991 +f 2437//993 2467//993 2440//993 +f 2467//993 2437//993 2465//993 +f 2468//1409 2440//1409 2469//1409 +f 2440//1409 2468//1409 2439//1409 +f 2441//1409 2439//1409 2468//1409 +f 2439//1409 2441//1409 2438//1409 +f 2442//1410 2468//1410 2443//1410 +f 2468//1410 2442//1410 2441//1410 +f 2443//1412 2444//1411 2411//1412 +f 2444//1411 2443//1412 2470//1411 +f 2445//1773 2444//994 2471//1773 +f 2444//994 2445//1773 2412//994 +f 2472//95 2413//95 2445//95 +f 2413//95 2472//95 2446//95 +f 2473//996 2414//996 2446//996 +f 2414//996 2473//996 2447//996 +f 2474//930 2415//930 2447//930 +f 2415//930 2474//930 2448//930 +f 2448//1320 2475//1337 2449//1320 +f 2475//1337 2448//1320 2476//1337 +f 2477//1321 2416//1321 2449//1321 +f 2416//1321 2477//1321 2450//1321 +f 2478//1243 2417//1243 2450//1243 +f 2417//1243 2478//1243 2451//1243 +f 2479//1428 2451//1428 2478//1428 +f 2451//1428 2479//1428 2452//1428 +f 2480//1394 2419//1394 2452//1394 +f 2419//1394 2480//1394 2453//1394 +f 2453//1001 2454//1001 2420//1001 +f 2454//1001 2453//1001 2481//1001 +f 2482//1583 2455//1770 2454//1431 +f 2455//1770 2482//1583 2456//1669 +f 2482//1432 2457//1579 2456//1420 +f 2457//1579 2482//1432 2483//165 +f 2483//947 2458//946 2457//946 +f 2458//946 2483//947 2484//947 +f 2484//1006 2459//984 2458//948 +f 2459//984 2484//1006 2485//1007 +f 2459//1239 2486//1008 2460//949 +f 2486//1008 2459//1239 2485//1009 +f 2460//951 2487//950 2461//951 +f 2487//950 2460//951 2486//950 +f 2461//1771 2488//1774 2462//1422 +f 2488//1774 2461//1771 2487//176 +f 2463//1772 2488//1775 2464//1436 +f 2488//1775 2463//1772 2462//1424 +f 2464//1437 2466//1438 2435//1437 +f 2466//1438 2464//1437 2489//1438 +f 2466//958 2490//958 2465//958 +f 2490//958 2466//958 2491//958 +f 2465//964 2492//964 2467//964 +f 2492//964 2465//964 2490//964 +f 2440//1440 2493//1440 2469//1440 +f 2493//1440 2440//1440 2467//1440 +f 2443//1410 2469//1410 2470//1410 +f 2469//1410 2443//1410 2468//1410 +f 2470//1585 2471//1776 2444//1585 +f 2471//1776 2470//1585 2494//1776 +f 2495//941 2445//941 2471//941 +f 2445//941 2495//941 2472//941 +f 2496//1019 2446//1443 2472//1443 +f 2446//1443 2496//1019 2473//1019 +f 2473//87 2474//87 2447//87 +f 2474//87 2473//87 2497//87 +f 2474//930 2476//930 2448//930 +f 2476//930 2474//930 2498//930 +f 2499//1777 2476//1588 2500//1778 +f 2476//1588 2499//1777 2475//1447 +f 2475//1321 2477//1321 2449//1321 +f 2477//1321 2475//1321 2501//1321 +f 2502//827 2450//827 2477//827 +f 2450//827 2502//827 2478//827 +f 2503//1341 2504//1448 2505//1449 +f 2505//1779 2478//1779 2502//1779 +f 2478//1341 2505//1449 2479//1451 +f 2479//1451 2505//1449 2504//1448 +f 2506//1452 2452//1780 2479//1452 +f 2452//1780 2506//1452 2480//1780 +f 2507//942 2453//942 2480//942 +f 2453//942 2507//942 2481//942 +f 2481//1455 2482//1781 2454//1456 +f 2482//1781 2481//1455 2508//1591 +f 2508//1458 2483//1782 2482//1460 +f 2483//1782 2508//1458 2509//1595 +f 2484//1036 2509//1036 2510//1036 +f 2509//1036 2484//1036 2483//1036 +f 2485//1037 2510//1037 2511//1039 +f 2510//1037 2485//1037 2484//1257 +f 2486//1462 2511//1598 2512//1783 +f 2511//1598 2486//1462 2485//1044 +f 2487//1045 2512//1045 2513//1045 +f 2512//1045 2487//1045 2486//1045 +f 2488//1784 2513//1464 2514//1465 +f 2513//1464 2488//1784 2487//1464 +f 2464//1466 2514//1467 2489//1467 +f 2514//1467 2464//1466 2488//1466 +f 2489//1405 2491//1405 2466//1405 +f 2491//1405 2489//1405 2515//1405 +f 2490//1051 2516//1051 2517//1051 +f 2516//1051 2490//1051 2491//1051 +f 2492//1052 2517//1053 2518//1053 +f 2517//1053 2492//1052 2490//1052 +f 2467//1409 2519//1409 2493//1409 +f 2519//1409 2467//1409 2492//1785 +f 2469//1469 2494//1469 2470//1469 +f 2494//1469 2469//1469 2493//1469 +f 2520//1411 2471//1411 2494//1411 +f 2471//1411 2520//1411 2495//1411 +f 2521//1056 2472//1057 2495//1057 +f 2472//1057 2521//1056 2496//1056 +f 2496//95 2497//95 2473//95 +f 2497//95 2496//95 2522//95 +f 2497//87 2498//849 2474//87 +f 2498//849 2497//87 2523//849 +f 2500//1471 2498//1059 2524//1058 +f 2498//1059 2500//1471 2476//1059 +f 2525//1472 2526//1264 2527//1062 +f 2526//1264 2525//1472 2528//1063 +f 2528//1063 2525//1472 2529//1064 +f 2528//1063 2529//1064 2530//1065 +f 2530//1065 2529//1064 2531//1065 +f 2530//1065 2531//1065 2532//1066 +f 2532//1066 2531//1065 2533//1066 +f 2532//1066 2533//1066 2534//1071 +f 2534//1071 2533//1066 2535//1069 +f 2534//1071 2535//1069 2536//1266 +f 2536//1266 2535//1069 2537//1070 +f 2536//1266 2537//1070 2538//1689 +f 2538//1689 2537//1070 2539//1066 +f 2538//1689 2539//1066 2540//1071 +f 2540//1071 2539//1066 2541//1071 +f 2540//1071 2541//1071 2542//1065 +f 2542//1065 2541//1071 2499//1065 +f 2542//1065 2499//1065 2543//1072 +f 2543//1072 2499//1065 2500//1265 +f 2543//1072 2500//1265 2544//1786 +f 2544//1786 2500//1265 2524//1069 +f 2544//1786 2524//1069 2545//1267 +f 2499//1477 2501//1691 2475//1476 +f 2501//1691 2499//1477 2541//1477 +f 2501//827 2502//827 2477//827 +f 2502//827 2501//827 2505//827 +f 2539//1478 2503//1478 2505//1478 +f 2503//1478 2539//1478 2537//1478 +f 2546//1394 2547//1480 2503//1481 +f 2503//1787 2479//1787 2504//1787 +f 2479//1394 2503//1481 2506//1483 +f 2506//1483 2503//1481 2547//1480 +f 2548//1274 2480//1274 2506//1081 +f 2480//1274 2548//1274 2507//1274 +f 2549//1399 2481//1399 2507//1399 +f 2481//1399 2549//1399 2508//1399 +f 2508//1547 2550//1547 2509//1547 +f 2550//1547 2508//1547 2549//1547 +f 2509//947 2551//947 2510//947 +f 2551//947 2509//947 2550//947 +f 2510//203 2552//203 2511//203 +f 2552//203 2510//203 2551//203 +f 2511//215 2553//215 2512//215 +f 2553//215 2511//215 2552//215 +f 2553//950 2513//950 2512//950 +f 2513//950 2553//950 2554//950 +f 2554//1402 2514//1402 2513//1402 +f 2514//1402 2554//1402 2555//1402 +f 2514//1404 2515//1404 2489//1404 +f 2515//1404 2514//1404 2555//1404 +f 2491//1488 2556//1488 2516//1488 +f 2556//1488 2491//1488 2515//1488 +f 2517//1490 2557//1788 2558//1492 +f 2557//1788 2517//1490 2516//1575 +f 2557//1788 2516//1575 2559//1091 +f 2560//958 2557//1788 2559//1091 +f 2518//967 2561//967 2562//967 +f 2561//965 2518//964 2517//1789 +f 2561//965 2517//1789 2558//1736 +f 2557//964 2561//965 2558//1736 +f 2519//1497 2518//1497 2563//1497 +f 2518//1497 2519//1497 2492//1497 +f 2493//1410 2520//1410 2494//1410 +f 2520//1410 2493//1410 2519//1410 +f 2495//1499 2564//1499 2521//1499 +f 2564//1499 2495//1499 2520//1499 +f 2521//941 2522//941 2496//941 +f 2522//941 2521//941 2565//941 +f 2566//95 2497//95 2522//95 +f 2497//95 2566//95 2523//95 +f 2524//1611 2523//1100 2545//1101 +f 2523//1100 2524//1611 2498//1102 +f 2544//1283 2567//1103 2543//1283 +f 2567//1103 2544//1283 2566//1103 +f 2543//1790 2568//1503 2542//1504 +f 2568//1503 2543//1790 2567//1791 +f 2569//1792 2542//1793 2568//1505 +f 2542//1793 2569//1792 2540//1507 +f 2561//1794 2540//1508 2569//1508 +f 2540//1508 2561//1794 2538//1795 +f 2557//1616 2538//1115 2561//1616 +f 2538//1115 2557//1616 2536//1115 +f 2560//1119 2536//1118 2557//1119 +f 2536//1118 2560//1119 2534//1118 +f 2570//1510 2534//1510 2560//1510 +f 2534//1510 2570//1510 2532//1510 +f 2571//1796 2532//1797 2570//1798 +f 2532//1797 2571//1796 2530//1799 +f 2572//1516 2530//1517 2571//1518 +f 2530//1517 2572//1516 2528//1519 +f 2573//1132 2528//1131 2572//1132 +f 2528//1131 2573//1132 2526//1133 +f 2574//1134 2526//1135 2573//1800 +f 2526//1135 2574//1134 2527//1137 +f 2575//1300 2527//1139 2574//1140 +f 2527//1139 2575//1300 2525//1141 +f 2576//1142 2525//1143 2575//1142 +f 2525//1143 2576//1142 2529//1143 +f 2577//1522 2529//1523 2576//1524 +f 2529//1523 2577//1522 2531//1525 +f 2533//1801 2577//1620 2578//1802 +f 2577//1620 2533//1801 2531//1529 +f 2535//1308 2578//1621 2546//1621 +f 2578//1621 2535//1308 2533//1308 +f 2537//1531 2546//1531 2503//1531 +f 2546//1531 2537//1531 2535//1531 +f 2541//1155 2505//1155 2501//1155 +f 2505//1155 2541//1155 2539//1155 +f 2545//1715 2566//1803 2544//1804 +f 2566//1803 2545//1715 2523//1159 +f 2546//942 2579//942 2547//942 +f 2579//942 2546//942 2578//942 +f 2579//942 2506//942 2547//942 +f 2506//942 2579//942 2548//942 +f 2580//1535 2507//1624 2548//1535 +f 2507//1624 2580//1535 2549//1624 +f 2581//1537 2549//1536 2580//1537 +f 2549//1536 2581//1537 2550//1536 +f 2550//1165 2582//1165 2551//1165 +f 2582//1165 2550//1165 2581//1165 +f 2551//1314 2583//1314 2552//1314 +f 2583//1314 2551//1314 2582//1314 +f 2552//1169 2584//1538 2553//1169 +f 2584//1538 2552//1169 2583//1538 +f 2553//1170 2585//1170 2554//1170 +f 2585//1170 2553//1170 2584//1170 +f 2554//1540 2586//1539 2555//1540 +f 2586//1539 2554//1540 2585//1539 +f 2555//1541 2556//1541 2515//1541 +f 2556//1541 2555//1541 2586//1541 +f 2516//1805 2587//1405 2559//1405 +f 2587//1405 2516//1805 2556//1405 +f 2587//1405 2560//1405 2559//1405 +f 2560//1405 2587//1405 2570//1405 +f 2562//1409 2569//1409 2588//1409 +f 2569//1409 2562//1409 2561//1409 +f 2563//1409 2562//1409 2588//1409 +f 2562//1409 2563//1409 2518//1409 +f 2520//1544 2563//1544 2564//1544 +f 2563//1544 2520//1544 2519//1544 +f 2564//1411 2565//1411 2521//1411 +f 2565//1411 2564//1411 2589//1411 +f 2567//941 2522//941 2565//941 +f 2522//941 2567//941 2566//941 +f 2568//1470 2565//1411 2589//1411 +f 2565//1411 2568//1470 2567//1470 +f 2588//1410 2568//1410 2589//1410 +f 2568//1410 2588//1410 2569//1410 +f 2590//1404 2570//1487 2587//1404 +f 2570//1487 2590//1404 2571//1487 +f 2572//1402 2590//1486 2591//1402 +f 2590//1486 2572//1402 2571//1402 +f 2573//950 2591//950 2592//950 +f 2591//950 2573//950 2572//950 +f 2574//215 2592//215 2593//215 +f 2592//215 2574//215 2573//215 +f 2594//1178 2574//203 2593//1178 +f 2574//203 2594//1178 2575//203 +f 2595//947 2575//947 2594//947 +f 2575//947 2595//947 2576//947 +f 2596//1547 2576//1547 2595//1547 +f 2576//1547 2596//1547 2577//1547 +f 2578//1399 2596//1399 2579//1399 +f 2596//1399 2578//1399 2577//1399 +f 2596//1399 2548//1399 2579//1399 +f 2548//1399 2596//1399 2580//1399 +f 2596//1547 2581//1401 2580//1401 +f 2581//1401 2596//1547 2595//1547 +f 2595//947 2582//947 2581//947 +f 2582//947 2595//947 2594//947 +f 2594//1178 2583//203 2582//203 +f 2583//203 2594//1178 2593//1178 +f 2593//215 2584//1180 2583//1180 +f 2584//1180 2593//215 2592//215 +f 2584//950 2591//950 2585//950 +f 2591//950 2584//950 2592//950 +f 2585//1486 2590//1486 2586//1486 +f 2590//1486 2585//1486 2591//1402 +f 2556//1487 2590//1404 2587//1404 +f 2590//1404 2556//1487 2586//1487 +f 2564//1410 2588//1410 2589//1410 +f 2588//1410 2564//1410 2563//1410 +f 2597//1318 2598//1626 2599//1626 +f 2598//1626 2597//1318 2600//1318 +f 2599//1626 2601//1320 2602//1320 +f 2601//1320 2599//1626 2598//1626 +f 2603//815 2599//816 2604//816 +f 2599//816 2603//815 2597//815 +f 2605//1320 2600//1318 2597//1318 +f 2600//1318 2605//1320 2606//1320 +f 2607//1321 2598//1321 2600//1321 +f 2598//1321 2607//1321 2608//1321 +f 2601//1322 2609//1548 2602//1324 +f 2609//1548 2601//1322 2610//1628 +f 2604//816 2602//822 2611//822 +f 2602//822 2604//816 2599//816 +f 2608//1321 2601//1321 2598//1321 +f 2601//1321 2608//1321 2612//1321 +f 2613//824 2604//824 2614//824 +f 2604//824 2613//824 2603//824 +f 2615//822 2597//815 2603//815 +f 2597//815 2615//822 2605//822 +f 2616//1723 2605//1326 2617//1723 +f 2605//1326 2616//1723 2606//1326 +f 2618//1321 2600//1321 2606//1321 +f 2600//1321 2618//1321 2607//1321 +f 2619//827 2608//827 2607//827 +f 2608//827 2619//827 2620//827 +f 2621//828 2622//829 2623//1185 +f 2622//829 2621//828 2609//1186 +f 2622//829 2609//1186 2624//1327 +f 2624//1327 2609//1186 2610//833 +f 2624//1327 2610//833 2625//833 +f 2625//833 2610//833 2626//834 +f 2625//833 2626//834 2627//834 +f 2627//834 2626//834 2628//1188 +f 2627//834 2628//1188 2629//836 +f 2629//836 2628//1188 2630//1329 +f 2629//836 2630//1329 2631//828 +f 2631//828 2630//1329 2632//1806 +f 2631//828 2632//1806 2633//838 +f 2633//838 2632//1806 2634//835 +f 2633//838 2634//835 2635//835 +f 2635//835 2634//835 2636//833 +f 2635//835 2636//833 2637//833 +f 2637//833 2636//833 2638//1632 +f 2637//833 2638//1632 2639//1633 +f 2639//1633 2638//1632 2640//1332 +f 2639//1633 2640//1332 2641//1187 +f 2641//1187 2640//1332 2642//830 +f 2602//1191 2621//1191 2611//1191 +f 2621//1191 2602//1191 2609//1192 +f 2612//1807 2610//1334 2601//1334 +f 2610//1334 2612//1807 2626//1336 +f 2614//824 2611//87 2643//87 +f 2611//87 2614//824 2604//824 +f 2620//827 2612//827 2608//827 +f 2612//827 2620//827 2644//827 +f 2645//850 2613//850 2614//850 +f 2613//850 2645//850 2646//850 +f 2647//87 2603//824 2613//824 +f 2603//824 2647//87 2615//87 +f 2617//852 2615//852 2648//852 +f 2615//852 2617//852 2605//852 +f 2617//1320 2649//1320 2616//1320 +f 2649//1320 2617//1320 2650//1320 +f 2651//1338 2606//1338 2616//1338 +f 2606//1338 2651//1338 2618//1338 +f 2652//827 2607//827 2618//827 +f 2607//827 2652//827 2619//827 +f 2653//1340 2619//225 2652//1341 +f 2619//225 2653//1340 2620//1577 +f 2620//1577 2653//1340 2644//1340 +f 2644//1340 2653//1340 2654//1343 +f 2644//1340 2654//1343 2655//1341 +f 2655//1808 2654//1808 2656//1808 +f 2657//862 2624//1809 2658//862 +f 2624//1809 2657//862 2622//863 +f 2643//1344 2622//1554 2657//1810 +f 2622//1554 2643//1344 2623//867 +f 2611//1345 2623//869 2643//870 +f 2623//869 2611//1345 2621//871 +f 2644//1202 2626//1811 2612//874 +f 2626//1811 2644//1202 2628//875 +f 2655//1556 2628//1351 2644//1556 +f 2628//1351 2655//1556 2630//1351 +f 2659//1353 2630//1352 2655//1353 +f 2630//1352 2659//1353 2632//1352 +f 2660//1759 2632//1205 2659//1759 +f 2632//1205 2660//1759 2634//1205 +f 2661//1354 2634//1646 2660//1356 +f 2634//1646 2661//1354 2636//1357 +f 2636//1358 2662//1562 2638//1358 +f 2662//1562 2636//1358 2661//1562 +f 2638//888 2663//889 2640//1360 +f 2663//889 2638//888 2662//1209 +f 2640//1729 2664//893 2642//894 +f 2664//893 2640//1729 2663//895 +f 2642//896 2665//897 2641//898 +f 2665//897 2642//896 2664//899 +f 2641//900 2666//903 2639//1563 +f 2666//903 2641//900 2665//903 +f 2639//1762 2667//1762 2637//1762 +f 2667//1762 2639//1762 2666//1762 +f 2635//1812 2667//1368 2668//1370 +f 2667//1368 2635//1812 2637//1371 +f 2633//1372 2668//1373 2669//1373 +f 2668//1373 2633//1372 2635//1372 +f 2631//1220 2669//912 2670//912 +f 2669//912 2631//1220 2633//1220 +f 2629//915 2670//1375 2671//1375 +f 2670//1375 2629//915 2631//915 +f 2627//1376 2671//1377 2672//1567 +f 2671//1377 2627//1376 2629//1379 +f 2625//1383 2672//1813 2673//1382 +f 2672//1813 2625//1383 2627//1383 +f 2658//1568 2625//1385 2673//1386 +f 2625//1385 2658//1568 2624//1767 +f 2657//95 2614//850 2643//95 +f 2614//850 2657//95 2645//850 +f 2674//927 2646//927 2645//927 +f 2646//927 2674//927 2675//927 +f 2646//850 2647//95 2613//850 +f 2647//95 2646//850 2676//95 +f 2648//928 2647//928 2677//928 +f 2647//928 2648//928 2615//928 +f 2648//930 2650//930 2617//930 +f 2650//930 2648//930 2678//930 +f 2679//1570 2649//1570 2650//1570 +f 2649//1570 2679//1570 2680//1570 +f 2681//1321 2616//1321 2649//1321 +f 2616//1321 2681//1321 2651//1321 +f 2651//933 2652//1768 2618//1768 +f 2652//1768 2651//933 2682//933 +f 2682//1391 2653//1391 2652//1391 +f 2653//1391 2682//1391 2683//1391 +f 2684//1392 2654//1393 2653//1394 +f 2654//1393 2684//1392 2656//1395 +f 2656//1395 2684//1392 2655//1814 +f 2655//1814 2684//1392 2685//1396 +f 2655//1814 2685//1396 2659//1394 +f 2659//1815 2685//1815 2686//1815 +f 2658//941 2645//927 2657//941 +f 2645//927 2658//941 2674//927 +f 2687//942 2659//942 2686//942 +f 2659//942 2687//942 2660//942 +f 2688//1398 2660//1399 2687//1398 +f 2660//1399 2688//1398 2661//1399 +f 2688//1400 2662//1547 2661//1547 +f 2662//1547 2688//1400 2689//1400 +f 2689//946 2663//947 2662//947 +f 2663//947 2689//946 2690//946 +f 2690//948 2664//203 2663//203 +f 2664//203 2690//948 2691//948 +f 2664//215 2692//949 2665//215 +f 2692//949 2664//215 2691//949 +f 2665//950 2693//951 2666//950 +f 2693//951 2665//950 2692//951 +f 2666//1486 2694//1403 2667//1486 +f 2694//1403 2666//1486 2693//1403 +f 2667//1404 2695//1404 2668//1404 +f 2695//1404 2667//1404 2694//1404 +f 2668//1405 2696//1405 2669//1405 +f 2696//1405 2668//1405 2695//1405 +f 2669//956 2697//958 2670//958 +f 2697//958 2669//956 2698//959 +f 2697//958 2698//959 2699//1575 +f 2698//967 2669//967 2696//967 +f 2700//961 2699//1575 2701//958 +f 2699//1575 2700//961 2697//958 +f 2670//962 2702//963 2671//964 +f 2702//963 2670//962 2700//965 +f 2702//963 2700//965 2701//1232 +f 2700//1816 2670//1816 2697//1816 +f 2703//968 2701//1232 2704//964 +f 2701//1232 2703//968 2702//963 +f 2671//1409 2705//1409 2672//1409 +f 2705//1409 2671//1409 2702//1409 +f 2672//1410 2706//1410 2673//1410 +f 2706//1410 2672//1410 2705//1410 +f 2673//1411 2674//1576 2658//1411 +f 2674//1576 2673//1411 2706//1576 +f 2706//1576 2675//1412 2674//1576 +f 2675//1412 2706//1576 2707//1412 +f 2675//927 2676//974 2646//927 +f 2676//974 2675//927 2708//974 +f 2677//976 2676//976 2709//976 +f 2676//976 2677//976 2647//976 +f 2710//87 2648//87 2677//87 +f 2648//87 2710//87 2678//87 +f 2711//977 2650//977 2678//977 +f 2650//977 2711//977 2679//977 +f 2712//1320 2680//1320 2679//1320 +f 2680//1320 2712//1320 2713//1320 +f 2680//1415 2681//1738 2649//1738 +f 2681//1738 2680//1415 2714//1415 +f 2715//827 2651//827 2681//827 +f 2651//827 2715//827 2682//827 +f 2716//1341 2682//1341 2715//1341 +f 2682//1341 2716//1341 2683//1341 +f 2683//1417 2684//1417 2653//1417 +f 2684//1417 2683//1417 2717//1417 +f 2718//942 2685//942 2684//942 +f 2685//942 2718//942 2719//942 +f 2719//942 2686//942 2685//942 +f 2686//942 2719//942 2687//942 +f 2720//1669 2687//1398 2719//1578 +f 2687//1398 2720//1669 2688//1398 +f 2720//1400 2689//1400 2688//1400 +f 2689//1400 2720//1400 2721//1579 +f 2721//946 2690//946 2689//946 +f 2690//946 2721//946 2722//946 +f 2722//948 2691//948 2690//948 +f 2691//948 2722//948 2723//984 +f 2691//949 2724//949 2692//949 +f 2724//949 2691//949 2723//985 +f 2692//951 2725//951 2693//951 +f 2725//951 2692//951 2724//951 +f 2693//1403 2726//1422 2694//1403 +f 2726//1422 2693//1403 2725//1771 +f 2695//1404 2726//1424 2727//1817 +f 2726//1424 2695//1404 2694//1404 +f 2696//1405 2727//1405 2698//1405 +f 2727//1405 2696//1405 2695//1405 +f 2698//1405 2728//1405 2699//1405 +f 2728//1405 2698//1405 2727//1405 +f 2699//991 2729//991 2701//991 +f 2729//991 2699//991 2730//991 +f 2701//993 2731//993 2704//993 +f 2731//993 2701//993 2729//993 +f 2732//1409 2704//1409 2733//1409 +f 2704//1409 2732//1409 2703//1409 +f 2705//1409 2703//1409 2732//1409 +f 2703//1409 2705//1409 2702//1409 +f 2706//1410 2732//1426 2707//1426 +f 2732//1426 2706//1410 2705//1410 +f 2707//1412 2708//1411 2675//1412 +f 2708//1411 2707//1412 2734//1411 +f 2709//994 2708//1773 2735//994 +f 2708//1773 2709//994 2676//1773 +f 2736//95 2677//95 2709//95 +f 2677//95 2736//95 2710//95 +f 2737//996 2678//996 2710//996 +f 2678//996 2737//996 2711//996 +f 2738//930 2679//822 2711//822 +f 2679//822 2738//930 2712//930 +f 2712//1320 2739//1320 2713//1320 +f 2739//1320 2712//1320 2740//1320 +f 2741//1321 2680//1321 2713//1321 +f 2680//1321 2741//1321 2714//1321 +f 2742//1243 2681//1243 2714//1243 +f 2681//1243 2742//1243 2715//1243 +f 2743//1428 2715//1428 2742//1428 +f 2715//1428 2743//1428 2716//1428 +f 2744//1394 2683//1394 2716//1394 +f 2683//1394 2744//1394 2717//1394 +f 2717//1001 2718//1001 2684//1001 +f 2718//1001 2717//1001 2745//1001 +f 2746//1430 2719//1578 2718//1431 +f 2719//1578 2746//1430 2720//1669 +f 2746//1818 2721//1579 2720//1400 +f 2721//1579 2746//1818 2747//165 +f 2747//947 2722//946 2721//946 +f 2722//946 2747//947 2748//947 +f 2748//1006 2723//984 2722//948 +f 2723//984 2748//1006 2749//1007 +f 2723//985 2750//1008 2724//949 +f 2750//1008 2723//985 2749//1009 +f 2724//951 2751//950 2725//951 +f 2751//950 2724//951 2750//950 +f 2725//1771 2752//1774 2726//1422 +f 2752//1774 2725//1771 2751//176 +f 2727//1817 2752//1435 2728//1436 +f 2752//1435 2727//1817 2726//1424 +f 2728//1438 2730//1437 2699//1438 +f 2730//1437 2728//1438 2753//1437 +f 2730//958 2754//958 2729//958 +f 2754//958 2730//958 2755//958 +f 2729//964 2756//964 2731//964 +f 2756//964 2729//964 2754//964 +f 2704//1439 2757//1440 2733//1439 +f 2757//1440 2704//1439 2731//1440 +f 2707//1426 2733//1410 2734//1410 +f 2733//1410 2707//1426 2732//1426 +f 2734//1585 2735//1585 2708//1585 +f 2735//1585 2734//1585 2758//1585 +f 2759//941 2709//941 2735//941 +f 2709//941 2759//941 2736//941 +f 2760//1019 2710//1019 2736//1019 +f 2710//1019 2760//1019 2737//1019 +f 2737//87 2738//87 2711//87 +f 2738//87 2737//87 2761//87 +f 2738//930 2740//930 2712//930 +f 2740//930 2738//930 2762//930 +f 2763//1444 2740//1588 2764//1678 +f 2740//1588 2763//1444 2739//1447 +f 2739//1321 2741//1321 2713//1321 +f 2741//1321 2739//1321 2765//1321 +f 2766//827 2714//827 2741//827 +f 2714//827 2766//827 2742//827 +f 2767//1341 2768//1448 2769//1340 +f 2769//1819 2742//1819 2766//1819 +f 2742//1341 2769//1340 2743//1820 +f 2743//1820 2769//1340 2768//1448 +f 2770//1452 2716//1780 2743//1452 +f 2716//1780 2770//1452 2744//1780 +f 2771//942 2717//942 2744//942 +f 2717//942 2771//942 2745//942 +f 2745//1821 2746//1744 2718//1456 +f 2746//1744 2745//1821 2772//1591 +f 2772//1458 2747//1592 2746//1460 +f 2747//1592 2772//1458 2773//1595 +f 2748//1036 2773//1036 2774//1036 +f 2773//1036 2748//1036 2747//1036 +f 2749//1037 2774//1037 2775//1039 +f 2774//1037 2749//1037 2748//1257 +f 2750//1258 2775//1042 2776//1043 +f 2775//1042 2750//1258 2749//1044 +f 2751//1045 2776//1045 2777//1045 +f 2776//1045 2751//1045 2750//1045 +f 2752//1601 2777//1465 2778//1465 +f 2777//1465 2752//1601 2751//1464 +f 2728//1466 2778//1467 2753//1467 +f 2778//1467 2728//1466 2752//1466 +f 2753//1405 2755//1405 2730//1405 +f 2755//1405 2753//1405 2779//1405 +f 2754//1051 2780//1051 2781//1051 +f 2780//1051 2754//1051 2755//1051 +f 2756//1052 2781//1052 2782//1052 +f 2781//1052 2756//1052 2754//1052 +f 2731//1409 2783//1409 2757//1409 +f 2783//1409 2731//1409 2756//1409 +f 2733//1469 2758//1469 2734//1469 +f 2758//1469 2733//1469 2757//1469 +f 2784//1470 2735//1470 2758//1470 +f 2735//1470 2784//1470 2759//1470 +f 2785//1057 2736//1057 2759//1057 +f 2736//1057 2785//1057 2760//1057 +f 2760//95 2761//95 2737//95 +f 2761//95 2760//95 2786//95 +f 2761//87 2762//849 2738//87 +f 2762//849 2761//87 2787//849 +f 2764//1058 2762//1058 2788//1261 +f 2762//1058 2764//1058 2740//1059 +f 2789//1472 2790//1264 2791//1062 +f 2790//1264 2789//1472 2792//1063 +f 2792//1063 2789//1472 2793//1064 +f 2792//1063 2793//1064 2794//1065 +f 2794//1065 2793//1064 2795//1065 +f 2794//1065 2795//1065 2796//1066 +f 2796//1066 2795//1065 2797//1066 +f 2796//1066 2797//1066 2798//1067 +f 2798//1067 2797//1066 2799//1069 +f 2798//1067 2799//1069 2800//1266 +f 2800//1266 2799//1069 2801//1070 +f 2800//1266 2801//1070 2802//1070 +f 2802//1070 2801//1070 2803//1263 +f 2802//1070 2803//1263 2804//1071 +f 2804//1071 2803//1263 2805//1071 +f 2804//1071 2805//1071 2806//1065 +f 2806//1065 2805//1071 2763//1065 +f 2806//1065 2763//1065 2807//1072 +f 2807//1072 2763//1065 2764//1073 +f 2807//1072 2764//1073 2808//1786 +f 2808//1786 2764//1073 2788//1266 +f 2808//1786 2788//1266 2809//1267 +f 2763//1822 2765//1691 2739//1476 +f 2765//1691 2763//1822 2805//1477 +f 2765//827 2766//827 2741//827 +f 2766//827 2765//827 2769//827 +f 2803//1478 2767//1478 2769//1478 +f 2767//1478 2803//1478 2801//1478 +f 2810//1394 2811//1480 2767//1481 +f 2767//1823 2743//1823 2768//1823 +f 2743//1394 2767//1481 2770//1483 +f 2770//1483 2767//1481 2811//1480 +f 2812//1824 2744//1274 2770//1484 +f 2744//1274 2812//1824 2771//1274 +f 2813//1399 2745//1399 2771//1399 +f 2745//1399 2813//1399 2772//1399 +f 2772//1547 2814//1547 2773//1547 +f 2814//1547 2772//1547 2813//1547 +f 2773//947 2815//947 2774//947 +f 2815//947 2773//947 2814//947 +f 2774//203 2816//203 2775//203 +f 2816//203 2774//203 2815//203 +f 2775//215 2817//215 2776//215 +f 2817//215 2775//215 2816//215 +f 2817//950 2777//950 2776//950 +f 2777//950 2817//950 2818//950 +f 2818//1486 2778//1486 2777//1486 +f 2778//1486 2818//1486 2819//1486 +f 2778//1404 2779//1404 2753//1404 +f 2779//1404 2778//1404 2819//1404 +f 2755//1488 2820//1488 2780//1488 +f 2820//1488 2755//1488 2779//1488 +f 2781//1825 2821//1825 2822//1825 +f 2821//1493 2781//958 2780//1826 +f 2821//1493 2780//1826 2823//956 +f 2824//958 2821//1493 2823//956 +f 2782//967 2825//967 2826//967 +f 2825//1827 2782//964 2781//1828 +f 2825//1827 2781//1828 2822//1280 +f 2821//964 2825//1827 2822//1280 +f 2783//1497 2782//1497 2827//1497 +f 2782//1497 2783//1497 2756//1497 +f 2757//1410 2784//1410 2758//1410 +f 2784//1410 2757//1410 2783//1410 +f 2759//1500 2828//1500 2785//1500 +f 2828//1500 2759//1500 2784//1500 +f 2785//941 2786//941 2760//941 +f 2786//941 2785//941 2829//941 +f 2830//95 2761//95 2786//95 +f 2761//95 2830//95 2787//95 +f 2788//1099 2787//1502 2809//1101 +f 2787//1502 2788//1099 2762//1102 +f 2808//1104 2831//1283 2807//1104 +f 2831//1283 2808//1104 2830//1103 +f 2807//1829 2832//1503 2806//1830 +f 2832//1503 2807//1829 2831//1791 +f 2833//1505 2806//1507 2832//1505 +f 2806//1507 2833//1505 2804//1507 +f 2825//1508 2804//1508 2833//1508 +f 2804//1508 2825//1508 2802//1508 +f 2821//1616 2802//1115 2825//1616 +f 2802//1115 2821//1616 2800//1115 +f 2824//1119 2800//1118 2821//1119 +f 2800//1118 2824//1119 2798//1118 +f 2834//1831 2798//1511 2824//1831 +f 2798//1511 2834//1831 2796//1511 +f 2835//1512 2796//1513 2834//1514 +f 2796//1513 2835//1512 2794//1515 +f 2836//1516 2794//1517 2835//1518 +f 2794//1517 2836//1516 2792//1519 +f 2837//1132 2792//1832 2836//1132 +f 2792//1832 2837//1132 2790//1133 +f 2838//1134 2790//1299 2837//1136 +f 2790//1299 2838//1134 2791//1137 +f 2839//1139 2791//1138 2838//1140 +f 2791//1138 2839//1139 2789//1141 +f 2840//1142 2789//1303 2839//1304 +f 2789//1303 2840//1142 2793//1833 +f 2841//1710 2793//1523 2840//1834 +f 2793//1523 2841//1710 2795//1525 +f 2797//1526 2841//1527 2842//1528 +f 2841//1527 2797//1526 2795//1529 +f 2799//1308 2842//1153 2810//1153 +f 2842//1153 2799//1308 2797//1308 +f 2801//1531 2810//1530 2767//1530 +f 2810//1530 2801//1531 2799//1531 +f 2805//1155 2769//1155 2765//1155 +f 2769//1155 2805//1155 2803//1155 +f 2809//1835 2830//1716 2808//1158 +f 2830//1716 2809//1835 2787//1718 +f 2810//942 2843//942 2811//942 +f 2843//942 2810//942 2842//942 +f 2843//942 2770//942 2811//942 +f 2770//942 2843//942 2812//942 +f 2844//1535 2771//1535 2812//1535 +f 2771//1535 2844//1535 2813//1535 +f 2845//1536 2813//1536 2844//1536 +f 2813//1536 2845//1536 2814//1536 +f 2814//1166 2846//1165 2815//1166 +f 2846//1165 2814//1166 2845//1165 +f 2815//1314 2847//1314 2816//1314 +f 2847//1314 2815//1314 2846//1314 +f 2816//1169 2848//1538 2817//1169 +f 2848//1538 2816//1169 2847//1538 +f 2817//1170 2849//1170 2818//1170 +f 2849//1170 2817//1170 2848//1170 +f 2818//1540 2850//1539 2819//1540 +f 2850//1539 2818//1540 2849//1539 +f 2819//1541 2820//1541 2779//1541 +f 2820//1541 2819//1541 2850//1542 +f 2780//1405 2851//1405 2823//1405 +f 2851//1405 2780//1405 2820//1405 +f 2851//1405 2824//1543 2823//1405 +f 2824//1543 2851//1405 2834//1543 +f 2826//1409 2833//1409 2852//1409 +f 2833//1409 2826//1409 2825//1409 +f 2827//1409 2826//1409 2852//1409 +f 2826//1409 2827//1409 2782//1409 +f 2784//1544 2827//1544 2828//1544 +f 2827//1544 2784//1544 2783//1544 +f 2828//1411 2829//1411 2785//1411 +f 2829//1411 2828//1411 2853//1411 +f 2831//941 2786//941 2829//941 +f 2786//941 2831//941 2830//941 +f 2832//1470 2829//1411 2853//1411 +f 2829//1411 2832//1470 2831//1470 +f 2852//1410 2832//1410 2853//1410 +f 2832//1410 2852//1410 2833//1410 +f 2854//1404 2834//1487 2851//1404 +f 2834//1487 2854//1404 2835//1487 +f 2836//1402 2854//1486 2855//1402 +f 2854//1486 2836//1402 2835//1402 +f 2837//950 2855//950 2856//950 +f 2855//950 2837//950 2836//950 +f 2838//215 2856//215 2857//215 +f 2856//215 2838//215 2837//215 +f 2858//1178 2838//203 2857//1178 +f 2838//203 2858//1178 2839//203 +f 2859//947 2839//947 2858//947 +f 2839//947 2859//947 2840//947 +f 2860//1547 2840//1547 2859//1547 +f 2840//1547 2860//1547 2841//1547 +f 2842//1399 2860//1399 2843//1399 +f 2860//1399 2842//1399 2841//1399 +f 2860//1399 2812//1398 2843//1399 +f 2812//1398 2860//1399 2844//1398 +f 2860//1547 2845//1485 2844//1485 +f 2845//1485 2860//1547 2859//1547 +f 2859//947 2846//947 2845//947 +f 2846//947 2859//947 2858//947 +f 2858//1178 2847//203 2846//203 +f 2847//203 2858//1178 2857//1178 +f 2857//215 2848//1180 2847//1180 +f 2848//1180 2857//215 2856//215 +f 2848//950 2855//950 2849//950 +f 2855//950 2848//950 2856//950 +f 2849//1486 2854//1486 2850//1486 +f 2854//1486 2849//1486 2855//1402 +f 2820//1404 2854//1404 2851//1404 +f 2854//1404 2820//1404 2850//1404 +f 2828//1410 2852//1410 2853//1410 +f 2852//1410 2828//1410 2827//1410 +f 2861//1318 2862//1626 2863//1626 +f 2862//1626 2861//1318 2864//1318 +f 2863//1626 2865//1320 2866//1320 +f 2865//1320 2863//1626 2862//1626 +f 2867//815 2863//816 2868//816 +f 2863//816 2867//815 2861//815 +f 2869//1320 2864//1318 2861//1318 +f 2864//1318 2869//1320 2870//1320 +f 2871//1321 2862//1321 2864//1321 +f 2862//1321 2871//1321 2872//1321 +f 2865//1322 2873//1836 2866//1324 +f 2873//1836 2865//1322 2874//1628 +f 2868//816 2866//930 2875//930 +f 2866//930 2868//816 2863//816 +f 2872//1321 2865//1321 2862//1321 +f 2865//1321 2872//1321 2876//1321 +f 2877//1837 2868//1837 2878//1837 +f 2868//1837 2877//1837 2867//1837 +f 2879//822 2861//815 2867//815 +f 2861//815 2879//822 2869//822 +f 2880//1326 2869//1326 2881//1326 +f 2869//1326 2880//1326 2870//1326 +f 2882//1321 2864//1321 2870//1321 +f 2864//1321 2882//1321 2871//1321 +f 2883//1629 2872//1629 2871//1629 +f 2872//1629 2883//1629 2884//1629 +f 2885//1330 2886//1838 2887//1185 +f 2886//1838 2885//1330 2873//831 +f 2886//1838 2873//831 2888//832 +f 2888//832 2873//831 2874//833 +f 2888//832 2874//833 2889//833 +f 2889//833 2874//833 2890//1188 +f 2889//833 2890//1188 2891//834 +f 2891//834 2890//1188 2892//1188 +f 2891//834 2892//1188 2893//1328 +f 2893//1328 2892//1188 2894//836 +f 2893//1328 2894//836 2895//828 +f 2895//828 2894//836 2896//837 +f 2895//828 2896//837 2897//838 +f 2897//838 2896//837 2898//835 +f 2897//838 2898//835 2899//835 +f 2899//835 2898//835 2900//833 +f 2899//835 2900//833 2901//833 +f 2901//833 2900//833 2902//1632 +f 2901//833 2902//1632 2903//1189 +f 2903//1189 2902//1632 2904//1634 +f 2903//1189 2904//1634 2905//1187 +f 2905//1187 2904//1634 2906//1185 +f 2866//1191 2885//1192 2875//1191 +f 2885//1192 2866//1191 2873//1192 +f 2876//1551 2874//1334 2865//1334 +f 2874//1334 2876//1551 2890//1336 +f 2878//1837 2875//1839 2907//1839 +f 2875//1839 2878//1837 2868//1837 +f 2884//1629 2876//1629 2872//1629 +f 2876//1629 2884//1629 2908//1629 +f 2909//850 2877//850 2878//850 +f 2877//850 2909//850 2910//850 +f 2911//1839 2867//1837 2877//1837 +f 2867//1837 2911//1839 2879//1839 +f 2881//852 2879//852 2912//852 +f 2879//852 2881//852 2869//852 +f 2881//1320 2913//1320 2880//1320 +f 2913//1320 2881//1320 2914//1320 +f 2915//1339 2870//1339 2880//1339 +f 2870//1339 2915//1339 2882//1339 +f 2916//1629 2871//1629 2882//1629 +f 2871//1629 2916//1629 2883//1629 +f 2917//860 2883//857 2916//858 +f 2883//857 2917//860 2884//1635 +f 2884//1635 2917//860 2908//860 +f 2908//860 2917//860 2918//1024 +f 2908//860 2918//1024 2919//858 +f 2919//861 2918//861 2920//861 +f 2921//862 2888//863 2922//862 +f 2888//863 2921//862 2886//863 +f 2907//864 2886//1554 2921//1199 +f 2886//1554 2907//864 2887//867 +f 2875//1840 2887//1841 2907//1842 +f 2887//1841 2875//1840 2885//1843 +f 2908//1639 2890//1640 2876//1641 +f 2890//1640 2908//1639 2892//1726 +f 2919//877 2892//878 2908//877 +f 2892//878 2919//877 2894//878 +f 2923//880 2894//880 2919//880 +f 2894//880 2923//880 2896//880 +f 2924//1644 2896//1645 2923//1644 +f 2896//1645 2924//1644 2898//1645 +f 2925//1844 2898//1560 2924//1845 +f 2898//1560 2925//1844 2900//1357 +f 2900//1562 2926//1562 2902//1562 +f 2926//1562 2900//1562 2925//1562 +f 2902//888 2927//1209 2904//1360 +f 2927//1209 2902//888 2926//891 +f 2904//895 2928//1361 2906//1730 +f 2928//1361 2904//895 2927//1846 +f 2906//1212 2929//897 2905//898 +f 2929//897 2906//1212 2928//899 +f 2905//900 2930//901 2903//902 +f 2930//901 2905//900 2929//903 +f 2903//904 2931//904 2901//904 +f 2931//904 2903//904 2930//904 +f 2899//1215 2931//907 2932//908 +f 2931//907 2899//1215 2901//909 +f 2897//1218 2932//1219 2933//1219 +f 2932//1219 2897//1218 2899//1218 +f 2895//1220 2933//1220 2934//1220 +f 2933//1220 2895//1220 2897//1220 +f 2893//915 2934//916 2935//916 +f 2934//916 2893//915 2895//915 +f 2891//1847 2935//1848 2936//1654 +f 2935//1848 2891//1847 2893//920 +f 2889//922 2936//922 2937//922 +f 2936//922 2889//922 2891//1223 +f 2922//1225 2889//924 2937//925 +f 2889//924 2922//1225 2888//1733 +f 2921//95 2878//850 2907//95 +f 2878//850 2921//95 2909//850 +f 2938//927 2910//927 2909//927 +f 2910//927 2938//927 2939//927 +f 2910//850 2911//95 2877//850 +f 2911//95 2910//850 2940//95 +f 2912//1849 2911//1849 2941//1849 +f 2911//1849 2912//1849 2879//1849 +f 2912//930 2914//930 2881//930 +f 2914//930 2912//930 2942//930 +f 2943//1570 2913//1659 2914//1659 +f 2913//1659 2943//1570 2944//1570 +f 2945//1321 2880//1321 2913//1321 +f 2880//1321 2945//1321 2915//1321 +f 2915//1660 2916//1660 2882//1660 +f 2916//1660 2915//1660 2946//1660 +f 2946//1228 2917//1228 2916//1228 +f 2917//1228 2946//1228 2947//1228 +f 2948//1735 2918//936 2917//937 +f 2918//936 2948//1735 2920//1229 +f 2920//1229 2948//1735 2919//935 +f 2919//935 2948//1735 2949//940 +f 2919//935 2949//940 2923//937 +f 2923//861 2949//861 2950//861 +f 2922//941 2909//927 2921//941 +f 2909//927 2922//941 2938//927 +f 2951//1664 2923//1664 2950//1664 +f 2923//1664 2951//1664 2924//1664 +f 2952//1398 2924//1399 2951//1398 +f 2924//1399 2952//1398 2925//1399 +f 2952//1400 2926//1485 2925//1485 +f 2926//1485 2952//1400 2953//1400 +f 2953//946 2927//947 2926//947 +f 2927//947 2953//946 2954//946 +f 2954//1850 2928//1851 2927//1851 +f 2928//1851 2954//1850 2955//1850 +f 2928//215 2956//949 2929//215 +f 2956//949 2928//215 2955//949 +f 2929//950 2957//951 2930//950 +f 2957//951 2929//950 2956//951 +f 2930//1085 2958//953 2931//1085 +f 2958//953 2930//1085 2957//953 +f 2931//954 2959//954 2932//954 +f 2959//954 2931//954 2958//954 +f 2932//990 2960//990 2933//990 +f 2960//990 2932//990 2959//990 +f 2933//1608 2961//957 2934//958 +f 2961//957 2933//1608 2962//1277 +f 2961//957 2962//1277 2963//1575 +f 2962//1852 2933//1852 2960//1852 +f 2964//961 2963//1575 2965//958 +f 2963//1575 2964//961 2961//957 +f 2934//962 2966//963 2935//964 +f 2966//963 2934//962 2964//1232 +f 2966//963 2964//1232 2965//966 +f 2964//967 2934//967 2961//967 +f 2967//968 2965//966 2968//964 +f 2965//966 2967//968 2966//963 +f 2935//969 2969//969 2936//969 +f 2969//969 2935//969 2966//969 +f 2936//970 2970//970 2937//970 +f 2970//970 2936//970 2969//970 +f 2937//971 2938//1668 2922//971 +f 2938//1668 2937//971 2970//1668 +f 2970//1668 2939//973 2938//1668 +f 2939//973 2970//1668 2971//973 +f 2939//927 2940//974 2910//927 +f 2940//974 2939//927 2972//974 +f 2941//976 2940//976 2973//976 +f 2940//976 2941//976 2911//976 +f 2974//1839 2912//1839 2941//1839 +f 2912//1839 2974//1839 2942//1839 +f 2975//977 2914//977 2942//977 +f 2914//977 2975//977 2943//977 +f 2976//1320 2944//1337 2943//1337 +f 2944//1337 2976//1320 2977//1320 +f 2944//1415 2945//1415 2913//1415 +f 2945//1415 2944//1415 2978//1415 +f 2979//1629 2915//1629 2945//1629 +f 2915//1629 2979//1629 2946//1629 +f 2980//858 2946//858 2979//858 +f 2946//858 2980//858 2947//858 +f 2947//980 2948//980 2917//980 +f 2948//980 2947//980 2981//980 +f 2982//1664 2949//1664 2948//1664 +f 2949//1664 2982//1664 2983//1664 +f 2983//1664 2950//1664 2949//1664 +f 2950//1664 2983//1664 2951//1664 +f 2984//1669 2951//1398 2983//1853 +f 2951//1398 2984//1669 2952//1398 +f 2984//1400 2953//1400 2952//1400 +f 2953//1400 2984//1400 2985//1579 +f 2985//946 2954//946 2953//946 +f 2954//946 2985//946 2986//946 +f 2986//1850 2955//1850 2954//1850 +f 2955//1850 2986//1850 2987//1854 +f 2955//949 2988//949 2956//949 +f 2988//949 2955//949 2987//1239 +f 2956//951 2989//951 2957//951 +f 2989//951 2956//951 2988//951 +f 2957//953 2990//1670 2958//953 +f 2990//1670 2957//953 2989//987 +f 2959//954 2990//988 2991//1855 +f 2990//988 2959//954 2958//954 +f 2960//990 2991//990 2962//990 +f 2991//990 2960//990 2959//990 +f 2962//990 2992//990 2963//990 +f 2992//990 2962//990 2991//990 +f 2963//991 2993//991 2965//991 +f 2993//991 2963//991 2994//991 +f 2965//993 2995//993 2968//993 +f 2995//993 2965//993 2993//993 +f 2996//969 2968//969 2997//969 +f 2968//969 2996//969 2967//969 +f 2969//969 2967//969 2996//969 +f 2967//969 2969//969 2966//969 +f 2970//970 2996//970 2971//970 +f 2996//970 2970//970 2969//970 +f 2971//973 2972//971 2939//973 +f 2972//971 2971//973 2998//971 +f 2973//994 2972//994 2999//994 +f 2972//994 2973//994 2940//994 +f 3000//95 2941//95 2973//95 +f 2941//95 3000//95 2974//95 +f 3001//1856 2942//1857 2974//1857 +f 2942//1857 3001//1856 2975//1856 +f 3002//930 2943//930 2975//930 +f 2943//930 3002//930 2976//930 +f 2976//1320 3003//1320 2977//1320 +f 3003//1320 2976//1320 3004//1320 +f 3005//1321 2944//1321 2977//1321 +f 2944//1321 3005//1321 2978//1321 +f 3006//1674 2945//1674 2978//1674 +f 2945//1674 3006//1674 2979//1674 +f 3007//999 2979//999 3006//999 +f 2979//999 3007//999 2980//999 +f 3008//937 2947//937 2980//937 +f 2947//937 3008//937 2981//937 +f 2981//1675 2982//1740 2948//1740 +f 2982//1740 2981//1675 3009//1675 +f 3010//1583 2983//1853 2982//1431 +f 2983//1853 3010//1583 2984//1669 +f 3010//1432 2985//1579 2984//1400 +f 2985//1579 3010//1432 3011//165 +f 3011//947 2986//946 2985//946 +f 2986//946 3011//947 3012//947 +f 3012//1858 2987//1854 2986//1850 +f 2987//1854 3012//1858 3013//1859 +f 2987//1239 3014//1008 2988//949 +f 3014//1008 2987//1239 3013//1009 +f 2988//951 3015//950 2989//951 +f 3015//950 2988//951 3014//950 +f 2989//987 3016//1741 2990//1670 +f 3016//1741 2989//987 3015//246 +f 2991//1855 3016//1011 2992//1012 +f 3016//1011 2991//1855 2990//988 +f 2992//1013 2994//1013 2963//1013 +f 2994//1013 2992//1013 3017//1013 +f 2994//958 3018//958 2993//958 +f 3018//958 2994//958 3019//958 +f 2993//964 3020//964 2995//964 +f 3020//964 2993//964 3018//964 +f 2968//1016 3021//1016 2997//1016 +f 3021//1016 2968//1016 2995//1016 +f 2971//970 2997//970 2998//970 +f 2997//970 2971//970 2996//970 +f 2998//1017 2999//1017 2972//1017 +f 2999//1017 2998//1017 3022//1017 +f 3023//941 2973//941 2999//941 +f 2973//941 3023//941 3000//941 +f 3024//1019 2974//1019 3000//1019 +f 2974//1019 3024//1019 3001//1019 +f 3001//1839 3002//1839 2975//1839 +f 3002//1839 3001//1839 3025//1839 +f 3002//930 3004//930 2976//930 +f 3004//930 3002//930 3026//930 +f 3027//1444 3004//1588 3028//1778 +f 3004//1588 3027//1444 3003//1447 +f 3003//1321 3005//1321 2977//1321 +f 3005//1321 3003//1321 3029//1321 +f 3030//1629 2978//1629 3005//1629 +f 2978//1629 3030//1629 3006//1629 +f 3031//858 3032//1860 3033//1636 +f 3033//1861 3006//1861 3030//1861 +f 3006//858 3033//1636 3007//1862 +f 3007//1862 3033//1636 3032//1860 +f 3034//1027 2980//1028 3007//1027 +f 2980//1028 3034//1027 3008//1028 +f 3035//1664 2981//1664 3008//1664 +f 2981//1664 3035//1664 3009//1664 +f 3009//1591 3010//1456 2982//1456 +f 3010//1456 3009//1591 3036//1591 +f 3036//1458 3011//1593 3010//1460 +f 3011//1593 3036//1458 3037//1595 +f 3012//1036 3037//1036 3038//1036 +f 3037//1036 3012//1036 3011//1036 +f 3013//1863 3038//1039 3039//1864 +f 3038//1039 3013//1863 3012//1865 +f 3014//1866 3039//1867 3040//1043 +f 3039//1867 3014//1866 3013//1044 +f 3015//1045 3040//1045 3041//1045 +f 3040//1045 3015//1045 3014//1045 +f 3016//1047 3041//1046 3042//1047 +f 3041//1046 3016//1047 3015//1046 +f 2992//1048 3042//1260 3017//1868 +f 3042//1260 2992//1048 3016//1048 +f 3017//990 3019//990 2994//990 +f 3019//990 3017//990 3043//990 +f 3018//1051 3044//1051 3045//1051 +f 3044//1051 3018//1051 3019//1051 +f 3020//1052 3045//1052 3046//1052 +f 3045//1052 3020//1052 3018//1052 +f 2995//969 3047//969 3021//969 +f 3047//969 2995//969 3020//969 +f 2997//1054 3022//1054 2998//1054 +f 3022//1054 2997//1054 3021//1054 +f 3048//971 2999//971 3022//971 +f 2999//971 3048//971 3023//971 +f 3049//1056 3000//1056 3023//1056 +f 3000//1056 3049//1056 3024//1056 +f 3024//95 3025//95 3001//95 +f 3025//95 3024//95 3050//95 +f 3025//1839 3026//1839 3002//1839 +f 3026//1839 3025//1839 3051//1839 +f 3028//1058 3026//1059 3052//1058 +f 3026//1059 3028//1058 3004//1059 +f 3053//1869 3054//1264 3055//1267 +f 3054//1264 3053//1869 3056//1063 +f 3056//1063 3053//1869 3057//1064 +f 3056//1063 3057//1064 3058//1065 +f 3058//1065 3057//1064 3059//1065 +f 3058//1065 3059//1065 3060//1066 +f 3060//1066 3059//1065 3061//1066 +f 3060//1066 3061//1066 3062//1071 +f 3062//1071 3061//1066 3063//1068 +f 3062//1071 3063//1068 3064//1069 +f 3064//1069 3063//1068 3065//1070 +f 3064//1069 3065//1070 3066//1475 +f 3066//1475 3065//1070 3067//1263 +f 3066//1475 3067//1263 3068//1071 +f 3068//1071 3067//1263 3069//1263 +f 3068//1071 3069//1263 3070//1065 +f 3070//1065 3069//1263 3027//1065 +f 3070//1065 3027//1065 3071//1072 +f 3071//1072 3027//1065 3028//1265 +f 3071//1072 3028//1265 3072//1074 +f 3072//1074 3028//1265 3052//1069 +f 3072//1074 3052//1069 3073//1267 +f 3027//1822 3029//1476 3003//1476 +f 3029//1476 3027//1822 3069//1477 +f 3029//1629 3030//1629 3005//1629 +f 3030//1629 3029//1629 3033//1629 +f 3067//1078 3031//1078 3033//1078 +f 3031//1078 3067//1078 3065//1078 +f 3074//937 3075//1271 3031//1870 +f 3031//1871 3007//1871 3032//1871 +f 3007//937 3031//1870 3034//1872 +f 3034//1872 3031//1870 3075//1271 +f 3076//1696 3008//1695 3034//1696 +f 3008//1695 3076//1696 3035//1695 +f 3077//1399 3009//1399 3035//1399 +f 3009//1399 3077//1399 3036//1399 +f 3036//1547 3078//1547 3037//1547 +f 3078//1547 3036//1547 3077//1547 +f 3037//947 3079//947 3038//947 +f 3079//947 3037//947 3078//947 +f 3038//1851 3080//1851 3039//1851 +f 3080//1851 3038//1851 3079//1851 +f 3039//215 3081//215 3040//215 +f 3081//215 3039//215 3080//215 +f 3081//950 3041//950 3040//950 +f 3041//950 3081//950 3082//950 +f 3082//1177 3042//1177 3041//1177 +f 3042//1177 3082//1177 3083//1177 +f 3042//954 3043//954 3017//954 +f 3043//954 3042//954 3083//954 +f 3019//1086 3084//1086 3044//1086 +f 3084//1086 3019//1086 3043//1086 +f 3045//1873 3085//1873 3086//1873 +f 3085//1826 3045//958 3044//1750 +f 3085//1826 3044//1750 3087//956 +f 3088//958 3085//1826 3087//956 +f 3046//967 3089//967 3090//967 +f 3089//1232 3046//964 3045//1874 +f 3089//1232 3045//1874 3086//1736 +f 3085//964 3089//1232 3086//1736 +f 3047//1281 3046//1281 3091//1281 +f 3046//1281 3047//1281 3020//1281 +f 3021//970 3048//970 3022//970 +f 3048//970 3021//970 3047//970 +f 3023//1751 3092//1098 3049//1098 +f 3092//1098 3023//1751 3048//1751 +f 3049//941 3050//941 3024//941 +f 3050//941 3049//941 3093//941 +f 3094//95 3025//95 3050//95 +f 3025//95 3094//95 3051//95 +f 3052//1100 3051//1875 3073//1876 +f 3051//1875 3052//1100 3026//1501 +f 3072//1104 3095//1283 3071//1104 +f 3095//1283 3072//1104 3094//1103 +f 3071//1105 3096//1284 3070//1285 +f 3096//1284 3071//1105 3095//1108 +f 3097//1286 3070//1877 3096//1111 +f 3070//1877 3097//1286 3068//1109 +f 3089//1113 3068//1114 3097//1113 +f 3068//1114 3089//1113 3066//1114 +f 3085//1115 3066//1115 3089//1115 +f 3066//1115 3085//1115 3064//1115 +f 3088//1118 3064//1118 3085//1118 +f 3064//1118 3088//1118 3062//1118 +f 3098//1291 3062//1292 3088//1291 +f 3062//1292 3098//1291 3060//1292 +f 3099//1704 3060//1123 3098//1124 +f 3060//1123 3099//1704 3058//1125 +f 3100//1126 3058//1127 3099//1294 +f 3058//1127 3100//1126 3056//1295 +f 3101//1296 3056//1133 3100//1132 +f 3056//1133 3101//1296 3054//1133 +f 3102//1298 3054//1299 3101//1136 +f 3054//1299 3102//1298 3055//1137 +f 3103//1141 3055//1878 3102//1879 +f 3055//1878 3103//1141 3053//1880 +f 3104//1302 3053//1143 3103//1302 +f 3053//1143 3104//1302 3057//1143 +f 3105//1522 3057//1523 3104//1524 +f 3057//1523 3105//1522 3059//1525 +f 3061//1526 3105//1620 3106//1802 +f 3105//1620 3061//1526 3059//1529 +f 3063//1712 3106//1713 3074//1713 +f 3106//1713 3063//1712 3061//1712 +f 3065//1154 3074//1154 3031//1154 +f 3074//1154 3065//1154 3063//1154 +f 3069//1754 3033//1714 3029//1714 +f 3033//1714 3069//1754 3067//1754 +f 3073//1835 3094//1803 3072//1717 +f 3094//1803 3073//1835 3051//1311 +f 3074//1664 3107//1664 3075//1664 +f 3107//1664 3074//1664 3106//1664 +f 3107//1664 3034//1664 3075//1664 +f 3034//1664 3107//1664 3076//1664 +f 3108//1535 3035//1535 3076//1535 +f 3035//1535 3108//1535 3077//1535 +f 3109//1881 3077//1881 3108//1881 +f 3077//1881 3109//1881 3078//1881 +f 3078//1165 3110//1165 3079//1165 +f 3110//1165 3078//1165 3109//1165 +f 3079//1882 3111//1883 3080//1882 +f 3111//1883 3079//1882 3110//1883 +f 3080//1538 3112//1538 3081//1538 +f 3112//1538 3080//1538 3111//1538 +f 3081//1170 3113//1170 3082//1170 +f 3113//1170 3081//1170 3112//1170 +f 3082//1317 3114//1316 3083//1317 +f 3114//1316 3082//1317 3113//1316 +f 3083//1173 3084//1173 3043//1173 +f 3084//1173 3083//1173 3114//1173 +f 3044//990 3115//990 3087//990 +f 3115//990 3044//990 3084//990 +f 3115//990 3088//990 3087//990 +f 3088//990 3115//990 3098//990 +f 3090//969 3097//969 3116//969 +f 3097//969 3090//969 3089//969 +f 3091//969 3090//969 3116//969 +f 3090//969 3091//969 3046//969 +f 3048//1175 3091//1175 3092//1175 +f 3091//1175 3048//1175 3047//1175 +f 3092//971 3093//971 3049//971 +f 3093//971 3092//971 3117//971 +f 3095//941 3050//941 3093//941 +f 3050//941 3095//941 3094//941 +f 3096//1055 3093//971 3117//971 +f 3093//971 3096//1055 3095//1055 +f 3116//970 3096//970 3117//970 +f 3096//970 3116//970 3097//970 +f 3118//954 3098//954 3115//954 +f 3098//954 3118//954 3099//954 +f 3100//1177 3118//1177 3119//1177 +f 3118//1177 3100//1177 3099//1177 +f 3101//950 3119//950 3120//950 +f 3119//950 3101//950 3100//950 +f 3102//215 3120//215 3121//215 +f 3120//215 3102//215 3101//215 +f 3122//1851 3102//1851 3121//1851 +f 3102//1851 3122//1851 3103//1851 +f 3123//947 3103//947 3122//947 +f 3103//947 3123//947 3104//947 +f 3124//1547 3104//1547 3123//1547 +f 3104//1547 3124//1547 3105//1547 +f 3106//1399 3124//1399 3107//1399 +f 3124//1399 3106//1399 3105//1399 +f 3124//1399 3076//1398 3107//1399 +f 3076//1398 3124//1399 3108//1398 +f 3124//1547 3109//1485 3108//1401 +f 3109//1485 3124//1547 3123//1547 +f 3123//947 3110//947 3109//947 +f 3110//947 3123//947 3122//947 +f 3122//1851 3111//1851 3110//1851 +f 3111//1851 3122//1851 3121//1851 +f 3121//215 3112//1180 3111//1180 +f 3112//1180 3121//215 3120//215 +f 3112//950 3119//950 3113//950 +f 3119//950 3112//950 3120//950 +f 3113//1085 3118//1177 3114//1085 +f 3118//1177 3113//1085 3119//1177 +f 3084//954 3118//954 3115//954 +f 3118//954 3084//954 3114//954 +f 3092//970 3116//970 3117//970 +f 3116//970 3092//970 3091//970 +f 3125//1318 3126//1626 3127//1626 +f 3126//1626 3125//1318 3128//1318 +f 3127//1626 3129//1320 3130//1320 +f 3129//1320 3127//1626 3126//1626 +f 3131//815 3127//816 3132//816 +f 3127//816 3131//815 3125//815 +f 3133//1320 3128//1318 3125//1318 +f 3128//1318 3133//1320 3134//1320 +f 3135//1321 3126//1321 3128//1321 +f 3126//1321 3135//1321 3136//1321 +f 3129//1322 3137//1548 3130//1324 +f 3137//1548 3129//1322 3138//1628 +f 3132//816 3130//930 3139//930 +f 3130//930 3132//816 3127//816 +f 3136//1321 3129//1321 3126//1321 +f 3129//1321 3136//1321 3140//1321 +f 3141//1837 3132//1837 3142//1837 +f 3132//1837 3141//1837 3131//1837 +f 3143//822 3125//815 3131//815 +f 3125//815 3143//822 3133//822 +f 3144//1326 3133//1326 3145//1326 +f 3133//1326 3144//1326 3134//1326 +f 3146//1321 3128//1321 3134//1321 +f 3128//1321 3146//1321 3135//1321 +f 3147//1629 3136//1629 3135//1629 +f 3136//1629 3147//1629 3148//1629 +f 3149//828 3150//829 3151//1185 +f 3150//829 3149//828 3137//1186 +f 3150//829 3137//1186 3152//832 +f 3152//832 3137//1186 3138//1187 +f 3152//832 3138//1187 3153//833 +f 3153//833 3138//1187 3154//834 +f 3153//833 3154//834 3155//834 +f 3155//834 3154//834 3156//1188 +f 3155//834 3156//1188 3157//1328 +f 3157//1328 3156//1188 3158//836 +f 3157//1328 3158//836 3159//828 +f 3159//828 3158//836 3160//837 +f 3159//828 3160//837 3161//838 +f 3161//838 3160//837 3162//835 +f 3161//838 3162//835 3163//835 +f 3163//835 3162//835 3164//833 +f 3163//835 3164//833 3165//833 +f 3165//833 3164//833 3166//1331 +f 3165//833 3166//1331 3167//1633 +f 3167//1633 3166//1331 3168//1332 +f 3167//1633 3168//1332 3169//1884 +f 3169//1884 3168//1332 3170//1185 +f 3130//1191 3149//1192 3139//1191 +f 3149//1192 3130//1191 3137//1192 +f 3140//1551 3138//1334 3129//1334 +f 3138//1334 3140//1551 3154//1336 +f 3142//1837 3139//1839 3171//1839 +f 3139//1839 3142//1837 3132//1837 +f 3148//1629 3140//1629 3136//1629 +f 3140//1629 3148//1629 3172//1629 +f 3173//850 3141//850 3142//850 +f 3141//850 3173//850 3174//850 +f 3175//1839 3131//1837 3141//1837 +f 3131//1837 3175//1839 3143//1839 +f 3145//852 3143//852 3176//852 +f 3143//852 3145//852 3133//852 +f 3145//1320 3177//1320 3144//1320 +f 3177//1320 3145//1320 3178//1320 +f 3179//1339 3134//1339 3144//1339 +f 3134//1339 3179//1339 3146//1339 +f 3180//1629 3135//1629 3146//1629 +f 3135//1629 3180//1629 3147//1629 +f 3181//860 3147//857 3180//858 +f 3147//857 3181//860 3148//859 +f 3148//859 3181//860 3172//1636 +f 3172//1636 3181//860 3182//1024 +f 3172//1636 3182//1024 3183//858 +f 3183//861 3182//861 3184//861 +f 3185//862 3152//863 3186//862 +f 3152//863 3185//862 3150//863 +f 3171//864 3150//1198 3185//1199 +f 3150//1198 3171//864 3151//867 +f 3139//871 3151//1841 3171//1842 +f 3151//1841 3139//871 3149//1843 +f 3172//1639 3154//1640 3140//1641 +f 3154//1640 3172//1639 3156//1726 +f 3183//877 3156//878 3172//877 +f 3156//878 3183//877 3158//878 +f 3187//880 3158//880 3183//880 +f 3158//880 3187//880 3160//880 +f 3188//1644 3160//1645 3187//1644 +f 3160//1645 3188//1644 3162//1645 +f 3189//1844 3162//1646 3188//1356 +f 3162//1646 3189//1844 3164//1357 +f 3164//1562 3190//1562 3166//1562 +f 3190//1562 3164//1562 3189//1562 +f 3166//888 3191//891 3168//1360 +f 3191//891 3166//888 3190//889 +f 3168//895 3192//1361 3170//1730 +f 3192//1361 3168//895 3191//1846 +f 3170//1212 3193//897 3169//898 +f 3193//897 3170//1212 3192//899 +f 3169//902 3194//903 3167//902 +f 3194//903 3169//902 3193//903 +f 3167//904 3195//904 3165//904 +f 3195//904 3167//904 3194//904 +f 3163//906 3195//907 3196//908 +f 3195//907 3163//906 3165//909 +f 3161//1218 3196//1219 3197//1219 +f 3196//1219 3161//1218 3163//1218 +f 3159//1220 3197//1220 3198//1220 +f 3197//1220 3159//1220 3161//1220 +f 3157//915 3198//916 3199//916 +f 3198//916 3157//915 3159//915 +f 3155//1653 3199//1885 3200//1654 +f 3199//1885 3155//1653 3157//920 +f 3153//1886 3200//1887 3201//922 +f 3200//1887 3153//1886 3155//1223 +f 3186//923 3153//1888 3201//925 +f 3153//1888 3186//923 3152//1733 +f 3185//95 3142//850 3171//95 +f 3142//850 3185//95 3173//850 +f 3202//927 3174//927 3173//927 +f 3174//927 3202//927 3203//927 +f 3174//850 3175//95 3141//850 +f 3175//95 3174//850 3204//95 +f 3176//1849 3175//1849 3205//1849 +f 3175//1849 3176//1849 3143//1849 +f 3176//930 3178//930 3145//930 +f 3178//930 3176//930 3206//930 +f 3207//1570 3177//1570 3178//1570 +f 3177//1570 3207//1570 3208//1570 +f 3209//1321 3144//1321 3177//1321 +f 3144//1321 3209//1321 3179//1321 +f 3179//1660 3180//1660 3146//1660 +f 3180//1660 3179//1660 3210//1660 +f 3210//1228 3181//1228 3180//1228 +f 3181//1228 3210//1228 3211//1228 +f 3212//1735 3182//936 3181//937 +f 3182//936 3212//1735 3184//1693 +f 3184//1693 3212//1735 3183//1870 +f 3183//1870 3212//1735 3213//940 +f 3183//1870 3213//940 3187//937 +f 3187//861 3213//861 3214//861 +f 3186//941 3173//927 3185//941 +f 3173//927 3186//941 3202//927 +f 3215//1664 3187//1664 3214//1664 +f 3187//1664 3215//1664 3188//1664 +f 3216//1398 3188//1399 3215//1398 +f 3188//1399 3216//1398 3189//1399 +f 3216//1400 3190//1485 3189//1485 +f 3190//1485 3216//1400 3217//1400 +f 3217//946 3191//947 3190//947 +f 3191//947 3217//946 3218//946 +f 3218//1850 3192//1851 3191//1851 +f 3192//1851 3218//1850 3219//1850 +f 3192//215 3220//949 3193//215 +f 3220//949 3192//215 3219//949 +f 3193//950 3221//951 3194//950 +f 3221//951 3193//950 3220//951 +f 3194//1085 3222//953 3195//1085 +f 3222//953 3194//1085 3221//953 +f 3195//954 3223//954 3196//954 +f 3223//954 3195//954 3222//954 +f 3196//990 3224//990 3197//990 +f 3224//990 3196//990 3223//990 +f 3197//1608 3225//1231 3198//958 +f 3225//1231 3197//1608 3226//1277 +f 3225//1231 3226//1277 3227//1575 +f 3226//967 3197//967 3224//967 +f 3228//961 3227//1575 3229//958 +f 3227//1575 3228//961 3225//1231 +f 3198//962 3230//963 3199//964 +f 3230//963 3198//962 3228//1232 +f 3230//963 3228//1232 3229//966 +f 3228//967 3198//967 3225//967 +f 3231//968 3229//966 3232//964 +f 3229//966 3231//968 3230//963 +f 3199//969 3233//969 3200//969 +f 3233//969 3199//969 3230//969 +f 3200//970 3234//970 3201//970 +f 3234//970 3200//970 3233//970 +f 3201//971 3202//1668 3186//971 +f 3202//1668 3201//971 3234//1668 +f 3234//1668 3203//973 3202//1668 +f 3203//973 3234//1668 3235//973 +f 3203//927 3204//974 3174//927 +f 3204//974 3203//927 3236//974 +f 3205//976 3204//976 3237//976 +f 3204//976 3205//976 3175//976 +f 3238//1839 3176//1839 3205//1839 +f 3176//1839 3238//1839 3206//1839 +f 3239//977 3178//977 3206//977 +f 3178//977 3239//977 3207//977 +f 3240//1320 3208//1337 3207//1337 +f 3208//1337 3240//1320 3241//1320 +f 3208//1415 3209//1415 3177//1415 +f 3209//1415 3208//1415 3242//1415 +f 3243//1629 3179//1629 3209//1629 +f 3179//1629 3243//1629 3210//1629 +f 3244//858 3210//858 3243//858 +f 3210//858 3244//858 3211//858 +f 3211//980 3212//980 3181//980 +f 3212//980 3211//980 3245//980 +f 3246//1664 3213//1664 3212//1664 +f 3213//1664 3246//1664 3247//1664 +f 3247//1664 3214//1664 3213//1664 +f 3214//1664 3247//1664 3215//1664 +f 3248//1669 3215//1398 3247//1853 +f 3215//1398 3248//1669 3216//1398 +f 3248//1400 3217//1400 3216//1400 +f 3217//1400 3248//1400 3249//1579 +f 3249//946 3218//946 3217//946 +f 3218//946 3249//946 3250//946 +f 3250//1850 3219//1850 3218//1850 +f 3219//1850 3250//1850 3251//1854 +f 3219//949 3252//949 3220//949 +f 3252//949 3219//949 3251//985 +f 3220//951 3253//951 3221//951 +f 3253//951 3220//951 3252//951 +f 3221//953 3254//1670 3222//953 +f 3254//1670 3221//953 3253//987 +f 3223//954 3254//988 3255//1855 +f 3254//988 3223//954 3222//954 +f 3224//990 3255//990 3226//990 +f 3255//990 3224//990 3223//990 +f 3226//990 3256//990 3227//990 +f 3256//990 3226//990 3255//990 +f 3227//991 3257//991 3229//991 +f 3257//991 3227//991 3258//991 +f 3229//993 3259//993 3232//993 +f 3259//993 3229//993 3257//993 +f 3260//969 3232//969 3261//969 +f 3232//969 3260//969 3231//969 +f 3233//969 3231//969 3260//969 +f 3231//969 3233//969 3230//969 +f 3234//970 3260//970 3235//970 +f 3260//970 3234//970 3233//970 +f 3235//973 3236//971 3203//973 +f 3236//971 3235//973 3262//971 +f 3237//994 3236//994 3263//994 +f 3236//994 3237//994 3204//994 +f 3264//95 3205//95 3237//95 +f 3205//95 3264//95 3238//95 +f 3265//1856 3206//1856 3238//1856 +f 3206//1856 3265//1856 3239//1856 +f 3266//930 3207//930 3239//930 +f 3207//930 3266//930 3240//930 +f 3240//1320 3267//1320 3241//1320 +f 3267//1320 3240//1320 3268//1320 +f 3269//1321 3208//1321 3241//1321 +f 3208//1321 3269//1321 3242//1321 +f 3270//1674 3209//1674 3242//1674 +f 3209//1674 3270//1674 3243//1674 +f 3271//999 3243//999 3270//999 +f 3243//999 3271//999 3244//999 +f 3272//937 3211//937 3244//937 +f 3211//937 3272//937 3245//937 +f 3245//1740 3246//1675 3212//1675 +f 3246//1675 3245//1740 3273//1740 +f 3274//1583 3247//1853 3246//1431 +f 3247//1853 3274//1583 3248//1669 +f 3274//1432 3249//1579 3248//1400 +f 3249//1579 3274//1432 3275//165 +f 3275//947 3250//946 3249//946 +f 3250//946 3275//947 3276//947 +f 3276//1889 3251//1854 3250//1850 +f 3251//1854 3276//1889 3277//1859 +f 3251//985 3278//1008 3252//949 +f 3278//1008 3251//985 3277//1009 +f 3252//951 3279//950 3253//951 +f 3279//950 3252//951 3278//950 +f 3253//987 3280//1890 3254//1670 +f 3280//1890 3253//987 3279//246 +f 3255//1855 3280//1011 3256//1012 +f 3280//1011 3255//1855 3254//988 +f 3256//1013 3258//1013 3227//1013 +f 3258//1013 3256//1013 3281//1013 +f 3258//958 3282//958 3257//958 +f 3282//958 3258//958 3283//958 +f 3257//964 3284//964 3259//964 +f 3284//964 3257//964 3282//964 +f 3232//1016 3285//1016 3261//1016 +f 3285//1016 3232//1016 3259//1016 +f 3235//970 3261//970 3262//970 +f 3261//970 3235//970 3260//970 +f 3262//1017 3263//1017 3236//1017 +f 3263//1017 3262//1017 3286//1017 +f 3287//941 3237//941 3263//941 +f 3237//941 3287//941 3264//941 +f 3288//1019 3238//1019 3264//1019 +f 3238//1019 3288//1019 3265//1019 +f 3265//1839 3266//1839 3239//1839 +f 3266//1839 3265//1839 3289//1839 +f 3266//930 3268//930 3240//930 +f 3268//930 3266//930 3290//930 +f 3291//1587 3268//1588 3292//1778 +f 3268//1588 3291//1587 3267//1447 +f 3267//1321 3269//1321 3241//1321 +f 3269//1321 3267//1321 3293//1321 +f 3294//1629 3242//1629 3269//1629 +f 3242//1629 3294//1629 3270//1629 +f 3295//858 3296//1860 3297//860 +f 3297//1891 3270//1891 3294//1891 +f 3270//858 3297//860 3271//1862 +f 3271//1862 3297//860 3296//1860 +f 3298//1027 3244//1028 3271//1027 +f 3244//1028 3298//1027 3272//1028 +f 3299//1664 3245//1664 3272//1664 +f 3245//1664 3299//1664 3273//1664 +f 3273//1591 3274//1456 3246//1456 +f 3274//1456 3273//1591 3300//1591 +f 3300//1458 3275//1459 3274//1460 +f 3275//1459 3300//1458 3301//1595 +f 3276//1036 3301//1036 3302//1036 +f 3301//1036 3276//1036 3275//1036 +f 3277//1863 3302//1892 3303//1864 +f 3302//1892 3277//1863 3276//1865 +f 3278//1043 3303//1044 3304//1043 +f 3303//1044 3278//1043 3277//1044 +f 3279//1045 3304//1045 3305//1045 +f 3304//1045 3279//1045 3278//1045 +f 3280//1046 3305//1047 3306//1047 +f 3305//1047 3280//1046 3279//1046 +f 3256//1048 3306//1868 3281//1868 +f 3306//1868 3256//1048 3280//1048 +f 3281//990 3283//990 3258//990 +f 3283//990 3281//990 3307//990 +f 3282//1051 3308//1051 3309//1051 +f 3308//1051 3282//1051 3283//1051 +f 3284//1052 3309//1052 3310//1052 +f 3309//1052 3284//1052 3282//1052 +f 3259//969 3311//969 3285//969 +f 3311//969 3259//969 3284//969 +f 3261//1054 3286//1054 3262//1054 +f 3286//1054 3261//1054 3285//1054 +f 3312//971 3263//971 3286//971 +f 3263//971 3312//971 3287//971 +f 3313//1056 3264//1056 3287//1056 +f 3264//1056 3313//1056 3288//1056 +f 3288//95 3289//95 3265//95 +f 3289//95 3288//95 3314//95 +f 3289//1839 3290//1839 3266//1839 +f 3290//1839 3289//1839 3315//1839 +f 3292//1058 3290//1059 3316//1058 +f 3290//1059 3292//1058 3268//1059 +f 3317//1472 3318//1893 3319//1267 +f 3318//1893 3317//1472 3320//1473 +f 3320//1473 3317//1472 3321//1474 +f 3320//1473 3321//1474 3322//1065 +f 3322//1065 3321//1474 3323//1065 +f 3322//1065 3323//1065 3324//1066 +f 3324//1066 3323//1065 3325//1066 +f 3324//1066 3325//1066 3326//1067 +f 3326//1067 3325//1066 3327//1068 +f 3326//1067 3327//1068 3328//1069 +f 3328//1069 3327//1068 3329//1070 +f 3328//1069 3329//1070 3330//1475 +f 3330//1475 3329//1070 3331//1263 +f 3330//1475 3331//1263 3332//1071 +f 3332//1071 3331//1263 3333//1071 +f 3332//1071 3333//1071 3334//1065 +f 3334//1065 3333//1071 3291//1065 +f 3334//1065 3291//1065 3335//1072 +f 3335//1072 3291//1065 3292//1265 +f 3335//1072 3292//1265 3336//1074 +f 3336//1074 3292//1265 3316//1069 +f 3336//1074 3316//1069 3337//1267 +f 3291//1476 3293//1691 3267//1476 +f 3293//1691 3291//1476 3333//1477 +f 3293//1629 3294//1629 3269//1629 +f 3294//1629 3293//1629 3297//1629 +f 3331//1078 3295//1078 3297//1078 +f 3295//1078 3331//1078 3329//1078 +f 3338//937 3339//1271 3295//1894 +f 3295//1895 3271//1895 3296//1895 +f 3271//937 3295//1894 3298//1896 +f 3298//1896 3295//1894 3339//1271 +f 3340//1696 3272//1695 3298//1696 +f 3272//1695 3340//1696 3299//1695 +f 3341//1399 3273//1399 3299//1399 +f 3273//1399 3341//1399 3300//1399 +f 3300//1547 3342//1547 3301//1547 +f 3342//1547 3300//1547 3341//1547 +f 3301//947 3343//947 3302//947 +f 3343//947 3301//947 3342//947 +f 3302//1851 3344//1851 3303//1851 +f 3344//1851 3302//1851 3343//1851 +f 3303//215 3345//215 3304//215 +f 3345//215 3303//215 3344//215 +f 3345//950 3305//950 3304//950 +f 3305//950 3345//950 3346//950 +f 3346//1177 3306//1177 3305//1177 +f 3306//1177 3346//1177 3347//1177 +f 3306//954 3307//954 3281//954 +f 3307//954 3306//954 3347//954 +f 3283//1086 3348//1086 3308//1086 +f 3348//1086 3283//1086 3307//1086 +f 3309//967 3349//967 3350//967 +f 3349//1749 3309//958 3308//1277 +f 3349//1749 3308//1277 3351//956 +f 3352//958 3349//1749 3351//956 +f 3310//1897 3353//1897 3354//1897 +f 3353//1898 3310//964 3309//965 +f 3353//1898 3309//965 3350//1736 +f 3349//964 3353//1898 3350//1736 +f 3311//1281 3310//1095 3355//1095 +f 3310//1095 3311//1281 3284//1281 +f 3285//970 3312//970 3286//970 +f 3312//970 3285//970 3311//970 +f 3287//1098 3356//1098 3313//1098 +f 3356//1098 3287//1098 3312//1098 +f 3313//941 3314//941 3288//941 +f 3314//941 3313//941 3357//941 +f 3358//95 3289//95 3314//95 +f 3289//95 3358//95 3315//95 +f 3316//1502 3315//1875 3337//1876 +f 3315//1875 3316//1502 3290//1501 +f 3336//1104 3359//1283 3335//1104 +f 3359//1283 3336//1104 3358//1103 +f 3335//1105 3360//1284 3334//1285 +f 3360//1284 3335//1105 3359//1108 +f 3361//1111 3334//1109 3360//1111 +f 3334//1109 3361//1111 3332//1109 +f 3353//1113 3332//1114 3361//1113 +f 3332//1114 3353//1113 3330//1114 +f 3349//1115 3330//1115 3353//1115 +f 3330//1115 3349//1115 3328//1115 +f 3352//1118 3328//1118 3349//1118 +f 3328//1118 3352//1118 3326//1118 +f 3362//1291 3326//1292 3352//1291 +f 3326//1292 3362//1291 3324//1292 +f 3363//1122 3324//1293 3362//1899 +f 3324//1293 3363//1122 3322//1125 +f 3364//1126 3322//1127 3363//1294 +f 3322//1127 3364//1126 3320//1295 +f 3365//1296 3320//1131 3364//1132 +f 3320//1131 3365//1296 3318//1133 +f 3366//1134 3318//1299 3365//1136 +f 3318//1299 3366//1134 3319//1137 +f 3367//1141 3319//1878 3366//1879 +f 3319//1878 3367//1141 3317//1880 +f 3368//1142 3317//1143 3367//1304 +f 3317//1143 3368//1142 3321//1143 +f 3369//1710 3321//1523 3368//1524 +f 3321//1523 3369//1710 3323//1525 +f 3325//1526 3369//1620 3370//1528 +f 3369//1620 3325//1526 3323//1529 +f 3327//1712 3370//1713 3338//1713 +f 3370//1713 3327//1712 3325//1712 +f 3329//1154 3338//1154 3295//1154 +f 3338//1154 3329//1154 3327//1154 +f 3333//1754 3297//1714 3293//1714 +f 3297//1714 3333//1754 3331//1754 +f 3337//1716 3358//1803 3336//1158 +f 3358//1803 3337//1716 3315//1311 +f 3338//1664 3371//1664 3339//1664 +f 3371//1664 3338//1664 3370//1664 +f 3371//1664 3298//1664 3339//1664 +f 3298//1664 3371//1664 3340//1664 +f 3372//1535 3299//1535 3340//1535 +f 3299//1535 3372//1535 3341//1535 +f 3373//1881 3341//1881 3372//1881 +f 3341//1881 3373//1881 3342//1881 +f 3342//1165 3374//1165 3343//1165 +f 3374//1165 3342//1165 3373//1165 +f 3343//1882 3375//1882 3344//1882 +f 3375//1882 3343//1882 3374//1882 +f 3344//1538 3376//1538 3345//1538 +f 3376//1538 3344//1538 3375//1538 +f 3345//1170 3377//1170 3346//1170 +f 3377//1170 3345//1170 3376//1170 +f 3346//1317 3378//1317 3347//1317 +f 3378//1317 3346//1317 3377//1317 +f 3347//1173 3348//1173 3307//1173 +f 3348//1173 3347//1173 3378//1173 +f 3308//990 3379//990 3351//990 +f 3379//990 3308//990 3348//990 +f 3379//990 3352//990 3351//990 +f 3352//990 3379//990 3362//990 +f 3354//969 3361//969 3380//969 +f 3361//969 3354//969 3353//969 +f 3355//969 3354//969 3380//969 +f 3354//969 3355//969 3310//969 +f 3312//1175 3355//1175 3356//1175 +f 3355//1175 3312//1175 3311//1175 +f 3356//971 3357//971 3313//971 +f 3357//971 3356//971 3381//971 +f 3359//941 3314//941 3357//941 +f 3314//941 3359//941 3358//941 +f 3360//1055 3357//971 3381//971 +f 3357//971 3360//1055 3359//1055 +f 3380//970 3360//970 3381//970 +f 3360//970 3380//970 3361//970 +f 3382//954 3362//954 3379//954 +f 3362//954 3382//954 3363//954 +f 3364//1177 3382//1177 3383//1177 +f 3382//1177 3364//1177 3363//1177 +f 3365//950 3383//950 3384//950 +f 3383//950 3365//950 3364//950 +f 3366//215 3384//215 3385//215 +f 3384//215 3366//215 3365//215 +f 3386//1851 3366//1851 3385//1851 +f 3366//1851 3386//1851 3367//1851 +f 3387//947 3367//947 3386//947 +f 3367//947 3387//947 3368//947 +f 3388//1547 3368//1547 3387//1547 +f 3368//1547 3388//1547 3369//1547 +f 3370//1399 3388//1399 3371//1399 +f 3388//1399 3370//1399 3369//1399 +f 3388//1399 3340//1398 3371//1399 +f 3340//1398 3388//1399 3372//1398 +f 3388//1547 3373//1485 3372//1401 +f 3373//1485 3388//1547 3387//1547 +f 3387//947 3374//947 3373//947 +f 3374//947 3387//947 3386//947 +f 3386//1851 3375//1851 3374//1851 +f 3375//1851 3386//1851 3385//1851 +f 3385//215 3376//1180 3375//1180 +f 3376//1180 3385//215 3384//215 +f 3376//950 3383//950 3377//950 +f 3383//950 3376//950 3384//950 +f 3377//1085 3382//1177 3378//1085 +f 3382//1177 3377//1085 3383//1177 +f 3348//954 3382//954 3379//954 +f 3382//954 3348//954 3378//954 +f 3356//970 3380//970 3381//970 +f 3380//970 3356//970 3355//970 +f 3389//1900 3390//1901 3391//1901 +f 3390//1901 3389//1900 3392//1900 +f 3391//1902 3393//1903 3394//1904 +f 3393//1903 3391//1902 3390//1905 +f 3395//1906 3391//1907 3396//1908 +f 3391//1907 3395//1906 3389//1909 +f 3389//1910 3397//1911 3392//1910 +f 3397//1911 3389//1910 3398//1911 +f 3392//1912 3399//1913 3390//1914 +f 3399//1913 3392//1912 3400//1915 +f 3394//1916 3401//1917 3402//1918 +f 3401//1917 3394//1916 3393//1919 +f 3396//1920 3394//1921 3403//1922 +f 3394//1921 3396//1920 3391//1923 +f 3390//1924 3404//1925 3393//1926 +f 3404//1925 3390//1924 3399//1927 +f 3405//1928 3396//1929 3406//1930 +f 3396//1929 3405//1928 3395//1931 +f 3395//1932 3398//1933 3389//1934 +f 3398//1933 3395//1932 3407//1935 +f 3398//1936 3408//1937 3397//1938 +f 3408//1937 3398//1936 3409//1939 +f 3392//1940 3410//1941 3400//1942 +f 3410//1941 3392//1940 3397//1943 +f 3399//1944 3411//1945 3412//1946 +f 3411//1945 3399//1944 3400//1947 +f 3402//1948 3413//1949 3414//1950 +f 3413//1949 3402//1948 3401//1951 +f 3403//1952 3402//1953 3415//1954 +f 3402//1953 3403//1952 3394//1955 +f 3393//1956 3416//1957 3401//1958 +f 3416//1957 3393//1956 3404//1959 +f 3406//1960 3403//1961 3417//1962 +f 3403//1961 3406//1960 3396//1963 +f 3399//1964 3418//1965 3404//1966 +f 3418//1965 3399//1964 3412//1967 +f 3419//1968 3406//1969 3420//1970 +f 3406//1969 3419//1968 3405//1971 +f 3421//1972 3395//1973 3405//1974 +f 3395//1973 3421//1972 3407//1975 +f 3407//1976 3409//1977 3398//1976 +f 3409//1977 3407//1976 3422//1977 +f 3409//1978 3423//1979 3408//1980 +f 3423//1979 3409//1978 3424//1981 +f 3397//1982 3425//1983 3410//1984 +f 3425//1983 3397//1982 3408//1985 +f 3400//1986 3426//1987 3411//1986 +f 3426//1987 3400//1986 3410//1987 +f 3412//1988 3427//1989 3428//1990 +f 3427//1989 3412//1988 3411//1991 +f 3414//1992 3429//1993 3430//1994 +f 3429//1993 3414//1992 3413//1995 +f 3415//1996 3414//1997 3431//1998 +f 3414//1997 3415//1996 3402//1999 +f 3401//2000 3432//2001 3413//2002 +f 3432//2001 3401//2000 3416//2003 +f 3417//2004 3415//2005 3433//2006 +f 3415//2005 3417//2004 3403//2007 +f 3404//2008 3434//2009 3416//2009 +f 3434//2009 3404//2008 3418//2010 +f 3420//2011 3417//2012 3435//2013 +f 3417//2012 3420//2011 3406//2014 +f 3412//2015 3436//2016 3418//2017 +f 3436//2016 3412//2015 3428//2018 +f 3437//2019 3419//2020 3420//2021 +f 3419//2020 3437//2019 3438//2022 +f 3419//2023 3421//2024 3405//2025 +f 3421//2024 3419//2023 3439//2026 +f 3421//2027 3422//2028 3407//2029 +f 3422//2028 3421//2027 3440//2028 +f 3422//2030 3424//2031 3409//2031 +f 3424//2031 3422//2030 3441//2032 +f 3424//2033 3442//2034 3423//2035 +f 3442//2034 3424//2033 3443//2036 +f 3408//2037 3444//2038 3425//2039 +f 3444//2038 3408//2037 3423//2040 +f 3410//2041 3445//2042 3426//2043 +f 3445//2042 3410//2041 3425//2042 +f 3411//2044 3446//2045 3427//2044 +f 3446//2045 3411//2044 3426//2046 +f 3427//2047 3447//2048 3428//2049 +f 3447//2048 3427//2047 3448//2050 +f 3449//2051 3429//2052 3450//2051 +f 3429//2052 3449//2051 3430//2053 +f 3451//2054 3414//2055 3430//2056 +f 3414//2055 3451//2054 3431//2057 +f 3413//2058 3452//2059 3429//2060 +f 3452//2059 3413//2058 3432//2061 +f 3453//2062 3415//2063 3431//2064 +f 3415//2063 3453//2062 3433//2065 +f 3416//2066 3454//2067 3432//2068 +f 3454//2067 3416//2066 3434//2069 +f 3455//2070 3417//2071 3433//2070 +f 3417//2071 3455//2070 3435//2072 +f 3418//2073 3456//2074 3434//2075 +f 3456//2074 3418//2073 3436//2076 +f 3457//2077 3420//2078 3435//2079 +f 3420//2078 3457//2077 3437//2080 +f 3428//2081 3458//2082 3436//2082 +f 3458//2082 3428//2081 3447//2081 +f 3459//2083 3438//2084 3437//2083 +f 3438//2084 3459//2083 3460//2084 +f 3438//2085 3439//2086 3419//2087 +f 3439//2086 3438//2085 3461//2088 +f 3439//2089 3440//2090 3421//2091 +f 3440//2090 3439//2089 3462//2092 +f 3440//2093 3441//2093 3422//2094 +f 3441//2093 3440//2093 3463//2095 +f 3441//2096 3443//2097 3424//2098 +f 3443//2097 3441//2096 3464//2099 +f 3442//2100 3465//2101 3466//2102 +f 3465//2101 3442//2100 3443//2103 +f 3423//2104 3467//2105 3444//2106 +f 3467//2105 3423//2104 3442//2107 +f 3425//2108 3468//2109 3445//2108 +f 3468//2109 3425//2108 3444//2109 +f 3426//2110 3469//2111 3446//2110 +f 3469//2111 3426//2110 3445//2112 +f 3446//2113 3448//2114 3427//2114 +f 3448//2114 3446//2113 3470//2113 +f 3448//2115 3471//2116 3447//2117 +f 3471//2116 3448//2115 3472//2118 +f 3450//2119 3473//2120 3449//2121 +f 3473//2120 3450//2119 3474//2122 +f 3474//2122 3450//2119 3475//2123 +f 3474//2122 3475//2123 3476//2124 +f 3476//2124 3475//2123 3477//2125 +f 3476//2124 3477//2125 3478//2126 +f 3478//2126 3477//2125 3479//2127 +f 3478//2126 3479//2127 3480//2128 +f 3480//2128 3479//2127 3481//2129 +f 3480//2128 3481//2129 3482//2130 +f 3482//2130 3481//2129 3483//2130 +f 3482//2130 3483//2130 3484//2131 +f 3484//2131 3483//2130 3485//2132 +f 3484//2131 3485//2132 3486//2125 +f 3486//2125 3485//2132 3487//2133 +f 3486//2125 3487//2133 3488//2134 +f 3488//2134 3487//2133 3489//2135 +f 3488//2134 3489//2135 3490//2136 +f 3490//2136 3489//2135 3491//2137 +f 3490//2136 3491//2137 3492//2138 +f 3492//2138 3491//2137 3493//2121 +f 3492//2138 3493//2121 3494//2139 +f 3473//2140 3430//2141 3449//2142 +f 3430//2141 3473//2140 3451//2143 +f 3450//2144 3452//2145 3475//2146 +f 3452//2145 3450//2144 3429//2147 +f 3495//2148 3431//2149 3451//2148 +f 3431//2149 3495//2148 3453//2149 +f 3432//2150 3496//2151 3452//2152 +f 3496//2151 3432//2150 3454//2153 +f 3497//2154 3433//2155 3453//2156 +f 3433//2155 3497//2154 3455//2157 +f 3434//2158 3498//2159 3454//2160 +f 3498//2159 3434//2158 3456//2161 +f 3455//2162 3457//2163 3435//2164 +f 3457//2163 3455//2162 3499//2165 +f 3458//2166 3456//2167 3436//2168 +f 3456//2167 3458//2166 3500//2169 +f 3501//2170 3437//2171 3457//2172 +f 3437//2171 3501//2170 3459//2173 +f 3447//2174 3502//2175 3458//2176 +f 3502//2175 3447//2174 3471//2177 +f 3503//2178 3460//2179 3459//2180 +f 3460//2179 3503//2178 3504//2181 +f 3460//2182 3461//2183 3438//2184 +f 3461//2183 3460//2182 3505//2185 +f 3461//2186 3462//2187 3439//2186 +f 3462//2187 3461//2186 3506//2187 +f 3462//2188 3463//2189 3440//2190 +f 3463//2189 3462//2188 3507//2191 +f 3463//2192 3464//2193 3441//2194 +f 3464//2193 3463//2192 3508//2195 +f 3443//2196 3509//2197 3465//2198 +f 3509//2197 3443//2196 3464//2199 +f 3466//2200 3510//2201 3511//2202 +f 3510//2201 3466//2200 3465//2203 +f 3467//2204 3466//2205 3512//2206 +f 3466//2205 3467//2204 3442//2207 +f 3444//2208 3513//2209 3468//2210 +f 3513//2209 3444//2208 3467//2211 +f 3445//2212 3514//2213 3469//2214 +f 3514//2213 3445//2212 3468//2215 +f 3469//2216 3470//2217 3446//2218 +f 3470//2217 3469//2216 3515//2219 +f 3470//2220 3472//2221 3448//2222 +f 3472//2221 3470//2220 3516//2223 +f 3472//2224 3517//2225 3471//2226 +f 3517//2225 3472//2224 3518//2227 +f 3519//2228 3493//2229 3491//2230 +f 3493//2229 3519//2228 3520//2228 +f 3520//2231 3494//2232 3493//2233 +f 3494//2232 3520//2231 3521//2234 +f 3521//2235 3492//2236 3494//2237 +f 3492//2236 3521//2235 3522//2238 +f 3522//2239 3490//2240 3492//2241 +f 3490//2240 3522//2239 3523//2242 +f 3523//2243 3488//2244 3490//2245 +f 3488//2244 3523//2243 3524//2246 +f 3486//2247 3524//2248 3525//2249 +f 3524//2248 3486//2247 3488//2250 +f 3484//2251 3525//2252 3526//2253 +f 3525//2252 3484//2251 3486//2254 +f 3482//2255 3526//2256 3527//2257 +f 3526//2256 3482//2255 3484//2258 +f 3480//2259 3527//2260 3528//2261 +f 3527//2260 3480//2259 3482//2262 +f 3478//2263 3528//2264 3529//2265 +f 3528//2264 3478//2263 3480//2266 +f 3476//2267 3529//2268 3530//2269 +f 3529//2268 3476//2267 3478//2270 +f 3476//2271 3495//2272 3474//2273 +f 3495//2272 3476//2271 3530//2274 +f 3474//2275 3451//2276 3473//2277 +f 3451//2276 3474//2275 3495//2275 +f 3475//2278 3496//2279 3477//2280 +f 3496//2279 3475//2278 3452//2279 +f 3531//2281 3477//2282 3496//2283 +f 3477//2282 3531//2281 3479//2284 +f 3532//2285 3479//2286 3531//2287 +f 3479//2286 3532//2285 3481//2288 +f 3533//2289 3481//2290 3532//2291 +f 3481//2290 3533//2289 3483//2292 +f 3534//2293 3483//2294 3533//2295 +f 3483//2294 3534//2293 3485//2296 +f 3535//2297 3485//2297 3534//2298 +f 3485//2297 3535//2297 3487//2299 +f 3536//2300 3487//2301 3535//2302 +f 3487//2301 3536//2300 3489//2303 +f 3536//2304 3491//2305 3489//2305 +f 3491//2305 3536//2304 3519//2304 +f 3530//2306 3453//2307 3495//2306 +f 3453//2307 3530//2306 3497//2308 +f 3498//2309 3496//2310 3454//2311 +f 3496//2310 3498//2309 3531//2312 +f 3497//2313 3499//2314 3455//2315 +f 3499//2314 3497//2313 3537//2316 +f 3500//2317 3498//2318 3456//2319 +f 3498//2318 3500//2317 3538//2318 +f 3499//2320 3501//2321 3457//2321 +f 3501//2321 3499//2320 3539//2322 +f 3502//2323 3500//2324 3458//2325 +f 3500//2324 3502//2323 3540//2326 +f 3541//2327 3459//2327 3501//2327 +f 3459//2180 3541//2328 3503//2178 +f 3471//2329 3542//2330 3502//2331 +f 3542//2330 3471//2329 3517//2332 +f 3543//2333 3504//2334 3503//2333 +f 3504//2334 3543//2333 3544//2335 +f 3504//2181 3505//2336 3460//2179 +f 3505//2336 3504//2181 3545//2337 +f 3505//2338 3506//2339 3461//2340 +f 3506//2339 3505//2338 3546//2341 +f 3506//2342 3507//2343 3462//2344 +f 3507//2343 3506//2342 3547//2345 +f 3507//2346 3508//2347 3463//2346 +f 3508//2347 3507//2346 3548//2347 +f 3464//2348 3549//2349 3509//2350 +f 3549//2349 3464//2348 3508//2351 +f 3465//2352 3550//2353 3510//2353 +f 3550//2353 3465//2352 3509//2352 +f 3511//2354 3551//2354 3552//2355 +f 3551//2354 3511//2354 3510//2356 +f 3512//2357 3511//2358 3553//2359 +f 3511//2358 3512//2357 3466//2360 +f 3513//2361 3512//2362 3554//2363 +f 3512//2362 3513//2361 3467//2364 +f 3468//2365 3555//2365 3514//2365 +f 3555//2365 3468//2365 3513//2365 +f 3514//2366 3515//2367 3469//2368 +f 3515//2367 3514//2366 3556//2369 +f 3515//2370 3516//2371 3470//2372 +f 3516//2371 3515//2370 3557//2373 +f 3516//2374 3518//2227 3472//2224 +f 3518//2227 3516//2374 3558//2375 +f 3518//2376 3559//2377 3517//2378 +f 3559//2377 3518//2376 3560//2379 +f 3561//2380 3520//2381 3519//2382 +f 3520//2381 3561//2380 3562//2383 +f 3562//2384 3521//2385 3520//2386 +f 3521//2385 3562//2384 3563//2387 +f 3521//2388 3564//2389 3522//2390 +f 3564//2389 3521//2388 3563//2391 +f 3522//2392 3565//2393 3523//2394 +f 3565//2393 3522//2392 3564//2392 +f 3523//2395 3566//2396 3524//2397 +f 3566//2396 3523//2395 3565//2398 +f 3525//2399 3566//2400 3567//2401 +f 3566//2400 3525//2399 3524//2402 +f 3526//2403 3567//2404 3568//2405 +f 3567//2404 3526//2403 3525//2406 +f 3527//2407 3568//2408 3569//2409 +f 3568//2408 3527//2407 3526//2410 +f 3528//2261 3569//2411 3570//2412 +f 3569//2411 3528//2261 3527//2260 +f 3529//2413 3570//2414 3537//2415 +f 3570//2414 3529//2413 3528//2416 +f 3530//2417 3537//2418 3497//2419 +f 3537//2418 3530//2417 3529//2420 +f 3538//2421 3531//2422 3498//2423 +f 3531//2422 3538//2421 3532//2424 +f 3571//2425 3532//2426 3538//2427 +f 3532//2426 3571//2425 3533//2428 +f 3572//2429 3533//2295 3571//2430 +f 3533//2295 3572//2429 3534//2293 +f 3573//2431 3534//2432 3572//2433 +f 3534//2432 3573//2431 3535//2434 +f 3574//2435 3535//2436 3573//2437 +f 3535//2436 3574//2435 3536//2438 +f 3574//2439 3519//2440 3536//2441 +f 3519//2440 3574//2439 3561//2442 +f 3537//2443 3539//2443 3499//2443 +f 3539//2443 3537//2443 3570//2443 +f 3540//2444 3538//2445 3500//2444 +f 3538//2445 3540//2444 3571//2445 +f 3539//2446 3541//2447 3501//2447 +f 3541//2447 3539//2446 3575//2448 +f 3542//2449 3540//2450 3502//2449 +f 3540//2450 3542//2449 3576//2450 +f 3577//2451 3503//2452 3541//2453 +f 3503//2452 3577//2451 3543//2454 +f 3517//2455 3578//2456 3542//2457 +f 3578//2456 3517//2455 3559//2458 +f 3579//2459 3544//2460 3543//2461 +f 3544//2460 3579//2459 3580//2462 +f 3544//2463 3545//2464 3504//2465 +f 3545//2464 3544//2463 3581//2466 +f 3545//2337 3546//2467 3505//2336 +f 3546//2467 3545//2337 3582//2468 +f 3546//2469 3547//2470 3506//2471 +f 3547//2470 3546//2469 3583//2472 +f 3547//2473 3548//2474 3507//2475 +f 3548//2474 3547//2473 3584//2476 +f 3508//2477 3585//2478 3549//2479 +f 3585//2478 3508//2477 3548//2480 +f 3509//2481 3586//2482 3550//2483 +f 3586//2482 3509//2481 3549//2484 +f 3510//2485 3587//2486 3551//2487 +f 3587//2486 3510//2485 3550//2488 +f 3588//2489 3551//2490 3589//2491 +f 3551//2490 3588//2489 3552//2492 +f 3553//2493 3552//2494 3590//2494 +f 3552//2494 3553//2493 3511//2493 +f 3554//2495 3553//2496 3591//2497 +f 3553//2496 3554//2495 3512//2498 +f 3555//2499 3554//2500 3592//2501 +f 3554//2500 3555//2499 3513//2502 +f 3555//2503 3556//2504 3514//2505 +f 3556//2504 3555//2503 3593//2506 +f 3556//2507 3557//2508 3515//2509 +f 3557//2508 3556//2507 3594//2510 +f 3557//2511 3558//2375 3516//2374 +f 3558//2375 3557//2511 3595//2512 +f 3558//2513 3560//2514 3518//2515 +f 3560//2514 3558//2513 3596//2516 +f 3560//2517 3597//2518 3559//2519 +f 3597//2518 3560//2517 3598//2520 +f 3599//2521 3562//2522 3561//2523 +f 3562//2522 3599//2521 3600//2524 +f 3562//2525 3601//2526 3563//2525 +f 3601//2526 3562//2525 3600//2526 +f 3563//2527 3602//2528 3564//2527 +f 3602//2528 3563//2527 3601//2528 +f 3564//2529 3603//2530 3565//2531 +f 3603//2530 3564//2529 3602//2530 +f 3565//2532 3604//2533 3566//2534 +f 3604//2533 3565//2532 3603//2535 +f 3566//2536 3605//2537 3567//2538 +f 3605//2537 3566//2536 3604//2539 +f 3568//2540 3605//2541 3606//2542 +f 3605//2541 3568//2540 3567//2543 +f 3569//2544 3606//2544 3575//2544 +f 3606//2544 3569//2544 3568//2544 +f 3570//2412 3575//2545 3539//2546 +f 3575//2545 3570//2412 3569//2411 +f 3576//2547 3571//2430 3540//2548 +f 3571//2430 3576//2547 3572//2429 +f 3607//2549 3572//2550 3576//2551 +f 3572//2550 3607//2549 3573//2552 +f 3608//2553 3573//2554 3607//2553 +f 3573//2554 3608//2553 3574//2554 +f 3608//2555 3561//2556 3574//2557 +f 3561//2556 3608//2555 3599//2558 +f 3575//2559 3577//2560 3541//2561 +f 3577//2560 3575//2559 3606//2562 +f 3578//2563 3576//2564 3542//2565 +f 3576//2564 3578//2563 3607//2566 +f 3609//2567 3543//2568 3577//2569 +f 3543//2568 3609//2567 3579//2570 +f 3559//2571 3610//2572 3578//2573 +f 3610//2572 3559//2571 3597//2574 +f 3611//2575 3579//2576 3612//2576 +f 3579//2576 3611//2575 3580//2575 +f 3580//2577 3581//2578 3544//2577 +f 3581//2578 3580//2577 3613//2578 +f 3581//2579 3582//2580 3545//2581 +f 3582//2580 3581//2579 3614//2582 +f 3582//2468 3583//2583 3546//2467 +f 3583//2583 3582//2468 3615//2584 +f 3583//2585 3584//2586 3547//2587 +f 3584//2586 3583//2585 3616//2588 +f 3617//2589 3548//2590 3584//2591 +f 3548//2590 3617//2589 3585//2592 +f 3549//2593 3618//2594 3586//2595 +f 3618//2594 3549//2593 3585//2596 +f 3550//2597 3619//2598 3587//2599 +f 3619//2598 3550//2597 3586//2600 +f 3589//2601 3587//2602 3620//2601 +f 3587//2602 3589//2601 3551//2602 +f 3621//2603 3589//2604 3622//2605 +f 3589//2604 3621//2603 3588//2606 +f 3590//2607 3588//2608 3623//2609 +f 3588//2608 3590//2607 3552//2610 +f 3591//2611 3590//2612 3624//2613 +f 3590//2612 3591//2611 3553//2614 +f 3592//2615 3591//2616 3625//2617 +f 3591//2616 3592//2615 3554//2618 +f 3555//2619 3626//2620 3593//2621 +f 3626//2620 3555//2619 3592//2622 +f 3593//2623 3594//2624 3556//2625 +f 3594//2624 3593//2623 3627//2626 +f 3594//2627 3595//2512 3557//2511 +f 3595//2512 3594//2627 3628//2628 +f 3595//2629 3596//2630 3558//2631 +f 3596//2630 3595//2629 3629//2632 +f 3596//2633 3598//2634 3560//2635 +f 3598//2634 3596//2633 3630//2636 +f 3597//2637 3631//2638 3632//2639 +f 3631//2638 3597//2637 3598//2640 +f 3599//2641 3633//2642 3600//2643 +f 3633//2642 3599//2641 3634//2644 +f 3600//2645 3635//2646 3601//2645 +f 3635//2646 3600//2645 3633//2646 +f 3601//2647 3636//2648 3602//2649 +f 3636//2648 3601//2647 3635//2648 +f 3602//2650 3637//2651 3603//2652 +f 3637//2651 3602//2650 3636//2653 +f 3603//2654 3638//2655 3604//2656 +f 3638//2655 3603//2654 3637//2657 +f 3604//2658 3609//2659 3605//2658 +f 3609//2659 3604//2658 3638//2660 +f 3606//2661 3609//2662 3577//2663 +f 3609//2662 3606//2661 3605//2664 +f 3610//2665 3607//2666 3578//2667 +f 3607//2666 3610//2665 3608//2668 +f 3610//2669 3599//2670 3608//2671 +f 3599//2670 3610//2669 3634//2672 +f 3638//2673 3579//2674 3609//2675 +f 3579//2674 3638//2673 3612//2676 +f 3610//2677 3632//2678 3634//2677 +f 3632//2678 3610//2677 3597//2678 +f 3639//2679 3612//2680 3640//2681 +f 3612//2680 3639//2679 3611//2682 +f 3641//2683 3580//2684 3611//2684 +f 3580//2684 3641//2683 3613//2683 +f 3613//2685 3614//2686 3581//2687 +f 3614//2686 3613//2685 3642//2688 +f 3614//2689 3615//2690 3582//2691 +f 3615//2690 3614//2689 3643//2692 +f 3615//2584 3616//2693 3583//2583 +f 3616//2693 3615//2584 3644//2694 +f 3645//2695 3584//2696 3616//2697 +f 3584//2696 3645//2695 3617//2698 +f 3646//2699 3585//2700 3617//2701 +f 3585//2700 3646//2699 3618//2702 +f 3647//2703 3586//2704 3618//2705 +f 3586//2704 3647//2703 3619//2706 +f 3620//2707 3619//2708 3648//2709 +f 3619//2708 3620//2707 3587//2710 +f 3622//2711 3620//2712 3649//2711 +f 3620//2712 3622//2711 3589//2713 +f 3650//2714 3622//2715 3651//2716 +f 3622//2715 3650//2714 3621//2717 +f 3652//2718 3588//2719 3621//2720 +f 3588//2719 3652//2718 3623//2721 +f 3624//2722 3623//2723 3653//2724 +f 3623//2723 3624//2722 3590//2725 +f 3625//2726 3624//2727 3654//2728 +f 3624//2727 3625//2726 3591//2729 +f 3592//2730 3655//2731 3626//2730 +f 3655//2731 3592//2730 3625//2731 +f 3593//2732 3656//2733 3627//2734 +f 3656//2733 3593//2732 3626//2735 +f 3627//2736 3628//2628 3594//2627 +f 3628//2628 3627//2736 3657//2737 +f 3628//2738 3629//2739 3595//2740 +f 3629//2739 3628//2738 3658//2741 +f 3629//2742 3630//2743 3596//2744 +f 3630//2743 3629//2742 3659//2745 +f 3630//2746 3631//2747 3598//2748 +f 3631//2747 3630//2746 3660//2749 +f 3632//2750 3661//2751 3662//2752 +f 3661//2751 3632//2750 3631//2753 +f 3634//2754 3662//2755 3633//2756 +f 3662//2755 3634//2754 3632//2757 +f 3633//2758 3663//2759 3635//2760 +f 3663//2759 3633//2758 3662//2761 +f 3635//2762 3664//2763 3636//2764 +f 3664//2763 3635//2762 3663//2765 +f 3636//2766 3640//2767 3637//2768 +f 3640//2767 3636//2766 3664//2769 +f 3637//2770 3612//2771 3638//2772 +f 3612//2771 3637//2770 3640//2773 +f 3664//2774 3639//2775 3640//2776 +f 3639//2775 3664//2774 3665//2777 +f 3666//2778 3611//2779 3639//2780 +f 3611//2779 3666//2778 3641//2781 +f 3667//2782 3613//2783 3641//2784 +f 3613//2783 3667//2782 3642//2785 +f 3642//2786 3643//2787 3614//2788 +f 3643//2787 3642//2786 3668//2789 +f 3643//2790 3644//2791 3615//2790 +f 3644//2791 3643//2790 3669//2792 +f 3670//2793 3616//2693 3644//2694 +f 3616//2693 3670//2793 3645//2794 +f 3671//2795 3617//2795 3645//2796 +f 3617//2795 3671//2795 3646//2795 +f 3672//2797 3618//2798 3646//2798 +f 3618//2798 3672//2797 3647//2797 +f 3648//2799 3647//2800 3673//2801 +f 3647//2800 3648//2799 3619//2802 +f 3649//2803 3648//2804 3674//2805 +f 3648//2804 3649//2803 3620//2806 +f 3651//2807 3649//2808 3675//2809 +f 3649//2808 3651//2807 3622//2810 +f 3676//2811 3651//2812 3677//2813 +f 3651//2812 3676//2811 3650//2814 +f 3678//2815 3621//2816 3650//2817 +f 3621//2816 3678//2815 3652//2818 +f 3653//2819 3652//2820 3679//2821 +f 3652//2820 3653//2819 3623//2820 +f 3654//2822 3653//2823 3680//2824 +f 3653//2823 3654//2822 3624//2825 +f 3625//2826 3681//2827 3655//2826 +f 3681//2827 3625//2826 3654//2827 +f 3626//2828 3682//2829 3656//2830 +f 3682//2829 3626//2828 3655//2831 +f 3627//2736 3683//2832 3657//2737 +f 3683//2832 3627//2736 3656//2833 +f 3657//2834 3658//2835 3628//2835 +f 3658//2835 3657//2834 3684//2836 +f 3658//2837 3659//2838 3629//2839 +f 3659//2838 3658//2837 3685//2840 +f 3659//2841 3660//2842 3630//2843 +f 3660//2842 3659//2841 3686//2844 +f 3631//2845 3687//2846 3661//2847 +f 3687//2846 3631//2845 3660//2848 +f 3662//2849 3688//2850 3663//2851 +f 3688//2850 3662//2849 3661//2852 +f 3663//2853 3665//2854 3664//2855 +f 3665//2854 3663//2853 3688//2856 +f 3689//2857 3639//2858 3665//2858 +f 3639//2858 3689//2857 3666//2859 +f 3690//2860 3641//2861 3666//2862 +f 3641//2861 3690//2860 3667//2863 +f 3691//2864 3642//2865 3667//2866 +f 3642//2865 3691//2864 3668//2867 +f 3668//2868 3669//2869 3643//2870 +f 3669//2869 3668//2868 3692//2871 +f 3693//2872 3644//2873 3669//2874 +f 3644//2873 3693//2872 3670//2875 +f 3694//2876 3645//2794 3670//2793 +f 3645//2794 3694//2876 3671//2877 +f 3695//2878 3646//2879 3671//2880 +f 3646//2879 3695//2878 3672//2881 +f 3696//2882 3647//2883 3672//2884 +f 3647//2883 3696//2882 3673//2885 +f 3674//2886 3673//2887 3697//2886 +f 3673//2887 3674//2886 3648//2888 +f 3675//2889 3674//2890 3698//2891 +f 3674//2890 3675//2889 3649//2892 +f 3677//2893 3675//2894 3699//2895 +f 3675//2894 3677//2893 3651//2896 +f 3700//2897 3677//2898 3701//2899 +f 3677//2898 3700//2897 3676//2900 +f 3702//2901 3650//2902 3676//2903 +f 3650//2902 3702//2901 3678//2904 +f 3703//2905 3652//2906 3678//2905 +f 3652//2906 3703//2905 3679//2906 +f 3680//2907 3679//2908 3704//2909 +f 3679//2908 3680//2907 3653//2910 +f 3654//2911 3705//2912 3681//2913 +f 3705//2912 3654//2911 3680//2914 +f 3655//2915 3706//2916 3682//2917 +f 3706//2916 3655//2915 3681//2918 +f 3656//2833 3707//2919 3683//2832 +f 3707//2919 3656//2833 3682//2920 +f 3657//2921 3708//2922 3684//2923 +f 3708//2922 3657//2921 3683//2924 +f 3684//2925 3685//2926 3658//2927 +f 3685//2926 3684//2925 3709//2928 +f 3685//2929 3686//2930 3659//2931 +f 3686//2930 3685//2929 3710//2932 +f 3686//2933 3687//2934 3660//2933 +f 3687//2934 3686//2933 3711//2935 +f 3687//2936 3688//2937 3661//2938 +f 3688//2937 3687//2936 3712//2939 +f 3712//2940 3665//2941 3688//2942 +f 3665//2941 3712//2940 3689//2943 +f 3713//2944 3666//2944 3689//2945 +f 3666//2944 3713//2944 3690//2946 +f 3714//2947 3667//2948 3690//2949 +f 3667//2948 3714//2947 3691//2950 +f 3715//2951 3668//2952 3691//2953 +f 3668//2952 3715//2951 3692//2954 +f 3716//2955 3669//2956 3692//2957 +f 3669//2956 3716//2955 3693//2958 +f 3717//2959 3670//2960 3693//2961 +f 3670//2960 3717//2959 3694//2962 +f 3718//2963 3671//2877 3694//2876 +f 3671//2877 3718//2963 3695//2964 +f 3719//2965 3672//2966 3695//2967 +f 3672//2966 3719//2965 3696//2968 +f 3720//2969 3673//2970 3696//2971 +f 3673//2970 3720//2969 3697//2969 +f 3698//2972 3697//2973 3721//2974 +f 3697//2973 3698//2972 3674//2975 +f 3675//2976 3722//2977 3699//2978 +f 3722//2977 3675//2976 3698//2979 +f 3677//2980 3723//2981 3701//2982 +f 3723//2981 3677//2980 3699//2983 +f 3700//2984 3724//2985 3725//2986 +f 3724//2985 3700//2984 3701//2984 +f 3726//2987 3676//2988 3700//2989 +f 3676//2988 3726//2987 3702//2990 +f 3727//2991 3678//2992 3702//2993 +f 3678//2992 3727//2991 3703//2994 +f 3728//2995 3679//2996 3703//2997 +f 3679//2996 3728//2995 3704//2998 +f 3680//2999 3729//3000 3705//2999 +f 3729//3000 3680//2999 3704//3000 +f 3681//3001 3730//3002 3706//3003 +f 3730//3002 3681//3001 3705//3004 +f 3682//2920 3731//3005 3707//2919 +f 3731//3005 3682//2920 3706//3006 +f 3683//3007 3732//3008 3708//3007 +f 3732//3008 3683//3007 3707//3008 +f 3684//3009 3733//3010 3709//3009 +f 3733//3010 3684//3009 3708//3010 +f 3709//3011 3710//3012 3685//3013 +f 3710//3012 3709//3011 3734//3014 +f 3710//3015 3711//3016 3686//3017 +f 3711//3016 3710//3015 3735//3018 +f 3711//3019 3712//3020 3687//3021 +f 3712//3020 3711//3019 3736//3022 +f 3736//3023 3689//3024 3712//3025 +f 3689//3024 3736//3023 3713//3026 +f 3737//3027 3690//3028 3713//3029 +f 3690//3028 3737//3027 3714//3030 +f 3738//3031 3691//3032 3714//3033 +f 3691//3032 3738//3031 3715//3034 +f 3716//3035 3715//3036 3739//3035 +f 3715//3036 3716//3035 3692//3036 +f 3740//3037 3693//3038 3716//3038 +f 3693//3038 3740//3037 3717//3037 +f 3741//3039 3694//3040 3717//3041 +f 3694//3040 3741//3039 3718//3042 +f 3742//3043 3695//2964 3718//2963 +f 3695//2964 3742//3043 3719//3044 +f 3743//3045 3696//3046 3719//3047 +f 3696//3046 3743//3045 3720//3048 +f 3744//3049 3697//3050 3720//3051 +f 3697//3050 3744//3049 3721//3052 +f 3698//3053 3745//3054 3722//3055 +f 3745//3054 3698//3053 3721//3056 +f 3699//3057 3746//3058 3723//3057 +f 3746//3058 3699//3057 3722//3059 +f 3701//3060 3747//3061 3724//3062 +f 3747//3061 3701//3060 3723//3063 +f 3725//3064 3748//3065 3749//3066 +f 3748//3065 3725//3064 3724//3064 +f 3750//3067 3700//3068 3725//3069 +f 3700//3068 3750//3067 3726//3070 +f 3751//3071 3702//3072 3726//3073 +f 3702//3072 3751//3071 3727//3074 +f 3752//3075 3703//3076 3727//3077 +f 3703//3076 3752//3075 3728//3078 +f 3704//3079 3753//3080 3729//3081 +f 3753//3080 3704//3079 3728//3082 +f 3705//3083 3754//3084 3730//3083 +f 3754//3084 3705//3083 3729//3084 +f 3706//3006 3755//3085 3731//3005 +f 3755//3085 3706//3006 3730//3086 +f 3707//3087 3756//3088 3732//3089 +f 3756//3088 3707//3087 3731//3090 +f 3708//3091 3757//3092 3733//3093 +f 3757//3092 3708//3091 3732//3094 +f 3758//3095 3709//3096 3733//3097 +f 3709//3096 3758//3095 3734//3098 +f 3734//3099 3735//3100 3710//3101 +f 3735//3100 3734//3099 3759//3102 +f 3735//3103 3736//3104 3711//3105 +f 3736//3104 3735//3103 3760//3106 +f 3760//3107 3713//3108 3736//3109 +f 3713//3108 3760//3107 3737//3110 +f 3761//3111 3714//3112 3737//3113 +f 3714//3112 3761//3111 3738//3114 +f 3739//3115 3738//3115 3762//3115 +f 3738//3115 3739//3115 3715//3115 +f 3740//3116 3739//3117 3763//3118 +f 3739//3117 3740//3116 3716//3119 +f 3764//3120 3717//3120 3740//3121 +f 3717//3120 3764//3120 3741//3120 +f 3765//3122 3718//3123 3741//3124 +f 3718//3123 3765//3122 3742//3125 +f 3766//3126 3719//3044 3742//3043 +f 3719//3044 3766//3126 3743//3127 +f 3767//3128 3720//3129 3743//3130 +f 3720//3129 3767//3128 3744//3131 +f 3745//3132 3744//3133 3768//3132 +f 3744//3133 3745//3132 3721//3133 +f 3722//3134 3769//3135 3746//3135 +f 3769//3135 3722//3134 3745//3136 +f 3723//3137 3770//3138 3747//3139 +f 3770//3138 3723//3137 3746//3140 +f 3724//3141 3771//3142 3748//3143 +f 3771//3142 3724//3141 3747//3144 +f 3749//3145 3772//3146 3773//3147 +f 3772//3146 3749//3145 3748//3148 +f 3750//3149 3749//3150 3774//3151 +f 3749//3150 3750//3149 3725//3152 +f 3775//3153 3726//3153 3750//3153 +f 3726//3153 3775//3153 3751//3154 +f 3752//3155 3751//3156 3776//3157 +f 3751//3156 3752//3155 3727//3158 +f 3753//3159 3752//3160 3777//3161 +f 3752//3160 3753//3159 3728//3162 +f 3729//3163 3778//3164 3754//3165 +f 3778//3164 3729//3163 3753//3166 +f 3730//3086 3779//3167 3755//3085 +f 3779//3167 3730//3086 3754//3168 +f 3731//3169 3780//3170 3756//3169 +f 3780//3170 3731//3169 3755//3171 +f 3732//3172 3781//3173 3757//3174 +f 3781//3173 3732//3172 3756//3175 +f 3782//3176 3733//3177 3757//3178 +f 3733//3177 3782//3176 3758//3177 +f 3783//3179 3734//3180 3758//3181 +f 3734//3180 3783//3179 3759//3182 +f 3759//3183 3760//3184 3735//3184 +f 3760//3184 3759//3183 3784//3185 +f 3784//3186 3737//3187 3760//3188 +f 3737//3187 3784//3186 3761//3187 +f 3762//3189 3761//3190 3785//3191 +f 3761//3190 3762//3189 3738//3192 +f 3763//3193 3762//3194 3786//3195 +f 3762//3194 3763//3193 3739//3196 +f 3764//3197 3763//3198 3787//3199 +f 3763//3198 3764//3197 3740//3200 +f 3788//3201 3741//3202 3764//3203 +f 3741//3202 3788//3201 3765//3204 +f 3789//3205 3742//3206 3765//3206 +f 3742//3206 3789//3205 3766//3205 +f 3790//3207 3743//3127 3766//3126 +f 3743//3127 3790//3207 3767//3208 +f 3768//3209 3767//3210 3791//3211 +f 3767//3210 3768//3209 3744//3212 +f 3769//3213 3768//3213 3792//3214 +f 3768//3213 3769//3213 3745//3213 +f 3746//3215 3793//3216 3770//3216 +f 3793//3216 3746//3215 3769//3215 +f 3747//3217 3794//3217 3771//3218 +f 3794//3217 3747//3217 3770//3219 +f 3748//3220 3795//3221 3772//3222 +f 3795//3221 3748//3220 3771//3223 +f 3773//3224 3796//3225 3797//3226 +f 3796//3225 3773//3224 3772//3227 +f 3774//3228 3773//3228 3798//3228 +f 3773//3228 3774//3228 3749//3228 +f 3775//3229 3774//3230 3799//3230 +f 3774//3230 3775//3229 3750//3229 +f 3776//3231 3775//3232 3800//3231 +f 3775//3232 3776//3231 3751//3233 +f 3777//3234 3776//3235 3801//3236 +f 3776//3235 3777//3234 3752//3237 +f 3778//3238 3777//3239 3802//3240 +f 3777//3239 3778//3238 3753//3241 +f 3754//3242 3803//3243 3779//3244 +f 3803//3243 3754//3242 3778//3245 +f 3755//3246 3804//3247 3780//3248 +f 3804//3247 3755//3246 3779//3249 +f 3756//3250 3805//3251 3781//3252 +f 3805//3251 3756//3250 3780//3253 +f 3757//3254 3806//3255 3782//3256 +f 3806//3255 3757//3254 3781//3257 +f 3807//3258 3758//3259 3782//3258 +f 3758//3259 3807//3258 3783//3260 +f 3808//3261 3759//3262 3783//3263 +f 3759//3262 3808//3261 3784//3264 +f 3785//3265 3784//3266 3808//3267 +f 3784//3266 3785//3265 3761//3268 +f 3786//3269 3785//3270 3809//3271 +f 3785//3270 3786//3269 3762//3272 +f 3787//3273 3786//3274 3810//3274 +f 3786//3274 3787//3273 3763//3275 +f 3788//3276 3787//3277 3811//3278 +f 3787//3277 3788//3276 3764//3279 +f 3812//3280 3765//3281 3788//3281 +f 3765//3281 3812//3280 3789//3280 +f 3813//3282 3766//3283 3789//3284 +f 3766//3283 3813//3282 3790//3285 +f 3791//3286 3790//3207 3814//3287 +f 3790//3207 3791//3286 3767//3208 +f 3792//3288 3791//3289 3815//3290 +f 3791//3289 3792//3288 3768//3289 +f 3793//3291 3792//3292 3816//3293 +f 3792//3292 3793//3291 3769//3291 +f 3770//3294 3817//3295 3794//3296 +f 3817//3295 3770//3294 3793//3297 +f 3771//3298 3818//3299 3795//3299 +f 3818//3299 3771//3298 3794//3298 +f 3772//3300 3819//3300 3796//3301 +f 3819//3300 3772//3300 3795//3302 +f 3820//3303 3796//3304 3821//3305 +f 3796//3304 3820//3303 3797//3306 +f 3798//3307 3797//3308 3822//3308 +f 3797//3308 3798//3307 3773//3309 +f 3799//3310 3798//3311 3823//3312 +f 3798//3311 3799//3310 3774//3313 +f 3800//3314 3799//3315 3824//3316 +f 3799//3315 3800//3314 3775//3314 +f 3801//3317 3800//3318 3825//3318 +f 3800//3318 3801//3317 3776//3317 +f 3802//3319 3801//3320 3826//3321 +f 3801//3320 3802//3319 3777//3319 +f 3803//3322 3802//3323 3827//3324 +f 3802//3323 3803//3322 3778//3323 +f 3779//3325 3828//3326 3804//3325 +f 3828//3326 3779//3325 3803//3326 +f 3780//3327 3829//3328 3805//3329 +f 3829//3328 3780//3327 3804//3328 +f 3806//3330 3805//3331 3830//3332 +f 3805//3331 3806//3330 3781//3333 +f 3831//3334 3782//3335 3806//3336 +f 3782//3335 3831//3334 3807//3337 +f 3832//3338 3783//3339 3807//3338 +f 3783//3339 3832//3338 3808//3339 +f 3809//3340 3808//3341 3832//3342 +f 3808//3341 3809//3340 3785//3343 +f 3810//3344 3809//3345 3833//3344 +f 3809//3345 3810//3344 3786//3345 +f 3811//3346 3810//3347 3834//3348 +f 3810//3347 3811//3346 3787//3347 +f 3812//3349 3811//3350 3835//3351 +f 3811//3350 3812//3349 3788//3352 +f 3836//3353 3789//3354 3812//3355 +f 3789//3354 3836//3353 3813//3356 +f 3814//3357 3813//3358 3837//3359 +f 3813//3358 3814//3357 3790//3360 +f 3815//3361 3814//3287 3838//3362 +f 3814//3287 3815//3361 3791//3286 +f 3816//3363 3815//3364 3839//3363 +f 3815//3364 3816//3363 3792//3364 +f 3817//3365 3816//3365 3840//3365 +f 3816//3365 3817//3365 3793//3365 +f 3794//3366 3841//3367 3818//3368 +f 3841//3367 3794//3366 3817//3369 +f 3795//3370 3842//3370 3819//3370 +f 3842//3370 3795//3370 3818//3371 +f 3821//3372 3819//3373 3843//3374 +f 3819//3373 3821//3372 3796//3372 +f 3844//3305 3821//3305 3845//3305 +f 3821//3305 3844//3305 3820//3303 +f 3846//3375 3797//3375 3820//3376 +f 3797//3375 3846//3375 3822//3375 +f 3823//3377 3822//3378 3847//3377 +f 3822//3378 3823//3377 3798//3379 +f 3824//3380 3823//3381 3848//3382 +f 3823//3381 3824//3380 3799//3383 +f 3825//3384 3824//3385 3849//3386 +f 3824//3385 3825//3384 3800//3384 +f 3826//3387 3825//3388 3850//3389 +f 3825//3388 3826//3387 3801//3390 +f 3827//3324 3826//3391 3851//3392 +f 3826//3391 3827//3324 3802//3323 +f 3828//3393 3827//3394 3852//3394 +f 3827//3394 3828//3393 3803//3393 +f 3804//3395 3853//3396 3829//3397 +f 3853//3396 3804//3395 3828//3398 +f 3830//3399 3829//3400 3854//3401 +f 3829//3400 3830//3399 3805//3402 +f 3831//3403 3830//3404 3855//3405 +f 3830//3404 3831//3403 3806//3406 +f 3856//3407 3807//3408 3831//3409 +f 3807//3408 3856//3407 3832//3410 +f 3833//3411 3832//3412 3856//3413 +f 3832//3412 3833//3411 3809//3414 +f 3834//3415 3833//3416 3857//3417 +f 3833//3416 3834//3415 3810//3418 +f 3835//3419 3834//3420 3858//3421 +f 3834//3420 3835//3419 3811//3422 +f 3812//3423 3859//3424 3836//3425 +f 3859//3424 3812//3423 3835//3426 +f 3837//3427 3836//3428 3860//3429 +f 3836//3428 3837//3427 3813//3430 +f 3838//3431 3837//3432 3861//3433 +f 3837//3432 3838//3431 3814//3432 +f 3839//3434 3838//3362 3862//3435 +f 3838//3362 3839//3434 3815//3361 +f 3840//3436 3839//3437 3863//3438 +f 3839//3437 3840//3436 3816//3439 +f 3841//3440 3840//3441 3864//3442 +f 3840//3441 3841//3440 3817//3443 +f 3818//3444 3865//3445 3842//3446 +f 3865//3445 3818//3444 3841//3447 +f 3843//3448 3842//3449 3866//3450 +f 3842//3449 3843//3448 3819//3449 +f 3845//3451 3843//3374 3867//3451 +f 3843//3374 3845//3451 3821//3372 +f 3868//3452 3845//3452 3869//3452 +f 3845//3452 3868//3452 3844//3452 +f 3870//3453 3820//3376 3844//3453 +f 3820//3376 3870//3453 3846//3375 +f 3871//3454 3822//3455 3846//3454 +f 3822//3455 3871//3454 3847//3455 +f 3848//3456 3847//3457 3872//3458 +f 3847//3457 3848//3456 3823//3459 +f 3849//3460 3848//3461 3873//3462 +f 3848//3461 3849//3460 3824//3463 +f 3850//3464 3849//3465 3874//3466 +f 3849//3465 3850//3464 3825//3467 +f 3851//3392 3850//3468 3875//3469 +f 3850//3468 3851//3392 3826//3391 +f 3852//3470 3851//3471 3876//3472 +f 3851//3471 3852//3470 3827//3473 +f 3853//3474 3852//3475 3877//3476 +f 3852//3475 3853//3474 3828//3477 +f 3854//3478 3853//3479 3878//3480 +f 3853//3479 3854//3478 3829//3481 +f 3855//3482 3854//3483 3879//3484 +f 3854//3483 3855//3482 3830//3485 +f 3856//3486 3855//3487 3880//3488 +f 3855//3487 3856//3486 3831//3489 +f 3833//3490 3880//3491 3857//3492 +f 3880//3491 3833//3490 3856//3493 +f 3834//3494 3881//3495 3858//3496 +f 3881//3495 3834//3494 3857//3497 +f 3835//3498 3882//3499 3859//3500 +f 3882//3499 3835//3498 3858//3501 +f 3836//3502 3883//3503 3860//3504 +f 3883//3503 3836//3502 3859//3505 +f 3861//3506 3860//3507 3884//3508 +f 3860//3507 3861//3506 3837//3509 +f 3862//3510 3861//3511 3885//3512 +f 3861//3511 3862//3510 3838//3513 +f 3863//3514 3862//3435 3886//3515 +f 3862//3435 3863//3514 3839//3434 +f 3864//3516 3863//3517 3887//3518 +f 3863//3517 3864//3516 3840//3519 +f 3865//3520 3864//3521 3888//3522 +f 3864//3521 3865//3520 3841//3523 +f 3866//3524 3865//3525 3889//3526 +f 3865//3525 3866//3524 3842//3527 +f 3867//3528 3866//3450 3890//3529 +f 3866//3450 3867//3528 3843//3448 +f 3869//3530 3867//3530 3891//3530 +f 3867//3530 3869//3530 3845//3530 +f 3892//3531 3868//3532 3869//3532 +f 3868//3532 3892//3531 3893//3531 +f 3894//3533 3844//3534 3868//3535 +f 3844//3534 3894//3533 3870//3534 +f 3895//3536 3846//3537 3870//3536 +f 3846//3537 3895//3536 3871//3537 +f 3896//3538 3847//3539 3871//3538 +f 3847//3539 3896//3538 3872//3539 +f 3873//3540 3872//3541 3897//3542 +f 3872//3541 3873//3540 3848//3543 +f 3874//3544 3873//3545 3898//3546 +f 3873//3545 3874//3544 3849//3547 +f 3875//3469 3874//3548 3899//3549 +f 3874//3548 3875//3469 3850//3468 +f 3876//3550 3875//3551 3900//3552 +f 3875//3551 3876//3550 3851//3553 +f 3877//3554 3876//3555 3901//3556 +f 3876//3555 3877//3554 3852//3557 +f 3902//3558 3853//3559 3877//3560 +f 3853//3559 3902//3558 3878//3561 +f 3879//3562 3878//3563 3903//3564 +f 3878//3563 3879//3562 3854//3565 +f 3880//3566 3879//3567 3904//3568 +f 3879//3567 3880//3566 3855//3569 +f 3857//3570 3904//3571 3881//3572 +f 3904//3571 3857//3570 3880//3573 +f 3858//3574 3905//3575 3882//3576 +f 3905//3575 3858//3574 3881//3577 +f 3859//3578 3906//3579 3883//3580 +f 3906//3579 3859//3578 3882//3581 +f 3884//3582 3883//3583 3907//3584 +f 3883//3583 3884//3582 3860//3585 +f 3885//3586 3884//3587 3908//3588 +f 3884//3587 3885//3586 3861//3589 +f 3886//3590 3885//3591 3909//3592 +f 3885//3591 3886//3590 3862//3593 +f 3887//3594 3886//3515 3910//3595 +f 3886//3515 3887//3594 3863//3514 +f 3888//3596 3887//3597 3911//3598 +f 3887//3597 3888//3596 3864//3599 +f 3889//3600 3888//3601 3912//3602 +f 3888//3601 3889//3600 3865//3603 +f 3890//3604 3889//3526 3913//3605 +f 3889//3526 3890//3604 3866//3524 +f 3891//3606 3890//3607 3914//3606 +f 3890//3607 3891//3606 3867//3607 +f 3915//3608 3869//3608 3891//3608 +f 3869//3608 3915//3608 3892//3608 +f 3916//3452 3892//3452 3917//3452 +f 3892//3452 3916//3452 3893//3452 +f 3893//3609 3894//3533 3868//3535 +f 3894//3533 3893//3609 3918//3610 +f 3919//3611 3870//3611 3894//3611 +f 3870//3611 3919//3611 3895//3611 +f 3920//3612 3871//3613 3895//3614 +f 3871//3613 3920//3612 3896//3615 +f 3897//3616 3896//3617 3921//3618 +f 3896//3617 3897//3616 3872//3619 +f 3898//3620 3897//3621 3922//3621 +f 3897//3621 3898//3620 3873//3622 +f 3899//3549 3898//3623 3923//3624 +f 3898//3623 3899//3549 3874//3548 +f 3900//3625 3899//3626 3924//3627 +f 3899//3626 3900//3625 3875//3628 +f 3901//3629 3900//3630 3925//3631 +f 3900//3630 3901//3629 3876//3632 +f 3926//3633 3877//3634 3901//3635 +f 3877//3634 3926//3633 3902//3636 +f 3927//3637 3878//3638 3902//3637 +f 3878//3638 3927//3637 3903//3639 +f 3904//3640 3903//3641 3928//3641 +f 3903//3641 3904//3640 3879//3640 +f 3881//3642 3928//3643 3905//3644 +f 3928//3643 3881//3642 3904//3645 +f 3882//3646 3929//3647 3906//3647 +f 3929//3647 3882//3646 3905//3648 +f 3883//3649 3930//3650 3907//3651 +f 3930//3650 3883//3649 3906//3652 +f 3908//3653 3907//3654 3931//3655 +f 3907//3654 3908//3653 3884//3656 +f 3909//3657 3908//3658 3932//3659 +f 3908//3658 3909//3657 3885//3660 +f 3910//3661 3909//3662 3933//3661 +f 3909//3662 3910//3661 3886//3662 +f 3911//3663 3910//3595 3934//3664 +f 3910//3595 3911//3663 3887//3594 +f 3912//3665 3911//3666 3935//3667 +f 3911//3666 3912//3665 3888//3668 +f 3913//3669 3912//3602 3936//3670 +f 3912//3602 3913//3669 3889//3600 +f 3914//3671 3913//3672 3937//3671 +f 3913//3672 3914//3671 3890//3672 +f 3938//3673 3891//3673 3914//3673 +f 3891//3673 3938//3673 3915//3673 +f 3917//3530 3915//3530 3939//3530 +f 3915//3530 3917//3530 3892//3530 +f 3940//3674 3917//3674 3941//3674 +f 3917//3674 3940//3674 3916//3674 +f 3942//3675 3893//3609 3916//3676 +f 3893//3609 3942//3675 3918//3610 +f 3918//3677 3919//3677 3894//3678 +f 3919//3677 3918//3677 3943//3679 +f 3944//3680 3895//3681 3919//3680 +f 3895//3681 3944//3680 3920//3681 +f 3896//3682 3945//3683 3921//3684 +f 3945//3683 3896//3682 3920//3685 +f 3922//3686 3921//3687 3946//3688 +f 3921//3687 3922//3686 3897//3689 +f 3923//3624 3922//3690 3947//3690 +f 3922//3690 3923//3624 3898//3623 +f 3924//3691 3923//3692 3948//3693 +f 3923//3692 3924//3691 3899//3694 +f 3925//3695 3924//3696 3949//3697 +f 3924//3696 3925//3695 3900//3698 +f 3950//3699 3901//3700 3925//3701 +f 3901//3700 3950//3699 3926//3702 +f 3951//3703 3902//3704 3926//3703 +f 3902//3704 3951//3703 3927//3705 +f 3928//3706 3927//3706 3952//3706 +f 3927//3706 3928//3706 3903//3706 +f 3905//3707 3952//3708 3929//3709 +f 3952//3708 3905//3707 3928//3710 +f 3906//3711 3953//3712 3930//3713 +f 3953//3712 3906//3711 3929//3714 +f 3907//3715 3954//3716 3931//3717 +f 3954//3716 3907//3715 3930//3718 +f 3932//3719 3931//3720 3955//3721 +f 3931//3720 3932//3719 3908//3722 +f 3933//3723 3932//3724 3956//3725 +f 3932//3724 3933//3723 3909//3724 +f 3934//3726 3933//3727 3957//3726 +f 3933//3727 3934//3726 3910//3727 +f 3935//3728 3934//3729 3958//3730 +f 3934//3729 3935//3728 3911//3731 +f 3936//3732 3935//3667 3959//3733 +f 3935//3667 3936//3732 3912//3665 +f 3937//3734 3936//3734 3960//3734 +f 3936//3734 3937//3734 3913//3734 +f 3938//3735 3937//3736 3961//3735 +f 3937//3736 3938//3735 3914//3736 +f 3939//3737 3938//3737 3962//3737 +f 3938//3737 3939//3737 3915//3737 +f 3941//3738 3939//3739 3963//3738 +f 3939//3739 3941//3738 3917//3739 +f 3964//3452 3940//3452 3941//3452 +f 3940//3452 3964//3452 3965//3452 +f 3966//3740 3916//3676 3940//3741 +f 3916//3676 3966//3740 3942//3675 +f 3967//3742 3918//3742 3942//3742 +f 3918//3742 3967//3742 3943//3742 +f 3943//3743 3944//3744 3919//3744 +f 3944//3744 3943//3743 3968//3743 +f 3969//3745 3920//3746 3944//3747 +f 3920//3746 3969//3745 3945//3748 +f 3921//3687 3970//3749 3946//3688 +f 3970//3749 3921//3687 3945//3750 +f 3947//3751 3946//3752 3971//3753 +f 3946//3752 3947//3751 3922//3754 +f 3948//3755 3947//3755 3972//3756 +f 3947//3755 3948//3755 3923//3755 +f 3949//3757 3948//3758 3973//3759 +f 3948//3758 3949//3757 3924//3760 +f 3974//3761 3925//3762 3949//3761 +f 3925//3762 3974//3761 3950//3762 +f 3975//3763 3926//3764 3950//3764 +f 3926//3764 3975//3763 3951//3765 +f 3976//3766 3927//3767 3951//3768 +f 3927//3767 3976//3766 3952//3769 +f 3929//3770 3976//3771 3953//3772 +f 3976//3771 3929//3770 3952//3773 +f 3930//3774 3977//3775 3954//3775 +f 3977//3775 3930//3774 3953//3774 +f 3955//3776 3954//3777 3978//3778 +f 3954//3777 3955//3776 3931//3779 +f 3956//3780 3955//3781 3979//3782 +f 3955//3781 3956//3780 3932//3783 +f 3957//3784 3956//3785 3980//3784 +f 3956//3785 3957//3784 3933//3785 +f 3958//3786 3957//3787 3981//3788 +f 3957//3787 3958//3786 3934//3789 +f 3959//3790 3958//3730 3982//3791 +f 3958//3730 3959//3790 3935//3728 +f 3960//3792 3959//3792 3983//3792 +f 3959//3792 3960//3792 3936//3792 +f 3961//3793 3960//3793 3984//3793 +f 3960//3793 3961//3793 3937//3793 +f 3962//3672 3961//3672 3985//3672 +f 3961//3672 3962//3672 3938//3672 +f 3963//3794 3962//3794 3986//3794 +f 3962//3794 3963//3794 3939//3794 +f 3987//3795 3941//3796 3963//3797 +f 3941//3796 3987//3795 3964//3530 +f 3988//3798 3965//3798 3964//3798 +f 3965//3798 3988//3798 3989//3798 +f 3965//3534 3966//3740 3940//3741 +f 3966//3740 3965//3534 3990//3534 +f 3991//3799 3942//3799 3966//3800 +f 3942//3799 3991//3799 3967//3801 +f 3992//3680 3943//3680 3967//3680 +f 3943//3680 3992//3680 3968//3680 +f 3968//3802 3969//3803 3944//3804 +f 3969//3803 3968//3802 3993//3805 +f 3994//3806 3945//3807 3969//3808 +f 3945//3807 3994//3806 3970//3809 +f 3946//3752 3995//3810 3971//3753 +f 3995//3810 3946//3752 3970//3811 +f 3972//3812 3971//3813 3996//3814 +f 3971//3813 3972//3812 3947//3815 +f 3973//3816 3972//3817 3997//3818 +f 3972//3817 3973//3816 3948//3819 +f 3998//3820 3949//3821 3973//3822 +f 3949//3821 3998//3820 3974//3823 +f 3999//3824 3950//3825 3974//3826 +f 3950//3825 3999//3824 3975//3825 +f 4000//3827 3951//3828 3975//3829 +f 3951//3828 4000//3827 3976//3830 +f 3977//3831 3976//3832 4000//3833 +f 3976//3832 3977//3831 3953//3834 +f 3978//3835 3977//3836 4001//3835 +f 3977//3836 3978//3835 3954//3836 +f 3979//3837 3978//3838 4002//3837 +f 3978//3838 3979//3837 3955//3839 +f 3980//3840 3979//3841 4003//3842 +f 3979//3841 3980//3840 3956//3841 +f 3981//3843 3980//3844 4004//3845 +f 3980//3844 3981//3843 3957//3846 +f 3982//3847 3981//3788 4005//3848 +f 3981//3788 3982//3847 3958//3786 +f 3959//3849 4006//3849 3983//3849 +f 4006//3849 3959//3849 3982//3849 +f 3984//3850 3983//3850 4007//3850 +f 3983//3850 3984//3850 3960//3850 +f 3961//3734 4008//3734 3985//3734 +f 4008//3734 3961//3734 3984//3734 +f 4009//3851 3962//3852 3985//3853 +f 3962//3852 4009//3851 3986//3854 +f 4010//3607 3963//3606 3986//3606 +f 3963//3606 4010//3607 3987//3607 +f 4011//3855 3964//3856 3987//3856 +f 3964//3856 4011//3855 3988//3855 +f 4012//3857 3988//3858 4013//3859 +f 3988//3858 4012//3857 3989//3860 +f 3989//3861 3990//3862 3965//3862 +f 3990//3862 3989//3861 4014//3861 +f 3990//3611 3991//3611 3966//3611 +f 3991//3611 3990//3611 4015//3611 +f 4016//3863 3967//3864 3991//3865 +f 3967//3864 4016//3863 3992//3866 +f 3993//3748 3992//3748 4017//3748 +f 3992//3748 3993//3748 3968//3748 +f 3994//3867 3993//3868 4018//3869 +f 3993//3868 3994//3867 3969//3870 +f 4019//3871 3970//3872 3994//3873 +f 3970//3872 4019//3871 3995//3874 +f 3971//3813 4020//3875 3996//3814 +f 4020//3875 3971//3813 3995//3876 +f 3997//3877 3996//3878 4021//3879 +f 3996//3878 3997//3877 3972//3880 +f 4022//3881 3973//3882 3997//3883 +f 3973//3882 4022//3881 3998//3884 +f 4023//3885 3974//3886 3998//3887 +f 3974//3886 4023//3885 3999//3888 +f 4024//3889 3975//3890 3999//3891 +f 3975//3890 4024//3889 4000//3892 +f 4001//3893 4000//3894 4024//3895 +f 4000//3894 4001//3893 3977//3896 +f 4002//3897 4001//3898 4025//3897 +f 4001//3898 4002//3897 3978//3898 +f 4003//3899 4002//3900 4026//3901 +f 4002//3900 4003//3899 3979//3902 +f 3980//3903 4027//3904 4004//3904 +f 4027//3904 3980//3903 4003//3903 +f 4005//3905 4004//3906 4028//3907 +f 4004//3906 4005//3905 3981//3908 +f 3982//3909 4029//3909 4006//3909 +f 4029//3909 3982//3909 4005//3909 +f 4007//3910 4006//3910 4030//3910 +f 4006//3910 4007//3910 3983//3910 +f 3984//3792 4031//3792 4008//3792 +f 4031//3792 3984//3792 4007//3792 +f 4032//3911 3985//3912 4008//3913 +f 3985//3912 4032//3911 4009//3914 +f 4033//3915 3986//3916 4009//3917 +f 3986//3916 4033//3915 4010//3672 +f 4034//3918 3987//3919 4010//3919 +f 3987//3919 4034//3918 4011//3918 +f 4013//3920 4011//3921 4035//3922 +f 4011//3921 4013//3920 3988//3923 +f 4036//3924 4013//3924 4037//3924 +f 4013//3924 4036//3924 4012//3924 +f 4038//3925 3989//3926 4012//3927 +f 3989//3926 4038//3925 4014//3928 +f 4014//3929 4015//3930 3990//3930 +f 4015//3930 4014//3929 4039//3929 +f 4015//3931 4016//3932 3991//3933 +f 4016//3932 4015//3931 4040//3681 +f 4041//3934 3992//3934 4016//3934 +f 3992//3934 4041//3934 4017//3934 +f 4018//3935 4017//3935 4042//3935 +f 4017//3935 4018//3935 3993//3935 +f 4019//3936 4018//3937 4043//3938 +f 4018//3937 4019//3936 3994//3939 +f 4020//3940 4019//3941 4044//3942 +f 4019//3941 4020//3940 3995//3943 +f 3996//3878 4045//3944 4021//3879 +f 4045//3944 3996//3878 4020//3944 +f 4022//3945 4021//3946 4046//3947 +f 4021//3946 4022//3945 3997//3948 +f 4047//3949 3998//3949 4022//3950 +f 3998//3949 4047//3949 4023//3949 +f 4048//3951 3999//3952 4023//3953 +f 3999//3952 4048//3951 4024//3954 +f 4025//3955 4024//3956 4048//3955 +f 4024//3956 4025//3955 4001//3957 +f 4026//3958 4025//3959 4049//3960 +f 4025//3959 4026//3958 4002//3961 +f 4003//3962 4050//3962 4027//3962 +f 4050//3962 4003//3962 4026//3963 +f 4004//3964 4051//3965 4028//3966 +f 4051//3965 4004//3964 4027//3967 +f 4005//3968 4052//3968 4029//3968 +f 4052//3968 4005//3968 4028//3968 +f 4030//3969 4029//3969 4053//3969 +f 4029//3969 4030//3969 4006//3969 +f 4031//3849 4030//3849 4054//3849 +f 4030//3849 4031//3849 4007//3849 +f 4055//3970 4008//3970 4031//3970 +f 4008//3970 4055//3970 4032//3970 +f 4033//3971 4032//3972 4056//3734 +f 4032//3972 4033//3971 4009//3973 +f 4057//3974 4010//3975 4033//3975 +f 4010//3975 4057//3974 4034//3976 +f 4035//3977 4034//3978 4058//3979 +f 4034//3978 4035//3977 4011//3980 +f 4037//3981 4035//3982 4059//3983 +f 4035//3982 4037//3981 4013//3981 +f 4060//3984 4037//3985 4061//3986 +f 4037//3985 4060//3984 4036//3987 +f 4062//3988 4012//3989 4036//3988 +f 4012//3989 4062//3988 4038//3989 +f 4063//3990 4014//3991 4038//3992 +f 4014//3991 4063//3990 4039//3993 +f 4039//3994 4040//3995 4015//3996 +f 4040//3995 4039//3994 4064//3994 +f 4065//3748 4016//3748 4040//3748 +f 4016//3748 4065//3748 4041//3748 +f 4017//3997 4066//3997 4042//3997 +f 4066//3997 4017//3997 4041//3997 +f 4043//3874 4042//3874 4067//3874 +f 4042//3874 4043//3874 4018//3874 +f 4044//3998 4043//3999 4068//4000 +f 4043//3999 4044//3998 4019//4001 +f 4045//4002 4044//4002 4069//4002 +f 4044//4002 4045//4002 4020//4002 +f 4046//3947 4045//3947 4070//3947 +f 4045//3947 4046//3947 4021//3946 +f 4047//4003 4046//4004 4071//4005 +f 4046//4004 4047//4003 4022//4006 +f 4072//4007 4023//4008 4047//4009 +f 4023//4008 4072//4007 4048//4010 +f 4049//4011 4048//4012 4072//4013 +f 4048//4012 4049//4011 4025//4014 +f 4026//4015 4073//4016 4050//4017 +f 4073//4016 4026//4015 4049//4015 +f 4027//4018 4074//4019 4051//4019 +f 4074//4019 4027//4018 4050//4018 +f 4028//4020 4075//4021 4052//4021 +f 4075//4021 4028//4020 4051//4020 +f 4052//4022 4053//4023 4029//4022 +f 4053//4023 4052//4022 4076//4023 +f 4054//3909 4053//3909 4077//3909 +f 4053//3909 4054//3909 4030//3909 +f 4078//4024 4031//4025 4054//4025 +f 4031//4025 4078//4024 4055//4024 +f 4056//3792 4055//3792 4079//3792 +f 4055//3792 4056//3792 4032//3792 +f 4057//4026 4056//4027 4080//4028 +f 4056//4027 4057//4026 4033//4029 +f 4058//4030 4057//4031 4081//4032 +f 4057//4031 4058//4030 4034//4033 +f 4059//4034 4058//4035 4082//4036 +f 4058//4035 4059//4034 4035//4037 +f 4061//4038 4059//4039 4083//4040 +f 4059//4039 4061//4038 4037//4041 +f 4084//4042 4061//4043 4085//4044 +f 4061//4043 4084//4042 4060//4045 +f 4086//4046 4036//4047 4060//4048 +f 4036//4047 4086//4046 4062//4049 +f 4087//4050 4038//4051 4062//4050 +f 4038//4051 4087//4050 4063//4051 +f 4039//4052 4088//4053 4064//4054 +f 4088//4053 4039//4052 4063//4055 +f 4040//4056 4089//4057 4065//4058 +f 4089//4057 4040//4056 4064//4059 +f 4090//4060 4041//4061 4065//4060 +f 4041//4061 4090//4060 4066//4061 +f 4042//4062 4091//4062 4067//4062 +f 4091//4062 4042//4062 4066//4062 +f 4092//4063 4043//4063 4067//4063 +f 4043//4063 4092//4063 4068//4063 +f 4069//4064 4068//4065 4093//4066 +f 4068//4065 4069//4064 4044//4067 +f 4070//4068 4069//4069 4094//4070 +f 4069//4069 4070//4068 4045//4071 +f 4071//4005 4070//4072 4095//4072 +f 4070//4072 4071//4005 4046//4004 +f 4072//4073 4071//4074 4096//4075 +f 4071//4074 4072//4073 4047//4076 +f 4049//4077 4096//4078 4073//4079 +f 4096//4078 4049//4077 4072//4080 +f 4050//4017 4097//4017 4074//4017 +f 4097//4017 4050//4017 4073//4016 +f 4051//4081 4098//4082 4075//4082 +f 4098//4082 4051//4081 4074//4081 +f 4075//4083 4076//4084 4052//4083 +f 4076//4084 4075//4083 4099//4084 +f 4077//3968 4076//3968 4100//3968 +f 4076//3968 4077//3968 4053//3968 +f 4101//4085 4054//4086 4077//4086 +f 4054//4086 4101//4085 4078//4085 +f 4079//4087 4078//4087 4102//4087 +f 4078//4087 4079//4087 4055//4087 +f 4080//4088 4079//4089 4103//4090 +f 4079//4089 4080//4088 4056//4091 +f 4104//4092 4057//4093 4080//4094 +f 4057//4093 4104//4092 4081//4095 +f 4082//4096 4081//4097 4105//4098 +f 4081//4097 4082//4096 4058//4099 +f 4083//4100 4082//4101 4106//4102 +f 4082//4101 4083//4100 4059//4103 +f 4085//4104 4083//4105 4107//4106 +f 4083//4105 4085//4104 4061//4107 +f 4108//4108 4084//4109 4085//4110 +f 4084//4109 4108//4108 4109//4108 +f 4084//4111 4086//4112 4060//4113 +f 4086//4112 4084//4111 4110//4114 +f 4111//4115 4062//4116 4086//4117 +f 4062//4116 4111//4115 4087//4118 +f 4063//4119 4112//4120 4088//4121 +f 4112//4120 4063//4119 4087//4122 +f 4064//4123 4113//4124 4089//4125 +f 4113//4124 4064//4123 4088//4126 +f 4065//4127 4114//4128 4090//4129 +f 4114//4128 4065//4127 4089//4130 +f 4091//4131 4090//4131 4115//4131 +f 4090//4131 4091//4131 4066//4131 +f 4067//4132 4116//4132 4092//4132 +f 4116//4132 4067//4132 4091//4132 +f 4117//4002 4068//4002 4092//4002 +f 4068//4002 4117//4002 4093//4002 +f 4094//4133 4093//4134 4118//4135 +f 4093//4134 4094//4133 4069//4136 +f 4095//4137 4094//4138 4119//4138 +f 4094//4138 4095//4137 4070//4137 +f 4096//4075 4095//4139 4120//4139 +f 4095//4139 4096//4075 4071//4074 +f 4073//4079 4120//4079 4097//4079 +f 4120//4079 4073//4079 4096//4078 +f 4074//4140 4121//4141 4098//4141 +f 4121//4141 4074//4140 4097//4140 +f 4098//4142 4099//4142 4075//4142 +f 4099//4142 4098//4142 4122//4142 +f 4099//4020 4100//4020 4076//4020 +f 4100//4020 4099//4020 4123//4020 +f 4077//4143 4124//4144 4101//4144 +f 4124//4144 4077//4143 4100//4143 +f 4078//3909 4125//3909 4102//3909 +f 4125//3909 4078//3909 4101//3909 +f 4103//4145 4102//4146 4126//4147 +f 4102//4146 4103//4145 4079//4148 +f 4127//4149 4080//4150 4103//4151 +f 4080//4150 4127//4149 4104//4152 +f 4128//4153 4081//4154 4104//4155 +f 4081//4154 4128//4153 4105//4156 +f 4106//4157 4105//4158 4129//4159 +f 4105//4158 4106//4157 4082//4158 +f 4107//4160 4106//4161 4130//4162 +f 4106//4161 4107//4160 4083//4163 +f 4107//4164 4108//4165 4085//4166 +f 4108//4165 4107//4164 4131//4167 +f 4132//4168 4109//4169 4108//4170 +f 4109//4169 4132//4168 4133//4171 +f 4109//4172 4110//4173 4084//4173 +f 4110//4173 4109//4172 4134//4172 +f 4110//4174 4111//4175 4086//4174 +f 4111//4175 4110//4174 4135//4176 +f 4087//4177 4136//4178 4112//4179 +f 4136//4178 4087//4177 4111//4180 +f 4088//4181 4137//4182 4113//4183 +f 4137//4182 4088//4181 4112//4184 +f 4089//4185 4138//4186 4114//4187 +f 4138//4186 4089//4185 4113//4188 +f 4090//4189 4139//4190 4115//4189 +f 4139//4190 4090//4189 4114//4190 +f 4116//4191 4115//4192 4140//4192 +f 4115//4192 4116//4191 4091//4191 +f 4092//4193 4141//4194 4117//4195 +f 4141//4194 4092//4193 4116//4194 +f 4093//4196 4142//4196 4118//4196 +f 4142//4196 4093//4196 4117//4196 +f 4094//4197 4143//4198 4119//4197 +f 4143//4198 4094//4197 4118//4198 +f 4120//4199 4119//4200 4144//4199 +f 4119//4200 4120//4199 4095//4201 +f 4097//4202 4144//4202 4121//4202 +f 4144//4202 4097//4202 4120//4202 +f 4121//4141 4122//4203 4098//4141 +f 4122//4203 4121//4141 4145//4204 +f 4122//4081 4123//4081 4099//4081 +f 4123//4081 4122//4081 4146//4081 +f 4100//4205 4147//4206 4124//4206 +f 4147//4206 4100//4205 4123//4205 +f 4101//3968 4148//3968 4125//3968 +f 4148//3968 4101//3968 4124//3968 +f 4126//4207 4125//4208 4149//4209 +f 4125//4208 4126//4207 4102//4210 +f 4150//4211 4103//4212 4126//4212 +f 4103//4212 4150//4211 4127//4211 +f 4151//4213 4104//4214 4127//4215 +f 4104//4214 4151//4213 4128//4216 +f 4152//4217 4105//4218 4128//4219 +f 4105//4218 4152//4217 4129//4220 +f 4130//4221 4129//4222 4153//4223 +f 4129//4222 4130//4221 4106//4224 +f 4130//4225 4131//4226 4107//4227 +f 4131//4226 4130//4225 4154//4228 +f 4155//4229 4108//4230 4131//4231 +f 4108//4230 4155//4229 4132//4232 +f 4156//4233 4133//4234 4132//4235 +f 4133//4234 4156//4233 4157//4233 +f 4133//4236 4134//4237 4109//4237 +f 4134//4237 4133//4236 4158//4238 +f 4134//4239 4135//4240 4110//4241 +f 4135//4240 4134//4239 4159//4242 +f 4111//4243 4160//4244 4136//4243 +f 4160//4244 4111//4243 4135//4244 +f 4112//4245 4161//4246 4137//4245 +f 4161//4246 4112//4245 4136//4246 +f 4113//4247 4162//4248 4138//4249 +f 4162//4248 4113//4247 4137//4250 +f 4114//4251 4163//4252 4139//4251 +f 4163//4252 4114//4251 4138//4253 +f 4115//4254 4164//4255 4140//4256 +f 4164//4255 4115//4254 4139//4257 +f 4141//4258 4140//4259 4165//4260 +f 4140//4259 4141//4258 4116//4261 +f 4117//4262 4166//4263 4142//4262 +f 4166//4263 4117//4262 4141//4263 +f 4118//4264 4167//4264 4143//4264 +f 4167//4264 4118//4264 4142//4264 +f 4119//4265 4168//4266 4144//4265 +f 4168//4266 4119//4265 4143//4266 +f 4144//4267 4145//4268 4121//4267 +f 4145//4268 4144//4267 4168//4269 +f 4145//4204 4146//4270 4122//4203 +f 4146//4270 4145//4204 4169//4270 +f 4123//4271 4170//4272 4147//4272 +f 4170//4272 4123//4271 4146//4271 +f 4147//4021 4148//4020 4124//4021 +f 4148//4020 4147//4021 4171//4020 +f 4149//4273 4148//4274 4172//4275 +f 4148//4274 4149//4273 4125//4276 +f 4173//4277 4126//4278 4149//4278 +f 4126//4278 4173//4277 4150//4277 +f 4174//4279 4127//4280 4150//4281 +f 4127//4280 4174//4279 4151//4282 +f 4175//4283 4128//4284 4151//4284 +f 4128//4284 4175//4283 4152//4285 +f 4176//4286 4129//4287 4152//4288 +f 4129//4287 4176//4286 4153//4289 +f 4153//4290 4154//4291 4130//4292 +f 4154//4291 4153//4290 4177//4291 +f 4154//4293 4155//4294 4131//4295 +f 4155//4294 4154//4293 4178//4296 +f 4179//4297 4132//4298 4155//4298 +f 4132//4298 4179//4297 4156//4297 +f 4180//4299 4157//4300 4156//4300 +f 4157//4300 4180//4299 4181//4301 +f 4157//4302 4158//4303 4133//4304 +f 4158//4303 4157//4302 4182//4305 +f 4158//4306 4159//4307 4134//4308 +f 4159//4307 4158//4306 4183//4309 +f 4135//4310 4184//4311 4160//4312 +f 4184//4311 4135//4310 4159//4313 +f 4136//4314 4185//4315 4161//4316 +f 4185//4315 4136//4314 4160//4317 +f 4137//4318 4186//4319 4162//4320 +f 4186//4319 4137//4318 4161//4321 +f 4138//4253 4187//4322 4163//4252 +f 4187//4322 4138//4253 4162//4323 +f 4139//4324 4188//4325 4164//4326 +f 4188//4325 4139//4324 4163//4327 +f 4140//4328 4189//4329 4165//4330 +f 4189//4329 4140//4328 4164//4331 +f 4141//4332 4190//4068 4166//4333 +f 4190//4068 4141//4332 4165//4196 +f 4167//4334 4166//4335 4191//4335 +f 4166//4335 4167//4334 4142//4334 +f 4143//4201 4192//4201 4168//4201 +f 4192//4201 4143//4201 4167//4201 +f 4168//4202 4169//4202 4145//4202 +f 4169//4202 4168//4202 4192//4202 +f 4146//4270 4193//4270 4170//4270 +f 4193//4270 4146//4270 4169//4270 +f 4170//4082 4171//4082 4147//4082 +f 4171//4082 4170//4082 4194//4082 +f 4171//4336 4172//4337 4148//4336 +f 4172//4337 4171//4336 4195//4337 +f 4196//4338 4149//4339 4172//4340 +f 4149//4339 4196//4338 4173//4341 +f 4197//4342 4150//4343 4173//4344 +f 4150//4343 4197//4342 4174//4345 +f 4198//4346 4151//4347 4174//4348 +f 4151//4347 4198//4346 4175//4349 +f 4199//4350 4152//4351 4175//4352 +f 4152//4351 4199//4350 4176//4353 +f 4200//4354 4153//4355 4176//4356 +f 4153//4355 4200//4354 4177//4357 +f 4177//4358 4178//4359 4154//4360 +f 4178//4359 4177//4358 4201//4361 +f 4202//4362 4155//4363 4178//4364 +f 4155//4363 4202//4362 4179//4365 +f 4203//4366 4156//4367 4179//4368 +f 4156//4367 4203//4366 4180//4366 +f 4204//4369 4181//4370 4180//4370 +f 4181//4370 4204//4369 4205//4369 +f 4181//4371 4182//4372 4157//4373 +f 4182//4372 4181//4371 4206//4374 +f 4182//4375 4183//4376 4158//4377 +f 4183//4376 4182//4375 4207//4378 +f 4183//4379 4184//4380 4159//4381 +f 4184//4380 4183//4379 4208//4382 +f 4160//4383 4209//4384 4185//4385 +f 4209//4384 4160//4383 4184//4386 +f 4161//4387 4210//4388 4186//4387 +f 4210//4388 4161//4387 4185//4388 +f 4162//4389 4211//4390 4187//4391 +f 4211//4390 4162//4389 4186//4392 +f 4163//4393 4212//4394 4188//4395 +f 4212//4394 4163//4393 4187//4396 +f 4164//4397 4213//4397 4189//4398 +f 4213//4397 4164//4397 4188//4399 +f 4165//4400 4214//4401 4190//4402 +f 4214//4401 4165//4400 4189//4401 +f 4166//4138 4215//4137 4191//4138 +f 4215//4137 4166//4138 4190//4137 +f 4192//4403 4191//4404 4216//4405 +f 4191//4404 4192//4403 4167//4406 +f 4169//4407 4216//4408 4193//4409 +f 4216//4408 4169//4407 4192//4408 +f 4193//4270 4194//4140 4170//4270 +f 4194//4140 4193//4270 4217//4140 +f 4194//4410 4195//4411 4171//4410 +f 4195//4411 4194//4410 4218//4411 +f 4219//4412 4172//4413 4195//4413 +f 4172//4413 4219//4412 4196//4414 +f 4220//4415 4173//4416 4196//4417 +f 4173//4416 4220//4415 4197//4418 +f 4221//4419 4174//4420 4197//4421 +f 4174//4420 4221//4419 4198//4422 +f 4222//4423 4175//4424 4198//4425 +f 4175//4424 4222//4423 4199//4426 +f 4223//4427 4176//4428 4199//4429 +f 4176//4428 4223//4427 4200//4430 +f 4224//4431 4177//4431 4200//4431 +f 4177//4431 4224//4431 4201//4431 +f 4225//4432 4178//4433 4201//4434 +f 4178//4433 4225//4432 4202//4435 +f 4226//4436 4179//4437 4202//4438 +f 4179//4437 4226//4436 4203//4439 +f 4227//4440 4180//4441 4203//4442 +f 4180//4441 4227//4440 4204//4443 +f 4228//4444 4204//4445 4229//4446 +f 4204//4445 4228//4444 4205//4447 +f 4181//4448 4230//4449 4206//4450 +f 4230//4449 4181//4448 4205//4451 +f 4182//4452 4231//4453 4207//4454 +f 4231//4453 4182//4452 4206//4455 +f 4207//4456 4208//4457 4183//4458 +f 4208//4457 4207//4456 4232//4459 +f 4184//4460 4233//4461 4209//4462 +f 4233//4461 4184//4460 4208//4463 +f 4185//4464 4234//4465 4210//4464 +f 4234//4465 4185//4464 4209//4465 +f 4186//4466 4235//4467 4211//4468 +f 4235//4467 4186//4466 4210//4466 +f 4187//4469 4236//4470 4212//4471 +f 4236//4470 4187//4469 4211//4472 +f 4188//4473 4237//4473 4213//4473 +f 4237//4473 4188//4473 4212//4474 +f 4214//4475 4213//4476 4238//4477 +f 4213//4476 4214//4475 4189//4478 +f 4190//4479 4239//4480 4215//4481 +f 4239//4480 4190//4479 4214//4480 +f 4191//4201 4240//4201 4216//4201 +f 4240//4201 4191//4201 4215//4201 +f 4216//4202 4217//4202 4193//4202 +f 4217//4202 4216//4202 4240//4202 +f 4217//4482 4218//4483 4194//4484 +f 4218//4483 4217//4482 4241//4485 +f 4195//4486 4242//4487 4219//4488 +f 4242//4487 4195//4486 4218//4486 +f 4243//4489 4196//4490 4219//4491 +f 4196//4490 4243//4489 4220//4492 +f 4244//4493 4197//4494 4220//4495 +f 4197//4494 4244//4493 4221//4493 +f 4245//4496 4198//4497 4221//4497 +f 4198//4497 4245//4496 4222//4496 +f 4246//4498 4199//4499 4222//4499 +f 4199//4499 4246//4498 4223//4498 +f 4247//4500 4200//4501 4223//4502 +f 4200//4501 4247//4500 4224//4503 +f 4225//4504 4224//4505 4248//4506 +f 4224//4505 4225//4504 4201//4507 +f 4249//4508 4202//4509 4225//4510 +f 4202//4509 4249//4508 4226//4511 +f 4250//4512 4203//4513 4226//4514 +f 4203//4513 4250//4512 4227//4515 +f 4229//4516 4227//4517 4251//4518 +f 4227//4517 4229//4516 4204//4517 +f 4252//4519 4229//4520 4253//4521 +f 4229//4520 4252//4519 4228//4522 +f 4254//4523 4205//4524 4228//4523 +f 4205//4524 4254//4523 4230//4525 +f 4206//4526 4255//4527 4231//4528 +f 4255//4527 4206//4526 4230//4529 +f 4207//4530 4256//4531 4232//4532 +f 4256//4531 4207//4530 4231//4533 +f 4233//4534 4232//4535 4257//4535 +f 4232//4535 4233//4534 4208//4534 +f 4209//4536 4258//4537 4234//4538 +f 4258//4537 4209//4536 4233//4539 +f 4210//4540 4259//4541 4235//4540 +f 4259//4541 4210//4540 4234//4542 +f 4211//4543 4260//4544 4236//4543 +f 4260//4544 4211//4543 4235//4544 +f 4212//4545 4261//4546 4237//4547 +f 4261//4546 4212//4545 4236//4548 +f 4238//4549 4237//4550 4262//4550 +f 4237//4550 4238//4549 4213//4549 +f 4239//4551 4238//4552 4263//4552 +f 4238//4552 4239//4551 4214//4553 +f 4215//4554 4264//4555 4240//4554 +f 4264//4555 4215//4554 4239//4555 +f 4240//4556 4241//4557 4217//4556 +f 4241//4557 4240//4556 4264//4557 +f 4218//4558 4265//4559 4242//4560 +f 4265//4559 4218//4558 4241//4561 +f 4219//4562 4266//4563 4243//4563 +f 4266//4563 4219//4562 4242//4562 +f 4267//4564 4220//4565 4243//4565 +f 4220//4565 4267//4564 4244//4564 +f 4268//4566 4221//4567 4244//4568 +f 4221//4567 4268//4566 4245//4569 +f 4269//4570 4222//4571 4245//4572 +f 4222//4571 4269//4570 4246//4573 +f 4270//4574 4223//4575 4246//4576 +f 4223//4575 4270//4574 4247//4577 +f 4248//4578 4247//4579 4271//4580 +f 4247//4579 4248//4578 4224//4581 +f 4249//4582 4248//4583 4272//4584 +f 4248//4583 4249//4582 4225//4585 +f 4273//4586 4226//4587 4249//4588 +f 4226//4587 4273//4586 4250//4589 +f 4251//4590 4250//4591 4274//4592 +f 4250//4591 4251//4590 4227//4591 +f 4253//4593 4251//4594 4275//4595 +f 4251//4594 4253//4593 4229//4596 +f 4276//4597 4253//4598 4277//4597 +f 4253//4598 4276//4597 4252//4598 +f 4278//4599 4228//4600 4252//4601 +f 4228//4600 4278//4599 4254//4602 +f 4279//4603 4230//4604 4254//4605 +f 4230//4604 4279//4603 4255//4606 +f 4231//4607 4280//4608 4256//4609 +f 4280//4608 4231//4607 4255//4610 +f 4257//4611 4256//4612 4281//4613 +f 4256//4612 4257//4611 4232//4611 +f 4258//4614 4257//4615 4282//4616 +f 4257//4615 4258//4614 4233//4617 +f 4234//4618 4283//4618 4259//4619 +f 4283//4618 4234//4618 4258//4618 +f 4235//4620 4284//4621 4260//4622 +f 4284//4621 4235//4620 4259//4623 +f 4236//4624 4285//4625 4261//4626 +f 4285//4625 4236//4624 4260//4627 +f 4262//4628 4261//4628 4286//4629 +f 4261//4628 4262//4628 4237//4628 +f 4263//4630 4262//4631 4287//4632 +f 4262//4631 4263//4630 4238//4633 +f 4264//4634 4263//4634 4288//4635 +f 4263//4634 4264//4634 4239//4636 +f 4241//4637 4288//4638 4265//4639 +f 4288//4638 4241//4637 4264//4640 +f 4242//4641 4289//4642 4266//4643 +f 4289//4642 4242//4641 4265//4644 +f 4243//4645 4290//4646 4267//4647 +f 4290//4646 4243//4645 4266//4645 +f 4291//4648 4244//4649 4267//4649 +f 4244//4649 4291//4648 4268//4650 +f 4292//4651 4245//4652 4268//4653 +f 4245//4652 4292//4651 4269//4654 +f 4293//4655 4246//4656 4269//4657 +f 4246//4656 4293//4655 4270//4658 +f 4271//4659 4270//4660 4294//4661 +f 4270//4660 4271//4659 4247//4662 +f 4272//4663 4271//4664 4295//4665 +f 4271//4664 4272//4663 4248//4666 +f 4273//4667 4272//4668 4296//4669 +f 4272//4668 4273//4667 4249//4670 +f 4274//4671 4273//4672 4297//4673 +f 4273//4672 4274//4671 4250//4674 +f 4275//4675 4274//4676 4298//4677 +f 4274//4676 4275//4675 4251//4678 +f 4277//4679 4275//4680 4299//4681 +f 4275//4680 4277//4679 4253//4682 +f 4300//4683 4276//4683 4277//4684 +f 4276//4683 4300//4683 4301//4683 +f 4302//4685 4252//4686 4276//4687 +f 4252//4686 4302//4685 4278//4688 +f 4303//4689 4254//4690 4278//4689 +f 4254//4690 4303//4689 4279//4691 +f 4304//4692 4255//4693 4279//4694 +f 4255//4693 4304//4692 4280//4695 +f 4281//4696 4280//4697 4305//4697 +f 4280//4697 4281//4696 4256//4696 +f 4282//4698 4281//4699 4306//4700 +f 4281//4699 4282//4698 4257//4701 +f 4283//4702 4282//4703 4307//4704 +f 4282//4703 4283//4702 4258//4705 +f 4259//4706 4308//4707 4284//4708 +f 4308//4707 4259//4706 4283//4709 +f 4260//4710 4309//4711 4285//4712 +f 4309//4711 4260//4710 4284//4713 +f 4286//4714 4285//4715 4310//4716 +f 4285//4715 4286//4714 4261//4717 +f 4287//4718 4286//4719 4311//4720 +f 4286//4719 4287//4718 4262//4721 +f 4288//4722 4287//4723 4312//4724 +f 4287//4723 4288//4722 4263//4725 +f 4265//4726 4312//4727 4289//4728 +f 4312//4727 4265//4726 4288//4729 +f 4266//4730 4313//4730 4290//4731 +f 4313//4730 4266//4730 4289//4732 +f 4290//4733 4291//4734 4267//4735 +f 4291//4734 4290//4733 4314//4736 +f 4315//4737 4268//4738 4291//4739 +f 4268//4738 4315//4737 4292//4740 +f 4316//4741 4269//4742 4292//4742 +f 4269//4742 4316//4741 4293//4741 +f 4294//4743 4293//4744 4317//4745 +f 4293//4744 4294//4743 4270//4746 +f 4295//4747 4294//4748 4318//4749 +f 4294//4748 4295//4747 4271//4750 +f 4296//4751 4295//4752 4319//4753 +f 4295//4752 4296//4751 4272//4752 +f 4297//4754 4296//4755 4320//4754 +f 4296//4755 4297//4754 4273//4755 +f 4298//4756 4297//4757 4321//4758 +f 4297//4757 4298//4756 4274//4759 +f 4299//4760 4298//4761 4322//4762 +f 4298//4761 4299//4760 4275//4763 +f 4323//4764 4277//4765 4299//4766 +f 4277//4765 4323//4764 4300//4764 +f 4324//4767 4301//4768 4300//4769 +f 4301//4768 4324//4767 4325//4770 +f 4301//4771 4302//4772 4276//4772 +f 4302//4772 4301//4771 4326//4771 +f 4327//4773 4278//4774 4302//4775 +f 4278//4774 4327//4773 4303//4776 +f 4328//4777 4279//4778 4303//4779 +f 4279//4778 4328//4777 4304//4780 +f 4305//4781 4304//4782 4329//4782 +f 4304//4782 4305//4781 4280//4781 +f 4306//4783 4305//4784 4330//4785 +f 4305//4784 4306//4783 4281//4786 +f 4307//4787 4306//4788 4331//4789 +f 4306//4788 4307//4787 4282//4787 +f 4308//4790 4307//4791 4332//4791 +f 4307//4791 4308//4790 4283//4790 +f 4284//4792 4333//4793 4309//4792 +f 4333//4793 4284//4792 4308//4793 +f 4309//4794 4310//4795 4285//4796 +f 4310//4795 4309//4794 4334//4797 +f 4311//4798 4310//4799 4335//4800 +f 4310//4799 4311//4798 4286//4801 +f 4312//4802 4311//4803 4336//4804 +f 4311//4803 4312//4802 4287//4805 +f 4289//4806 4336//4807 4313//4808 +f 4336//4807 4289//4806 4312//4809 +f 4313//4810 4314//4810 4290//4811 +f 4314//4810 4313//4810 4337//4810 +f 4314//4812 4315//4813 4291//4814 +f 4315//4813 4314//4812 4338//4815 +f 4315//4816 4316//4817 4292//4818 +f 4316//4817 4315//4816 4339//4819 +f 4317//4820 4316//4821 4340//4822 +f 4316//4821 4317//4820 4293//4823 +f 4318//4824 4317//4825 4341//4826 +f 4317//4825 4318//4824 4294//4827 +f 4319//4828 4318//4829 4342//4830 +f 4318//4829 4319//4828 4295//4831 +f 4320//4832 4319//4833 4343//4834 +f 4319//4833 4320//4832 4296//4835 +f 4321//4836 4320//4837 4344//4838 +f 4320//4837 4321//4836 4297//4839 +f 4322//4840 4321//4841 4345//4842 +f 4321//4841 4322//4840 4298//4843 +f 4346//4844 4299//4845 4322//4846 +f 4299//4845 4346//4844 4323//4847 +f 4347//4848 4300//4849 4323//4850 +f 4300//4849 4347//4848 4324//4851 +f 4348//4852 4325//4853 4324//4854 +f 4325//4853 4348//4852 4349//4855 +f 4325//4856 4326//4857 4301//4858 +f 4326//4857 4325//4856 4350//4859 +f 4326//4860 4327//4861 4302//4862 +f 4327//4861 4326//4860 4351//4863 +f 4352//4864 4303//4865 4327//4866 +f 4303//4865 4352//4864 4328//4867 +f 4329//4868 4328//4869 4353//4870 +f 4328//4869 4329//4868 4304//4871 +f 4330//4872 4329//4873 4354//4874 +f 4329//4873 4330//4872 4305//4875 +f 4331//4876 4330//4877 4355//4878 +f 4330//4877 4331//4876 4306//4879 +f 4332//4880 4331//4881 4356//4882 +f 4331//4881 4332//4880 4307//4883 +f 4333//4884 4332//4885 4357//4885 +f 4332//4885 4333//4884 4308//4884 +f 4333//4886 4334//4887 4309//4888 +f 4334//4887 4333//4886 4358//4886 +f 4334//4889 4335//4890 4310//4891 +f 4335//4890 4334//4889 4359//4892 +f 4336//4893 4335//4894 4360//4895 +f 4335//4894 4336//4893 4311//4896 +f 4313//4897 4360//4898 4337//4899 +f 4360//4898 4313//4897 4336//4900 +f 4337//4901 4338//4902 4314//4903 +f 4338//4902 4337//4901 4361//4904 +f 4338//4905 4339//4906 4315//4905 +f 4339//4906 4338//4905 4362//4907 +f 4339//4908 4340//4909 4316//4910 +f 4340//4909 4339//4908 4363//4911 +f 4341//4912 4340//4913 4364//4912 +f 4340//4913 4341//4912 4317//4913 +f 4342//4914 4341//4915 4365//4916 +f 4341//4915 4342//4914 4318//4917 +f 4343//4918 4342//4919 4366//4920 +f 4342//4919 4343//4918 4319//4921 +f 4344//4922 4343//4923 4367//4922 +f 4343//4923 4344//4922 4320//4923 +f 4345//4924 4344//4925 4368//4924 +f 4344//4925 4345//4924 4321//4925 +f 4369//4926 4322//4927 4345//4928 +f 4322//4927 4369//4926 4346//4929 +f 4370//4930 4323//4931 4346//4932 +f 4323//4931 4370//4930 4347//4933 +f 4371//4934 4324//4935 4347//4936 +f 4324//4935 4371//4934 4348//4937 +f 4372//4938 4349//4939 4348//4939 +f 4349//4939 4372//4938 4373//4938 +f 4349//4940 4350//4941 4325//4942 +f 4350//4941 4349//4940 4374//4943 +f 4350//4944 4351//4945 4326//4946 +f 4351//4945 4350//4944 4375//4947 +f 4351//4948 4352//4949 4327//4950 +f 4352//4949 4351//4948 4376//4951 +f 4353//4952 4352//4953 4377//4954 +f 4352//4953 4353//4952 4328//4955 +f 4354//4956 4353//4957 4378//4957 +f 4353//4957 4354//4956 4329//4956 +f 4355//4958 4354//4959 4379//4960 +f 4354//4959 4355//4958 4330//4961 +f 4356//4962 4355//4963 4380//4962 +f 4355//4963 4356//4962 4331//4964 +f 4357//4965 4356//4966 4381//4967 +f 4356//4966 4357//4965 4332//4968 +f 4333//4969 4382//4970 4358//4970 +f 4382//4970 4333//4969 4357//4971 +f 4358//4972 4359//4973 4334//4974 +f 4359//4973 4358//4972 4383//4975 +f 4359//4976 4360//4977 4335//4978 +f 4360//4977 4359//4976 4384//4979 +f 4360//4980 4361//4981 4337//4982 +f 4361//4981 4360//4980 4384//4983 +f 4361//4984 4362//4985 4338//4986 +f 4362//4985 4361//4984 4385//4987 +f 4362//4988 4363//4989 4339//4990 +f 4363//4989 4362//4988 4386//4991 +f 4387//4992 4340//4992 4363//4992 +f 4340//4992 4387//4992 4364//4993 +f 4365//4994 4364//4995 4388//4996 +f 4364//4995 4365//4994 4341//4995 +f 4366//4997 4365//4998 4389//4999 +f 4365//4998 4366//4997 4342//5000 +f 4367//5001 4366//5002 4390//5003 +f 4366//5002 4367//5001 4343//5002 +f 4368//5004 4367//5005 4391//5006 +f 4367//5005 4368//5004 4344//5007 +f 4345//5008 4392//5009 4369//5010 +f 4392//5009 4345//5008 4368//5011 +f 4393//5012 4346//5013 4369//5014 +f 4346//5013 4393//5012 4370//5015 +f 4394//5016 4347//5017 4370//5018 +f 4347//5017 4394//5016 4371//5019 +f 4395//5020 4348//5021 4371//5022 +f 4348//5021 4395//5020 4372//5023 +f 4396//5024 4372//5025 4397//5024 +f 4372//5025 4396//5024 4373//5025 +f 4373//5026 4374//5027 4349//5028 +f 4374//5027 4373//5026 4398//5029 +f 4374//5030 4375//5031 4350//5032 +f 4375//5031 4374//5030 4399//5033 +f 4375//5034 4376//5035 4351//5035 +f 4376//5035 4375//5034 4400//5034 +f 4401//5036 4352//5037 4376//5038 +f 4352//5037 4401//5036 4377//5039 +f 4378//5040 4377//5041 4402//5042 +f 4377//5041 4378//5040 4353//5043 +f 4379//5044 4378//5045 4403//5046 +f 4378//5045 4379//5044 4354//5047 +f 4380//5048 4379//5049 4404//5050 +f 4379//5049 4380//5048 4355//5051 +f 4381//5052 4380//5053 4405//5054 +f 4380//5053 4381//5052 4356//5055 +f 4357//5056 4406//5057 4382//5058 +f 4406//5057 4357//5056 4381//5059 +f 4358//5060 4407//5061 4383//5060 +f 4407//5061 4358//5060 4382//5061 +f 4359//5062 4408//5063 4384//5064 +f 4408//5063 4359//5062 4383//5065 +f 4384//5066 4385//5067 4361//5066 +f 4385//5067 4384//5066 4408//5067 +f 4385//5068 4386//5069 4362//5070 +f 4386//5069 4385//5068 4409//5071 +f 4410//5072 4363//5073 4386//5074 +f 4363//5073 4410//5072 4387//5075 +f 4411//5076 4364//5077 4387//5078 +f 4364//5077 4411//5076 4388//5079 +f 4389//5080 4388//5081 4412//5082 +f 4388//5081 4389//5080 4365//5083 +f 4390//5084 4389//5085 4413//5086 +f 4389//5085 4390//5084 4366//5085 +f 4391//5087 4390//5088 4414//5089 +f 4390//5088 4391//5087 4367//5090 +f 4368//5091 4415//5092 4392//5092 +f 4415//5092 4368//5091 4391//5093 +f 4369//5094 4416//5095 4393//5096 +f 4416//5095 4369//5094 4392//5097 +f 4417//5098 4370//5099 4393//5100 +f 4370//5099 4417//5098 4394//5101 +f 4395//5102 4394//5103 4418//5104 +f 4394//5103 4395//5102 4371//5105 +f 4397//5106 4395//5107 4419//5108 +f 4395//5107 4397//5106 4372//5109 +f 4420//5110 4397//5111 4421//5112 +f 4397//5111 4420//5110 4396//5110 +f 4398//5113 4396//5114 4422//5115 +f 4396//5114 4398//5113 4373//5116 +f 4398//5117 4399//5118 4374//5119 +f 4399//5118 4398//5117 4423//5120 +f 4399//5121 4400//5122 4375//5123 +f 4400//5122 4399//5121 4424//5124 +f 4425//5125 4376//5126 4400//5125 +f 4376//5126 4425//5125 4401//5127 +f 4426//5128 4377//5129 4401//5130 +f 4377//5129 4426//5128 4402//5131 +f 4403//5132 4402//5133 4427//5134 +f 4402//5133 4403//5132 4378//5135 +f 4404//5136 4403//5137 4428//5138 +f 4403//5137 4404//5136 4379//5139 +f 4405//5140 4404//5141 4429//5141 +f 4404//5141 4405//5140 4380//5140 +f 4381//5142 4430//5143 4406//5144 +f 4430//5143 4381//5142 4405//5145 +f 4382//5146 4431//5147 4407//5148 +f 4431//5147 4382//5146 4406//5149 +f 4383//5150 4432//5151 4408//5152 +f 4432//5151 4383//5150 4407//5153 +f 4408//5154 4409//5155 4385//5156 +f 4409//5155 4408//5154 4432//5157 +f 4409//5158 4410//5159 4386//5160 +f 4410//5159 4409//5158 4433//5161 +f 4434//5162 4387//5163 4410//5164 +f 4387//5163 4434//5162 4411//5165 +f 4388//5166 4435//5167 4412//5168 +f 4435//5167 4388//5166 4411//5169 +f 4413//5170 4412//5171 4436//5172 +f 4412//5171 4413//5170 4389//5173 +f 4414//5174 4413//5175 4437//5176 +f 4413//5175 4414//5174 4390//5177 +f 4391//5178 4438//5179 4415//5180 +f 4438//5179 4391//5178 4414//5181 +f 4392//5182 4439//5183 4416//5184 +f 4439//5183 4392//5182 4415//5185 +f 4393//5186 4440//5187 4417//5188 +f 4440//5187 4393//5186 4416//5189 +f 4394//5190 4441//5191 4418//5192 +f 4441//5191 4394//5190 4417//5193 +f 4419//5194 4418//5195 4442//5196 +f 4418//5195 4419//5194 4395//5197 +f 4421//5198 4419//5199 4443//5200 +f 4419//5199 4421//5198 4397//5201 +f 4444//5202 4421//5203 4445//5204 +f 4421//5203 4444//5202 4420//5205 +f 4446//5206 4396//5207 4420//5208 +f 4396//5207 4446//5206 4422//5209 +f 4423//5210 4422//5211 4447//5211 +f 4422//5211 4423//5210 4398//5210 +f 4448//5212 4399//5213 4423//5214 +f 4399//5213 4448//5212 4424//5215 +f 4449//5216 4400//5217 4424//5216 +f 4400//5217 4449//5216 4425//5218 +f 4450//5219 4401//5220 4425//5221 +f 4401//5220 4450//5219 4426//5222 +f 4451//5223 4402//5223 4426//5223 +f 4402//5133 4451//5224 4427//5134 +f 4428//5225 4427//5226 4452//5227 +f 4427//5226 4428//5225 4403//5228 +f 4429//5229 4428//5230 4453//5230 +f 4428//5230 4429//5229 4404//5229 +f 4430//5231 4429//5232 4454//5233 +f 4429//5232 4430//5231 4405//5234 +f 4406//5235 4455//5236 4431//5237 +f 4455//5236 4406//5235 4430//5238 +f 4407//5239 4456//5240 4432//5241 +f 4456//5240 4407//5239 4431//5242 +f 4432//5243 4433//5244 4409//5243 +f 4433//5244 4432//5243 4456//5244 +f 4457//5245 4410//5246 4433//5247 +f 4410//5246 4457//5245 4434//5248 +f 4411//5249 4458//5250 4435//5251 +f 4458//5250 4411//5249 4434//5252 +f 4412//5253 4459//5254 4436//5255 +f 4459//5254 4412//5253 4435//5256 +f 4437//5257 4436//5258 4460//5259 +f 4436//5258 4437//5257 4413//5260 +f 4414//5261 4461//5262 4438//5263 +f 4461//5262 4414//5261 4437//5264 +f 4415//5265 4462//5266 4439//5267 +f 4462//5266 4415//5265 4438//5268 +f 4416//5269 4463//5270 4440//5271 +f 4463//5270 4416//5269 4439//5272 +f 4417//5273 4464//5274 4441//5274 +f 4464//5274 4417//5273 4440//5273 +f 4442//5275 4441//5276 4465//5275 +f 4441//5276 4442//5275 4418//5276 +f 4443//5277 4442//5278 4466//5279 +f 4442//5278 4443//5277 4419//5280 +f 4445//5281 4443//5282 4467//5283 +f 4443//5282 4445//5281 4421//5284 +f 4468//5285 4445//5286 4469//5287 +f 4445//5286 4468//5285 4444//5288 +f 4470//5289 4420//5290 4444//5291 +f 4420//5290 4470//5289 4446//5292 +f 4471//5293 4422//5294 4446//5295 +f 4422//5294 4471//5293 4447//5296 +f 4448//5297 4447//5298 4472//5299 +f 4447//5298 4448//5297 4423//5300 +f 4473//5301 4424//5302 4448//5303 +f 4424//5302 4473//5301 4449//5304 +f 4474//5305 4425//5306 4449//5307 +f 4425//5306 4474//5305 4450//5308 +f 4475//5309 4426//5310 4450//5311 +f 4426//5310 4475//5309 4451//5312 +f 4476//5313 4427//5314 4451//5315 +f 4427//5314 4476//5313 4452//5314 +f 4453//5316 4452//5317 4477//5318 +f 4452//5317 4453//5316 4428//5319 +f 4454//5320 4453//5321 4478//5322 +f 4453//5321 4454//5320 4429//5323 +f 4455//5324 4454//5325 4479//5326 +f 4454//5325 4455//5324 4430//5327 +f 4431//5328 4480//5329 4456//5328 +f 4480//5329 4431//5328 4455//5330 +f 4456//5331 4457//5332 4433//5333 +f 4457//5332 4456//5331 4480//5334 +f 4434//5335 4481//5336 4458//5336 +f 4481//5336 4434//5335 4457//5335 +f 4435//5337 4482//5338 4459//5339 +f 4482//5338 4435//5337 4458//5340 +f 4436//5341 4483//5342 4460//5343 +f 4483//5342 4436//5341 4459//5344 +f 4437//5345 4484//5346 4461//5347 +f 4484//5346 4437//5345 4460//5348 +f 4438//5349 4485//5350 4462//5351 +f 4485//5350 4438//5349 4461//5352 +f 4439//5353 4486//5354 4463//5355 +f 4486//5354 4439//5353 4462//5356 +f 4440//5357 4487//5358 4464//5359 +f 4487//5358 4440//5357 4463//5357 +f 4441//5360 4488//5361 4465//5362 +f 4488//5361 4441//5360 4464//5363 +f 4466//5364 4465//5365 4489//5366 +f 4465//5365 4466//5364 4442//5367 +f 4467//5368 4466//5369 4490//5370 +f 4466//5369 4467//5368 4443//5371 +f 4469//5372 4467//5373 4491//5374 +f 4467//5373 4469//5372 4445//5375 +f 4492//5376 4469//5377 4493//5378 +f 4469//5377 4492//5376 4468//5379 +f 4494//5380 4444//5380 4468//5380 +f 4444//5380 4494//5380 4470//5380 +f 4495//5381 4446//5382 4470//5383 +f 4446//5382 4495//5381 4471//5384 +f 4496//5385 4447//5386 4471//5387 +f 4447//5386 4496//5385 4472//5388 +f 4497//5389 4448//5390 4472//5391 +f 4448//5390 4497//5389 4473//5392 +f 4498//5393 4449//5393 4473//5393 +f 4449//5393 4498//5393 4474//5393 +f 4499//5394 4450//5395 4474//5396 +f 4450//5395 4499//5394 4475//5397 +f 4500//5398 4451//5399 4475//5400 +f 4451//5399 4500//5398 4476//5401 +f 4501//5402 4452//5403 4476//5404 +f 4452//5403 4501//5402 4477//5405 +f 4478//5406 4477//5407 4502//5408 +f 4477//5407 4478//5406 4453//5409 +f 4479//5410 4478//5411 4503//5412 +f 4478//5411 4479//5410 4454//5413 +f 4480//5414 4479//5415 4504//5416 +f 4479//5415 4480//5414 4455//5417 +f 4457//5418 4504//5419 4481//5420 +f 4504//5419 4457//5418 4480//5421 +f 4458//5422 4505//5423 4482//5424 +f 4505//5423 4458//5422 4481//5425 +f 4459//5426 4506//5427 4483//5428 +f 4506//5427 4459//5426 4482//5427 +f 4483//5429 4484//5430 4460//5431 +f 4484//5430 4483//5429 4507//5432 +f 4461//5433 4508//5434 4485//5435 +f 4508//5434 4461//5433 4484//5436 +f 4462//5437 4509//5438 4486//5439 +f 4509//5438 4462//5437 4485//5440 +f 4463//5441 4510//5442 4487//5443 +f 4510//5442 4463//5441 4486//5444 +f 4464//5445 4511//5446 4488//5447 +f 4511//5446 4464//5445 4487//5448 +f 4465//5449 4512//5450 4489//5451 +f 4512//5450 4465//5449 4488//5452 +f 4466//5453 4513//5454 4490//5455 +f 4513//5454 4466//5453 4489//5456 +f 4467//5457 4514//5458 4491//5459 +f 4514//5458 4467//5457 4490//5460 +f 4469//5461 4515//5462 4493//5463 +f 4515//5462 4469//5461 4491//5464 +f 4516//5465 4492//5466 4493//5467 +f 4492//5466 4516//5465 4517//5465 +f 4518//5468 4468//5469 4492//5470 +f 4468//5469 4518//5468 4494//5471 +f 4519//5472 4470//5473 4494//5474 +f 4470//5473 4519//5472 4495//5473 +f 4520//5475 4471//5476 4495//5477 +f 4471//5476 4520//5475 4496//5478 +f 4521//5479 4472//5480 4496//5481 +f 4472//5480 4521//5479 4497//5482 +f 4522//5483 4473//5484 4497//5485 +f 4473//5484 4522//5483 4498//5486 +f 4523//5487 4474//5488 4498//5489 +f 4474//5488 4523//5487 4499//5488 +f 4524//5490 4475//5491 4499//5492 +f 4475//5491 4524//5490 4500//5493 +f 4525//5494 4476//5495 4500//5494 +f 4476//5495 4525//5494 4501//5495 +f 4477//5496 4526//5497 4502//5496 +f 4526//5497 4477//5496 4501//5497 +f 4503//5498 4502//5499 4527//5500 +f 4502//5499 4503//5498 4478//5501 +f 4504//5502 4503//5503 4528//5503 +f 4503//5503 4504//5502 4479//5502 +f 4481//5504 4528//5505 4505//5506 +f 4528//5505 4481//5504 4504//5504 +f 4482//5507 4529//5508 4506//5509 +f 4529//5508 4482//5507 4505//5510 +f 4506//5511 4507//5512 4483//5513 +f 4507//5512 4506//5511 4530//5514 +f 4507//5515 4508//5516 4484//5517 +f 4508//5516 4507//5515 4531//5518 +f 4485//5519 4532//5520 4509//5521 +f 4532//5520 4485//5519 4508//5522 +f 4486//5523 4533//5524 4510//5525 +f 4533//5524 4486//5523 4509//5526 +f 4487//5527 4534//5528 4511//5529 +f 4534//5528 4487//5527 4510//5527 +f 4488//5530 4535//5531 4512//5532 +f 4535//5531 4488//5530 4511//5533 +f 4512//5534 4513//5535 4489//5536 +f 4513//5535 4512//5534 4536//5537 +f 4490//5538 4537//5539 4514//5539 +f 4537//5539 4490//5538 4513//5538 +f 4491//5540 4538//5541 4515//5541 +f 4538//5541 4491//5540 4514//5542 +f 4539//5543 4493//5544 4515//5545 +f 4493//5544 4539//5543 4516//5546 +f 4539//5547 4517//5548 4516//5549 +f 4517//5548 4539//5547 4540//5550 +f 4517//5548 4540//5550 4541//5551 +f 4541//5551 4540//5550 4542//5552 +f 4541//5551 4542//5552 4543//5553 +f 4543//5553 4542//5552 4544//5554 +f 4543//5553 4544//5554 4545//5555 +f 4545//5555 4544//5554 4546//5556 +f 4545//5555 4546//5556 4547//5557 +f 4547//5557 4546//5556 4548//5558 +f 4547//5557 4548//5558 4549//5559 +f 4549//5559 4548//5558 4550//5560 +f 4549//5559 4550//5560 4551//5561 +f 4551//5561 4550//5560 4552//5562 +f 4551//5561 4552//5562 4553//5563 +f 4553//5563 4552//5562 4554//5564 +f 4553//5563 4554//5564 4555//5565 +f 4555//5565 4554//5564 4556//5566 +f 4555//5565 4556//5566 4557//5567 +f 4557//5567 4556//5566 4558//5568 +f 4557//5567 4558//5568 4559//5569 +f 4559//5569 4558//5568 4560//5570 +f 4517//5571 4518//5572 4492//5573 +f 4518//5572 4517//5571 4541//5574 +f 4561//5575 4494//5576 4518//5575 +f 4494//5576 4561//5575 4519//5577 +f 4562//5578 4495//5578 4519//5579 +f 4495//5578 4562//5578 4520//5578 +f 4520//5580 4521//5581 4496//5582 +f 4521//5581 4520//5580 4563//5583 +f 4564//5584 4497//5585 4521//5584 +f 4497//5585 4564//5584 4522//5585 +f 4565//5586 4498//5586 4522//5586 +f 4498//5586 4565//5586 4523//5586 +f 4566//5587 4499//5588 4523//5589 +f 4499//5588 4566//5587 4524//5590 +f 4567//5591 4500//5592 4524//5593 +f 4500//5592 4567//5591 4525//5594 +f 4501//5595 4568//5596 4526//5597 +f 4568//5596 4501//5595 4525//5598 +f 4502//5599 4569//5600 4527//5599 +f 4569//5600 4502//5599 4526//5600 +f 4528//5601 4527//5602 4570//5602 +f 4527//5602 4528//5601 4503//5601 +f 4505//5603 4570//5603 4529//5603 +f 4570//5603 4505//5603 4528//5603 +f 4529//5604 4530//5605 4506//5604 +f 4530//5605 4529//5604 4571//5605 +f 4530//5606 4531//5607 4507//5608 +f 4531//5607 4530//5606 4572//5609 +f 4531//5610 4532//5611 4508//5612 +f 4532//5611 4531//5610 4573//5613 +f 4509//5614 4574//5615 4533//5616 +f 4574//5615 4509//5614 4532//5614 +f 4510//5617 4575//5618 4534//5619 +f 4575//5618 4510//5617 4533//5620 +f 4511//5621 4576//5622 4535//5622 +f 4576//5622 4511//5621 4534//5621 +f 4535//5623 4536//5624 4512//5625 +f 4536//5624 4535//5623 4577//5626 +f 4536//5627 4537//5628 4513//5629 +f 4537//5628 4536//5627 4578//5630 +f 4514//5631 4579//5632 4538//5633 +f 4579//5632 4514//5631 4537//5634 +f 4540//5635 4515//5636 4538//5637 +f 4515//5636 4540//5635 4539//5638 +f 4580//5639 4559//5640 4581//5641 +f 4559//5640 4580//5639 4557//5642 +f 4582//5643 4557//5644 4580//5645 +f 4557//5644 4582//5643 4555//5646 +f 4553//5647 4582//5648 4583//5649 +f 4582//5648 4553//5647 4555//5647 +f 4551//5650 4583//5651 4584//5652 +f 4583//5651 4551//5650 4553//5653 +f 4549//5654 4584//5655 4585//5656 +f 4584//5655 4549//5654 4551//5657 +f 4547//5658 4585//5659 4586//5660 +f 4585//5659 4547//5658 4549//5661 +f 4545//5662 4586//5663 4587//5664 +f 4586//5663 4545//5662 4547//5665 +f 4543//5666 4587//5667 4561//5668 +f 4587//5667 4543//5666 4545//5669 +f 4541//5670 4561//5671 4518//5672 +f 4561//5671 4541//5670 4543//5673 +f 4542//5674 4538//5675 4579//5676 +f 4538//5675 4542//5674 4540//5677 +f 4588//5678 4542//5679 4579//5680 +f 4542//5679 4588//5678 4544//5681 +f 4589//5682 4544//5683 4588//5682 +f 4544//5683 4589//5682 4546//5683 +f 4590//5684 4546//5685 4589//5686 +f 4546//5685 4590//5684 4548//5687 +f 4591//5688 4548//5689 4590//5690 +f 4548//5689 4591//5688 4550//5691 +f 4592//5692 4550//5693 4591//5694 +f 4550//5693 4592//5692 4552//5695 +f 4593//5696 4552//5697 4592//5698 +f 4552//5697 4593//5696 4554//5699 +f 4594//5700 4554//5701 4593//5700 +f 4554//5701 4594//5700 4556//5702 +f 4595//5703 4556//5704 4594//5705 +f 4556//5704 4595//5703 4558//5704 +f 4596//5706 4558//5707 4595//5708 +f 4558//5707 4596//5706 4560//5709 +f 4581//5710 4560//5711 4596//5712 +f 4560//5711 4581//5710 4559//5713 +f 4561//5714 4562//5715 4519//5716 +f 4562//5715 4561//5714 4587//5717 +f 4562//5718 4563//5719 4520//5718 +f 4563//5719 4562//5718 4597//5720 +f 4563//5721 4564//5721 4521//5722 +f 4564//5721 4563//5721 4598//5721 +f 4599//5723 4522//5724 4564//5723 +f 4522//5724 4599//5723 4565//5725 +f 4600//5726 4523//5727 4565//5728 +f 4523//5727 4600//5726 4566//5729 +f 4601//5730 4524//5731 4566//5732 +f 4524//5731 4601//5730 4567//5733 +f 4525//5734 4602//5735 4568//5736 +f 4602//5735 4525//5734 4567//5737 +f 4526//5738 4603//5739 4569//5740 +f 4603//5739 4526//5738 4568//5741 +f 4527//5742 4604//5743 4570//5742 +f 4604//5743 4527//5742 4569//5744 +f 4570//5745 4571//5746 4529//5747 +f 4571//5746 4570//5745 4604//5746 +f 4571//5748 4572//5748 4530//5749 +f 4572//5748 4571//5748 4605//5750 +f 4572//5751 4573//5752 4531//5753 +f 4573//5752 4572//5751 4606//5754 +f 4532//5755 4607//5756 4574//5757 +f 4607//5756 4532//5755 4573//5758 +f 4533//5759 4608//5760 4575//5761 +f 4608//5760 4533//5759 4574//5762 +f 4534//5763 4609//5764 4576//5765 +f 4609//5764 4534//5763 4575//5766 +f 4576//5767 4577//5768 4535//5769 +f 4577//5768 4576//5767 4610//5770 +f 4577//5771 4578//5772 4536//5771 +f 4578//5772 4577//5771 4611//5773 +f 4578//5774 4579//5775 4537//5776 +f 4579//5775 4578//5774 4588//5777 +f 4581//5778 4612//5779 4580//5780 +f 4612//5779 4581//5778 4613//5781 +f 4580//5782 4614//5783 4582//5784 +f 4614//5783 4580//5782 4612//5785 +f 4583//5786 4614//5787 4615//5788 +f 4614//5787 4583//5786 4582//5789 +f 4584//5790 4615//5791 4616//5792 +f 4615//5791 4584//5790 4583//5793 +f 4585//5794 4616//5795 4617//5796 +f 4616//5795 4585//5794 4584//5797 +f 4586//5798 4617//5799 4597//5800 +f 4617//5799 4586//5798 4585//5801 +f 4587//5802 4597//5803 4562//5804 +f 4597//5803 4587//5802 4586//5802 +f 4611//5805 4588//5806 4578//5807 +f 4588//5806 4611//5805 4589//5808 +f 4618//5809 4589//5810 4611//5811 +f 4589//5810 4618//5809 4590//5812 +f 4619//5813 4590//5814 4618//5815 +f 4590//5814 4619//5813 4591//5816 +f 4620//5817 4591//5818 4619//5819 +f 4591//5818 4620//5817 4592//5820 +f 4621//5821 4592//5822 4620//5823 +f 4592//5822 4621//5821 4593//5824 +f 4621//5825 4594//5826 4593//5826 +f 4594//5826 4621//5825 4622//5825 +f 4622//5827 4595//5828 4594//5829 +f 4595//5828 4622//5827 4623//5830 +f 4623//5831 4596//5832 4595//5832 +f 4596//5832 4623//5831 4624//5831 +f 4596//5833 4613//5834 4581//5833 +f 4613//5834 4596//5833 4624//5834 +f 4597//5835 4598//5836 4563//5837 +f 4598//5836 4597//5835 4617//5838 +f 4598//5839 4599//5840 4564//5841 +f 4599//5840 4598//5839 4625//5842 +f 4626//5843 4565//5844 4599//5845 +f 4565//5844 4626//5843 4600//5846 +f 4627//5847 4566//5848 4600//5849 +f 4566//5848 4627//5847 4601//5850 +f 4628//5851 4567//5852 4601//5851 +f 4567//5852 4628//5851 4602//5852 +f 4568//5853 4629//5854 4603//5855 +f 4629//5854 4568//5853 4602//5856 +f 4569//5857 4630//5858 4604//5859 +f 4630//5858 4569//5857 4603//5860 +f 4604//5861 4605//5862 4571//5861 +f 4605//5862 4604//5861 4630//5862 +f 4605//5863 4606//5864 4572//5865 +f 4606//5864 4605//5863 4631//5866 +f 4606//5867 4607//5868 4573//5869 +f 4607//5868 4606//5867 4632//5870 +f 4608//5871 4607//5872 4633//5873 +f 4607//5872 4608//5871 4574//5874 +f 4575//5875 4634//5876 4609//5876 +f 4634//5876 4575//5875 4608//5877 +f 4609//5878 4610//5879 4576//5880 +f 4610//5879 4609//5878 4635//5881 +f 4610//5882 4611//5883 4577//5884 +f 4611//5883 4610//5882 4618//5885 +f 4613//5886 4636//5887 4612//5888 +f 4636//5887 4613//5886 4637//5889 +f 4612//5890 4638//5891 4614//5892 +f 4638//5891 4612//5890 4636//5893 +f 4615//5894 4638//5895 4639//5896 +f 4638//5895 4615//5894 4614//5897 +f 4616//5898 4639//5899 4625//5900 +f 4639//5899 4616//5898 4615//5901 +f 4617//5902 4625//5903 4598//5904 +f 4625//5903 4617//5902 4616//5905 +f 4635//5906 4618//5907 4610//5908 +f 4618//5907 4635//5906 4619//5909 +f 4640//5910 4619//5911 4635//5912 +f 4619//5911 4640//5910 4620//5913 +f 4640//5914 4621//5915 4620//5915 +f 4621//5915 4640//5914 4641//5916 +f 4641//5917 4622//5918 4621//5919 +f 4622//5918 4641//5917 4642//5920 +f 4642//5921 4623//5922 4622//5923 +f 4623//5922 4642//5921 4643//5924 +f 4643//5925 4624//5926 4623//5927 +f 4624//5926 4643//5925 4644//5928 +f 4644//5929 4613//5930 4624//5931 +f 4613//5930 4644//5929 4637//5932 +f 4625//5933 4626//5934 4599//5934 +f 4626//5934 4625//5933 4639//5933 +f 4645//5935 4600//5936 4626//5937 +f 4600//5936 4645//5935 4627//5938 +f 4628//5939 4627//5940 4646//5940 +f 4627//5940 4628//5939 4601//5939 +f 4629//5941 4628//5942 4647//5943 +f 4628//5942 4629//5941 4602//5944 +f 4603//5945 4648//5946 4630//5947 +f 4648//5946 4603//5945 4629//5948 +f 4630//5949 4631//5950 4605//5949 +f 4631//5950 4630//5949 4648//5951 +f 4631//5952 4632//5953 4606//5954 +f 4632//5953 4631//5952 4649//5955 +f 4633//5956 4632//5957 4650//5958 +f 4632//5957 4633//5956 4607//5959 +f 4608//5960 4651//5961 4634//5962 +f 4651//5961 4608//5960 4633//5963 +f 4634//5964 4635//5965 4609//5966 +f 4635//5965 4634//5964 4640//5967 +f 4652//5968 4636//5969 4637//5970 +f 4636//5969 4652//5968 4653//5971 +f 4636//5972 4645//5972 4638//5972 +f 4645//5972 4636//5972 4653//5972 +f 4639//5973 4645//5974 4626//5974 +f 4645//5974 4639//5973 4638//5975 +f 4634//5976 4641//5977 4640//5978 +f 4641//5977 4634//5976 4651//5979 +f 4651//5980 4642//5981 4641//5982 +f 4642//5981 4651//5980 4654//5983 +f 4654//5984 4643//5985 4642//5986 +f 4643//5985 4654//5984 4655//5987 +f 4655//5988 4644//5989 4643//5990 +f 4644//5989 4655//5988 4656//5991 +f 4656//5992 4637//5993 4644//5994 +f 4637//5993 4656//5992 4652//5995 +f 4646//5996 4645//5997 4653//5998 +f 4645//5997 4646//5996 4627//5999 +f 4647//6000 4646//6001 4657//6002 +f 4646//6001 4647//6000 4628//6003 +f 4629//6004 4658//6005 4648//6004 +f 4658//6005 4629//6004 4647//6006 +f 4648//6007 4649//6008 4631//6009 +f 4649//6008 4648//6007 4658//6010 +f 4650//6011 4649//6012 4659//6013 +f 4649//6012 4650//6011 4632//6014 +f 4633//6015 4654//6016 4651//6017 +f 4654//6016 4633//6015 4650//6018 +f 4657//6019 4653//6020 4652//6021 +f 4653//6020 4657//6019 4646//6022 +f 4650//6023 4655//6024 4654//6024 +f 4655//6024 4650//6023 4659//6023 +f 4659//6025 4656//6026 4655//6027 +f 4656//6026 4659//6025 4660//6028 +f 4660//6029 4652//6030 4656//6031 +f 4652//6030 4660//6029 4657//6032 +f 4658//6033 4657//6034 4660//6035 +f 4657//6034 4658//6033 4647//6036 +f 4649//6037 4660//6038 4659//6039 +f 4660//6038 4649//6037 4658//6040 +f 4661//6041 4662//6042 4663//6043 +f 4662//6042 4661//6041 4664//6044 +f 4663//6045 4665//6046 4666//6047 +f 4665//6046 4663//6045 4662//1903 +f 4667//6048 4663//6049 4668//6050 +f 4663//6049 4667//6048 4661//6051 +f 4661//6052 4669//6053 4664//6054 +f 4669//6053 4661//6052 4670//6053 +f 4664//6055 4671//6056 4662//6057 +f 4671//6056 4664//6055 4672//6058 +f 4666//6059 4673//6060 4674//6061 +f 4673//6060 4666//6059 4665//6062 +f 4668//6063 4666//6064 4675//6065 +f 4666//6064 4668//6063 4663//6066 +f 4662//6067 4676//6068 4665//6069 +f 4676//6068 4662//6067 4671//6070 +f 4677//6071 4668//6072 4678//6073 +f 4668//6072 4677//6071 4667//6074 +f 4667//6075 4670//6076 4661//6077 +f 4670//6076 4667//6075 4679//6078 +f 4670//6079 4680//6080 4669//6081 +f 4680//6080 4670//6079 4681//6082 +f 4664//6083 4682//6084 4672//6085 +f 4682//6084 4664//6083 4669//6086 +f 4671//6087 4683//6088 4684//6089 +f 4683//6088 4671//6087 4672//6090 +f 4674//6091 4685//6092 4686//6093 +f 4685//6092 4674//6091 4673//6094 +f 4675//6095 4674//1952 4687//6096 +f 4674//1952 4675//6095 4666//6097 +f 4665//6098 4688//6099 4673//6100 +f 4688//6099 4665//6098 4676//6101 +f 4678//6102 4675//6103 4689//6104 +f 4675//6103 4678//6102 4668//6105 +f 4671//6106 4690//6107 4676//6107 +f 4690//6107 4671//6106 4684//6106 +f 4691//6108 4678//6109 4692//6110 +f 4678//6109 4691//6108 4677//6111 +f 4693//1975 4667//1973 4677//1974 +f 4667//1973 4693//1975 4679//1975 +f 4679//6112 4681//6113 4670//6112 +f 4681//6113 4679//6112 4694//6113 +f 4681//6114 4695//6115 4680//6116 +f 4695//6115 4681//6114 4696//1981 +f 4669//6117 4697//1985 4682//6117 +f 4697//1985 4669//6117 4680//1985 +f 4672//6118 4698//6119 4683//6120 +f 4698//6119 4672//6118 4682//6121 +f 4684//1990 4699//6122 4700//1990 +f 4699//6122 4684//1990 4683//6122 +f 4686//6123 4701//6124 4702//6125 +f 4701//6124 4686//6123 4685//6126 +f 4687//6127 4686//6127 4703//6127 +f 4686//6127 4687//6127 4674//6127 +f 4673//6128 4704//6129 4685//6130 +f 4704//6129 4673//6128 4688//6131 +f 4689//6132 4687//2006 4705//6133 +f 4687//2006 4689//6132 4675//6134 +f 4676//2008 4706//2009 4688//2009 +f 4706//2009 4676//2008 4690//2010 +f 4692//6135 4689//6136 4707//2013 +f 4689//6136 4692//6135 4678//6137 +f 4684//6138 4708//2016 4690//6139 +f 4708//2016 4684//6138 4700//6140 +f 4709//6141 4691//6142 4692//6143 +f 4691//6142 4709//6141 4710//6144 +f 4691//6145 4693//6146 4677//6147 +f 4693//6146 4691//6145 4711//6148 +f 4693//6149 4694//2028 4679//6150 +f 4694//2028 4693//6149 4712//6151 +f 4694//2030 4696//6152 4681//6153 +f 4696//6152 4694//2030 4713//6154 +f 4696//2033 4714//2034 4695//2035 +f 4714//2034 4696//2033 4715//2036 +f 4680//2037 4716//2038 4697//2039 +f 4716//2038 4680//2037 4695//6155 +f 4682//6156 4717//6157 4698//6158 +f 4717//6157 4682//6156 4697//6159 +f 4683//6160 4718//6161 4699//6162 +f 4718//6161 4683//6160 4698//6163 +f 4699//6164 4719//2049 4700//2049 +f 4719//2049 4699//6164 4720//6165 +f 4721//6166 4701//6167 4722//6168 +f 4701//6167 4721//6166 4702//6169 +f 4723//6170 4686//6171 4702//6170 +f 4686//6171 4723//6170 4703//6172 +f 4685//6173 4724//6174 4701//6175 +f 4724//6174 4685//6173 4704//6176 +f 4725//6177 4687//6178 4703//6179 +f 4687//6178 4725//6177 4705//6180 +f 4688//6181 4726//6182 4704//2068 +f 4726//6182 4688//6181 4706//6183 +f 4727//6184 4689//6185 4705//6186 +f 4689//6185 4727//6184 4707//2072 +f 4690//2073 4728//2074 4706//2075 +f 4728//2074 4690//2073 4708//2076 +f 4729//6187 4692//6188 4707//6189 +f 4692//6188 4729//6187 4709//6190 +f 4700//6191 4730//6192 4708//6193 +f 4730//6192 4700//6191 4719//6194 +f 4731//2083 4710//2084 4709//2083 +f 4710//2084 4731//2083 4732//2084 +f 4710//6195 4711//6196 4691//6197 +f 4711//6196 4710//6195 4733//6198 +f 4711//2089 4712//2090 4693//6199 +f 4712//2090 4711//2089 4734//2092 +f 4712//6200 4713//6201 4694//6202 +f 4713//6201 4712//6200 4735//6203 +f 4713//6204 4715//6205 4696//6206 +f 4715//6205 4713//6204 4736//6207 +f 4714//6208 4737//6209 4738//6210 +f 4737//6209 4714//6208 4715//6211 +f 4695//6212 4739//2105 4716//2106 +f 4739//2105 4695//6212 4714//2107 +f 4697//2108 4740//2109 4717//2108 +f 4740//2109 4697//2108 4716//2109 +f 4698//6213 4741//6214 4718//6215 +f 4741//6214 4698//6213 4717//6214 +f 4718//6216 4720//6217 4699//6217 +f 4720//6217 4718//6216 4742//6218 +f 4720//6219 4743//6220 4719//6220 +f 4743//6220 4720//6219 4744//6219 +f 4722//6221 4745//6222 4721//6223 +f 4745//6222 4722//6221 4746//6224 +f 4746//6224 4722//6221 4747//6225 +f 4746//6224 4747//6225 4748//6226 +f 4748//6226 4747//6225 4749//6227 +f 4748//6226 4749//6227 4750//2135 +f 4750//2135 4749//6227 4751//6228 +f 4750//2135 4751//6228 4752//6229 +f 4752//6229 4751//6228 4753//6230 +f 4752//6229 4753//6230 4754//6231 +f 4754//6231 4753//6230 4755//6232 +f 4754//6231 4755//6232 4756//2127 +f 4756//2127 4755//6232 4757//6233 +f 4756//2127 4757//6233 4758//6234 +f 4758//6234 4757//6233 4759//6235 +f 4758//6234 4759//6235 4760//6236 +f 4760//6236 4759//6235 4761//6237 +f 4760//6236 4761//6237 4762//6238 +f 4762//6238 4761//6237 4763//6239 +f 4762//6238 4763//6239 4764//6240 +f 4764//6240 4763//6239 4765//6241 +f 4764//6240 4765//6241 4766//6223 +f 4745//6242 4702//6243 4721//2142 +f 4702//6243 4745//6242 4723//2143 +f 4722//2144 4724//6244 4747//2146 +f 4724//6244 4722//2144 4701//2147 +f 4767//6245 4703//6246 4723//6247 +f 4703//6246 4767//6245 4725//6248 +f 4704//6249 4768//6250 4724//6251 +f 4768//6250 4704//6249 4726//6252 +f 4769//6253 4705//6254 4725//6255 +f 4705//6254 4769//6253 4727//6256 +f 4706//6257 4770//6258 4726//6259 +f 4770//6258 4706//6257 4728//6260 +f 4727//6261 4729//6262 4707//6263 +f 4729//6262 4727//6261 4771//6264 +f 4730//6265 4728//6266 4708//6267 +f 4728//6266 4730//6265 4772//6268 +f 4773//6269 4709//6270 4729//6270 +f 4709//6270 4773//6269 4731//6271 +f 4719//6272 4774//6273 4730//6274 +f 4774//6273 4719//6272 4743//6275 +f 4775//6276 4732//2179 4731//6277 +f 4732//2179 4775//6276 4776//6278 +f 4732//6279 4733//6280 4710//2184 +f 4733//6280 4732//6279 4777//6281 +f 4733//6282 4734//6283 4711//2187 +f 4734//6283 4733//6282 4778//6284 +f 4734//6285 4735//6286 4712//2190 +f 4735//6286 4734//6285 4779//6287 +f 4735//2192 4736//6288 4713//2194 +f 4736//6288 4735//2192 4780//6289 +f 4715//6290 4781//6291 4737//2198 +f 4781//6291 4715//6290 4736//6292 +f 4738//6293 4782//6294 4783//6294 +f 4782//6294 4738//6293 4737//6293 +f 4739//6295 4738//6295 4784//6295 +f 4738//6295 4739//6295 4714//2207 +f 4716//6296 4785//6297 4740//2210 +f 4785//6297 4716//6296 4739//2211 +f 4717//6298 4786//6299 4741//6300 +f 4786//6299 4717//6298 4740//2215 +f 4741//2217 4742//6301 4718//6302 +f 4742//6301 4741//2217 4787//6303 +f 4742//6304 4744//2220 4720//6305 +f 4744//2220 4742//6304 4788//6306 +f 4744//6307 4789//6308 4743//6309 +f 4789//6308 4744//6307 4790//6310 +f 4791//6311 4765//6312 4763//6311 +f 4765//6312 4791//6311 4792//6313 +f 4792//6314 4766//6315 4765//2233 +f 4766//6315 4792//6314 4793//6316 +f 4793//6317 4764//6318 4766//6317 +f 4764//6318 4793//6317 4794//2238 +f 4794//6319 4762//6320 4764//6321 +f 4762//6320 4794//6319 4795//6322 +f 4795//6323 4760//6324 4762//2245 +f 4760//6324 4795//6323 4796//6325 +f 4758//6326 4796//6327 4797//6327 +f 4796//6327 4758//6326 4760//6328 +f 4756//6329 4797//6330 4798//6330 +f 4797//6330 4756//6329 4758//6329 +f 4754//6331 4798//6332 4799//2257 +f 4798//6332 4754//6331 4756//6333 +f 4752//6334 4799//6335 4800//2261 +f 4799//6335 4752//6334 4754//6336 +f 4750//6337 4800//6338 4801//6339 +f 4800//6338 4750//6337 4752//6340 +f 4748//6341 4801//6342 4802//6343 +f 4801//6342 4748//6341 4750//6344 +f 4748//6345 4767//6346 4746//6347 +f 4767//6346 4748//6345 4802//6348 +f 4746//6349 4723//6350 4745//6350 +f 4723//6350 4746//6349 4767//6349 +f 4747//6351 4768//6352 4749//6351 +f 4768//6352 4747//6351 4724//6352 +f 4803//6353 4749//6354 4768//6353 +f 4749//6354 4803//6353 4751//6355 +f 4804//2286 4751//6356 4803//6357 +f 4751//6356 4804//2286 4753//2288 +f 4805//2289 4753//6358 4804//2291 +f 4753//6358 4805//2289 4755//2292 +f 4806//6359 4755//2294 4805//2295 +f 4755//2294 4806//6359 4757//6360 +f 4807//6361 4757//6362 4806//6363 +f 4757//6362 4807//6361 4759//6364 +f 4808//2301 4759//6365 4807//6366 +f 4759//6365 4808//2301 4761//2303 +f 4808//6367 4763//6368 4761//6368 +f 4763//6368 4808//6367 4791//6367 +f 4802//6369 4725//6370 4767//6371 +f 4725//6370 4802//6369 4769//2308 +f 4770//6372 4768//6373 4726//6372 +f 4768//6373 4770//6372 4803//6373 +f 4769//2316 4771//6374 4727//2315 +f 4771//6374 4769//2316 4809//2316 +f 4772//6375 4770//6376 4728//6377 +f 4770//6376 4772//6375 4810//6378 +f 4771//6379 4773//6380 4729//6381 +f 4773//6380 4771//6379 4811//6382 +f 4774//6383 4772//6384 4730//6385 +f 4772//6384 4774//6383 4812//6386 +f 4813//6387 4731//6387 4773//6387 +f 4731//6277 4813//2328 4775//6276 +f 4743//6388 4814//2330 4774//2331 +f 4814//2330 4743//6388 4789//2332 +f 4815//6389 4776//6390 4775//6389 +f 4776//6390 4815//6389 4816//6390 +f 4776//6278 4777//6391 4732//2179 +f 4777//6391 4776//6278 4817//6392 +f 4777//6393 4778//6394 4733//6395 +f 4778//6394 4777//6393 4818//6396 +f 4778//6397 4779//6398 4734//6399 +f 4779//6398 4778//6397 4819//2345 +f 4779//6400 4780//6401 4735//6400 +f 4780//6401 4779//6400 4820//2347 +f 4736//6402 4821//6403 4781//2350 +f 4821//6403 4736//6402 4780//6404 +f 4737//6405 4822//6406 4782//6407 +f 4822//6406 4737//6405 4781//6405 +f 4783//6408 4823//6409 4824//6410 +f 4823//6409 4783//6408 4782//6411 +f 4784//6412 4783//6412 4825//6412 +f 4783//6412 4784//6412 4738//6412 +f 4785//2364 4784//6413 4826//6414 +f 4784//6413 4785//2364 4739//2364 +f 4740//2365 4827//6415 4786//2365 +f 4827//6415 4740//2365 4785//6415 +f 4786//2366 4787//6416 4741//6417 +f 4787//6416 4786//2366 4828//6418 +f 4787//6419 4788//6420 4742//6421 +f 4788//6420 4787//6419 4829//6422 +f 4788//6423 4790//6310 4744//6307 +f 4790//6310 4788//6423 4830//6424 +f 4790//6425 4831//6426 4789//6427 +f 4831//6426 4790//6425 4832//2379 +f 4833//6428 4792//6429 4791//6430 +f 4792//6429 4833//6428 4834//6431 +f 4834//6432 4793//6433 4792//6434 +f 4793//6433 4834//6432 4835//6435 +f 4793//6436 4836//6437 4794//6438 +f 4836//6437 4793//6436 4835//6439 +f 4794//6440 4837//6441 4795//6442 +f 4837//6441 4794//6440 4836//6443 +f 4795//6444 4838//6445 4796//6446 +f 4838//6445 4795//6444 4837//6447 +f 4797//6448 4838//6449 4839//6449 +f 4838//6449 4797//6448 4796//6450 +f 4798//6451 4839//6452 4840//6453 +f 4839//6452 4798//6451 4797//6454 +f 4799//6455 4840//6456 4841//6457 +f 4840//6456 4799//6455 4798//2410 +f 4800//2261 4841//2411 4842//6458 +f 4841//2411 4800//2261 4799//6335 +f 4801//6459 4842//6460 4809//6461 +f 4842//6460 4801//6459 4800//6462 +f 4802//6463 4809//6464 4769//6465 +f 4809//6464 4802//6463 4801//6466 +f 4810//2423 4803//6467 4770//2423 +f 4803//6467 4810//2423 4804//6467 +f 4843//6468 4804//6469 4810//6470 +f 4804//6469 4843//6468 4805//6471 +f 4844//6472 4805//2295 4843//6473 +f 4805//2295 4844//6472 4806//6359 +f 4845//6474 4806//6475 4844//2433 +f 4806//6475 4845//6474 4807//6476 +f 4846//6477 4807//6478 4845//6479 +f 4807//6478 4846//6477 4808//6480 +f 4846//6481 4791//6482 4808//6483 +f 4791//6482 4846//6481 4833//6484 +f 4809//6485 4811//6485 4771//6485 +f 4811//6485 4809//6485 4842//6485 +f 4812//6486 4810//6486 4772//6486 +f 4810//6486 4812//6486 4843//6486 +f 4811//2446 4813//2446 4773//6487 +f 4813//2446 4811//2446 4847//2448 +f 4814//2449 4812//2450 4774//2449 +f 4812//2450 4814//2449 4848//2450 +f 4849//6488 4775//6489 4813//2453 +f 4775//6489 4849//6488 4815//6490 +f 4789//6491 4850//6492 4814//6493 +f 4850//6492 4789//6491 4831//6494 +f 4851//6495 4816//6496 4815//6497 +f 4816//6496 4851//6495 4852//6498 +f 4816//6499 4817//2464 4776//2465 +f 4817//2464 4816//6499 4853//2466 +f 4817//6392 4818//6500 4777//6391 +f 4818//6500 4817//6392 4854//6501 +f 4818//2469 4819//6502 4778//2471 +f 4819//6502 4818//2469 4855//2472 +f 4819//6503 4820//6504 4779//6505 +f 4820//6504 4819//6503 4856//2476 +f 4780//6506 4857//6507 4821//6508 +f 4857//6507 4780//6506 4820//2477 +f 4781//6509 4858//6510 4822//6511 +f 4858//6510 4781//6509 4821//6512 +f 4782//2485 4859//6513 4823//2487 +f 4859//6513 4782//2485 4822//2488 +f 4860//6514 4823//6515 4861//6516 +f 4823//6515 4860//6514 4824//6517 +f 4825//6518 4824//6519 4862//6520 +f 4824//6519 4825//6518 4783//6521 +f 4826//2497 4825//2497 4863//2497 +f 4825//2497 4826//2497 4784//6522 +f 4827//6523 4826//6524 4864//6525 +f 4826//6524 4827//6523 4785//2502 +f 4827//2503 4828//6526 4786//2505 +f 4828//6526 4827//2503 4865//6527 +f 4828//2507 4829//6528 4787//6529 +f 4829//6528 4828//2507 4866//6530 +f 4829//6531 4830//6424 4788//6423 +f 4830//6424 4829//6531 4867//6532 +f 4830//6533 4832//6534 4790//6535 +f 4832//6534 4830//6533 4868//2516 +f 4832//6536 4869//6537 4831//2519 +f 4869//6537 4832//6536 4870//6538 +f 4871//6539 4834//6540 4833//6541 +f 4834//6540 4871//6539 4872//6542 +f 4834//2525 4873//2525 4835//2525 +f 4873//2525 4834//2525 4872//2525 +f 4835//2527 4874//2528 4836//6543 +f 4874//2528 4835//2527 4873//2528 +f 4836//6544 4875//6545 4837//6546 +f 4875//6545 4836//6544 4874//6547 +f 4837//6548 4876//6549 4838//6550 +f 4876//6549 4837//6548 4875//6551 +f 4838//6552 4877//6553 4839//6554 +f 4877//6553 4838//6552 4876//6555 +f 4840//2542 4877//6556 4878//2542 +f 4877//6556 4840//2542 4839//2543 +f 4841//2544 4878//6557 4847//6557 +f 4878//6557 4841//2544 4840//2544 +f 4842//6458 4847//2546 4811//6558 +f 4847//2546 4842//6458 4841//2411 +f 4848//2547 4843//6473 4812//2548 +f 4843//6473 4848//2547 4844//6472 +f 4879//6559 4844//6559 4848//6559 +f 4844//6559 4879//6559 4845//6559 +f 4880//6560 4845//6561 4879//6561 +f 4845//6561 4880//6560 4846//6562 +f 4880//6563 4833//6564 4846//6565 +f 4833//6564 4880//6563 4871//6566 +f 4847//6567 4849//2560 4813//2561 +f 4849//2560 4847//6567 4878//2562 +f 4850//6568 4848//6568 4814//6568 +f 4848//6568 4850//6568 4879//6568 +f 4881//6569 4815//6570 4849//6571 +f 4815//6570 4881//6569 4851//2570 +f 4831//6572 4882//2572 4850//2573 +f 4882//2572 4831//6572 4869//6573 +f 4883//6574 4851//6575 4884//6576 +f 4851//6575 4883//6574 4852//6577 +f 4852//2577 4853//6578 4816//6579 +f 4853//6578 4852//2577 4885//6580 +f 4853//6581 4854//2580 4817//2581 +f 4854//2580 4853//6581 4886//6582 +f 4854//6501 4855//6583 4818//6500 +f 4855//6583 4854//6501 4887//6584 +f 4855//2585 4856//2585 4819//2587 +f 4856//2585 4855//2585 4888//6585 +f 4889//6586 4820//2590 4856//2591 +f 4820//2590 4889//6586 4857//2592 +f 4821//6587 4890//6588 4858//6589 +f 4890//6588 4821//6587 4857//6590 +f 4822//6591 4891//6592 4859//6592 +f 4891//6592 4822//6591 4858//6591 +f 4861//6593 4859//6594 4892//6595 +f 4859//6594 4861//6593 4823//6596 +f 4893//6597 4861//6598 4894//6599 +f 4861//6598 4893//6597 4860//6600 +f 4862//6601 4860//6602 4895//6602 +f 4860//6602 4862//6601 4824//6601 +f 4863//6603 4862//6604 4896//6605 +f 4862//6604 4863//6603 4825//6606 +f 4864//6607 4863//6608 4897//6609 +f 4863//6608 4864//6607 4826//6610 +f 4827//6611 4898//6612 4865//6613 +f 4898//6612 4827//6611 4864//6614 +f 4865//2626 4866//6615 4828//6616 +f 4866//6615 4865//2626 4899//2626 +f 4866//6617 4867//6532 4829//6531 +f 4867//6532 4866//6617 4900//6618 +f 4867//6619 4868//6620 4830//2631 +f 4868//6620 4867//6619 4901//6621 +f 4868//6622 4870//6623 4832//6624 +f 4870//6623 4868//6622 4902//6625 +f 4869//6626 4903//6627 4904//6627 +f 4903//6627 4869//6626 4870//6628 +f 4871//6629 4905//6630 4872//6629 +f 4905//6630 4871//6629 4906//6630 +f 4872//2645 4907//2646 4873//6631 +f 4907//2646 4872//2645 4905//2646 +f 4873//2649 4908//6632 4874//6633 +f 4908//6632 4873//2649 4907//6632 +f 4874//6634 4909//6635 4875//6636 +f 4909//6635 4874//6634 4908//2653 +f 4875//6637 4910//6638 4876//6639 +f 4910//6638 4875//6637 4909//6640 +f 4876//6641 4881//6642 4877//6643 +f 4881//6642 4876//6641 4910//6644 +f 4878//6645 4881//6646 4849//6647 +f 4881//6646 4878//6645 4877//6648 +f 4882//6649 4879//6649 4850//6650 +f 4879//6649 4882//6649 4880//6651 +f 4882//6652 4871//6653 4880//6654 +f 4871//6653 4882//6652 4906//6655 +f 4910//6656 4851//6657 4881//6658 +f 4851//6657 4910//6656 4884//6659 +f 4882//2677 4904//6660 4906//2677 +f 4904//6660 4882//2677 4869//2678 +f 4911//6661 4884//6662 4912//6662 +f 4884//6662 4911//6661 4883//6661 +f 4913//6663 4852//6664 4883//6665 +f 4852//6664 4913//6663 4885//6666 +f 4885//6667 4886//6668 4853//6669 +f 4886//6668 4885//6667 4914//6670 +f 4886//6671 4887//2689 4854//6671 +f 4887//2689 4886//6671 4915//6672 +f 4887//6584 4888//6673 4855//6583 +f 4888//6673 4887//6584 4916//6674 +f 4917//6675 4856//6675 4888//2697 +f 4856//6675 4917//6675 4889//2696 +f 4918//6676 4857//6677 4889//6678 +f 4857//6677 4918//6676 4890//6679 +f 4919//6680 4858//2703 4890//6681 +f 4858//2703 4919//6680 4891//6682 +f 4892//6683 4891//6684 4920//6683 +f 4891//6684 4892//6683 4859//6685 +f 4894//6686 4892//6687 4921//6688 +f 4892//6687 4894//6686 4861//6689 +f 4922//6690 4894//6691 4923//6692 +f 4894//6691 4922//6690 4893//6693 +f 4924//6694 4860//6695 4893//6696 +f 4860//6695 4924//6694 4895//6697 +f 4896//6698 4895//6699 4925//6700 +f 4895//6699 4896//6698 4862//6701 +f 4897//6702 4896//6703 4926//6704 +f 4896//6703 4897//6702 4863//6705 +f 4864//2730 4927//2731 4898//2730 +f 4927//2731 4864//2730 4897//2731 +f 4865//6706 4928//6707 4899//6706 +f 4928//6707 4865//6706 4898//6708 +f 4899//6709 4900//6618 4866//6617 +f 4900//6618 4899//6709 4929//2737 +f 4900//6710 4901//6711 4867//6710 +f 4901//6711 4900//6710 4930//6712 +f 4901//6713 4902//6714 4868//6714 +f 4902//6714 4901//6713 4931//6713 +f 4902//6715 4903//6716 4870//6716 +f 4903//6716 4902//6715 4932//6715 +f 4904//6717 4933//6718 4934//6719 +f 4933//6718 4904//6717 4903//6720 +f 4906//6721 4934//6722 4905//2756 +f 4934//6722 4906//6721 4904//2757 +f 4905//6723 4935//6724 4907//6725 +f 4935//6724 4905//6723 4934//6726 +f 4907//6727 4936//6728 4908//2764 +f 4936//6728 4907//6727 4935//6728 +f 4908//6729 4912//6730 4909//6731 +f 4912//6730 4908//6729 4936//2769 +f 4909//2770 4884//6732 4910//2772 +f 4884//6732 4909//2770 4912//6733 +f 4936//6734 4911//6735 4912//2776 +f 4911//6735 4936//6734 4937//6736 +f 4938//6737 4883//6738 4911//6739 +f 4883//6738 4938//6737 4913//6740 +f 4939//6741 4885//2783 4913//6742 +f 4885//2783 4939//6741 4914//6743 +f 4914//6744 4915//6745 4886//6746 +f 4915//6745 4914//6744 4940//6747 +f 4915//2791 4916//2791 4887//2790 +f 4916//2791 4915//2791 4941//2792 +f 4942//2793 4888//6673 4916//6674 +f 4888//6673 4942//2793 4917//6748 +f 4943//6749 4889//6750 4917//6751 +f 4889//6750 4943//6749 4918//6752 +f 4944//6753 4890//6754 4918//6754 +f 4890//6754 4944//6753 4919//6755 +f 4920//6756 4919//6757 4945//6758 +f 4919//6757 4920//6756 4891//6759 +f 4921//6760 4920//6761 4946//6762 +f 4920//6761 4921//6760 4892//6763 +f 4923//6764 4921//6765 4947//6766 +f 4921//6765 4923//6764 4894//6767 +f 4948//6768 4923//6769 4949//6768 +f 4923//6769 4948//6768 4922//6770 +f 4950//6771 4893//6772 4922//6773 +f 4893//6772 4950//6771 4924//6774 +f 4925//6775 4924//6776 4951//2821 +f 4924//6776 4925//6775 4895//2820 +f 4926//6777 4925//6778 4952//6779 +f 4925//6778 4926//6777 4896//6780 +f 4897//6781 4953//6782 4927//2826 +f 4953//6782 4897//6781 4926//6783 +f 4898//6784 4954//2828 4928//6785 +f 4954//2828 4898//6784 4927//6786 +f 4899//6709 4955//2832 4929//2737 +f 4955//2832 4899//6709 4928//6787 +f 4929//6788 4930//2834 4900//6789 +f 4930//2834 4929//6788 4956//2836 +f 4930//2840 4931//2838 4901//2838 +f 4931//2838 4930//2840 4957//2840 +f 4931//2844 4932//2841 4902//6790 +f 4932//2841 4931//2844 4958//2844 +f 4903//6791 4959//6792 4933//6793 +f 4959//6792 4903//6791 4932//6794 +f 4934//6795 4960//2852 4935//6795 +f 4960//2852 4934//6795 4933//6796 +f 4935//6797 4937//6798 4936//6797 +f 4937//6798 4935//6797 4960//6799 +f 4961//6800 4911//6801 4937//6802 +f 4911//6801 4961//6800 4938//2859 +f 4962//2862 4913//6803 4938//2860 +f 4913//6803 4962//2862 4939//6804 +f 4963//6805 4914//2865 4939//6806 +f 4914//2865 4963//6805 4940//2867 +f 4940//2868 4941//2871 4915//6807 +f 4941//2871 4940//2868 4964//6808 +f 4965//6809 4916//2872 4941//2874 +f 4916//2872 4965//6809 4942//6810 +f 4966//6811 4917//6748 4942//2793 +f 4917//6748 4966//6811 4943//6812 +f 4967//6813 4918//2878 4943//6814 +f 4918//2878 4967//6813 4944//6815 +f 4968//2885 4919//6816 4944//6816 +f 4919//6816 4968//2885 4945//2885 +f 4946//6817 4945//6818 4969//6819 +f 4945//6818 4946//6817 4920//6820 +f 4947//6821 4946//6822 4970//6823 +f 4946//6822 4947//6821 4921//6824 +f 4949//2895 4947//6825 4971//6826 +f 4947//6825 4949//2895 4923//6827 +f 4972//6828 4949//6829 4973//6830 +f 4949//6829 4972//6828 4948//6831 +f 4974//6832 4922//6833 4948//6834 +f 4922//6833 4974//6832 4950//6835 +f 4975//2905 4924//6836 4950//2905 +f 4924//6836 4975//2905 4951//6837 +f 4952//6838 4951//6839 4976//6840 +f 4951//6839 4952//6838 4925//6841 +f 4926//6842 4977//6843 4953//6844 +f 4977//6843 4926//6842 4952//2914 +f 4927//6845 4978//2918 4954//6845 +f 4978//2918 4927//6845 4953//2918 +f 4928//6787 4979//6846 4955//2832 +f 4979//6846 4928//6787 4954//6847 +f 4929//6848 4980//6849 4956//6850 +f 4980//6849 4929//6848 4955//6851 +f 4956//6852 4957//6853 4930//2927 +f 4957//6853 4956//6852 4981//2928 +f 4957//6854 4958//6855 4931//6856 +f 4958//6855 4957//6854 4982//6857 +f 4958//6858 4959//2934 4932//2933 +f 4959//2934 4958//6858 4983//2935 +f 4959//6859 4960//6860 4933//6861 +f 4960//6860 4959//6859 4984//6862 +f 4984//2941 4937//6863 4960//6864 +f 4937//6863 4984//2941 4961//6865 +f 4985//6866 4938//6867 4961//6868 +f 4938//6867 4985//6866 4962//6869 +f 4986//6870 4939//6871 4962//6872 +f 4939//6871 4986//6870 4963//6873 +f 4987//2953 4940//2952 4963//2953 +f 4940//2952 4987//2953 4964//2954 +f 4988//2957 4941//6874 4964//6875 +f 4941//6874 4988//2957 4965//6876 +f 4989//6877 4942//6878 4965//2959 +f 4942//6878 4989//6877 4966//2962 +f 4990//6879 4943//6812 4966//6811 +f 4943//6812 4990//6879 4967//6880 +f 4991//6881 4944//6882 4967//6883 +f 4944//6882 4991//6881 4968//6881 +f 4992//2969 4945//2971 4968//2971 +f 4945//2971 4992//2969 4969//6884 +f 4970//6885 4969//6886 4993//2974 +f 4969//6886 4970//6885 4946//6887 +f 4947//6888 4994//6889 4971//2978 +f 4994//6889 4947//6888 4970//6890 +f 4949//6891 4995//6892 4973//6893 +f 4995//6892 4949//6891 4971//6894 +f 4972//6895 4996//6896 4997//6896 +f 4996//6896 4972//6895 4973//6897 +f 4998//6898 4948//6899 4972//6900 +f 4948//6899 4998//6898 4974//6901 +f 4999//6902 4950//6903 4974//6904 +f 4950//6903 4999//6902 4975//6905 +f 5000//2996 4951//2996 4975//2997 +f 4951//2996 5000//2996 4976//2998 +f 4952//6906 5001//6907 4977//6908 +f 5001//6907 4952//6906 4976//6907 +f 4953//6909 5002//6910 4978//6911 +f 5002//6910 4953//6909 4977//6912 +f 4954//6847 5003//6913 4979//6846 +f 5003//6913 4954//6847 4978//3006 +f 4955//6914 5004//6915 4980//6914 +f 5004//6915 4955//6914 4979//6916 +f 4956//6917 5005//3010 4981//3009 +f 5005//3010 4956//6917 4980//3010 +f 4981//6918 4982//6919 4957//3013 +f 4982//6919 4981//6918 5006//6920 +f 4982//6921 4983//6922 4958//6923 +f 4983//6922 4982//6921 5007//6924 +f 4983//3019 4984//6925 4959//3021 +f 4984//6925 4983//3019 5008//3022 +f 5008//6926 4961//6927 4984//6928 +f 4961//6927 5008//6926 4985//6929 +f 5009//6930 4962//6931 4985//6932 +f 4962//6931 5009//6930 4986//3030 +f 5010//3031 4963//6933 4986//6934 +f 4963//6933 5010//3031 4987//3034 +f 4988//3035 4987//3036 5011//3035 +f 4987//3036 4988//3035 4964//3036 +f 5012//6935 4965//6936 4988//6937 +f 4965//6936 5012//6935 4989//3037 +f 5013//3039 4966//6938 4989//6938 +f 4966//6938 5013//3039 4990//6939 +f 5014//3043 4967//6880 4990//6879 +f 4967//6880 5014//3043 4991//3044 +f 5015//6940 4968//6941 4991//6940 +f 4968//6941 5015//6940 4992//6942 +f 5016//6943 4969//6944 4992//6945 +f 4969//6944 5016//6943 4993//6946 +f 4970//6947 5017//6948 4994//6949 +f 5017//6948 4970//6947 4993//6950 +f 4971//6951 5018//6952 4995//6953 +f 5018//6952 4971//6951 4994//6954 +f 4973//6955 5019//6956 4996//6957 +f 5019//6956 4973//6955 4995//6958 +f 4997//6959 5020//6960 5021//6960 +f 5020//6960 4997//6959 4996//6959 +f 5022//6961 4972//6962 4997//6963 +f 4972//6962 5022//6961 4998//6964 +f 5023//6965 4974//6966 4998//6967 +f 4974//6966 5023//6965 4999//6968 +f 5024//6969 4975//6970 4999//3077 +f 4975//6970 5024//6969 5000//6971 +f 4976//6972 5025//6973 5001//6974 +f 5025//6973 4976//6972 5000//6975 +f 4977//6976 5026//3084 5002//6976 +f 5026//3084 4977//6976 5001//6977 +f 4978//3006 5027//3085 5003//6913 +f 5027//3085 4978//3006 5002//6978 +f 4979//6979 5028//6980 5004//6981 +f 5028//6980 4979//6979 5003//6982 +f 4980//3093 5029//6983 5005//3093 +f 5029//6983 4980//3093 5004//6983 +f 5030//6984 4981//6985 5005//6986 +f 4981//6985 5030//6984 5006//6987 +f 5006//3099 5007//6988 4982//6989 +f 5007//6988 5006//3099 5031//6990 +f 5007//6991 5008//6992 4983//6993 +f 5008//6992 5007//6991 5032//6994 +f 5032//6995 4985//6996 5008//6997 +f 4985//6996 5032//6995 5009//6995 +f 5033//3111 4986//6998 5009//3113 +f 4986//6998 5033//3111 5010//6999 +f 5011//7000 5010//7001 5034//7000 +f 5010//7001 5011//7000 4987//3115 +f 5012//7002 5011//3117 5035//7003 +f 5011//3117 5012//7002 4988//3119 +f 5036//7004 4989//7005 5012//7006 +f 4989//7005 5036//7004 5013//7007 +f 5037//7008 4990//7009 5013//7010 +f 4990//7009 5037//7008 5014//3125 +f 5038//7011 4991//3044 5014//3043 +f 4991//3044 5038//7011 5015//7012 +f 5039//7013 4992//7014 5015//7014 +f 4992//7014 5039//7013 5016//7013 +f 5017//7015 5016//7016 5040//7017 +f 5016//7016 5017//7015 4993//7018 +f 4994//7019 5041//7020 5018//7020 +f 5041//7020 4994//7019 5017//7021 +f 4995//7022 5042//7023 5019//7024 +f 5042//7023 4995//7022 5018//7025 +f 4996//7026 5043//7027 5020//7028 +f 5043//7027 4996//7026 5019//7026 +f 5021//7029 5044//3145 5045//3147 +f 5044//3145 5021//7029 5020//7030 +f 5022//7031 5021//7032 5046//7033 +f 5021//7032 5022//7031 4997//7034 +f 5047//7035 4998//7036 5022//7037 +f 4998//7036 5047//7035 5023//7038 +f 5024//7039 5023//7040 5048//7041 +f 5023//7040 5024//7039 4999//7039 +f 5025//7042 5024//7043 5049//7043 +f 5024//7043 5025//7042 5000//7042 +f 5001//7044 5050//7045 5026//7046 +f 5050//7045 5001//7044 5025//7047 +f 5002//6978 5051//3167 5027//3085 +f 5051//3167 5002//6978 5026//3168 +f 5003//7048 5052//7049 5028//7050 +f 5052//7049 5003//7048 5027//7051 +f 5004//7052 5053//7053 5029//7054 +f 5053//7053 5004//7052 5028//7055 +f 5054//7056 5005//7057 5029//7056 +f 5005//7057 5054//7056 5030//7057 +f 5055//3182 5006//7058 5030//3181 +f 5006//7058 5055//3182 5031//7059 +f 5031//3183 5032//3184 5007//3184 +f 5032//3184 5031//3183 5056//3183 +f 5056//3186 5009//3187 5032//3188 +f 5009//3187 5056//3186 5033//3187 +f 5034//7060 5033//7060 5057//7060 +f 5033//7060 5034//7060 5010//7060 +f 5035//7061 5034//7062 5058//7063 +f 5034//7062 5035//7061 5011//7064 +f 5036//7065 5035//7066 5059//3199 +f 5035//7066 5036//7065 5012//7067 +f 5060//7068 5013//7069 5036//7070 +f 5013//7069 5060//7068 5037//7071 +f 5061//3205 5014//3206 5037//3206 +f 5014//3206 5061//3205 5038//3205 +f 5062//7072 5015//7012 5038//7011 +f 5015//7012 5062//7072 5039//7073 +f 5040//7074 5039//7075 5063//7076 +f 5039//7075 5040//7074 5016//7077 +f 5041//7078 5040//7079 5064//7078 +f 5040//7079 5041//7078 5017//7079 +f 5018//7080 5065//7081 5042//7082 +f 5065//7081 5018//7080 5041//7080 +f 5019//7083 5066//7084 5043//7085 +f 5066//7084 5019//7083 5042//7086 +f 5020//7087 5067//7088 5044//7089 +f 5067//7088 5020//7087 5043//7087 +f 5045//7090 5068//7090 5069//7091 +f 5068//7090 5045//7090 5044//7092 +f 5046//7093 5045//7094 5070//7095 +f 5045//7094 5046//7093 5021//7096 +f 5047//7097 5046//7098 5071//7098 +f 5046//7098 5047//7097 5022//7099 +f 5048//7100 5047//7101 5072//7101 +f 5047//7101 5048//7100 5023//7100 +f 5049//7102 5048//7103 5073//7103 +f 5048//7103 5049//7102 5024//7102 +f 5050//7104 5049//7105 5074//7106 +f 5049//7105 5050//7104 5025//7107 +f 5026//7108 5075//3243 5051//3244 +f 5075//3243 5026//7108 5050//3245 +f 5027//7109 5076//7110 5052//7111 +f 5076//7110 5027//7109 5051//7112 +f 5028//7113 5077//7114 5053//7115 +f 5077//7114 5028//7113 5052//7116 +f 5029//7117 5078//7118 5054//7119 +f 5078//7118 5029//7117 5053//7118 +f 5079//7120 5030//7121 5054//7120 +f 5030//7121 5079//7120 5055//7122 +f 5080//7123 5031//7124 5055//3263 +f 5031//7124 5080//7123 5056//7125 +f 5057//7126 5056//7127 5080//7128 +f 5056//7127 5057//7126 5033//3268 +f 5058//7129 5057//7129 5081//7129 +f 5057//7129 5058//7129 5034//7129 +f 5059//3274 5058//3275 5082//7130 +f 5058//3275 5059//3274 5035//3275 +f 5060//7131 5059//7132 5083//7133 +f 5059//7132 5060//7131 5036//3277 +f 5084//3280 5037//7134 5060//3281 +f 5037//7134 5084//3280 5061//3280 +f 5085//7135 5038//7136 5061//3284 +f 5038//7136 5085//7135 5062//3285 +f 5063//7137 5062//7072 5086//7138 +f 5062//7072 5063//7137 5039//7073 +f 5064//3288 5063//3288 5087//7139 +f 5063//3288 5064//3288 5040//7140 +f 5065//7141 5064//7142 5088//7143 +f 5064//7142 5065//7141 5041//7144 +f 5042//7145 5089//7146 5066//7147 +f 5089//7146 5042//7145 5065//7148 +f 5043//7149 5090//7150 5067//7151 +f 5090//7150 5043//7149 5066//3298 +f 5044//7152 5091//7153 5068//3301 +f 5091//7153 5044//7152 5067//7154 +f 5092//7155 5068//7156 5093//7157 +f 5068//7156 5092//7155 5069//7158 +f 5070//3307 5069//3308 5094//3308 +f 5069//3308 5070//3307 5045//3309 +f 5071//7159 5070//7160 5095//7161 +f 5070//7160 5071//7159 5046//7162 +f 5072//7163 5071//7164 5096//7165 +f 5071//7164 5072//7163 5047//7163 +f 5073//7166 5072//7167 5097//7168 +f 5072//7167 5073//7166 5048//7166 +f 5074//3320 5073//7169 5098//7170 +f 5073//7169 5074//3320 5049//7171 +f 5075//7172 5074//7173 5099//3324 +f 5074//7173 5075//7172 5050//3323 +f 5051//7174 5100//7175 5076//7176 +f 5100//7175 5051//7174 5075//7177 +f 5052//7178 5101//7179 5077//7178 +f 5101//7179 5052//7178 5076//7179 +f 5078//7180 5077//7181 5102//7182 +f 5077//7181 5078//7180 5053//7183 +f 5103//7184 5054//7185 5078//7186 +f 5054//7185 5103//7184 5079//3337 +f 5104//7187 5055//7188 5079//7189 +f 5055//7188 5104//7187 5080//7190 +f 5081//7191 5080//7192 5104//7193 +f 5080//7192 5081//7191 5057//7192 +f 5082//7194 5081//7195 5105//7194 +f 5081//7195 5082//7194 5058//7196 +f 5083//7197 5082//7198 5106//7199 +f 5082//7198 5083//7197 5059//7200 +f 5084//7201 5083//7202 5107//7203 +f 5083//7202 5084//7201 5060//7204 +f 5108//7205 5061//7206 5084//7207 +f 5061//7206 5108//7205 5085//7208 +f 5086//7209 5085//7210 5109//7211 +f 5085//7210 5086//7209 5062//3360 +f 5087//7212 5086//7138 5110//7213 +f 5086//7138 5087//7212 5063//7137 +f 5088//7214 5087//7215 5111//7216 +f 5087//7215 5088//7214 5064//7217 +f 5089//7218 5088//7219 5112//7220 +f 5088//7219 5089//7218 5065//3365 +f 5066//3368 5113//7221 5090//7222 +f 5113//7221 5066//3368 5089//7223 +f 5067//3370 5114//7224 5091//7224 +f 5114//7224 5067//3370 5090//3370 +f 5093//7225 5091//7226 5115//7227 +f 5091//7226 5093//7225 5068//3372 +f 5116//7157 5093//7157 5117//7157 +f 5093//7157 5116//7157 5092//7155 +f 5118//7228 5069//7229 5092//7230 +f 5069//7229 5118//7228 5094//3375 +f 5095//3377 5094//7231 5119//7232 +f 5094//7231 5095//3377 5070//3379 +f 5096//7233 5095//7234 5120//7235 +f 5095//7234 5096//7233 5071//7236 +f 5097//7237 5096//7238 5121//3386 +f 5096//7238 5097//7237 5072//3384 +f 5098//7239 5097//7240 5122//3389 +f 5097//7240 5098//7239 5073//7241 +f 5099//3324 5098//3391 5123//3392 +f 5098//3391 5099//3324 5074//7173 +f 5100//7242 5099//7243 5124//7243 +f 5099//7243 5100//7242 5075//7242 +f 5076//7244 5125//7245 5101//7246 +f 5125//7245 5076//7244 5100//7247 +f 5102//7248 5101//7249 5126//7248 +f 5101//7249 5102//7248 5077//7250 +f 5103//7251 5102//7252 5127//3405 +f 5102//7252 5103//7251 5078//7253 +f 5128//7254 5079//7255 5103//7256 +f 5079//7255 5128//7254 5104//7255 +f 5105//7257 5104//7258 5128//7259 +f 5104//7258 5105//7257 5081//7260 +f 5106//7261 5105//7262 5129//7263 +f 5105//7262 5106//7261 5082//7264 +f 5107//7265 5106//7266 5130//7267 +f 5106//7266 5107//7265 5083//7268 +f 5084//7269 5131//7270 5108//7271 +f 5131//7270 5084//7269 5107//3426 +f 5109//3429 5108//7272 5132//3429 +f 5108//7272 5109//3429 5085//7272 +f 5110//3433 5109//7273 5133//3433 +f 5109//7273 5110//3433 5086//7273 +f 5111//7274 5110//7213 5134//7275 +f 5110//7213 5111//7274 5087//7212 +f 5112//7276 5111//7277 5135//3438 +f 5111//7277 5112//7276 5088//7278 +f 5113//7279 5112//7280 5136//7281 +f 5112//7280 5113//7279 5089//7282 +f 5090//7283 5137//7284 5114//3446 +f 5137//7284 5090//7283 5113//7285 +f 5115//7286 5114//7287 5138//3450 +f 5114//7287 5115//7286 5091//7288 +f 5117//7227 5115//7227 5139//7227 +f 5115//7227 5117//7227 5093//7225 +f 5140//7289 5117//7289 5141//7289 +f 5117//7289 5140//7289 5116//7289 +f 5142//7230 5092//7230 5116//7230 +f 5092//7230 5142//7230 5118//7228 +f 5143//7290 5094//7291 5118//7290 +f 5094//7291 5143//7290 5119//7291 +f 5120//7292 5119//7293 5144//7294 +f 5119//7293 5120//7292 5095//7295 +f 5121//7296 5120//3461 5145//7297 +f 5120//3461 5121//7296 5096//3463 +f 5122//7298 5121//7299 5146//7300 +f 5121//7299 5122//7298 5097//3467 +f 5123//3392 5122//3468 5147//7301 +f 5122//3468 5123//3392 5098//3391 +f 5124//7302 5123//7303 5148//7304 +f 5123//7303 5124//7302 5099//7305 +f 5125//3474 5124//7306 5149//7307 +f 5124//7306 5125//3474 5100//3477 +f 5126//7308 5125//7309 5150//7310 +f 5125//7309 5126//7308 5101//7311 +f 5127//7312 5126//7313 5151//7314 +f 5126//7313 5127//7312 5102//7315 +f 5128//7316 5127//7317 5152//7318 +f 5127//7317 5128//7316 5103//7319 +f 5105//7320 5152//7321 5129//7321 +f 5152//7321 5105//7320 5128//7322 +f 5106//3497 5153//7323 5130//7323 +f 5153//7323 5106//3497 5129//3497 +f 5107//7324 5154//7325 5131//7326 +f 5154//7325 5107//7324 5130//7327 +f 5108//7328 5155//7329 5132//7330 +f 5155//7329 5108//7328 5131//7331 +f 5133//7332 5132//7333 5156//7334 +f 5132//7333 5133//7332 5109//3509 +f 5134//7335 5133//3511 5157//7336 +f 5133//3511 5134//7335 5110//7337 +f 5135//7338 5134//7275 5158//7339 +f 5134//7275 5135//7338 5111//7274 +f 5136//7340 5135//3517 5159//7341 +f 5135//3517 5136//7340 5112//3519 +f 5137//7342 5136//7343 5160//3522 +f 5136//7343 5137//7342 5113//7344 +f 5138//7345 5137//7346 5161//7347 +f 5137//7346 5138//7345 5114//7348 +f 5139//7349 5138//3450 5162//3529 +f 5138//3450 5139//7349 5115//7286 +f 5141//7350 5139//7350 5163//7350 +f 5139//7350 5141//7350 5117//7350 +f 5164//7351 5140//7352 5141//7352 +f 5140//7352 5164//7351 5165//7351 +f 5166//7353 5116//7354 5140//7355 +f 5116//7354 5166//7353 5142//7354 +f 5167//7356 5118//7357 5142//7356 +f 5118//7357 5167//7356 5143//7357 +f 5168//7358 5119//7359 5143//7358 +f 5119//7359 5168//7358 5144//7359 +f 5145//3540 5144//3541 5169//3542 +f 5144//3541 5145//3540 5120//3543 +f 5146//7360 5145//3545 5170//7361 +f 5145//3545 5146//7360 5121//7362 +f 5147//7301 5146//3548 5171//7363 +f 5146//3548 5147//7301 5122//3468 +f 5148//7364 5147//3553 5172//3552 +f 5147//3553 5148//7364 5123//7365 +f 5149//7366 5148//7367 5173//7368 +f 5148//7367 5149//7366 5124//7369 +f 5174//3558 5125//7370 5149//7371 +f 5125//7370 5174//3558 5150//3561 +f 5151//7372 5150//7373 5175//3564 +f 5150//7373 5151//7372 5126//7374 +f 5152//3566 5151//7375 5176//7375 +f 5151//7375 5152//3566 5127//3566 +f 5129//7376 5176//3571 5153//7377 +f 5176//3571 5129//7376 5152//7378 +f 5130//7379 5177//7380 5154//7381 +f 5177//7380 5130//7379 5153//7382 +f 5131//7383 5178//7384 5155//3580 +f 5178//7384 5131//7383 5154//7385 +f 5156//7386 5155//3582 5179//7387 +f 5155//3582 5156//7386 5132//7388 +f 5157//7389 5156//7390 5180//7391 +f 5156//7390 5157//7389 5133//7392 +f 5158//7393 5157//3590 5181//7394 +f 5157//3590 5158//7393 5134//3593 +f 5159//3594 5158//7339 5182//7395 +f 5158//7339 5159//3594 5135//7338 +f 5160//3596 5159//7396 5183//3598 +f 5159//7396 5160//3596 5136//7397 +f 5161//7398 5160//7399 5184//7400 +f 5160//7399 5161//7398 5137//3601 +f 5162//7401 5161//7347 5185//7402 +f 5161//7347 5162//7401 5138//7345 +f 5163//3606 5162//3607 5186//3606 +f 5162//3607 5163//3606 5139//3607 +f 5187//7403 5141//7403 5163//7403 +f 5141//7403 5187//7403 5164//7403 +f 5188//7289 5164//7289 5189//7289 +f 5164//7289 5188//7289 5165//7289 +f 5165//7404 5166//7353 5140//7355 +f 5166//7353 5165//7404 5190//7405 +f 5191//7406 5142//7406 5166//7406 +f 5142//7406 5191//7406 5167//7406 +f 5192//7407 5143//7408 5167//7409 +f 5143//7408 5192//7407 5168//7410 +f 5169//7411 5168//7412 5193//7413 +f 5168//7412 5169//7411 5144//7414 +f 5170//7415 5169//7416 5194//7416 +f 5169//7416 5170//7415 5145//3622 +f 5171//7363 5170//3623 5195//7417 +f 5170//3623 5171//7363 5146//3548 +f 5172//7418 5171//7419 5196//7420 +f 5171//7419 5172//7418 5147//7421 +f 5173//3632 5172//7422 5197//7422 +f 5172//7422 5173//3632 5148//3632 +f 5198//7423 5149//7424 5173//7425 +f 5149//7424 5198//7423 5174//7426 +f 5199//3637 5150//3637 5174//3637 +f 5150//3637 5199//3637 5175//3639 +f 5176//3640 5175//3641 5200//3641 +f 5175//3641 5176//3640 5151//3640 +f 5153//7427 5200//3643 5177//3644 +f 5200//3643 5153//7427 5176//3645 +f 5154//3646 5201//3647 5178//7428 +f 5201//3647 5154//3646 5177//3648 +f 5155//7429 5202//7430 5179//7431 +f 5202//7430 5155//7429 5178//3649 +f 5180//7432 5179//7433 5203//7432 +f 5179//7433 5180//7432 5156//7434 +f 5181//7435 5180//7436 5204//7437 +f 5180//7436 5181//7435 5157//7438 +f 5182//3661 5181//7439 5205//3661 +f 5181//7439 5182//3661 5158//7439 +f 5183//3663 5182//7395 5206//7440 +f 5182//7395 5183//3663 5159//3594 +f 5184//3665 5183//3666 5207//7441 +f 5183//3666 5184//3665 5160//3668 +f 5185//7442 5184//7400 5208//3670 +f 5184//7400 5185//7442 5161//7398 +f 5186//3671 5185//3672 5209//3671 +f 5185//3672 5186//3671 5162//3672 +f 5210//3673 5163//7443 5186//7443 +f 5163//7443 5210//3673 5187//3673 +f 5189//7350 5187//7350 5211//7350 +f 5187//7350 5189//7350 5164//7350 +f 5212//7444 5189//7445 5213//7444 +f 5189//7445 5212//7444 5188//7445 +f 5214//7446 5165//7404 5188//7447 +f 5165//7404 5214//7446 5190//7405 +f 5190//7448 5191//7449 5166//7450 +f 5191//7449 5190//7448 5215//7451 +f 5216//7452 5167//7452 5191//7452 +f 5167//7452 5216//7452 5192//7452 +f 5168//7453 5217//7454 5193//7455 +f 5217//7454 5168//7453 5192//7456 +f 5194//7457 5193//7458 5218//7459 +f 5193//7458 5194//7457 5169//7460 +f 5195//7417 5194//7417 5219//7417 +f 5194//7417 5195//7417 5170//3623 +f 5196//7461 5195//7462 5220//7463 +f 5195//7462 5196//7461 5171//7464 +f 5197//7465 5196//7466 5221//7467 +f 5196//7466 5197//7465 5172//7468 +f 5222//7469 5173//7469 5197//7470 +f 5173//7469 5222//7469 5198//3702 +f 5223//7471 5174//7472 5198//7473 +f 5174//7472 5223//7471 5199//3705 +f 5200//7474 5199//7475 5224//7476 +f 5199//7475 5200//7474 5175//3706 +f 5177//7477 5224//7478 5201//7479 +f 5224//7478 5177//7477 5200//7480 +f 5178//7481 5225//7481 5202//7481 +f 5225//7481 5178//7481 5201//7481 +f 5179//7482 5226//7483 5203//7484 +f 5226//7483 5179//7482 5202//7485 +f 5204//7486 5203//7487 5227//3720 +f 5203//7487 5204//7486 5180//7488 +f 5205//3725 5204//7489 5228//3725 +f 5204//7489 5205//3725 5181//7490 +f 5206//7491 5205//7492 5229//3726 +f 5205//7492 5206//7491 5182//7492 +f 5207//7493 5206//3729 5230//7494 +f 5206//3729 5207//7493 5183//3731 +f 5208//3732 5207//7441 5231//3733 +f 5207//7441 5208//3732 5184//3665 +f 5209//3734 5208//3734 5232//3734 +f 5208//3734 5209//3734 5185//3734 +f 5210//3736 5209//3736 5233//3736 +f 5209//3736 5210//3736 5186//3736 +f 5211//3737 5210//3737 5234//3737 +f 5210//3737 5211//3737 5187//3737 +f 5213//7495 5211//7496 5235//7495 +f 5211//7496 5213//7495 5189//7496 +f 5236//7289 5212//7289 5213//7289 +f 5212//7289 5236//7289 5237//7289 +f 5238//7497 5188//7447 5212//7498 +f 5188//7447 5238//7497 5214//7446 +f 5239//7499 5190//7499 5214//7499 +f 5190//7499 5239//7499 5215//7499 +f 5215//7500 5216//7500 5191//7500 +f 5216//7500 5215//7500 5240//7500 +f 5241//7501 5192//7502 5216//7503 +f 5192//7502 5241//7501 5217//7504 +f 5193//7458 5242//3749 5218//7459 +f 5242//3749 5193//7458 5217//3750 +f 5219//7505 5218//7506 5243//7507 +f 5218//7506 5219//7505 5194//3754 +f 5220//3755 5219//3756 5244//3756 +f 5219//3756 5220//3755 5195//3755 +f 5221//7508 5220//7509 5245//7510 +f 5220//7509 5221//7508 5196//7511 +f 5246//7512 5197//7513 5221//7514 +f 5197//7513 5246//7512 5222//7515 +f 5247//7516 5198//7517 5222//3764 +f 5198//7517 5247//7516 5223//7518 +f 5248//7519 5199//7520 5223//7521 +f 5199//7520 5248//7519 5224//7522 +f 5201//7523 5248//7524 5225//7525 +f 5248//7524 5201//7523 5224//7526 +f 5202//7527 5249//7528 5226//7529 +f 5249//7528 5202//7527 5225//7530 +f 5227//7531 5226//7532 5250//7533 +f 5226//7532 5227//7531 5203//7534 +f 5228//7535 5227//7535 5251//3782 +f 5227//7535 5228//7535 5204//7535 +f 5229//7536 5228//7537 5252//7536 +f 5228//7537 5229//7536 5205//3785 +f 5230//7538 5229//7539 5253//7540 +f 5229//7539 5230//7538 5206//7541 +f 5231//3790 5230//7494 5254//3791 +f 5230//7494 5231//3790 5207//7493 +f 5232//3792 5231//3792 5255//3792 +f 5231//3792 5232//3792 5208//3792 +f 5233//3793 5232//3793 5256//3793 +f 5232//3793 5233//3793 5209//3793 +f 5234//3671 5233//3671 5257//3671 +f 5233//3671 5234//3671 5210//3671 +f 5235//7542 5234//7542 5258//7542 +f 5234//7542 5235//7542 5211//7542 +f 5259//7543 5213//7544 5235//7544 +f 5213//7544 5259//7543 5236//7350 +f 5260//7545 5237//7545 5236//7545 +f 5237//7545 5260//7545 5261//7545 +f 5237//7354 5238//7497 5212//7498 +f 5238//7497 5237//7354 5262//7354 +f 5263//7546 5214//7546 5238//7547 +f 5214//7546 5263//7546 5239//7548 +f 5264//7452 5215//7452 5239//7452 +f 5215//7452 5264//7452 5240//7452 +f 5240//7549 5241//7550 5216//7551 +f 5241//7550 5240//7549 5265//7552 +f 5266//3806 5217//3807 5241//3808 +f 5217//3807 5266//3806 5242//3809 +f 5218//7506 5267//7553 5243//7507 +f 5267//7553 5218//7506 5242//7554 +f 5244//7555 5243//7556 5268//7557 +f 5243//7556 5244//7555 5219//7558 +f 5245//3816 5244//7559 5269//3818 +f 5244//7559 5245//3816 5220//7560 +f 5270//7561 5221//7562 5245//7563 +f 5221//7562 5270//7561 5246//3823 +f 5271//7564 5222//7565 5246//3826 +f 5222//7565 5271//7564 5247//3825 +f 5272//7566 5223//7567 5247//7568 +f 5223//7567 5272//7566 5248//7569 +f 5249//7570 5248//7571 5272//7572 +f 5248//7571 5249//7570 5225//7570 +f 5250//7573 5249//7574 5273//7575 +f 5249//7574 5250//7573 5226//7576 +f 5251//3837 5250//7577 5274//3837 +f 5250//7577 5251//3837 5227//7577 +f 5252//3840 5251//3841 5275//3842 +f 5251//3841 5252//3840 5228//3841 +f 5253//7578 5252//7579 5276//3845 +f 5252//7579 5253//7578 5229//7580 +f 5254//7581 5253//7540 5277//7582 +f 5253//7540 5254//7581 5230//7538 +f 5231//3849 5278//3849 5255//3849 +f 5278//3849 5231//3849 5254//3849 +f 5256//7583 5255//3850 5279//7584 +f 5255//3850 5256//7583 5232//3850 +f 5233//3734 5280//3734 5257//3734 +f 5280//3734 5233//3734 5256//3734 +f 5281//7585 5234//7586 5257//7587 +f 5234//7586 5281//7585 5258//3854 +f 5282//3607 5235//3606 5258//3606 +f 5235//3606 5282//3607 5259//3607 +f 5283//7588 5236//7589 5259//7589 +f 5236//7589 5283//7588 5260//7588 +f 5284//7590 5260//7591 5285//3859 +f 5260//7591 5284//7590 5261//7592 +f 5261//7593 5262//7594 5237//7594 +f 5262//7594 5261//7593 5286//7593 +f 5262//7406 5263//7595 5238//7595 +f 5263//7595 5262//7406 5287//7406 +f 5288//7596 5239//7597 5263//7598 +f 5239//7597 5288//7596 5264//7599 +f 5265//7504 5264//7504 5289//7504 +f 5264//7504 5265//7504 5240//7504 +f 5266//3867 5265//7600 5290//3869 +f 5265//7600 5266//3867 5241//3870 +f 5291//7601 5242//7602 5266//7603 +f 5242//7602 5291//7601 5267//7604 +f 5243//7556 5292//7605 5268//7557 +f 5292//7605 5243//7556 5267//7606 +f 5269//3877 5268//7607 5293//3879 +f 5268//7607 5269//3877 5244//7608 +f 5294//7609 5245//3882 5269//7610 +f 5245//3882 5294//7609 5270//7611 +f 5295//7612 5246//7613 5270//7614 +f 5246//7613 5295//7612 5271//3888 +f 5296//7615 5247//7615 5271//7615 +f 5247//7615 5296//7615 5272//7615 +f 5273//7616 5272//7617 5296//7618 +f 5272//7617 5273//7616 5249//7619 +f 5274//7620 5273//7621 5297//7620 +f 5273//7621 5274//7620 5250//7622 +f 5275//7623 5274//3900 5298//7624 +f 5274//3900 5275//7623 5251//3902 +f 5252//7625 5299//7626 5276//7627 +f 5299//7626 5252//7625 5275//7628 +f 5277//7629 5276//7630 5300//3907 +f 5276//7630 5277//7629 5253//7631 +f 5254//7632 5301//7632 5278//7632 +f 5301//7632 5254//7632 5277//7632 +f 5279//7633 5278//3910 5302//7633 +f 5278//3910 5279//7633 5255//3910 +f 5256//7634 5303//7634 5280//7634 +f 5303//7634 5256//7634 5279//7634 +f 5304//7635 5257//7636 5280//7637 +f 5257//7636 5304//7635 5281//3914 +f 5305//3915 5258//3916 5281//3917 +f 5258//3916 5305//3915 5282//3672 +f 5306//3918 5259//3919 5282//3919 +f 5259//3919 5306//3918 5283//3918 +f 5285//7638 5283//7639 5307//7640 +f 5283//7639 5285//7638 5260//7641 +f 5308//3924 5285//3924 5309//3924 +f 5285//3924 5308//3924 5284//3924 +f 5310//7642 5261//7643 5284//7644 +f 5261//7643 5310//7642 5286//7645 +f 5286//7646 5287//7647 5262//7647 +f 5287//7647 5286//7646 5311//7646 +f 5287//7648 5288//7649 5263//7650 +f 5288//7649 5287//7648 5312//7452 +f 5313//7651 5264//7651 5288//7651 +f 5264//7651 5313//7651 5289//7651 +f 5290//3935 5289//3935 5314//3935 +f 5289//3935 5290//3935 5265//3935 +f 5291//3938 5290//7652 5315//7653 +f 5290//7652 5291//3938 5266//7654 +f 5292//7655 5291//7656 5316//7657 +f 5291//7656 5292//7655 5267//7658 +f 5268//7607 5317//7659 5293//3879 +f 5317//7659 5268//7607 5292//7659 +f 5294//7660 5293//7661 5318//7662 +f 5293//7661 5294//7660 5269//7663 +f 5319//3949 5270//3949 5294//3949 +f 5270//3949 5319//3949 5295//3949 +f 5320//7664 5271//7665 5295//7666 +f 5271//7665 5320//7664 5296//7667 +f 5297//7668 5296//7669 5320//3955 +f 5296//7669 5297//7668 5273//7670 +f 5298//7671 5297//7672 5321//7673 +f 5297//7672 5298//7671 5274//3961 +f 5275//7674 5322//7675 5299//7675 +f 5322//7675 5275//7674 5298//7674 +f 5276//7676 5323//7677 5300//7678 +f 5323//7677 5276//7676 5299//7679 +f 5277//3968 5324//3968 5301//3968 +f 5324//3968 5277//3968 5300//3968 +f 5302//7680 5301//7681 5325//7680 +f 5301//7681 5302//7680 5278//7681 +f 5303//7682 5302//7682 5326//7682 +f 5302//7682 5303//7682 5279//7682 +f 5327//7683 5280//7684 5303//7685 +f 5280//7684 5327//7683 5304//3970 +f 5305//3971 5304//7686 5328//3734 +f 5304//7686 5305//3971 5281//3973 +f 5329//3974 5282//7687 5305//3975 +f 5282//7687 5329//3974 5306//7688 +f 5307//7689 5306//7690 5330//3979 +f 5306//7690 5307//7689 5283//3980 +f 5309//7691 5307//7692 5331//7693 +f 5307//7692 5309//7691 5285//7694 +f 5332//7695 5309//7696 5333//7697 +f 5309//7696 5332//7695 5308//7695 +f 5334//7698 5284//7699 5308//7698 +f 5284//7699 5334//7698 5310//7699 +f 5335//7700 5286//7701 5310//3992 +f 5286//7701 5335//7700 5311//7702 +f 5311//7703 5312//7704 5287//7704 +f 5312//7704 5311//7703 5336//7705 +f 5337//7504 5288//7504 5312//7504 +f 5288//7504 5337//7504 5313//7504 +f 5289//3997 5338//3997 5314//3997 +f 5338//3997 5289//3997 5313//3997 +f 5315//7604 5314//7604 5339//7604 +f 5314//7604 5315//7604 5290//7604 +f 5316//7706 5315//7707 5340//7708 +f 5315//7707 5316//7706 5291//7706 +f 5317//4002 5316//4002 5341//4002 +f 5316//4002 5317//4002 5292//4002 +f 5318//7662 5317//7709 5342//7709 +f 5317//7709 5318//7662 5293//7661 +f 5319//7710 5318//7711 5343//7712 +f 5318//7711 5319//7710 5294//7713 +f 5344//4007 5295//4008 5319//4009 +f 5295//4008 5344//4007 5320//4010 +f 5321//4011 5320//7714 5344//4013 +f 5320//7714 5321//4011 5297//7715 +f 5298//7716 5345//7717 5322//7718 +f 5345//7717 5298//7716 5321//7719 +f 5299//7720 5346//7721 5323//7722 +f 5346//7721 5299//7720 5322//7720 +f 5300//7723 5347//7723 5324//7723 +f 5347//7723 5300//7723 5323//7723 +f 5324//4023 5325//4023 5301//4023 +f 5325//4023 5324//4023 5348//4023 +f 5326//7724 5325//7724 5349//7724 +f 5325//7724 5326//7724 5302//7724 +f 5350//4024 5303//7725 5326//7725 +f 5303//7725 5350//4024 5327//4024 +f 5328//3792 5327//3792 5351//3792 +f 5327//3792 5328//3792 5304//3792 +f 5329//7726 5328//4027 5352//7727 +f 5328//4027 5329//7726 5305//4029 +f 5330//7728 5329//7729 5353//7730 +f 5329//7729 5330//7728 5306//7731 +f 5331//7732 5330//4037 5354//7732 +f 5330//4037 5331//7732 5307//4037 +f 5333//7733 5331//7734 5355//7733 +f 5331//7734 5333//7733 5309//7735 +f 5356//7736 5333//7737 5357//7736 +f 5333//7737 5356//7736 5332//7738 +f 5358//7739 5308//7740 5332//7739 +f 5308//7740 5358//7739 5334//7740 +f 5359//4050 5310//7741 5334//7742 +f 5310//7741 5359//4050 5335//4051 +f 5311//7743 5360//7744 5336//4054 +f 5360//7744 5311//7743 5335//7745 +f 5312//7746 5361//7747 5337//7748 +f 5361//7747 5312//7746 5336//7749 +f 5362//4060 5313//4061 5337//4060 +f 5313//4061 5362//4060 5338//4061 +f 5314//7750 5363//7751 5339//7750 +f 5363//7751 5314//7750 5338//7751 +f 5364//7658 5315//7658 5339//7658 +f 5315//7658 5364//7658 5340//7658 +f 5341//7752 5340//4065 5365//7753 +f 5340//4065 5341//7752 5316//4067 +f 5342//7754 5341//7755 5366//7756 +f 5341//7755 5342//7754 5317//7757 +f 5343//7712 5342//4072 5367//4072 +f 5342//4072 5343//7712 5318//7711 +f 5344//7758 5343//7759 5368//4139 +f 5343//7759 5344//7758 5319//4076 +f 5321//7760 5368//7761 5345//7762 +f 5368//7761 5321//7760 5344//7763 +f 5322//7718 5369//7764 5346//7765 +f 5369//7764 5322//7718 5345//7717 +f 5323//7766 5370//7766 5347//7766 +f 5370//7766 5323//7766 5346//7766 +f 5347//7767 5348//7767 5324//7767 +f 5348//7767 5347//7767 5371//7767 +f 5349//3968 5348//3968 5372//3968 +f 5348//3968 5349//3968 5325//3968 +f 5373//7768 5326//7768 5349//7768 +f 5326//7768 5373//7768 5350//7768 +f 5351//4087 5350//4087 5374//4087 +f 5350//4087 5351//4087 5327//4087 +f 5352//7769 5351//7770 5375//4090 +f 5351//7770 5352//7769 5328//4091 +f 5376//7771 5329//7772 5352//4094 +f 5329//7772 5376//7771 5353//4095 +f 5354//7773 5353//7774 5377//4098 +f 5353//7774 5354//7773 5330//4096 +f 5355//7775 5354//7776 5378//7777 +f 5354//7776 5355//7775 5331//7778 +f 5357//7779 5355//7780 5379//7781 +f 5355//7780 5357//7779 5333//4107 +f 5380//7782 5356//7783 5357//7784 +f 5356//7783 5380//7782 5381//7782 +f 5356//7785 5358//7786 5332//4113 +f 5358//7786 5356//7785 5382//7785 +f 5383//7787 5334//7788 5358//7789 +f 5334//7788 5383//7787 5359//7790 +f 5335//7791 5384//7792 5360//7793 +f 5384//7792 5335//7791 5359//7794 +f 5336//4123 5385//4124 5361//7795 +f 5385//4124 5336//4123 5360//7796 +f 5337//7797 5386//7798 5362//4129 +f 5386//7798 5337//7797 5361//4130 +f 5363//7799 5362//7799 5387//7799 +f 5362//7799 5363//7799 5338//7799 +f 5339//7800 5388//7800 5364//7800 +f 5388//7800 5339//7800 5363//7800 +f 5389//4002 5340//4002 5364//4002 +f 5340//4002 5389//4002 5365//4002 +f 5366//7801 5365//7802 5390//7803 +f 5365//7802 5366//7801 5341//7804 +f 5367//4137 5366//4138 5391//4138 +f 5366//4138 5367//4137 5342//4137 +f 5368//4139 5367//4139 5392//4139 +f 5367//4139 5368//4139 5343//7759 +f 5345//7762 5392//7762 5369//7762 +f 5392//7762 5345//7762 5368//7761 +f 5346//7805 5393//7806 5370//7806 +f 5393//7806 5346//7805 5369//7805 +f 5370//7807 5371//7807 5347//7807 +f 5371//7807 5370//7807 5394//7807 +f 5371//7723 5372//7723 5348//7723 +f 5372//7723 5371//7723 5395//7723 +f 5349//4144 5396//4144 5373//4144 +f 5396//4144 5349//4144 5372//4144 +f 5350//7724 5397//7632 5374//7632 +f 5397//7632 5350//7724 5373//7724 +f 5375//4145 5374//4146 5398//4147 +f 5374//4146 5375//4145 5351//4148 +f 5399//4149 5352//7808 5375//7809 +f 5352//7808 5399//4149 5376//4152 +f 5400//7810 5353//4153 5376//7811 +f 5353//4153 5400//7810 5377//4156 +f 5378//7812 5377//4159 5401//7812 +f 5377//4159 5378//7812 5354//7813 +f 5379//4162 5378//7814 5402//4162 +f 5378//7814 5379//4162 5355//7814 +f 5379//7815 5380//7816 5357//7817 +f 5380//7816 5379//7815 5403//7818 +f 5404//7819 5381//7820 5380//7821 +f 5381//7820 5404//7819 5405//4171 +f 5381//7822 5382//7822 5356//7822 +f 5382//7822 5381//7822 5406//7822 +f 5382//7823 5383//7824 5358//7825 +f 5383//7824 5382//7823 5407//7823 +f 5359//7826 5408//7827 5384//7828 +f 5408//7827 5359//7826 5383//7829 +f 5360//7830 5409//7831 5385//7832 +f 5409//7831 5360//7830 5384//7833 +f 5361//7834 5410//4186 5386//7835 +f 5410//4186 5361//7834 5385//7836 +f 5362//7837 5411//7838 5387//7839 +f 5411//7838 5362//7837 5386//7838 +f 5388//7840 5387//7840 5412//7840 +f 5387//7840 5388//7840 5363//7840 +f 5364//7841 5413//7842 5389//7843 +f 5413//7842 5364//7841 5388//7844 +f 5365//7757 5414//7757 5390//7757 +f 5414//7757 5365//7757 5389//7757 +f 5366//4198 5415//4197 5391//4198 +f 5415//4197 5366//4198 5390//4197 +f 5392//4201 5391//4200 5416//4199 +f 5391//4200 5392//4201 5367//4201 +f 5369//7845 5416//7845 5393//7845 +f 5416//7845 5369//7845 5392//7845 +f 5393//7806 5394//7806 5370//7806 +f 5394//7806 5393//7806 5417//7806 +f 5394//7766 5395//7766 5371//7766 +f 5395//7766 5394//7766 5418//7766 +f 5372//7846 5419//7846 5396//7846 +f 5419//7846 5372//7846 5395//7846 +f 5373//3968 5420//3968 5397//3968 +f 5420//3968 5373//3968 5396//3968 +f 5398//7847 5397//7848 5421//7849 +f 5397//7848 5398//7847 5374//7850 +f 5422//4211 5375//4212 5398//4212 +f 5375//4212 5422//4211 5399//4211 +f 5423//7851 5376//7852 5399//7853 +f 5376//7852 5423//7851 5400//7854 +f 5424//7855 5377//4218 5400//7856 +f 5377//4218 5424//7855 5401//7857 +f 5402//7858 5401//7859 5425//7860 +f 5401//7859 5402//7858 5378//7859 +f 5402//7861 5403//7862 5379//7861 +f 5403//7862 5402//7861 5426//7863 +f 5427//7864 5380//7865 5403//4231 +f 5380//7865 5427//7864 5404//7866 +f 5428//7867 5405//7868 5404//7869 +f 5405//7868 5428//7867 5429//7870 +f 5405//7871 5406//7872 5381//7872 +f 5406//7872 5405//7871 5430//7873 +f 5406//4239 5407//7874 5382//4241 +f 5407//7874 5406//4239 5431//4242 +f 5383//7875 5432//7876 5408//7875 +f 5432//7876 5383//7875 5407//7876 +f 5384//7877 5433//7878 5409//7879 +f 5433//7878 5384//7877 5408//4245 +f 5385//7880 5434//7881 5410//7882 +f 5434//7881 5385//7880 5409//4250 +f 5386//4251 5435//7883 5411//4251 +f 5435//7883 5386//4251 5410//4322 +f 5387//7884 5436//7885 5412//7886 +f 5436//7885 5387//7884 5411//7887 +f 5413//4258 5412//4259 5437//7888 +f 5412//4259 5413//4258 5388//4261 +f 5389//7889 5438//7890 5414//7889 +f 5438//7890 5389//7889 5413//7890 +f 5390//4264 5439//4264 5415//4264 +f 5439//4264 5390//4264 5414//4264 +f 5391//4265 5440//4266 5416//4265 +f 5440//4266 5391//4265 5415//4266 +f 5416//7891 5417//7891 5393//7891 +f 5417//7891 5416//7891 5440//7891 +f 5417//7806 5418//7892 5394//7806 +f 5418//7892 5417//7806 5441//7892 +f 5395//7893 5442//7893 5419//7894 +f 5442//7893 5395//7893 5418//7893 +f 5419//7723 5420//7723 5396//7723 +f 5420//7723 5419//7723 5443//7723 +f 5421//7895 5420//4273 5444//7896 +f 5420//4273 5421//7895 5397//4276 +f 5445//4277 5398//7897 5421//4278 +f 5398//7897 5445//4277 5422//4277 +f 5446//7898 5399//7899 5422//7900 +f 5399//7899 5446//7898 5423//7901 +f 5447//7902 5400//4284 5423//7903 +f 5400//4284 5447//7902 5424//7904 +f 5448//7905 5401//7906 5424//4288 +f 5401//7906 5448//7905 5425//7907 +f 5425//7908 5426//7909 5402//7908 +f 5426//7909 5425//7908 5449//7910 +f 5426//7911 5427//7912 5403//7913 +f 5427//7912 5426//7911 5450//7914 +f 5451//7915 5404//7916 5427//7917 +f 5404//7916 5451//7915 5428//7918 +f 5452//4301 5429//4300 5428//7919 +f 5429//4300 5452//4301 5453//7920 +f 5429//7921 5430//7922 5405//7923 +f 5430//7922 5429//7921 5454//7924 +f 5430//7925 5431//7926 5406//4306 +f 5431//7926 5430//7925 5455//7927 +f 5407//7928 5456//4311 5432//4312 +f 5456//4311 5407//7928 5431//4313 +f 5408//4316 5457//4316 5433//4316 +f 5457//4316 5408//4316 5432//4316 +f 5409//7929 5458//7930 5434//4320 +f 5458//7930 5409//7929 5433//7931 +f 5410//4322 5459//4322 5435//7883 +f 5459//4322 5410//4322 5434//4323 +f 5411//4326 5460//7932 5436//4326 +f 5460//7932 5411//4326 5435//4327 +f 5412//7933 5461//7934 5437//4330 +f 5461//7934 5412//7933 5436//4331 +f 5413//7935 5462//7754 5438//7756 +f 5462//7754 5413//7935 5437//7757 +f 5439//4335 5438//4334 5463//4334 +f 5438//4334 5439//4335 5414//4335 +f 5415//4201 5464//4201 5440//4201 +f 5464//4201 5415//4201 5439//4201 +f 5440//7845 5441//7845 5417//7845 +f 5441//7845 5440//7845 5464//7845 +f 5418//7892 5465//7892 5442//7892 +f 5465//7892 5418//7892 5441//7892 +f 5442//7766 5443//7766 5419//7766 +f 5443//7766 5442//7766 5466//7766 +f 5443//7936 5444//7937 5420//7936 +f 5444//7937 5443//7936 5467//7937 +f 5468//4338 5421//4340 5444//7938 +f 5421//4340 5468//4338 5445//7939 +f 5469//7940 5422//7941 5445//7942 +f 5422//7941 5469//7940 5446//7943 +f 5470//7944 5423//7945 5446//7946 +f 5423//7945 5470//7944 5447//4349 +f 5471//7947 5424//7948 5447//7948 +f 5424//7948 5471//7947 5448//7947 +f 5472//7949 5425//7950 5448//7951 +f 5425//7950 5472//7949 5449//4357 +f 5449//7952 5450//7953 5426//7954 +f 5450//7953 5449//7952 5473//7955 +f 5474//7956 5427//7957 5450//7958 +f 5427//7957 5474//7956 5451//7959 +f 5475//4366 5428//4367 5451//4367 +f 5428//4367 5475//4366 5452//4366 +f 5476//4369 5453//4370 5452//4370 +f 5453//4370 5476//4369 5477//4369 +f 5453//7960 5454//7960 5429//7960 +f 5454//7960 5453//7960 5478//7960 +f 5454//7961 5455//7962 5430//7963 +f 5455//7962 5454//7961 5479//7964 +f 5455//7965 5456//7966 5431//7966 +f 5456//7966 5455//7965 5480//7967 +f 5432//7968 5481//7969 5457//7970 +f 5481//7969 5432//7968 5456//7971 +f 5433//7972 5482//7973 5458//7972 +f 5482//7973 5433//7972 5457//7973 +f 5434//4389 5483//7974 5459//4391 +f 5483//7974 5434//4389 5458//4392 +f 5435//7975 5484//7976 5460//7977 +f 5484//7976 5435//7975 5459//7978 +f 5436//7979 5485//4398 5461//4398 +f 5485//4398 5436//7979 5460//4399 +f 5437//7980 5486//7980 5462//7980 +f 5486//7980 5437//7980 5461//7981 +f 5438//4138 5487//4137 5463//4138 +f 5487//4137 5438//4138 5462//4137 +f 5464//7982 5463//7983 5488//4405 +f 5463//7983 5464//7982 5439//4406 +f 5441//7984 5488//7984 5465//7985 +f 5488//7984 5441//7984 5464//7984 +f 5465//7892 5466//7805 5442//7892 +f 5466//7805 5465//7892 5489//7805 +f 5466//7986 5467//7987 5443//7986 +f 5467//7987 5466//7986 5490//7987 +f 5491//7988 5444//7989 5467//7990 +f 5444//7989 5491//7988 5468//4414 +f 5492//4415 5445//7991 5468//4417 +f 5445//7991 5492//4415 5469//4418 +f 5493//4419 5446//7992 5469//4421 +f 5446//7992 5493//4419 5470//4420 +f 5494//4423 5447//7993 5470//7994 +f 5447//7993 5494//4423 5471//7995 +f 5495//7996 5448//7997 5471//7998 +f 5448//7997 5495//7996 5472//7999 +f 5496//4431 5449//4431 5472//4431 +f 5449//4431 5496//4431 5473//4431 +f 5497//8000 5450//8001 5473//4434 +f 5450//8001 5497//8000 5474//8002 +f 5498//8003 5451//8004 5474//8005 +f 5451//8004 5498//8003 5475//8006 +f 5499//8007 5452//4441 5475//8008 +f 5452//4441 5499//8007 5476//4443 +f 5500//8009 5476//4447 5501//8009 +f 5476//4447 5500//8009 5477//4447 +f 5453//8010 5502//8011 5478//8010 +f 5502//8011 5453//8010 5477//8012 +f 5454//8013 5503//8014 5479//8015 +f 5503//8014 5454//8013 5478//8016 +f 5479//4459 5480//8017 5455//8017 +f 5480//8017 5479//4459 5504//4459 +f 5456//8018 5505//4463 5481//8018 +f 5505//4463 5456//8018 5480//4463 +f 5457//8019 5506//4465 5482//8019 +f 5506//4465 5457//8019 5481//4465 +f 5458//8020 5507//4467 5483//4468 +f 5507//4467 5458//8020 5482//8021 +f 5459//8022 5508//4471 5484//8023 +f 5508//4471 5459//8022 5483//4472 +f 5460//8024 5509//8025 5485//4473 +f 5509//8025 5460//8024 5484//8025 +f 5486//8026 5485//8027 5510//8028 +f 5485//8027 5486//8026 5461//4478 +f 5462//4481 5511//8029 5487//4481 +f 5511//8029 5462//4481 5486//8029 +f 5463//4201 5512//4201 5488//4201 +f 5512//4201 5463//4201 5487//4201 +f 5488//7845 5489//7845 5465//7845 +f 5489//7845 5488//7845 5512//7845 +f 5489//8030 5490//8031 5466//8032 +f 5490//8031 5489//8030 5513//8033 +f 5467//8034 5514//8035 5491//8036 +f 5514//8035 5467//8034 5490//8037 +f 5515//8038 5468//4490 5491//8039 +f 5468//4490 5515//8038 5492//4492 +f 5516//4493 5469//4493 5492//8040 +f 5469//4493 5516//4493 5493//4493 +f 5517//8041 5470//8042 5493//8042 +f 5470//8042 5517//8041 5494//8041 +f 5518//8043 5471//8044 5494//8045 +f 5471//8044 5518//8043 5495//8043 +f 5519//8046 5472//4501 5495//4502 +f 5472//4501 5519//8046 5496//8047 +f 5497//4504 5496//8048 5520//4506 +f 5496//8048 5497//4504 5473//4507 +f 5521//4511 5474//8049 5497//8050 +f 5474//8049 5521//4511 5498//4511 +f 5522//8051 5475//8052 5498//8053 +f 5475//8052 5522//8051 5499//4515 +f 5501//8054 5499//8055 5523//8054 +f 5499//8055 5501//8054 5476//8055 +f 5524//8056 5501//8057 5525//8058 +f 5501//8057 5524//8056 5500//8059 +f 5526//8060 5477//8061 5500//8062 +f 5477//8061 5526//8060 5502//8063 +f 5478//8064 5527//8065 5503//8066 +f 5527//8065 5478//8064 5502//8067 +f 5479//8068 5528//8069 5504//4532 +f 5528//8069 5479//8068 5503//8070 +f 5505//8071 5504//4535 5529//4535 +f 5504//4535 5505//8071 5480//4534 +f 5481//8072 5530//8073 5506//4538 +f 5530//8073 5481//8072 5505//8073 +f 5482//4540 5531//8074 5507//8075 +f 5531//8074 5482//4540 5506//8074 +f 5483//8076 5532//8077 5508//8076 +f 5532//8077 5483//8076 5507//8077 +f 5484//8078 5533//4548 5509//8079 +f 5533//4548 5484//8078 5508//4548 +f 5510//4549 5509//4550 5534//4550 +f 5509//4550 5510//4549 5485//4549 +f 5511//4551 5510//4552 5535//4552 +f 5510//4552 5511//4551 5486//4553 +f 5487//4554 5536//8080 5512//4554 +f 5536//8080 5487//4554 5511//8080 +f 5512//8081 5513//8082 5489//8081 +f 5513//8082 5512//8081 5536//8082 +f 5490//4558 5537//4559 5514//4560 +f 5537//4559 5490//4558 5513//4561 +f 5491//8083 5538//4563 5515//8084 +f 5538//4563 5491//8083 5514//8085 +f 5539//8086 5492//8087 5515//8088 +f 5492//8087 5539//8086 5516//8089 +f 5540//8090 5493//8091 5516//8091 +f 5493//8091 5540//8090 5517//8090 +f 5541//8092 5494//8093 5517//8094 +f 5494//8093 5541//8092 5518//4571 +f 5542//8095 5495//8096 5518//8097 +f 5495//8096 5542//8095 5519//8098 +f 5520//8099 5519//4579 5543//4580 +f 5519//4579 5520//8099 5496//8100 +f 5521//8101 5520//8102 5544//8103 +f 5520//8102 5521//8101 5497//8102 +f 5545//8104 5498//8105 5521//8106 +f 5498//8105 5545//8104 5522//8107 +f 5523//8108 5522//8109 5546//8108 +f 5522//8109 5523//8108 5499//4591 +f 5525//8110 5523//8111 5547//8112 +f 5523//8111 5525//8110 5501//4593 +f 5548//4597 5525//8113 5549//8114 +f 5525//8113 5548//4597 5524//8113 +f 5550//8115 5500//8116 5524//8117 +f 5500//8116 5550//8115 5526//8118 +f 5551//8119 5502//8120 5526//8121 +f 5502//8120 5551//8119 5527//4606 +f 5503//8122 5552//8123 5528//8124 +f 5552//8123 5503//8122 5527//8125 +f 5529//8126 5528//8127 5553//8128 +f 5528//8127 5529//8126 5504//4611 +f 5530//8129 5529//4617 5554//4616 +f 5529//4617 5530//8129 5505//4617 +f 5506//8130 5555//8131 5531//8130 +f 5555//8131 5506//8130 5530//8130 +f 5507//8132 5556//4621 5532//8133 +f 5556//4621 5507//8132 5531//8134 +f 5508//8135 5557//8136 5533//8137 +f 5557//8136 5508//8135 5532//8138 +f 5534//4629 5533//8139 5558//4629 +f 5533//8139 5534//4629 5509//4628 +f 5535//8140 5534//4630 5559//8141 +f 5534//4630 5535//8140 5510//4633 +f 5536//8142 5535//4634 5560//4635 +f 5535//4634 5536//8142 5511//8143 +f 5513//8144 5560//8145 5537//4639 +f 5560//8145 5513//8144 5536//8146 +f 5514//8147 5561//8148 5538//4643 +f 5561//8148 5514//8147 5537//4644 +f 5515//8149 5562//8150 5539//8151 +f 5562//8150 5515//8149 5538//4645 +f 5563//8152 5516//8153 5539//8154 +f 5516//8153 5563//8152 5540//8155 +f 5564//4654 5517//8156 5540//8156 +f 5517//8156 5564//4654 5541//8157 +f 5565//8158 5518//8159 5541//8160 +f 5518//8159 5565//8158 5542//8161 +f 5543//8162 5542//8163 5566//8164 +f 5542//8163 5543//8162 5519//8165 +f 5544//8166 5543//4664 5567//4665 +f 5543//4664 5544//8166 5520//4666 +f 5545//4668 5544//8167 5568//8168 +f 5544//8167 5545//4668 5521//8169 +f 5546//8170 5545//8171 5569//8172 +f 5545//8171 5546//8170 5522//8173 +f 5547//8174 5546//4678 5570//8174 +f 5546//4678 5547//8174 5523//4678 +f 5549//8175 5547//8176 5571//4681 +f 5547//8176 5549//8175 5525//8177 +f 5572//4684 5548//4683 5549//8178 +f 5548//4683 5572//4684 5573//8179 +f 5574//8180 5524//8181 5548//8182 +f 5524//8181 5574//8180 5550//4688 +f 5575//8183 5526//8184 5550//8185 +f 5526//8184 5575//8183 5551//8186 +f 5576//8187 5527//8188 5551//8189 +f 5527//8188 5576//8187 5552//8190 +f 5553//8191 5552//8192 5577//8193 +f 5552//8192 5553//8191 5528//8194 +f 5554//8195 5553//8196 5578//8197 +f 5553//8196 5554//8195 5529//8198 +f 5555//8199 5554//8200 5579//4704 +f 5554//8200 5555//8199 5530//8201 +f 5531//8202 5580//8203 5556//8204 +f 5580//8203 5531//8202 5555//8205 +f 5532//8206 5581//4713 5557//8206 +f 5581//4713 5532//8206 5556//4713 +f 5558//4714 5557//8207 5582//8208 +f 5557//8207 5558//4714 5533//8209 +f 5559//8210 5558//8211 5583//8212 +f 5558//8211 5559//8210 5534//8213 +f 5560//4723 5559//4723 5584//4723 +f 5559//4723 5560//4723 5535//8214 +f 5537//8215 5584//8216 5561//8217 +f 5584//8216 5537//8215 5560//8218 +f 5538//8219 5585//4730 5562//4731 +f 5585//4730 5538//8219 5561//4732 +f 5562//8220 5563//8221 5539//8222 +f 5563//8221 5562//8220 5586//8223 +f 5587//8224 5540//8225 5563//8226 +f 5540//8225 5587//8224 5564//8227 +f 5588//8228 5541//8229 5564//8229 +f 5541//8229 5588//8228 5565//8228 +f 5566//4746 5565//8230 5589//8231 +f 5565//8230 5566//4746 5542//4746 +f 5567//8232 5566//8233 5590//4749 +f 5566//8233 5567//8232 5543//8234 +f 5568//8235 5567//8236 5591//8237 +f 5567//8236 5568//8235 5544//4752 +f 5569//8238 5568//8239 5592//4754 +f 5568//8239 5569//8238 5545//8240 +f 5570//8241 5569//8242 5593//8243 +f 5569//8242 5570//8241 5546//8244 +f 5571//8245 5570//8246 5594//8247 +f 5570//8246 5571//8245 5547//8246 +f 5595//8248 5549//4766 5571//4766 +f 5549//4766 5595//8248 5572//8248 +f 5596//4770 5573//8249 5572//8249 +f 5573//8249 5596//4770 5597//4770 +f 5573//4771 5574//8250 5548//8251 +f 5574//8250 5573//4771 5598//4771 +f 5599//8252 5550//8253 5574//8254 +f 5550//8253 5599//8252 5575//4776 +f 5600//4779 5551//4779 5575//4779 +f 5551//4779 5600//4779 5576//4779 +f 5577//8255 5576//8256 5601//8257 +f 5576//8256 5577//8255 5552//4781 +f 5578//8258 5577//4784 5602//4785 +f 5577//4784 5578//8258 5553//8259 +f 5579//8260 5578//8261 5603//8262 +f 5578//8261 5579//8260 5554//4787 +f 5580//8263 5579//8264 5604//8265 +f 5579//8264 5580//8263 5555//8264 +f 5556//8266 5605//8267 5581//8266 +f 5605//8267 5556//8266 5580//8267 +f 5581//8268 5582//8269 5557//8270 +f 5582//8269 5581//8268 5606//8271 +f 5583//8272 5582//8273 5607//8274 +f 5582//8273 5583//8272 5558//8272 +f 5584//8275 5583//8276 5608//8277 +f 5583//8276 5584//8275 5559//8278 +f 5561//8279 5608//8280 5585//8281 +f 5608//8280 5561//8279 5584//8282 +f 5585//8283 5586//8284 5562//8283 +f 5586//8284 5585//8283 5609//8285 +f 5586//8286 5587//8287 5563//8288 +f 5587//8287 5586//8286 5610//8289 +f 5587//8290 5588//8291 5564//8290 +f 5588//8291 5587//8290 5611//8291 +f 5589//8292 5588//8293 5612//8292 +f 5588//8293 5589//8292 5565//8293 +f 5590//8294 5589//4825 5613//4826 +f 5589//4825 5590//8294 5566//8295 +f 5591//8296 5590//8297 5614//8298 +f 5590//8297 5591//8296 5567//8299 +f 5592//8300 5591//8301 5615//8302 +f 5591//8301 5592//8300 5568//8303 +f 5593//8304 5592//4837 5616//8305 +f 5592//4837 5593//8304 5569//4839 +f 5594//8306 5593//8307 5617//4842 +f 5593//8307 5594//8306 5570//8308 +f 5618//8309 5571//8310 5594//8311 +f 5571//8310 5618//8309 5595//8312 +f 5619//4851 5572//4851 5595//4851 +f 5572//4851 5619//4851 5596//4851 +f 5620//4852 5597//8313 5596//4854 +f 5597//8313 5620//4852 5621//4855 +f 5597//8314 5598//4857 5573//4858 +f 5598//4857 5597//8314 5622//4859 +f 5598//8315 5599//8316 5574//8317 +f 5599//8316 5598//8315 5623//8318 +f 5624//4867 5575//4867 5599//8319 +f 5575//4867 5624//4867 5600//4867 +f 5601//8320 5600//8321 5625//8322 +f 5600//8321 5601//8320 5576//8320 +f 5602//8323 5601//4873 5626//8324 +f 5601//4873 5602//8323 5577//8325 +f 5603//8326 5602//8327 5627//8328 +f 5602//8327 5603//8326 5578//4879 +f 5604//8329 5603//8330 5628//8331 +f 5603//8330 5604//8329 5579//8332 +f 5605//8333 5604//8334 5629//8334 +f 5604//8334 5605//8333 5580//8333 +f 5605//8335 5606//8336 5581//8337 +f 5606//8336 5605//8335 5630//4886 +f 5606//8338 5607//8339 5582//8340 +f 5607//8339 5606//8338 5631//8341 +f 5608//8342 5607//8343 5632//8344 +f 5607//8343 5608//8342 5583//8345 +f 5585//8346 5632//8347 5609//8347 +f 5632//8347 5585//8346 5608//8346 +f 5609//8348 5610//8349 5586//8350 +f 5610//8349 5609//8348 5633//8351 +f 5610//8352 5611//8353 5587//4905 +f 5611//8353 5610//8352 5634//8354 +f 5611//8355 5612//8356 5588//8357 +f 5612//8356 5611//8355 5635//8358 +f 5613//8359 5612//8360 5636//8361 +f 5612//8360 5613//8359 5589//8362 +f 5614//8363 5613//8364 5637//4916 +f 5613//8364 5614//8363 5590//8365 +f 5615//8366 5614//8367 5638//8368 +f 5614//8367 5615//8366 5591//8369 +f 5616//8370 5615//8371 5639//8372 +f 5615//8371 5616//8370 5592//8373 +f 5617//8374 5616//8375 5640//8376 +f 5616//8375 5617//8374 5593//8377 +f 5641//4926 5594//4927 5617//4928 +f 5594//4927 5641//4926 5618//4929 +f 5642//8378 5595//8379 5618//8380 +f 5595//8379 5642//8378 5619//4933 +f 5643//4936 5596//4936 5619//8381 +f 5596//4936 5643//4936 5620//4937 +f 5644//8382 5621//8383 5620//8383 +f 5621//8383 5644//8382 5645//8384 +f 5621//8385 5622//8386 5597//8386 +f 5622//8386 5621//8385 5646//8385 +f 5622//8387 5623//8388 5598//8389 +f 5623//8388 5622//8387 5647//4947 +f 5623//8390 5624//4949 5599//8391 +f 5624//4949 5623//8390 5648//8392 +f 5625//8393 5624//8394 5649//8395 +f 5624//8394 5625//8393 5600//8396 +f 5626//8397 5625//8398 5650//8399 +f 5625//8398 5626//8397 5601//8400 +f 5627//8401 5626//8402 5651//8403 +f 5626//8402 5627//8401 5602//8404 +f 5628//4964 5627//8405 5652//4962 +f 5627//8405 5628//4964 5603//8406 +f 5629//8407 5628//8408 5653//8409 +f 5628//8408 5629//8407 5604//8410 +f 5605//8411 5654//8412 5630//4970 +f 5654//8412 5605//8411 5629//8413 +f 5630//8414 5631//8415 5606//8415 +f 5631//8415 5630//8414 5655//8414 +f 5631//8416 5632//8417 5607//8418 +f 5632//8417 5631//8416 5656//8419 +f 5632//8420 5633//8421 5609//4982 +f 5633//8421 5632//8420 5656//8422 +f 5633//8423 5634//8424 5610//8425 +f 5634//8424 5633//8423 5657//8426 +f 5634//8427 5635//8428 5611//8429 +f 5635//8428 5634//8427 5658//8430 +f 5659//8431 5612//8432 5635//8433 +f 5612//8432 5659//8431 5636//8434 +f 5637//8435 5636//8436 5660//8437 +f 5636//8436 5637//8435 5613//8438 +f 5638//8439 5637//8440 5661//8441 +f 5637//8440 5638//8439 5614//5000 +f 5639//8442 5638//5001 5662//5003 +f 5638//5001 5639//8442 5615//5001 +f 5640//8443 5639//8444 5663//8443 +f 5639//8444 5640//8443 5616//8444 +f 5617//5008 5664//8445 5641//8446 +f 5664//8445 5617//5008 5640//8447 +f 5665//8448 5618//8449 5641//8450 +f 5618//8449 5665//8448 5642//5015 +f 5666//5016 5619//8451 5642//5018 +f 5619//8451 5666//5016 5643//8452 +f 5667//8453 5620//8454 5643//5022 +f 5620//8454 5667//8453 5644//8455 +f 5668//8456 5644//8457 5669//8456 +f 5644//8457 5668//8456 5645//8457 +f 5645//8458 5646//5029 5621//5029 +f 5646//5029 5645//8458 5670//8458 +f 5646//8459 5647//8460 5622//8461 +f 5647//8460 5646//8459 5671//8462 +f 5647//5034 5648//8463 5623//5035 +f 5648//8463 5647//5034 5672//5034 +f 5673//8464 5624//8465 5648//8466 +f 5624//8465 5673//8464 5649//8467 +f 5650//5040 5649//8468 5674//8469 +f 5649//8468 5650//5040 5625//5043 +f 5651//5044 5650//8470 5675//5046 +f 5650//8470 5651//5044 5626//8471 +f 5652//8472 5651//8473 5676//8474 +f 5651//8473 5652//8472 5627//8475 +f 5653//8476 5652//8477 5677//8478 +f 5652//8477 5653//8476 5628//8479 +f 5629//8480 5678//8481 5654//8482 +f 5678//8481 5629//8480 5653//5059 +f 5630//8483 5679//8484 5655//8483 +f 5679//8484 5630//8483 5654//8484 +f 5631//8485 5680//8486 5656//8487 +f 5680//8486 5631//8485 5655//5065 +f 5656//8488 5657//8489 5633//8490 +f 5657//8489 5656//8488 5680//5067 +f 5657//8491 5658//8492 5634//8493 +f 5658//8492 5657//8491 5681//5071 +f 5682//8494 5635//8494 5658//8494 +f 5635//8494 5682//8494 5659//8494 +f 5683//8495 5636//8496 5659//5078 +f 5636//8496 5683//8495 5660//5079 +f 5661//8497 5660//8498 5684//8499 +f 5660//8498 5661//8497 5637//8500 +f 5662//8501 5661//8502 5685//8503 +f 5661//8502 5662//8501 5638//8504 +f 5663//8505 5662//5088 5686//8506 +f 5662//5088 5663//8505 5639//5090 +f 5640//5093 5687//8507 5664//5092 +f 5687//8507 5640//5093 5663//8508 +f 5641//8509 5688//8510 5665//8511 +f 5688//8510 5641//8509 5664//5095 +f 5689//5098 5642//8512 5665//5100 +f 5642//8512 5689//5098 5666//5101 +f 5667//8513 5666//8514 5690//8515 +f 5666//8514 5667//8513 5643//8516 +f 5669//8517 5667//8518 5691//8519 +f 5667//8518 5669//8517 5644//8520 +f 5692//8521 5669//8522 5693//8523 +f 5669//8522 5692//8521 5668//8524 +f 5670//8525 5668//8526 5694//8527 +f 5668//8526 5670//8525 5645//8528 +f 5670//8529 5671//8530 5646//8531 +f 5671//8530 5670//8529 5695//8532 +f 5671//8533 5672//8534 5647//5123 +f 5672//8534 5671//8533 5696//5124 +f 5697//8535 5648//8536 5672//8535 +f 5648//8536 5697//8535 5673//8536 +f 5698//5128 5649//8537 5673//5130 +f 5649//8537 5698//5128 5674//5131 +f 5675//8538 5674//5133 5699//8539 +f 5674//5133 5675//8538 5650//8540 +f 5676//8541 5675//8542 5700//8543 +f 5675//8542 5676//8541 5651//8544 +f 5677//8545 5676//8546 5701//8547 +f 5676//8546 5677//8545 5652//8548 +f 5653//8549 5702//8550 5678//8551 +f 5702//8550 5653//8549 5677//8552 +f 5654//8553 5703//5149 5679//8554 +f 5703//5149 5654//8553 5678//8555 +f 5655//8556 5704//8557 5680//8556 +f 5704//8557 5655//8556 5679//8557 +f 5680//8558 5681//8559 5657//5156 +f 5681//8559 5680//8558 5704//8560 +f 5681//8561 5682//8562 5658//8563 +f 5682//8562 5681//8561 5705//8564 +f 5706//8565 5659//8566 5682//8567 +f 5659//8566 5706//8565 5683//5165 +f 5660//8568 5707//8569 5684//8570 +f 5707//8569 5660//8568 5683//8571 +f 5685//8572 5684//8573 5708//8574 +f 5684//8573 5685//8572 5661//8575 +f 5686//8576 5685//8577 5709//5176 +f 5685//8577 5686//8576 5662//8578 +f 5663//8579 5710//8580 5687//5180 +f 5710//8580 5663//8579 5686//8581 +f 5664//8582 5711//8583 5688//8584 +f 5711//8583 5664//8582 5687//5185 +f 5665//8585 5712//8586 5689//8587 +f 5712//8586 5665//8585 5688//8588 +f 5666//8589 5713//8590 5690//8591 +f 5713//8590 5666//8589 5689//8592 +f 5691//8593 5690//8594 5714//8595 +f 5690//8594 5691//8593 5667//8596 +f 5693//8597 5691//8598 5715//8599 +f 5691//8598 5693//8597 5669//5201 +f 5716//8600 5693//8601 5717//8602 +f 5693//8601 5716//8600 5692//8603 +f 5718//8604 5668//8605 5692//8606 +f 5668//8605 5718//8604 5694//5209 +f 5695//8607 5694//8608 5719//8609 +f 5694//8608 5695//8607 5670//8610 +f 5720//8611 5671//8612 5695//8613 +f 5671//8612 5720//8611 5696//5215 +f 5721//8614 5672//8615 5696//8616 +f 5672//8615 5721//8614 5697//8615 +f 5722//8617 5673//8618 5697//8619 +f 5673//8618 5722//8617 5698//8620 +f 5723//5223 5674//5223 5698//5223 +f 5674//5133 5723//5224 5699//8539 +f 5700//8621 5699//8622 5724//8622 +f 5699//8622 5700//8621 5675//5228 +f 5701//8623 5700//5230 5725//5230 +f 5700//5230 5701//8623 5676//8623 +f 5702//8624 5701//8625 5726//5233 +f 5701//8625 5702//8624 5677//8626 +f 5678//8627 5727//8628 5703//8629 +f 5727//8628 5678//8627 5702//8630 +f 5679//8631 5728//8632 5704//8633 +f 5728//8632 5679//8631 5703//8634 +f 5704//8635 5705//8636 5681//8637 +f 5705//8636 5704//8635 5728//8638 +f 5729//8639 5682//8640 5705//8640 +f 5682//8640 5729//8639 5706//8639 +f 5683//8641 5730//8642 5707//8642 +f 5730//8642 5683//8641 5706//8641 +f 5684//5253 5731//8643 5708//5255 +f 5731//8643 5684//5253 5707//8644 +f 5709//8645 5708//8646 5732//8647 +f 5708//8646 5709//8645 5685//8648 +f 5686//5261 5733//5262 5710//5263 +f 5733//5262 5686//5261 5709//5264 +f 5687//5265 5734//5266 5711//5267 +f 5734//5266 5687//5265 5710//8649 +f 5688//8650 5735//8651 5712//5271 +f 5735//8651 5688//8650 5711//8652 +f 5689//8653 5736//8654 5713//8655 +f 5736//8654 5689//8653 5712//8656 +f 5714//8657 5713//8658 5737//8659 +f 5713//8658 5714//8657 5690//8660 +f 5715//8661 5714//8662 5738//8663 +f 5714//8662 5715//8661 5691//8664 +f 5717//8665 5715//8666 5739//8667 +f 5715//8666 5717//8665 5693//8668 +f 5740//8669 5717//8670 5741//8669 +f 5717//8670 5740//8669 5716//8671 +f 5742//8672 5692//8673 5716//8674 +f 5692//8673 5742//8672 5718//8675 +f 5743//8676 5694//8677 5718//8678 +f 5694//8677 5743//8676 5719//8679 +f 5720//8680 5719//8681 5744//8682 +f 5719//8681 5720//8680 5695//8680 +f 5745//8683 5696//8684 5720//5303 +f 5696//8684 5745//8683 5721//8685 +f 5746//8686 5697//8687 5721//8688 +f 5697//8687 5746//8686 5722//5308 +f 5747//8689 5698//8690 5722//8691 +f 5698//8690 5747//8689 5723//5312 +f 5748//5314 5699//8692 5723//5315 +f 5699//8692 5748//5314 5724//8693 +f 5725//8694 5724//5317 5749//8695 +f 5724//5317 5725//8694 5700//5319 +f 5726//5320 5725//8696 5750//8697 +f 5725//8696 5726//5320 5701//8698 +f 5727//8699 5726//8700 5751//8701 +f 5726//8700 5727//8699 5702//8702 +f 5703//8703 5752//8704 5728//5328 +f 5752//8704 5703//8703 5727//8705 +f 5728//5331 5729//5332 5705//5333 +f 5729//5332 5728//5331 5752//8706 +f 5706//8707 5753//8708 5730//8708 +f 5753//8708 5706//8707 5729//8707 +f 5707//8709 5754//8710 5731//8711 +f 5754//8710 5707//8709 5730//8712 +f 5708//8713 5755//8714 5732//8715 +f 5755//8714 5708//8713 5731//5344 +f 5709//8716 5756//8717 5733//8718 +f 5756//8717 5709//8716 5732//5348 +f 5710//8719 5757//8720 5734//8721 +f 5757//8720 5710//8719 5733//8722 +f 5711//8723 5758//8724 5735//8725 +f 5758//8724 5711//8723 5734//8726 +f 5712//5357 5759//5359 5736//5359 +f 5759//5359 5712//5357 5735//5357 +f 5713//8727 5760//8728 5737//8729 +f 5760//8728 5713//8727 5736//8730 +f 5738//5366 5737//8731 5761//8732 +f 5737//8731 5738//5366 5714//8731 +f 5739//8733 5738//8734 5762//8735 +f 5738//8734 5739//8733 5715//8736 +f 5741//8737 5739//8738 5763//8739 +f 5739//8738 5741//8737 5717//8740 +f 5764//8741 5741//8742 5765//5378 +f 5741//8742 5764//8741 5740//5379 +f 5766//5380 5716//8743 5740//5380 +f 5716//8743 5766//5380 5742//8743 +f 5767//8744 5718//5382 5742//5383 +f 5718//5382 5767//8744 5743//5382 +f 5768//8745 5719//8746 5743//8747 +f 5719//8746 5768//8745 5744//8748 +f 5769//8749 5720//8750 5744//8749 +f 5720//8750 5769//8749 5745//8750 +f 5770//5393 5721//8751 5745//8752 +f 5721//8751 5770//5393 5746//8753 +f 5771//8754 5722//8755 5746//5396 +f 5722//8755 5771//8754 5747//8756 +f 5772//8757 5723//5401 5747//8757 +f 5723//5401 5772//8757 5748//5401 +f 5773//5402 5724//8758 5748//8759 +f 5724//8758 5773//5402 5749//8760 +f 5750//8761 5749//8762 5774//5408 +f 5749//8762 5750//8761 5725//5409 +f 5751//8763 5750//8764 5775//5412 +f 5750//8764 5751//8763 5726//8765 +f 5752//8766 5751//8767 5776//8768 +f 5751//8767 5752//8766 5727//8769 +f 5729//8770 5776//8771 5753//8772 +f 5776//8771 5729//8770 5752//8773 +f 5730//8774 5777//8775 5754//8776 +f 5777//8775 5730//8774 5753//8777 +f 5731//8778 5778//8779 5755//8780 +f 5778//8779 5731//8778 5754//8781 +f 5755//8782 5756//8783 5732//8784 +f 5756//8783 5755//8782 5779//8785 +f 5733//8786 5780//8787 5757//8788 +f 5780//8787 5733//8786 5756//5433 +f 5734//8789 5781//8790 5758//8791 +f 5781//8790 5734//8789 5757//8792 +f 5735//8793 5782//8794 5759//5443 +f 5782//8794 5735//8793 5758//8795 +f 5736//5445 5783//8796 5760//5447 +f 5783//8796 5736//5445 5759//5448 +f 5737//8797 5784//8798 5761//5451 +f 5784//8798 5737//8797 5760//5452 +f 5738//8799 5785//8800 5762//8800 +f 5785//8800 5738//8799 5761//8799 +f 5739//8801 5786//8802 5763//8801 +f 5786//8802 5739//8801 5762//8803 +f 5741//8804 5787//8805 5765//8806 +f 5787//8805 5741//8804 5763//8807 +f 5788//8808 5764//8809 5765//8810 +f 5764//8809 5788//8808 5789//8811 +f 5790//8812 5740//8813 5764//8812 +f 5740//8813 5790//8812 5766//8814 +f 5791//8815 5742//8816 5766//8817 +f 5742//8816 5791//8815 5767//8818 +f 5792//8819 5743//8820 5767//8819 +f 5743//8820 5792//8819 5768//8820 +f 5793//8821 5744//8822 5768//8823 +f 5744//8822 5793//8821 5769//8824 +f 5794//8825 5745//8826 5769//8827 +f 5745//8826 5794//8825 5770//8826 +f 5795//5488 5746//8828 5770//8829 +f 5746//8828 5795//5488 5771//8830 +f 5796//5492 5747//5492 5771//5492 +f 5747//5492 5796//5492 5772//5492 +f 5797//5494 5748//8831 5772//5494 +f 5748//8831 5797//5494 5773//8832 +f 5749//5496 5798//5497 5774//5496 +f 5798//5497 5749//5496 5773//5497 +f 5775//8833 5774//8834 5799//8835 +f 5774//8834 5775//8833 5750//5501 +f 5776//5502 5775//5503 5800//5503 +f 5775//5503 5776//5502 5751//5502 +f 5753//8836 5800//8837 5777//8837 +f 5800//8837 5753//8836 5776//8836 +f 5754//8838 5801//5507 5778//8839 +f 5801//5507 5754//8838 5777//8840 +f 5778//5511 5779//8841 5755//8842 +f 5779//8841 5778//5511 5802//5514 +f 5779//8843 5780//5517 5756//8844 +f 5780//5517 5779//8843 5803//8845 +f 5757//8846 5804//8847 5781//8847 +f 5804//8847 5757//8846 5780//8846 +f 5758//8848 5805//8849 5782//8850 +f 5805//8849 5758//8848 5781//8851 +f 5759//5527 5806//5528 5783//5529 +f 5806//5528 5759//5527 5782//5527 +f 5760//5530 5807//8852 5784//5532 +f 5807//8852 5760//5530 5783//5533 +f 5784//8853 5785//8854 5761//8855 +f 5785//8854 5784//8853 5808//8856 +f 5762//8857 5809//8858 5786//8858 +f 5809//8858 5762//8857 5785//8859 +f 5763//8860 5810//8861 5787//8862 +f 5810//8861 5763//8860 5786//8863 +f 5811//8864 5765//8865 5787//8866 +f 5765//8865 5811//8864 5788//8867 +f 5811//8868 5789//8869 5788//8870 +f 5789//8869 5811//8868 5812//8871 +f 5789//8869 5812//8871 5813//5551 +f 5813//5551 5812//8871 5814//8872 +f 5813//5551 5814//8872 5815//8873 +f 5815//8873 5814//8872 5816//5553 +f 5815//8873 5816//5553 5817//8874 +f 5817//8874 5816//5553 5818//8875 +f 5817//8874 5818//8875 5819//8876 +f 5819//8876 5818//8875 5820//8877 +f 5819//8876 5820//8877 5821//8878 +f 5821//8878 5820//8877 5822//5560 +f 5821//8878 5822//5560 5823//5561 +f 5823//5561 5822//5560 5824//8879 +f 5823//5561 5824//8879 5825//5554 +f 5825//5554 5824//8879 5826//5553 +f 5825//5554 5826//5553 5827//8880 +f 5827//8880 5826//5553 5828//8881 +f 5827//8880 5828//8881 5829//8882 +f 5829//8882 5828//8881 5830//8883 +f 5829//8882 5830//8883 5831//8884 +f 5831//8885 5830//8885 5832//8885 +f 5789//5574 5790//5574 5764//5572 +f 5790//5574 5789//5574 5813//5574 +f 5833//8886 5766//8887 5790//8888 +f 5766//8887 5833//8886 5791//8889 +f 5834//8890 5767//8891 5791//8892 +f 5767//8891 5834//8890 5792//8893 +f 5792//8894 5793//8895 5768//8895 +f 5793//8895 5792//8894 5835//8896 +f 5836//8897 5769//8898 5793//8899 +f 5769//8898 5836//8897 5794//8897 +f 5837//5586 5770//8900 5794//5586 +f 5770//8900 5837//5586 5795//8900 +f 5838//8901 5771//5590 5795//8902 +f 5771//5590 5838//8901 5796//5590 +f 5839//8903 5772//8904 5796//8905 +f 5772//8904 5839//8903 5797//5594 +f 5773//8906 5840//5597 5798//8907 +f 5840//5597 5773//8906 5797//8908 +f 5774//8909 5841//5600 5799//8909 +f 5841//5600 5774//8909 5798//5600 +f 5800//8910 5799//8911 5842//8911 +f 5799//8911 5800//8910 5775//8910 +f 5777//8912 5842//8913 5801//8914 +f 5842//8913 5777//8912 5800//8915 +f 5801//8916 5802//5605 5778//8916 +f 5802//5605 5801//8916 5843//5605 +f 5802//5608 5803//8917 5779//5608 +f 5803//8917 5802//5608 5844//8917 +f 5803//8918 5804//5610 5780//8919 +f 5804//5610 5803//8918 5845//5613 +f 5781//8920 5846//8921 5805//8922 +f 5846//8921 5781//8920 5804//8920 +f 5782//8923 5847//8924 5806//8925 +f 5847//8924 5782//8923 5805//8926 +f 5783//5621 5848//5621 5807//5621 +f 5848//5621 5783//5621 5806//5621 +f 5807//8927 5808//8928 5784//5625 +f 5808//8928 5807//8927 5849//8929 +f 5808//5627 5809//8930 5785//5629 +f 5809//8930 5808//5627 5850//8931 +f 5786//8932 5851//8933 5810//8934 +f 5851//8933 5786//8932 5809//8935 +f 5812//8936 5787//8937 5810//8938 +f 5787//8937 5812//8936 5811//8937 +f 5852//8939 5831//8940 5853//8941 +f 5831//8940 5852//8939 5829//8940 +f 5854//8942 5829//8943 5852//5643 +f 5829//8943 5854//8942 5827//8944 +f 5825//8945 5854//8946 5855//8947 +f 5854//8946 5825//8945 5827//8948 +f 5823//8949 5855//8950 5856//8950 +f 5855//8950 5823//8949 5825//8949 +f 5821//8951 5856//8952 5857//8953 +f 5856//8952 5821//8951 5823//5657 +f 5819//8954 5857//8954 5858//5660 +f 5857//8954 5819//8954 5821//8955 +f 5817//8956 5858//5664 5859//8957 +f 5858//5664 5817//8956 5819//8958 +f 5815//8959 5859//8960 5833//8961 +f 5859//8960 5815//8959 5817//8959 +f 5813//5670 5833//5671 5790//5672 +f 5833//5671 5813//5670 5815//5673 +f 5814//8962 5810//8963 5851//8963 +f 5810//8963 5814//8962 5812//8962 +f 5860//8964 5814//8965 5851//8964 +f 5814//8965 5860//8964 5816//8966 +f 5861//8967 5816//8968 5860//8967 +f 5816//8968 5861//8967 5818//8968 +f 5862//8969 5818//8970 5861//5686 +f 5818//8970 5862//8969 5820//5687 +f 5863//5688 5820//8971 5862//8972 +f 5820//8971 5863//5688 5822//8973 +f 5864//8974 5822//8975 5863//8974 +f 5822//8975 5864//8974 5824//8975 +f 5865//8976 5824//8977 5864//8978 +f 5824//8977 5865//8976 5826//8977 +f 5866//8979 5826//8980 5865//8981 +f 5826//8980 5866//8979 5828//8982 +f 5867//8983 5828//8984 5866//8985 +f 5828//8984 5867//8983 5830//8986 +f 5868//8987 5830//8988 5867//8989 +f 5830//8988 5868//8987 5832//8990 +f 5853//8991 5832//8992 5868//8993 +f 5832//8992 5853//8991 5831//5711 +f 5833//5714 5834//5715 5791//5716 +f 5834//5715 5833//5714 5859//5717 +f 5834//8994 5835//8995 5792//8996 +f 5835//8995 5834//8994 5869//8997 +f 5835//8998 5836//8999 5793//5721 +f 5836//8999 5835//8998 5870//9000 +f 5871//9001 5794//5724 5836//9001 +f 5794//5724 5871//9001 5837//5725 +f 5872//9002 5795//9003 5837//9004 +f 5795//9003 5872//9002 5838//9005 +f 5873//9006 5796//9007 5838//9008 +f 5796//9007 5873//9006 5839//9009 +f 5797//9010 5874//9011 5840//9012 +f 5874//9011 5797//9010 5839//5737 +f 5798//9013 5875//9014 5841//9015 +f 5875//9014 5798//9013 5840//9016 +f 5799//9017 5876//9018 5842//5742 +f 5876//9018 5799//9017 5841//9018 +f 5842//9019 5843//9020 5801//9019 +f 5843//9020 5842//9019 5876//9021 +f 5843//9022 5844//9023 5802//9024 +f 5844//9023 5843//9022 5877//9025 +f 5844//9026 5845//9027 5803//9028 +f 5845//9027 5844//9026 5878//9027 +f 5804//9029 5879//9030 5846//9031 +f 5879//9030 5804//9029 5845//9032 +f 5805//9033 5880//9034 5847//9035 +f 5880//9034 5805//9033 5846//9036 +f 5806//9037 5881//9038 5848//5766 +f 5881//9038 5806//9037 5847//5765 +f 5848//9039 5849//9040 5807//9041 +f 5849//9040 5848//9039 5882//9042 +f 5849//9043 5850//9044 5808//9045 +f 5850//9044 5849//9043 5883//9046 +f 5850//9047 5851//9048 5809//9047 +f 5851//9048 5850//9047 5860//9049 +f 5853//9050 5884//9051 5852//9050 +f 5884//9051 5853//9050 5885//9051 +f 5852//9052 5886//9053 5854//9054 +f 5886//9053 5852//9052 5884//9055 +f 5855//9056 5886//9057 5887//9058 +f 5886//9057 5855//9056 5854//9059 +f 5856//9060 5887//9061 5888//9061 +f 5887//9061 5856//9060 5855//9060 +f 5857//5794 5888//9062 5889//5796 +f 5888//9062 5857//5794 5856//9063 +f 5858//5799 5889//5798 5869//5799 +f 5889//5798 5858//5799 5857//5798 +f 5859//9064 5869//9065 5834//9065 +f 5869//9065 5859//9064 5858//9064 +f 5883//5805 5860//9066 5850//9067 +f 5860//9066 5883//5805 5861//9068 +f 5890//9069 5861//9070 5883//5809 +f 5861//9070 5890//9069 5862//5812 +f 5891//9071 5862//5816 5890//5815 +f 5862//5816 5891//9071 5863//5816 +f 5892//9072 5863//9073 5891//9074 +f 5863//9073 5892//9072 5864//9075 +f 5893//9076 5864//9077 5892//9078 +f 5864//9077 5893//9076 5865//9079 +f 5893//5825 5866//5826 5865//5826 +f 5866//5826 5893//5825 5894//5825 +f 5894//9080 5867//9081 5866//9082 +f 5867//9081 5894//9080 5895//5830 +f 5895//5831 5868//9083 5867//5832 +f 5868//9083 5895//5831 5896//5831 +f 5868//9084 5885//9085 5853//9084 +f 5885//9085 5868//9084 5896//9085 +f 5869//9086 5870//9087 5835//9088 +f 5870//9087 5869//9086 5889//9089 +f 5870//9090 5871//9091 5836//5841 +f 5871//9091 5870//9090 5897//9092 +f 5898//9093 5837//9094 5871//9095 +f 5837//9094 5898//9093 5872//9096 +f 5899//9097 5838//9098 5872//5849 +f 5838//9098 5899//9097 5873//9099 +f 5900//9100 5839//9101 5873//9102 +f 5839//9101 5900//9100 5874//9103 +f 5840//9104 5901//9105 5875//9106 +f 5901//9105 5840//9104 5874//9107 +f 5841//9108 5902//9109 5876//9110 +f 5902//9109 5841//9108 5875//9111 +f 5876//9112 5877//9113 5843//9114 +f 5877//9113 5876//9112 5902//9115 +f 5877//9116 5878//9117 5844//9118 +f 5878//9117 5877//9116 5903//9119 +f 5878//9120 5879//9121 5845//9122 +f 5879//9121 5878//9120 5904//9123 +f 5880//9124 5879//9125 5905//9126 +f 5879//9125 5880//9124 5846//9127 +f 5847//9128 5906//9129 5881//9130 +f 5906//9129 5847//9128 5880//9131 +f 5881//9132 5882//9133 5848//9134 +f 5882//9133 5881//9132 5907//9135 +f 5882//9136 5883//5884 5849//9137 +f 5883//5884 5882//9136 5890//5885 +f 5885//9138 5908//9139 5884//5888 +f 5908//9139 5885//9138 5909//9140 +f 5884//9141 5910//9142 5886//9143 +f 5910//9142 5884//9141 5908//9144 +f 5887//9145 5910//9146 5911//5896 +f 5910//9146 5887//9145 5886//9147 +f 5888//5898 5911//9148 5897//5900 +f 5911//9148 5888//5898 5887//5901 +f 5889//9149 5897//9150 5870//9151 +f 5897//9150 5889//9149 5888//5905 +f 5907//9152 5890//9153 5882//9154 +f 5890//9153 5907//9152 5891//5909 +f 5912//9155 5891//9156 5907//5912 +f 5891//9156 5912//9155 5892//9157 +f 5912//9158 5893//9159 5892//9160 +f 5893//9159 5912//9158 5913//9161 +f 5913//9162 5894//9163 5893//9164 +f 5894//9163 5913//9162 5914//9162 +f 5914//9165 5895//9165 5894//9165 +f 5895//9165 5914//9165 5915//9165 +f 5915//9166 5896//5927 5895//5927 +f 5896//5927 5915//9166 5916//9166 +f 5916//9167 5885//9168 5896//9169 +f 5885//9168 5916//9167 5909//9170 +f 5897//5933 5898//9171 5871//9171 +f 5898//9171 5897//5933 5911//5933 +f 5917//9172 5872//9173 5898//9174 +f 5872//9173 5917//9172 5899//9175 +f 5900//9176 5899//9177 5918//9178 +f 5899//9177 5900//9176 5873//9179 +f 5901//9180 5900//9181 5919//9182 +f 5900//9181 5901//9180 5874//9183 +f 5875//9184 5920//9185 5902//9186 +f 5920//9185 5875//9184 5901//9187 +f 5902//9188 5903//9189 5877//5949 +f 5903//9189 5902//9188 5920//5951 +f 5903//9190 5904//9191 5878//9192 +f 5904//9191 5903//9190 5921//9193 +f 5905//9194 5904//9195 5922//9196 +f 5904//9195 5905//9194 5879//9197 +f 5880//5960 5923//9198 5906//5962 +f 5923//9198 5880//5960 5905//9199 +f 5906//5964 5907//5965 5881//5966 +f 5907//5965 5906//5964 5912//9200 +f 5924//9201 5908//9201 5909//9201 +f 5908//9201 5924//9201 5925//5971 +f 5908//9202 5917//9202 5910//9202 +f 5917//9202 5908//9202 5925//9202 +f 5911//5973 5917//5974 5898//9203 +f 5917//5974 5911//5973 5910//5975 +f 5906//9204 5913//9205 5912//9206 +f 5913//9205 5906//9204 5923//9207 +f 5923//9208 5914//9209 5913//9210 +f 5914//9209 5923//9208 5926//9208 +f 5926//9211 5915//9212 5914//9213 +f 5915//9212 5926//9211 5927//9214 +f 5927//9215 5916//9216 5915//5990 +f 5916//9216 5927//9215 5928//5991 +f 5928//9217 5909//5994 5916//5994 +f 5909//5994 5928//9217 5924//9217 +f 5918//9218 5917//9219 5925//5999 +f 5917//9219 5918//9218 5899//9220 +f 5919//9221 5918//9222 5929//9223 +f 5918//9222 5919//9221 5900//9224 +f 5901//9225 5930//9226 5920//9227 +f 5930//9226 5901//9225 5919//9228 +f 5920//9229 5921//9230 5903//9231 +f 5921//9230 5920//9229 5930//9232 +f 5922//9233 5921//9234 5931//6013 +f 5921//9234 5922//9233 5904//9235 +f 5905//9236 5926//9237 5923//6017 +f 5926//9237 5905//9236 5922//9238 +f 5929//9239 5925//9240 5924//6021 +f 5925//9240 5929//9239 5918//9241 +f 5922//9242 5927//9243 5926//9243 +f 5927//9243 5922//9242 5931//9242 +f 5931//9244 5928//9245 5927//9246 +f 5928//9245 5931//9244 5932//9247 +f 5932//9248 5924//9249 5928//9250 +f 5924//9249 5932//9248 5929//9251 +f 5930//9252 5929//9253 5932//9254 +f 5929//9253 5930//9252 5919//9252 +f 5921//9255 5932//9256 5931//9257 +f 5932//9256 5921//9255 5930//9258 +f 5933//6043 5934//9259 5935//9260 +f 5934//9259 5933//6043 5936//1900 +f 5935//9261 5937//9262 5938//9263 +f 5937//9262 5935//9261 5934//1903 +f 5939//1909 5935//1908 5940//1908 +f 5935//1908 5939//1909 5933//1909 +f 5933//9264 5941//1911 5936//9264 +f 5941//1911 5933//9264 5942//1911 +f 5936//9265 5943//9266 5934//9267 +f 5943//9266 5936//9265 5944//9268 +f 5938//9269 5945//9270 5946//9270 +f 5945//9270 5938//9269 5937//9269 +f 5940//1920 5938//1923 5947//1922 +f 5938//1923 5940//1920 5935//9271 +f 5934//9272 5948//9273 5937//1926 +f 5948//9273 5934//9272 5943//6070 +f 5949//9274 5940//9275 5950//9276 +f 5940//9275 5949//9274 5939//9277 +f 5939//9278 5942//1933 5933//9279 +f 5942//1933 5939//9278 5951//1935 +f 5942//1936 5952//9280 5941//1938 +f 5952//9280 5942//1936 5953//1939 +f 5936//9281 5954//9282 5944//9283 +f 5954//9282 5936//9281 5941//9284 +f 5943//1945 5955//9285 5956//6089 +f 5955//9285 5943//1945 5944//9286 +f 5946//9287 5957//1950 5958//1950 +f 5957//1950 5946//9287 5945//9287 +f 5947//1955 5946//6096 5959//6096 +f 5946//6096 5947//1955 5938//1955 +f 5937//6098 5960//9288 5945//6100 +f 5960//9288 5937//6098 5948//6101 +f 5950//9289 5947//9290 5961//1962 +f 5947//9290 5950//9289 5940//6105 +f 5943//6106 5962//6107 5948//6107 +f 5962//6107 5943//6106 5956//6106 +f 5963//9291 5950//9292 5964//9293 +f 5950//9292 5963//9291 5949//9294 +f 5965//9295 5939//9296 5949//9296 +f 5939//9296 5965//9295 5951//9295 +f 5951//9297 5953//1977 5942//9297 +f 5953//1977 5951//9297 5966//1977 +f 5953//9298 5967//9299 5952//1980 +f 5967//9299 5953//9298 5968//1981 +f 5941//9300 5969//9301 5954//9300 +f 5969//9301 5941//9300 5952//9301 +f 5944//9302 5970//9303 5955//9304 +f 5970//9303 5944//9302 5954//1987 +f 5956//1990 5971//6122 5972//1990 +f 5971//6122 5956//1990 5955//6122 +f 5958//9305 5973//9306 5974//6125 +f 5973//9306 5958//9305 5957//9307 +f 5959//6127 5958//6127 5975//6127 +f 5958//6127 5959//6127 5946//6127 +f 5945//9308 5976//9309 5957//9310 +f 5976//9309 5945//9308 5960//6131 +f 5961//9311 5959//9312 5977//9313 +f 5959//9312 5961//9311 5947//9314 +f 5948//9315 5978//2009 5960//2009 +f 5978//2009 5948//9315 5962//2010 +f 5964//9316 5961//2012 5979//2013 +f 5961//2012 5964//9316 5950//2014 +f 5956//6138 5980//2016 5962//6139 +f 5980//2016 5956//6138 5972//6140 +f 5981//9317 5963//9318 5964//9319 +f 5963//9318 5981//9317 5982//6144 +f 5963//9320 5965//9321 5949//6147 +f 5965//9321 5963//9320 5983//9322 +f 5965//9323 5966//9324 5951//9325 +f 5966//9324 5965//9323 5984//9323 +f 5966//9326 5968//9327 5953//2031 +f 5968//9327 5966//9326 5985//2032 +f 5968//2033 5986//2034 5967//2035 +f 5986//2034 5968//2033 5987//2036 +f 5952//9328 5988//9329 5969//9330 +f 5988//9329 5952//9328 5967//9331 +f 5954//9332 5989//9333 5970//9334 +f 5989//9333 5954//9332 5969//9335 +f 5955//9336 5990//9337 5971//6162 +f 5990//9337 5955//9336 5970//9338 +f 5971//9339 5991//2048 5972//2049 +f 5991//2048 5971//9339 5992//9340 +f 5993//9341 5973//9342 5994//9342 +f 5973//9342 5993//9341 5974//9343 +f 5995//9344 5958//9345 5974//9346 +f 5958//9345 5995//9344 5975//9347 +f 5957//9348 5996//2060 5973//9349 +f 5996//2060 5957//9348 5976//2058 +f 5997//9350 5959//9351 5975//9352 +f 5959//9351 5997//9350 5977//9353 +f 5960//9354 5998//9355 5976//2068 +f 5998//9355 5960//9354 5978//6183 +f 5999//2072 5961//2071 5977//2070 +f 5961//2071 5999//2072 5979//2072 +f 5962//2073 6000//2074 5978//2075 +f 6000//2074 5962//2073 5980//2076 +f 6001//9356 5964//2078 5979//2079 +f 5964//2078 6001//9356 5981//9357 +f 5972//2081 6002//9358 5980//2082 +f 6002//9358 5972//2081 5991//2081 +f 6003//2083 5982//2084 5981//2083 +f 5982//2084 6003//2083 6004//2084 +f 5982//9359 5983//9360 5963//9361 +f 5983//9360 5982//9359 6005//9362 +f 5983//9363 5984//9364 5965//9365 +f 5984//9364 5983//9363 6006//2092 +f 5984//9366 5985//2093 5966//2094 +f 5985//2093 5984//9366 6007//9367 +f 5985//6204 5987//2097 5968//2098 +f 5987//2097 5985//6204 6008//2099 +f 5986//9368 6009//9369 6010//2102 +f 6009//9369 5986//9368 5987//2103 +f 5967//9370 6011//9371 5988//9372 +f 6011//9371 5967//9370 5986//9373 +f 5969//9374 6012//9375 5989//9376 +f 6012//9375 5969//9374 5988//2109 +f 5970//2110 6013//2111 5990//9377 +f 6013//2111 5970//2110 5989//2111 +f 5990//9378 5992//9379 5971//2114 +f 5992//9379 5990//9378 6014//6218 +f 5992//9380 6015//9381 5991//2117 +f 6015//9381 5992//9380 6016//2118 +f 5994//9382 6017//9383 5993//2139 +f 6017//9383 5994//9382 6018//9384 +f 6018//9384 5994//9382 6019//9385 +f 6018//9384 6019//9385 6020//2124 +f 6020//2124 6019//9385 6021//2125 +f 6020//2124 6021//2125 6022//2126 +f 6022//2126 6021//2125 6023//2125 +f 6022//2126 6023//2125 6024//2128 +f 6024//2128 6023//2125 6025//2131 +f 6024//2128 6025//2131 6026//2130 +f 6026//2130 6025//2131 6027//2130 +f 6026//2130 6027//2130 6028//2131 +f 6028//2131 6027//2130 6029//2132 +f 6028//2131 6029//2132 6030//9386 +f 6030//9386 6029//2132 6031//9387 +f 6030//9386 6031//9387 6032//9386 +f 6032//9386 6031//9387 6033//9385 +f 6032//9386 6033//9385 6034//9388 +f 6034//9388 6033//9385 6035//9389 +f 6034//9388 6035//9389 6036//2138 +f 6036//2138 6035//9389 6037//9390 +f 6036//2138 6037//9390 6038//9391 +f 6017//9392 5974//9393 5993//9394 +f 5974//9393 6017//9392 5995//9395 +f 5994//2144 5996//2146 6019//2146 +f 5996//2146 5994//2144 5973//2147 +f 6039//2148 5975//9396 5995//2148 +f 5975//9396 6039//2148 5997//9396 +f 5976//9397 6040//2151 5996//9398 +f 6040//2151 5976//9397 5998//2153 +f 6041//2156 5977//2157 5997//2156 +f 5977//2157 6041//2156 5999//9399 +f 5978//9400 6042//9401 5998//2160 +f 6042//9401 5978//9400 6000//9402 +f 5999//9403 6001//2164 5979//9403 +f 6001//2164 5999//9403 6043//2163 +f 6002//2166 6000//9404 5980//9405 +f 6000//9404 6002//2166 6044//2169 +f 6045//9406 5981//9407 6001//6270 +f 5981//9407 6045//9406 6003//6271 +f 5991//2174 6046//9408 6002//2176 +f 6046//9408 5991//2174 6015//2177 +f 6047//9409 6004//9410 6003//9411 +f 6004//9410 6047//9409 6048//9412 +f 6004//9413 6005//9414 5982//9415 +f 6005//9414 6004//9413 6049//9416 +f 6005//9417 6006//9418 5983//9419 +f 6006//9418 6005//9417 6050//9420 +f 6006//9421 6007//9422 5984//2190 +f 6007//9422 6006//9421 6051//2191 +f 6007//9423 6008//2193 5985//9424 +f 6008//2193 6007//9423 6052//2195 +f 5987//9425 6053//2198 6009//2198 +f 6053//2198 5987//9425 6008//9426 +f 6010//9427 6054//9428 6055//9429 +f 6054//9428 6010//9427 6009//9430 +f 6011//9431 6010//2204 6056//9432 +f 6010//2204 6011//9431 5986//9433 +f 5988//2208 6057//6297 6012//2210 +f 6057//6297 5988//2208 6011//2211 +f 5989//2214 6058//9434 6013//2214 +f 6058//9434 5989//2214 6012//9434 +f 6013//9435 6014//6301 5990//6302 +f 6014//6301 6013//9435 6059//6303 +f 6014//2220 6016//9436 5992//2222 +f 6016//9436 6014//2220 6060//6306 +f 6016//9437 6061//2225 6015//9438 +f 6061//2225 6016//9437 6062//9439 +f 6063//9440 6037//2229 6035//2229 +f 6037//2229 6063//9440 6064//9440 +f 6064//6314 6038//6315 6037//2233 +f 6038//6315 6064//6314 6065//6316 +f 6065//9441 6036//9442 6038//9442 +f 6036//9442 6065//9441 6066//9441 +f 6066//2239 6034//9443 6036//2241 +f 6034//9443 6066//2239 6067//2242 +f 6067//6323 6032//2244 6034//2245 +f 6032//2244 6067//6323 6068//6325 +f 6030//9444 6068//9445 6069//2249 +f 6068//9445 6030//9444 6032//9446 +f 6028//2251 6069//2252 6070//2253 +f 6069//2252 6028//2251 6030//2254 +f 6026//9447 6070//9448 6071//9449 +f 6070//9448 6026//9447 6028//2258 +f 6024//9450 6071//2260 6072//9451 +f 6071//2260 6024//9450 6026//9452 +f 6022//9453 6072//9454 6073//2265 +f 6072//9454 6022//9453 6024//2266 +f 6020//2270 6073//9455 6074//9455 +f 6073//9455 6020//2270 6022//2270 +f 6020//9456 6039//9457 6018//9458 +f 6039//9457 6020//9456 6074//9459 +f 6018//9460 5995//9461 6017//9462 +f 5995//9461 6018//9460 6039//9463 +f 6019//2280 6040//9464 6021//2280 +f 6040//9464 6019//2280 5996//9464 +f 6075//9465 6021//9466 6040//6353 +f 6021//9466 6075//9465 6023//2284 +f 6076//2287 6023//9467 6075//2287 +f 6023//9467 6076//2287 6025//2288 +f 6077//9468 6025//6358 6076//2291 +f 6025//6358 6077//9468 6027//2292 +f 6078//9469 6027//9470 6077//9471 +f 6027//9470 6078//9469 6029//2296 +f 6079//9472 6029//9473 6078//2298 +f 6029//9473 6079//9472 6031//2299 +f 6080//9474 6031//2301 6079//2302 +f 6031//2301 6080//9474 6033//2303 +f 6080//9475 6035//2305 6033//2305 +f 6035//2305 6080//9475 6063//2304 +f 6074//9476 5997//9477 6039//9478 +f 5997//9477 6074//9476 6041//9479 +f 6042//9480 6040//9481 5998//9482 +f 6040//9481 6042//9480 6075//6373 +f 6041//9483 6043//9484 5999//2315 +f 6043//9484 6041//9483 6081//2316 +f 6044//9485 6042//2318 6000//9485 +f 6042//2318 6044//9485 6082//2318 +f 6043//9486 6045//9487 6001//9488 +f 6045//9487 6043//9486 6083//9489 +f 6046//2324 6044//9490 6002//9491 +f 6044//9490 6046//2324 6084//2326 +f 6085//6387 6003//6387 6045//6387 +f 6003//9411 6085//2328 6047//9409 +f 6015//9492 6086//9493 6046//9494 +f 6086//9493 6015//9492 6061//9495 +f 6087//9496 6048//9497 6047//9496 +f 6048//9497 6087//9496 6088//9497 +f 6048//9412 6049//2336 6004//9410 +f 6049//2336 6048//9412 6089//9498 +f 6049//9499 6050//9500 6005//6395 +f 6050//9500 6049//9499 6090//2341 +f 6050//9501 6051//9502 6006//2344 +f 6051//9502 6050//9501 6091//2345 +f 6051//2346 6052//9503 6007//9504 +f 6052//9503 6051//2346 6092//9503 +f 6008//2348 6093//2349 6053//2350 +f 6093//2349 6008//2348 6052//2351 +f 6009//9505 6094//9506 6054//9507 +f 6094//9506 6009//9505 6053//2352 +f 6055//9508 6095//9509 6096//2355 +f 6095//9509 6055//9508 6054//9510 +f 6056//9511 6055//9512 6097//9513 +f 6055//9512 6056//9511 6010//9514 +f 6057//9515 6056//9516 6098//9517 +f 6056//9516 6057//9515 6011//9518 +f 6012//9519 6099//6415 6058//9519 +f 6099//6415 6012//9519 6057//6415 +f 6058//9520 6059//9521 6013//9522 +f 6059//9521 6058//9520 6100//9520 +f 6059//9523 6060//9524 6014//9525 +f 6060//9524 6059//9523 6101//9526 +f 6060//6423 6062//9439 6016//9437 +f 6062//9439 6060//6423 6102//6424 +f 6062//9527 6103//9528 6061//6427 +f 6103//9528 6062//9527 6104//2379 +f 6105//2380 6064//2381 6063//9529 +f 6064//2381 6105//2380 6106//2383 +f 6106//9530 6065//6433 6064//9531 +f 6065//6433 6106//9530 6107//6435 +f 6065//9532 6108//9533 6066//9534 +f 6108//9533 6065//9532 6107//6439 +f 6066//9535 6109//9536 6067//9537 +f 6109//9536 6066//9535 6108//9538 +f 6067//9539 6110//9540 6068//9541 +f 6110//9540 6067//9539 6109//2398 +f 6069//2399 6110//9542 6111//9543 +f 6110//9542 6069//2399 6068//2402 +f 6070//9544 6111//9545 6112//9546 +f 6111//9545 6070//9544 6069//2406 +f 6071//9547 6112//2410 6113//6457 +f 6112//2410 6071//9547 6070//9548 +f 6072//9451 6113//2411 6114//9549 +f 6113//2411 6072//9451 6071//2260 +f 6073//9550 6114//2414 6081//2415 +f 6114//2414 6073//9550 6072//9551 +f 6074//9552 6081//9553 6041//9554 +f 6081//9553 6074//9552 6073//9555 +f 6082//9556 6075//9557 6042//2423 +f 6075//9557 6082//9556 6076//2424 +f 6115//6468 6076//6469 6082//2427 +f 6076//6469 6115//6468 6077//9558 +f 6116//9559 6077//9471 6115//9560 +f 6077//9471 6116//9559 6078//9469 +f 6117//9561 6078//9562 6116//2433 +f 6078//9562 6117//9561 6079//9563 +f 6118//2435 6079//9564 6117//2437 +f 6079//9564 6118//2435 6080//2438 +f 6118//9565 6063//2440 6080//9566 +f 6063//2440 6118//9565 6105//2442 +f 6081//2443 6083//2443 6043//2443 +f 6083//2443 6081//2443 6114//9567 +f 6084//2444 6082//9568 6044//2444 +f 6082//9568 6084//2444 6115//9568 +f 6083//2446 6085//2446 6045//6487 +f 6085//2446 6083//2446 6119//2448 +f 6086//2450 6084//2450 6046//2450 +f 6084//2450 6086//2450 6120//2450 +f 6121//9569 6047//6489 6085//2453 +f 6047//6489 6121//9569 6087//6490 +f 6061//9570 6122//6491 6086//6493 +f 6122//6491 6061//9570 6103//6494 +f 6123//9571 6088//9572 6087//9573 +f 6088//9572 6123//9571 6124//9574 +f 6088//9575 6089//9576 6048//9577 +f 6089//9576 6088//9575 6125//2466 +f 6089//9498 6090//9578 6049//2336 +f 6090//9578 6089//9498 6126//6501 +f 6090//9579 6091//9580 6050//2471 +f 6091//9580 6090//9579 6127//2472 +f 6091//6503 6092//6504 6051//6505 +f 6092//6504 6091//6503 6128//2476 +f 6052//9581 6129//9582 6093//2479 +f 6129//9582 6052//9581 6092//9583 +f 6053//9584 6130//2482 6094//2483 +f 6130//2482 6053//9584 6093//2484 +f 6054//9585 6131//9586 6095//9587 +f 6131//9586 6054//9585 6094//9588 +f 6132//9589 6095//9590 6133//9589 +f 6095//9590 6132//9589 6096//9591 +f 6097//9592 6096//2494 6134//2494 +f 6096//2494 6097//9592 6055//9592 +f 6098//9593 6097//9594 6135//2497 +f 6097//9594 6098//9593 6056//2498 +f 6099//9595 6098//9596 6136//9597 +f 6098//9596 6099//9595 6057//9598 +f 6099//2503 6100//9599 6058//9600 +f 6100//9599 6099//2503 6137//2506 +f 6100//9601 6101//9602 6059//9603 +f 6101//9602 6100//9601 6138//9604 +f 6101//2511 6102//6424 6060//6423 +f 6102//6424 6101//2511 6139//2512 +f 6102//2513 6104//2514 6062//9605 +f 6104//2514 6102//2513 6140//2516 +f 6104//9606 6141//9607 6103//2519 +f 6141//9607 6104//9606 6142//9608 +f 6143//6539 6106//6540 6105//6541 +f 6106//6540 6143//6539 6144//6542 +f 6106//2525 6145//9609 6107//2525 +f 6145//9609 6106//2525 6144//9609 +f 6107//2527 6146//2528 6108//6543 +f 6146//2528 6107//2527 6145//2528 +f 6108//9610 6147//9611 6109//6546 +f 6147//9611 6108//9610 6146//6547 +f 6109//2533 6148//9612 6110//2534 +f 6148//9612 6109//2533 6147//6551 +f 6110//9613 6149//9614 6111//2538 +f 6149//9614 6110//9613 6148//2539 +f 6112//9615 6149//6556 6150//2542 +f 6149//6556 6112//9615 6111//2543 +f 6113//2544 6150//6557 6119//6557 +f 6150//6557 6113//2544 6112//2544 +f 6114//9549 6119//9616 6083//2546 +f 6119//9616 6114//9549 6113//2411 +f 6120//9617 6115//9560 6084//2548 +f 6115//9560 6120//9617 6116//9559 +f 6151//9618 6116//9619 6120//2551 +f 6116//9619 6151//9618 6117//2552 +f 6152//9620 6117//2554 6151//9621 +f 6117//2554 6152//9620 6118//2554 +f 6152//9622 6105//9623 6118//2557 +f 6105//9623 6152//9622 6143//9624 +f 6119//9625 6121//9626 6085//2561 +f 6121//9626 6119//9625 6150//2562 +f 6122//9627 6120//9628 6086//6568 +f 6120//9628 6122//9627 6151//2566 +f 6153//9629 6087//9630 6121//9631 +f 6087//9630 6153//9629 6123//9632 +f 6103//2571 6154//9633 6122//2573 +f 6154//9633 6103//2571 6141//6573 +f 6155//2575 6123//2576 6156//2576 +f 6123//2576 6155//2575 6124//2575 +f 6124//9634 6125//9635 6088//9636 +f 6125//9635 6124//9634 6157//2578 +f 6125//2579 6126//2580 6089//2581 +f 6126//2580 6125//2579 6158//6581 +f 6126//6501 6127//2583 6090//9578 +f 6127//2583 6126//6501 6159//6584 +f 6127//9637 6128//9638 6091//9637 +f 6128//9638 6127//9637 6160//6585 +f 6161//2589 6092//2590 6128//2591 +f 6092//2590 6161//2589 6129//2592 +f 6093//9639 6162//2594 6130//2595 +f 6162//2594 6093//9639 6129//9639 +f 6094//9640 6163//9641 6131//9641 +f 6163//9641 6094//9640 6130//2600 +f 6133//9642 6131//9643 6164//6595 +f 6131//9643 6133//9642 6095//9644 +f 6165//2605 6133//9645 6166//2605 +f 6133//9645 6165//2605 6132//9646 +f 6134//6601 6132//6602 6167//6602 +f 6132//6602 6134//6601 6096//6601 +f 6135//9647 6134//2612 6168//2613 +f 6134//2612 6135//9647 6097//2614 +f 6136//9648 6135//2616 6169//9649 +f 6135//2616 6136//9648 6098//2618 +f 6099//9650 6170//9651 6137//9650 +f 6170//9651 6099//9650 6136//6614 +f 6137//9652 6138//2624 6100//6616 +f 6138//2624 6137//9652 6171//2626 +f 6138//2627 6139//2512 6101//2511 +f 6139//2512 6138//2627 6172//2628 +f 6139//9653 6140//9654 6102//2631 +f 6140//9654 6139//9653 6173//9655 +f 6140//2633 6142//9656 6104//9656 +f 6142//9656 6140//2633 6174//2633 +f 6141//9657 6175//9658 6176//9659 +f 6175//9658 6141//9657 6142//9660 +f 6143//6629 6177//6630 6144//6629 +f 6177//6630 6143//6629 6178//6630 +f 6144//6631 6179//2646 6145//6631 +f 6179//2646 6144//6631 6177//2646 +f 6145//2649 6180//6632 6146//6633 +f 6180//6632 6145//2649 6179//6632 +f 6146//9661 6181//9662 6147//6636 +f 6181//9662 6146//9661 6180//2653 +f 6147//6639 6182//6638 6148//6639 +f 6182//6638 6147//6639 6181//6640 +f 6148//2658 6153//9663 6149//2658 +f 6153//9663 6148//2658 6182//2660 +f 6150//2661 6153//9664 6121//9665 +f 6153//9664 6150//2661 6149//2664 +f 6154//2665 6151//2666 6122//2667 +f 6151//2666 6154//2665 6152//2666 +f 6154//9666 6143//9666 6152//9666 +f 6143//9666 6154//9666 6178//6655 +f 6182//9667 6123//9668 6153//9669 +f 6123//9668 6182//9667 6156//2676 +f 6154//2677 6176//6660 6178//2677 +f 6176//6660 6154//2677 6141//2678 +f 6183//9670 6156//9671 6184//6662 +f 6156//9671 6183//9670 6155//2682 +f 6185//2683 6124//2684 6155//2684 +f 6124//2684 6185//2683 6157//2683 +f 6157//6667 6158//2686 6125//9672 +f 6158//2686 6157//6667 6186//2688 +f 6158//9673 6159//6671 6126//9674 +f 6159//6671 6158//9673 6187//9675 +f 6159//6584 6160//9676 6127//2583 +f 6160//9676 6159//6584 6188//2694 +f 6189//6675 6128//9677 6160//2697 +f 6128//9677 6189//6675 6161//2698 +f 6190//9678 6129//2700 6161//2701 +f 6129//2700 6190//9678 6162//9679 +f 6191//9680 6130//2704 6162//9681 +f 6130//2704 6191//9680 6163//2706 +f 6164//9682 6163//9683 6192//2709 +f 6163//9683 6164//9682 6131//6685 +f 6166//2711 6164//2712 6193//2711 +f 6164//2712 6166//2711 6133//2712 +f 6194//2714 6166//2715 6195//2716 +f 6166//2715 6194//2714 6165//2717 +f 6196//9684 6132//9685 6165//9686 +f 6132//9685 6196//9684 6167//6697 +f 6168//9687 6167//9688 6197//9689 +f 6167//9688 6168//9687 6134//2725 +f 6169//9690 6168//2727 6198//9691 +f 6168//2727 6169//9690 6135//9692 +f 6136//2730 6199//2731 6170//2730 +f 6199//2731 6136//2730 6169//2731 +f 6137//9693 6200//6707 6171//9694 +f 6200//6707 6137//9693 6170//2735 +f 6171//9695 6172//2628 6138//2627 +f 6172//2628 6171//9695 6201//2737 +f 6172//9696 6173//2739 6139//2740 +f 6173//2739 6172//9696 6202//2741 +f 6173//2742 6174//9697 6140//9698 +f 6174//9697 6173//2742 6203//2745 +f 6174//9699 6175//2747 6142//9700 +f 6175//2747 6174//9699 6204//2749 +f 6176//9701 6205//9702 6206//2752 +f 6205//9702 6176//9701 6175//2753 +f 6178//9703 6206//9704 6177//2756 +f 6206//9704 6178//9703 6176//2757 +f 6177//6723 6207//6724 6179//6725 +f 6207//6724 6177//6723 6206//9705 +f 6179//6727 6208//9706 6180//2764 +f 6208//9706 6179//6727 6207//6728 +f 6180//9707 6184//9708 6181//6731 +f 6184//9708 6180//9707 6208//9709 +f 6181//2770 6156//9710 6182//2772 +f 6156//9710 6181//2770 6184//6733 +f 6208//9711 6183//2775 6184//2776 +f 6183//2775 6208//9711 6209//2777 +f 6210//2778 6155//9712 6183//2780 +f 6155//9712 6210//2778 6185//2781 +f 6211//9713 6157//9714 6185//9715 +f 6157//9714 6211//9713 6186//2785 +f 6186//6744 6187//6745 6158//9716 +f 6187//6745 6186//6744 6212//6747 +f 6187//9717 6188//9718 6159//9717 +f 6188//9718 6187//9717 6213//9718 +f 6214//9719 6160//9676 6188//2694 +f 6160//9676 6214//9719 6189//6811 +f 6215//2795 6161//2796 6189//2796 +f 6161//2796 6215//2795 6190//2795 +f 6216//9720 6162//2798 6190//2798 +f 6162//2798 6216//9720 6191//2797 +f 6192//9721 6191//6759 6217//9722 +f 6191//6759 6192//9721 6163//2802 +f 6193//2803 6192//9723 6218//9724 +f 6192//9723 6193//2803 6164//2806 +f 6195//9725 6193//9726 6219//2809 +f 6193//9726 6195//9725 6166//9727 +f 6220//2811 6195//9728 6221//2813 +f 6195//9728 6220//2811 6194//9729 +f 6222//2815 6165//9730 6194//2817 +f 6165//9730 6222//2815 6196//9731 +f 6197//2820 6196//6776 6223//2821 +f 6196//6776 6197//2820 6167//2820 +f 6198//9732 6197//9733 6224//9734 +f 6197//9733 6198//9732 6168//2825 +f 6169//2826 6225//2827 6199//2826 +f 6225//2827 6169//2826 6198//2827 +f 6170//2828 6226//9735 6200//2830 +f 6226//9735 6170//2828 6199//9736 +f 6171//9695 6227//9737 6201//2737 +f 6227//9737 6171//9695 6200//2833 +f 6201//9738 6202//9738 6172//2835 +f 6202//9738 6201//9738 6228//2836 +f 6202//9739 6203//2838 6173//9740 +f 6203//2838 6202//9739 6229//9741 +f 6203//9742 6204//2842 6174//2843 +f 6204//2842 6203//9742 6230//2844 +f 6175//9743 6231//9744 6205//9744 +f 6231//9744 6175//9743 6204//9743 +f 6206//6795 6232//9745 6207//6795 +f 6232//9745 6206//6795 6205//9746 +f 6207//9747 6209//9748 6208//9749 +f 6209//9748 6207//9747 6232//9750 +f 6233//9751 6183//2858 6209//2858 +f 6183//2858 6233//9751 6210//2859 +f 6234//9752 6185//2863 6210//2860 +f 6185//2863 6234//9752 6211//2863 +f 6235//2867 6186//2865 6211//2866 +f 6186//2865 6235//2867 6212//2867 +f 6212//6808 6213//9753 6187//2870 +f 6213//9753 6212//6808 6236//6808 +f 6237//6809 6188//9754 6213//2872 +f 6188//9754 6237//6809 6214//9755 +f 6238//6811 6189//6811 6214//9719 +f 6189//6811 6238//6811 6215//2877 +f 6239//9756 6190//9757 6215//2880 +f 6190//9757 6239//9756 6216//2881 +f 6240//9758 6191//9759 6216//9760 +f 6191//9759 6240//9758 6217//2885 +f 6218//2886 6217//9761 6241//2886 +f 6217//9761 6218//2886 6192//2888 +f 6219//2889 6218//2890 6242//2891 +f 6218//2890 6219//2889 6193//9762 +f 6221//9763 6219//9764 6243//9765 +f 6219//9764 6221//9763 6195//2896 +f 6244//2897 6221//2900 6245//2899 +f 6221//2900 6244//2897 6220//2900 +f 6246//9766 6194//2902 6220//9767 +f 6194//2902 6246//9766 6222//9768 +f 6247//2905 6196//2906 6222//2905 +f 6196//2906 6247//2905 6223//2906 +f 6224//6838 6223//9769 6248//6840 +f 6223//9769 6224//6838 6197//6841 +f 6198//9770 6249//9771 6225//2913 +f 6249//9771 6198//9770 6224//9772 +f 6199//9773 6250//9774 6226//2917 +f 6250//9774 6199//9773 6225//2918 +f 6200//2833 6251//6787 6227//9737 +f 6251//6787 6200//2833 6226//2920 +f 6201//9775 6252//9776 6228//6850 +f 6252//9776 6201//9775 6227//9777 +f 6228//6852 6229//9778 6202//2927 +f 6229//9778 6228//6852 6253//2928 +f 6229//9779 6230//9780 6203//9781 +f 6230//9780 6229//9779 6254//2932 +f 6230//9782 6231//9783 6204//9784 +f 6231//9783 6230//9782 6255//9785 +f 6231//9786 6232//9787 6205//6861 +f 6232//9787 6231//9786 6256//9788 +f 6256//6863 6209//9789 6232//2942 +f 6209//9789 6256//6863 6233//9790 +f 6257//9791 6210//2944 6233//2945 +f 6210//2944 6257//9791 6234//2946 +f 6258//2947 6211//9792 6234//2949 +f 6211//9792 6258//2947 6235//9793 +f 6259//2951 6212//2952 6235//2953 +f 6212//2952 6259//2951 6236//2954 +f 6260//2955 6213//2956 6236//2957 +f 6213//2956 6260//2955 6237//9794 +f 6261//9795 6214//6878 6237//9796 +f 6214//6878 6261//9795 6238//2962 +f 6262//6879 6215//2877 6238//6811 +f 6215//2877 6262//6879 6239//9797 +f 6263//2965 6216//2966 6239//9798 +f 6216//2966 6263//2965 6240//2968 +f 6264//2969 6217//2971 6240//2971 +f 6217//2971 6264//2969 6241//2969 +f 6242//2972 6241//9799 6265//2974 +f 6241//9799 6242//2972 6218//2975 +f 6219//2976 6266//2977 6243//9800 +f 6266//2977 6219//2976 6242//2979 +f 6221//6891 6267//9801 6245//6893 +f 6267//9801 6221//6891 6243//6894 +f 6244//2985 6268//2985 6269//2985 +f 6268//2985 6244//2985 6245//2984 +f 6270//9802 6220//9803 6244//2989 +f 6220//9803 6270//9802 6246//9804 +f 6271//2991 6222//9805 6246//2993 +f 6222//9805 6271//2991 6247//6905 +f 6272//2998 6223//2995 6247//2997 +f 6223//2995 6272//2998 6248//2998 +f 6224//9806 6273//9807 6249//9808 +f 6273//9807 6224//9806 6248//6907 +f 6225//9809 6274//9810 6250//6911 +f 6274//9810 6225//9809 6249//9811 +f 6226//2920 6275//9812 6251//6787 +f 6275//9812 6226//2920 6250//3006 +f 6227//9813 6276//3008 6252//9813 +f 6276//3008 6227//9813 6251//3008 +f 6228//3009 6277//9814 6253//3009 +f 6277//9814 6228//3009 6252//3010 +f 6253//9815 6254//9816 6229//3013 +f 6254//9816 6253//9815 6278//3014 +f 6254//9817 6255//3016 6230//3017 +f 6255//3016 6254//9817 6279//3018 +f 6255//9818 6256//9819 6231//9820 +f 6256//9819 6255//9818 6280//9821 +f 6280//9822 6233//9823 6256//9823 +f 6233//9823 6280//9822 6257//9824 +f 6281//3027 6234//3028 6257//3029 +f 6234//3028 6281//3027 6258//9825 +f 6282//3031 6235//6933 6258//6934 +f 6235//6933 6282//3031 6259//3034 +f 6260//3035 6259//3036 6283//9826 +f 6259//3036 6260//3035 6236//3036 +f 6284//9827 6237//9828 6260//6937 +f 6237//9828 6284//9827 6261//3037 +f 6285//3041 6238//3040 6261//3040 +f 6238//3040 6285//3041 6262//3042 +f 6286//3043 6239//9797 6262//6879 +f 6239//9797 6286//3043 6263//9829 +f 6287//9830 6240//3046 6263//3047 +f 6240//3046 6287//9830 6264//3048 +f 6288//3049 6241//9831 6264//9832 +f 6241//9831 6288//3049 6265//3052 +f 6242//9833 6289//9834 6266//3055 +f 6289//9834 6242//9833 6265//9835 +f 6243//9836 6290//9837 6267//6953 +f 6290//9837 6243//9836 6266//9838 +f 6245//9839 6291//9840 6268//3062 +f 6291//9840 6245//9839 6267//9841 +f 6269//3064 6292//9842 6293//3066 +f 6292//9842 6269//3064 6268//3064 +f 6294//3067 6244//9843 6269//9844 +f 6244//9843 6294//3067 6270//3070 +f 6295//9845 6246//9846 6270//3073 +f 6246//9846 6295//9845 6271//3074 +f 6296//6969 6247//3076 6271//3077 +f 6247//3076 6296//6969 6272//3078 +f 6248//9847 6297//9848 6273//3081 +f 6297//9848 6248//9847 6272//3082 +f 6249//9849 6298//9850 6274//9851 +f 6298//9850 6249//9849 6273//3084 +f 6250//3006 6299//3085 6275//9812 +f 6299//3085 6250//3006 6274//9852 +f 6251//3087 6300//3088 6276//3089 +f 6300//3088 6251//3087 6275//9853 +f 6252//9854 6301//9855 6277//9856 +f 6301//9855 6252//9854 6276//9857 +f 6302//3095 6253//3096 6277//3097 +f 6253//3096 6302//3095 6278//3098 +f 6278//3099 6279//6988 6254//3101 +f 6279//6988 6278//3099 6303//3102 +f 6279//9858 6280//6992 6255//9859 +f 6280//6992 6279//9858 6304//9860 +f 6304//9861 6257//9862 6280//9863 +f 6257//9862 6304//9861 6281//3110 +f 6305//3111 6258//9864 6281//3113 +f 6258//9864 6305//3111 6282//3114 +f 6283//9865 6282//7001 6306//7000 +f 6282//7001 6283//9865 6259//7001 +f 6284//7002 6283//3117 6307//7003 +f 6283//3117 6284//7002 6260//3119 +f 6308//9866 6261//3121 6284//3121 +f 6261//3121 6308//9866 6285//9866 +f 6309//3122 6262//9867 6285//3124 +f 6262//9867 6309//3122 6286//3125 +f 6310//7011 6263//9829 6286//3043 +f 6263//9829 6310//7011 6287//3127 +f 6311//3128 6264//9868 6287//3130 +f 6264//9868 6311//3128 6288//3131 +f 6289//9869 6288//9870 6312//3132 +f 6288//9870 6289//9869 6265//3133 +f 6266//3134 6313//9871 6290//3135 +f 6313//9871 6266//3134 6289//3134 +f 6267//7025 6314//9872 6291//9873 +f 6314//9872 6267//7025 6290//7025 +f 6268//9874 6315//3142 6292//3143 +f 6315//3142 6268//9874 6291//3144 +f 6293//9875 6316//9876 6317//3147 +f 6316//9876 6293//9875 6292//3148 +f 6294//9877 6293//3150 6318//3151 +f 6293//3150 6294//9877 6269//9878 +f 6319//3153 6270//9879 6294//9880 +f 6270//9879 6319//3153 6295//3154 +f 6296//9881 6295//3156 6320//3157 +f 6295//3156 6296//9881 6271//3158 +f 6297//9882 6296//9883 6321//3161 +f 6296//9883 6297//9882 6272//9884 +f 6273//3163 6322//9885 6298//3165 +f 6322//9885 6273//3163 6297//3166 +f 6274//9852 6323//9886 6299//3085 +f 6323//9886 6274//9852 6298//3168 +f 6275//9887 6324//9888 6300//9889 +f 6324//9888 6275//9887 6299//9890 +f 6276//9891 6325//9892 6301//3174 +f 6325//9892 6276//9891 6300//9893 +f 6326//9894 6277//9895 6301//9896 +f 6277//9895 6326//9894 6302//3177 +f 6327//9897 6278//9898 6302//9899 +f 6278//9898 6327//9897 6303//9900 +f 6303//3183 6304//3184 6279//3184 +f 6304//3184 6303//3183 6328//3185 +f 6328//3188 6281//3187 6304//3188 +f 6281//3187 6328//3188 6305//3187 +f 6306//9901 6305//9902 6329//9902 +f 6305//9902 6306//9901 6282//9903 +f 6307//7061 6306//3194 6330//7063 +f 6306//3194 6307//7061 6283//7064 +f 6308//3197 6307//3198 6331//3199 +f 6307//3198 6308//3197 6284//9904 +f 6332//3201 6285//3202 6308//3203 +f 6285//3202 6332//3201 6309//3204 +f 6333//9905 6286//3206 6309//3206 +f 6286//3206 6333//9905 6310//3205 +f 6334//3207 6287//3127 6310//7011 +f 6287//3127 6334//3207 6311//3208 +f 6312//9906 6311//3210 6335//3211 +f 6311//3210 6312//9906 6288//3212 +f 6313//9907 6312//3213 6336//9908 +f 6312//3213 6313//9907 6289//3213 +f 6290//9909 6337//9910 6314//9911 +f 6337//9910 6290//9909 6313//9912 +f 6291//9913 6338//9914 6315//9915 +f 6338//9914 6291//9913 6314//9914 +f 6292//9916 6339//3221 6316//3222 +f 6339//3221 6292//9916 6315//3223 +f 6317//3224 6340//9917 6341//3226 +f 6340//9917 6317//3224 6316//3227 +f 6318//3228 6317//3228 6342//3228 +f 6317//3228 6318//3228 6293//3228 +f 6319//9918 6318//3230 6343//9919 +f 6318//3230 6319//9918 6294//3229 +f 6320//9920 6319//3232 6344//3232 +f 6319//3232 6320//9920 6295//3233 +f 6321//9921 6320//9922 6345//9923 +f 6320//9922 6321//9921 6296//3237 +f 6322//3238 6321//9924 6346//3240 +f 6321//9924 6322//3238 6297//9925 +f 6298//3242 6347//3243 6323//3244 +f 6347//3243 6298//3242 6322//3245 +f 6299//9926 6348//9927 6324//7111 +f 6348//9927 6299//9926 6323//7112 +f 6300//3250 6349//9928 6325//3252 +f 6349//9928 6300//3250 6324//3253 +f 6301//9929 6350//9930 6326//7119 +f 6350//9930 6301//9929 6325//7118 +f 6351//9931 6302//9932 6326//3258 +f 6302//9932 6351//9931 6327//9933 +f 6352//9934 6303//9935 6327//3263 +f 6303//9935 6352//9934 6328//3264 +f 6329//9936 6328//9937 6352//9938 +f 6328//9937 6329//9936 6305//3268 +f 6330//3270 6329//9939 6353//3271 +f 6329//9939 6330//3270 6306//7129 +f 6331//9940 6330//3273 6354//7130 +f 6330//3273 6331//9940 6307//3275 +f 6332//3278 6331//9941 6355//3278 +f 6331//9941 6332//3278 6308//3277 +f 6356//3280 6309//7134 6332//7134 +f 6309//7134 6356//3280 6333//3280 +f 6357//3282 6310//3283 6333//3284 +f 6310//3283 6357//3282 6334//3285 +f 6335//3286 6334//3207 6358//7138 +f 6334//3207 6335//3286 6311//3208 +f 6336//9942 6335//3289 6359//3290 +f 6335//3289 6336//9942 6312//3289 +f 6337//9943 6336//9943 6360//3293 +f 6336//9943 6337//9943 6313//9944 +f 6314//9945 6361//9946 6338//9947 +f 6361//9946 6314//9945 6337//3297 +f 6315//9948 6362//3299 6339//3299 +f 6362//3299 6315//9948 6338//9948 +f 6316//7153 6363//7153 6340//3301 +f 6363//7153 6316//7153 6339//3302 +f 6364//3303 6340//9949 6365//3305 +f 6340//9949 6364//3303 6341//3306 +f 6342//3307 6341//3308 6366//3308 +f 6341//3308 6342//3307 6317//3309 +f 6343//3310 6342//3311 6367//3312 +f 6342//3311 6343//3310 6318//3313 +f 6344//3314 6343//9950 6368//3315 +f 6343//9950 6344//3314 6319//3314 +f 6345//9951 6344//9952 6369//3318 +f 6344//9952 6345//9951 6320//3317 +f 6346//3319 6345//9953 6370//3321 +f 6345//9953 6346//3319 6321//3319 +f 6347//9954 6346//7173 6371//3324 +f 6346//7173 6347//9954 6322//3323 +f 6323//9955 6372//9956 6348//7176 +f 6372//9956 6323//9955 6347//3326 +f 6324//7178 6373//7179 6349//7178 +f 6373//7179 6324//7178 6348//7179 +f 6350//7180 6349//9957 6374//9958 +f 6349//9957 6350//7180 6325//9959 +f 6375//9960 6326//9961 6350//7186 +f 6326//9961 6375//9960 6351//3337 +f 6376//7189 6327//7188 6351//7189 +f 6327//7188 6376//7189 6352//7190 +f 6353//9962 6352//9963 6376//9964 +f 6352//9963 6353//9962 6329//9965 +f 6354//3344 6353//3345 6377//3344 +f 6353//3345 6354//3344 6330//3345 +f 6355//9966 6354//9966 6378//9966 +f 6354//9966 6355//9966 6331//9966 +f 6356//9967 6355//9968 6379//9969 +f 6355//9968 6356//9967 6332//3352 +f 6380//9970 6333//9971 6356//9972 +f 6333//9971 6380//9970 6357//3356 +f 6358//3357 6357//3358 6381//3359 +f 6357//3358 6358//3357 6334//3360 +f 6359//7212 6358//7138 6382//7213 +f 6358//7138 6359//7212 6335//3286 +f 6360//3363 6359//7217 6383//3363 +f 6359//7217 6360//3363 6336//7217 +f 6361//9973 6360//9974 6384//9973 +f 6360//9974 6361//9973 6337//9974 +f 6338//9975 6385//9976 6362//3368 +f 6385//9976 6338//9975 6361//9977 +f 6339//3371 6386//3370 6363//7224 +f 6386//3370 6339//3371 6362//3371 +f 6365//9978 6363//9979 6387//9980 +f 6363//9979 6365//9978 6340//3372 +f 6388//3305 6365//3305 6389//3305 +f 6365//3305 6388//3305 6364//3303 +f 6390//9981 6341//9982 6364//3376 +f 6341//9982 6390//9981 6366//3375 +f 6367//3379 6366//7232 6391//3377 +f 6366//7232 6367//3379 6342//3379 +f 6368//9983 6367//9984 6392//9985 +f 6367//9984 6368//9983 6343//9986 +f 6369//7237 6368//3386 6393//3386 +f 6368//3386 6369//7237 6344//7237 +f 6370//9987 6369//9988 6394//3389 +f 6369//9988 6370//9987 6345//3390 +f 6371//3324 6370//3391 6395//3392 +f 6370//3391 6371//3324 6346//7173 +f 6372//3393 6371//9989 6396//9989 +f 6371//9989 6372//3393 6347//3393 +f 6348//9990 6397//9991 6373//7246 +f 6397//9991 6348//9990 6372//7247 +f 6374//9992 6373//9993 6398//9994 +f 6373//9993 6374//9992 6349//7250 +f 6375//7251 6374//9995 6399//3405 +f 6374//9995 6375//7251 6350//7253 +f 6400//3407 6351//9996 6375//3409 +f 6351//9996 6400//3407 6376//3410 +f 6377//9997 6376//3412 6400//3413 +f 6376//3412 6377//9997 6353//3414 +f 6378//3415 6377//9998 6401//9999 +f 6377//9998 6378//3415 6354//3418 +f 6379//10000 6378//10001 6402//10002 +f 6378//10001 6379//10000 6355//10003 +f 6356//10004 6403//10005 6380//3425 +f 6403//10005 6356//10004 6379//3426 +f 6381//10006 6380//10007 6404//3429 +f 6380//10007 6381//10006 6357//3430 +f 6382//10008 6381//10009 6405//10010 +f 6381//10009 6382//10008 6358//10011 +f 6383//7274 6382//7213 6406//10012 +f 6382//7213 6383//7274 6359//7212 +f 6384//10013 6383//10014 6407//3438 +f 6383//10014 6384//10013 6360//10015 +f 6385//10016 6384//10017 6408//7281 +f 6384//10017 6385//10016 6361//10018 +f 6362//10019 6409//10020 6386//3446 +f 6409//10020 6362//10019 6385//3447 +f 6387//10021 6386//3449 6410//10022 +f 6386//3449 6387//10021 6363//3449 +f 6389//3374 6387//9980 6411//3374 +f 6387//9980 6389//3374 6365//9978 +f 6412//3452 6389//3452 6413//3452 +f 6389//3452 6412//3452 6388//3452 +f 6414//10023 6364//3376 6388//10023 +f 6364//3376 6414//10023 6390//9981 +f 6415//3454 6366//10024 6390//3454 +f 6366//10024 6415//3454 6391//10024 +f 6392//7292 6391//7293 6416//3458 +f 6391//7293 6392//7292 6367//3459 +f 6393//7296 6392//3461 6417//3462 +f 6392//3461 6393//7296 6368//10025 +f 6394//7298 6393//3465 6418//7300 +f 6393//3465 6394//7298 6369//10026 +f 6395//3392 6394//10027 6419//7301 +f 6394//10027 6395//3392 6370//3391 +f 6396//10028 6395//10029 6420//10030 +f 6395//10029 6396//10028 6371//3473 +f 6397//3474 6396//3477 6421//7307 +f 6396//3477 6397//3474 6372//3477 +f 6398//10031 6397//10032 6422//3480 +f 6397//10032 6398//10031 6373//10033 +f 6399//3482 6398//10034 6423//3484 +f 6398//10034 6399//3482 6374//3485 +f 6400//10035 6399//10036 6424//10037 +f 6399//10036 6400//10035 6375//3489 +f 6377//10038 6424//10039 6401//10039 +f 6424//10039 6377//10038 6400//10038 +f 6378//3494 6425//3495 6402//10040 +f 6425//3495 6378//3494 6401//3497 +f 6379//3498 6426//3499 6403//3500 +f 6426//3499 6379//3498 6402//3501 +f 6380//3502 6427//10041 6404//10042 +f 6427//10041 6380//3502 6403//3505 +f 6405//3506 6404//10043 6428//10044 +f 6404//10043 6405//3506 6381//3509 +f 6406//10045 6405//10046 6429//10047 +f 6405//10046 6406//10045 6382//10048 +f 6407//10049 6406//10012 6430//7339 +f 6406//10012 6407//10049 6383//7274 +f 6408//7340 6407//3517 6431//7341 +f 6407//3517 6408//7340 6384//3519 +f 6409//3520 6408//3521 6432//3522 +f 6408//3521 6409//3520 6385//3523 +f 6410//10050 6409//3525 6433//10051 +f 6409//3525 6410//10050 6386//10052 +f 6411//7349 6410//10022 6434//3450 +f 6410//10022 6411//7349 6387//10021 +f 6413//3530 6411//3530 6435//3530 +f 6411//3530 6413//3530 6389//3530 +f 6436//3531 6412//3531 6413//3531 +f 6412//3531 6436//3531 6437//3531 +f 6438//3533 6388//3534 6412//3535 +f 6388//3534 6438//3533 6414//3534 +f 6439//10053 6390//3537 6414//10054 +f 6390//3537 6439//10053 6415//3537 +f 6440//10055 6391//10056 6415//7358 +f 6391//10056 6440//10055 6416//10057 +f 6417//10058 6416//10059 6441//10060 +f 6416//10059 6417//10058 6392//3543 +f 6418//10061 6417//10062 6442//3546 +f 6417//10062 6418//10061 6393//10063 +f 6419//7301 6418//10064 6443//3549 +f 6418//10064 6419//7301 6394//10027 +f 6420//10065 6419//10066 6444//10067 +f 6419//10066 6420//10065 6395//10068 +f 6421//10069 6420//10070 6445//10071 +f 6420//10070 6421//10069 6396//7369 +f 6446//3558 6397//3559 6421//7371 +f 6397//3559 6446//3558 6422//3561 +f 6423//10072 6422//10073 6447//3564 +f 6422//10073 6423//10072 6398//3565 +f 6424//10074 6423//7375 6448//7375 +f 6423//7375 6424//10074 6399//3569 +f 6401//10075 6448//10076 6425//3572 +f 6448//10076 6401//10075 6424//7378 +f 6402//10077 6449//3575 6426//10078 +f 6449//3575 6402//10077 6425//10079 +f 6403//7383 6450//10080 6427//3580 +f 6450//10080 6403//7383 6426//3581 +f 6428//3582 6427//10081 6451//3584 +f 6427//10081 6428//3582 6404//3585 +f 6429//3586 6428//3587 6452//3586 +f 6428//3587 6429//3586 6405//3587 +f 6430//10082 6429//10083 6453//10084 +f 6429//10083 6430//10082 6406//10085 +f 6431//3594 6430//7339 6454//10086 +f 6430//7339 6431//3594 6407//10049 +f 6432//10087 6431//7396 6455//3598 +f 6431//7396 6432//10087 6408//7397 +f 6433//3600 6432//3601 6456//7400 +f 6432//3601 6433//3600 6409//3603 +f 6434//10088 6433//10051 6457//7402 +f 6433//10051 6434//10088 6410//10050 +f 6435//3606 6434//3607 6458//3606 +f 6434//3607 6435//3606 6411//3607 +f 6459//3608 6413//3608 6435//3608 +f 6413//3608 6459//3608 6436//3608 +f 6460//3452 6436//3452 6461//3452 +f 6436//3452 6460//3452 6437//3452 +f 6437//3534 6438//3533 6412//3535 +f 6438//3533 6437//3534 6462//3610 +f 6463//3611 6414//3611 6438//3611 +f 6414//3611 6463//3611 6439//3611 +f 6464//10089 6415//10090 6439//10091 +f 6415//10090 6464//10089 6440//7410 +f 6441//10092 6440//10093 6465//7413 +f 6440//10093 6441//10092 6416//3619 +f 6442//3620 6441//3621 6466//3621 +f 6441//3621 6442//3620 6417//3622 +f 6443//3549 6442//3623 6467//7417 +f 6442//3623 6443//3549 6418//10064 +f 6444//7419 6443//7419 6468//3627 +f 6443//7419 6444//7419 6419//10094 +f 6445//10095 6444//10096 6469//10097 +f 6444//10096 6445//10095 6420//10098 +f 6470//10099 6421//10100 6445//10101 +f 6421//10100 6470//10099 6446//7426 +f 6471//3638 6422//3638 6446//3637 +f 6422//3638 6471//3638 6447//3639 +f 6448//3640 6447//3641 6472//10102 +f 6447//3641 6448//3640 6423//3640 +f 6425//3644 6472//3644 6449//3644 +f 6472//3644 6425//3644 6448//3645 +f 6426//3646 6473//7428 6450//10103 +f 6473//7428 6426//3646 6449//3648 +f 6427//3649 6474//10104 6451//10105 +f 6474//10104 6427//3649 6450//3649 +f 6452//10106 6451//3654 6475//3655 +f 6451//3654 6452//10106 6428//10107 +f 6453//10108 6452//3658 6476//3659 +f 6452//3658 6453//10108 6429//3660 +f 6454//3661 6453//3661 6477//3661 +f 6453//3661 6454//3661 6430//3661 +f 6455//3663 6454//10086 6478//10109 +f 6454//10086 6455//3663 6431//3594 +f 6456//10110 6455//3666 6479//7441 +f 6455//3666 6456//10110 6432//3668 +f 6457//7442 6456//7400 6480//10111 +f 6456//7400 6457//7442 6433//3600 +f 6458//3671 6457//3671 6481//3671 +f 6457//3671 6458//3671 6434//3671 +f 6482//7443 6435//7443 6458//7443 +f 6435//7443 6482//7443 6459//7443 +f 6461//3530 6459//3530 6483//3530 +f 6459//3530 6461//3530 6436//3530 +f 6484//3674 6461//3674 6485//3674 +f 6461//3674 6484//3674 6460//3674 +f 6486//10112 6437//3534 6460//10113 +f 6437//3534 6486//10112 6462//3610 +f 6462//10114 6463//10115 6438//3678 +f 6463//10115 6462//10114 6487//3679 +f 6488//7452 6439//7452 6463//7452 +f 6439//7452 6488//7452 6464//7452 +f 6440//7455 6489//7455 6465//7455 +f 6489//7456 6440//7456 6464//7456 +f 6466//3686 6465//3687 6490//3688 +f 6465//3687 6466//3686 6441//3689 +f 6467//7417 6466//7417 6491//7417 +f 6466//7417 6467//7417 6442//3623 +f 6468//3691 6467//10116 6492//3693 +f 6467//10116 6468//3691 6443//10117 +f 6469//10118 6468//7467 6493//7467 +f 6468//7467 6469//10118 6444//10118 +f 6494//10119 6445//10120 6469//3701 +f 6445//10120 6494//10119 6470//10121 +f 6495//10122 6446//10123 6470//7473 +f 6446//10123 6495//10122 6471//3703 +f 6472//3706 6471//3706 6496//3706 +f 6471//3706 6472//3706 6447//3706 +f 6449//10124 6496//10125 6473//3709 +f 6496//10125 6449//10124 6472//3710 +f 6450//10126 6497//10127 6474//3713 +f 6497//10127 6450//10126 6473//10128 +f 6451//10129 6498//3717 6475//3717 +f 6498//3717 6451//10129 6474//3718 +f 6476//10130 6475//10131 6499//10132 +f 6475//10131 6476//10130 6452//3722 +f 6477//3725 6476//10133 6500//3725 +f 6476//10133 6477//3725 6453//7490 +f 6478//3726 6477//3727 6501//3726 +f 6477//3727 6478//3726 6454//3727 +f 6479//3728 6478//3729 6502//7494 +f 6478//3729 6479//3728 6455//3731 +f 6480//3732 6479//7441 6503//10134 +f 6479//7441 6480//3732 6456//10110 +f 6481//3734 6480//3734 6504//3734 +f 6480//3734 6481//3734 6457//3734 +f 6482//3736 6481//3736 6505//3736 +f 6481//3736 6482//3736 6458//3736 +f 6483//3606 6482//3606 6506//3606 +f 6482//3606 6483//3606 6459//3606 +f 6485//3738 6483//3739 6507//3738 +f 6483//3739 6485//3738 6461//3739 +f 6508//3452 6484//3452 6485//3452 +f 6484//3452 6508//3452 6509//3452 +f 6510//10135 6460//10113 6484//3741 +f 6460//10113 6510//10135 6486//10112 +f 6511//10136 6462//10136 6486//10136 +f 6462//10136 6511//10136 6487//10136 +f 6487//10137 6488//10138 6463//10139 +f 6488//10138 6487//10137 6512//3744 +f 6513//7501 6464//7502 6488//7503 +f 6464//7502 6513//7501 6489//7504 +f 6465//3687 6514//3749 6490//3688 +f 6514//3749 6465//3687 6489//3750 +f 6491//10140 6490//7506 6515//7507 +f 6490//7506 6491//10140 6466//3754 +f 6492//3755 6491//3755 6516//3756 +f 6491//3755 6492//3755 6467//3755 +f 6493//7508 6492//7509 6517//10141 +f 6492//7509 6493//7508 6468//7511 +f 6518//10142 6469//10143 6493//10144 +f 6469//10143 6518//10142 6494//3762 +f 6519//10145 6470//10146 6494//10147 +f 6470//10146 6519//10145 6495//10148 +f 6520//10149 6471//10150 6495//7519 +f 6471//10150 6520//10149 6496//3769 +f 6473//3770 6520//10151 6497//3772 +f 6520//10151 6473//3770 6496//10152 +f 6474//3774 6521//7529 6498//7529 +f 6521//7529 6474//3774 6497//3774 +f 6499//10153 6498//10154 6522//10155 +f 6498//10154 6499//10153 6475//3779 +f 6500//10156 6499//10157 6523//3782 +f 6499//10157 6500//10156 6476//10158 +f 6501//3784 6500//10159 6524//3784 +f 6500//10159 6501//3784 6477//3785 +f 6502//10160 6501//3789 6525//7540 +f 6501//3789 6502//10160 6478//3789 +f 6503//3790 6502//7494 6526//3791 +f 6502//7494 6503//3790 6479//3728 +f 6504//3792 6503//3792 6527//3792 +f 6503//3792 6504//3792 6480//3792 +f 6505//3793 6504//3793 6528//3793 +f 6504//3793 6505//3793 6481//3793 +f 6506//3671 6505//3671 6529//3671 +f 6505//3671 6506//3671 6482//3671 +f 6507//10161 6506//10161 6530//10161 +f 6506//10161 6507//10161 6483//10161 +f 6531//10162 6485//3797 6507//3797 +f 6485//3797 6531//10162 6508//3530 +f 6532//3798 6509//3798 6508//3798 +f 6509//3798 6532//3798 6533//3798 +f 6509//3534 6510//10135 6484//3741 +f 6510//10135 6509//3534 6534//3534 +f 6535//10163 6486//10164 6510//10165 +f 6486//10164 6535//10163 6511//10166 +f 6536//3680 6487//3680 6511//3680 +f 6487//3680 6536//3680 6512//3680 +f 6512//10167 6513//10168 6488//7551 +f 6513//10168 6512//10167 6537//3805 +f 6538//3806 6489//3807 6513//3808 +f 6489//3807 6538//3806 6514//3809 +f 6490//7506 6539//10169 6515//7507 +f 6539//10169 6490//7506 6514//7554 +f 6516//10170 6515//10171 6540//7557 +f 6515//10171 6516//10170 6491//7558 +f 6517//10172 6516//10173 6541//10174 +f 6516//10173 6517//10172 6492//7560 +f 6542//10175 6493//10176 6517//7563 +f 6493//10176 6542//10175 6518//10177 +f 6543//3824 6494//7565 6518//3826 +f 6494//7565 6543//3824 6519//3825 +f 6544//10178 6495//10179 6519//10180 +f 6495//10179 6544//10178 6520//3830 +f 6521//10181 6520//3832 6544//3833 +f 6520//3832 6521//10181 6497//3834 +f 6522//3835 6521//3836 6545//3835 +f 6521//3836 6522//3835 6498//3836 +f 6523//3837 6522//3837 6546//3837 +f 6522//3837 6523//3837 6499//10182 +f 6524//3840 6523//3841 6547//3842 +f 6523//3841 6524//3840 6500//3841 +f 6525//10183 6524//10184 6548//10183 +f 6524//10184 6525//10183 6501//3846 +f 6526//7581 6525//7540 6549//7582 +f 6525//7540 6526//7581 6502//10160 +f 6503//3849 6550//3849 6527//3849 +f 6550//3849 6503//3849 6526//3849 +f 6528//3850 6527//3850 6551//3850 +f 6527//3850 6528//3850 6504//3850 +f 6505//3734 6552//3734 6529//3734 +f 6552//3734 6505//3734 6528//3734 +f 6553//3851 6506//10185 6529//3853 +f 6506//10185 6553//3851 6530//3854 +f 6554//3607 6507//3606 6530//3606 +f 6507//3606 6554//3607 6531//3607 +f 6555//3855 6508//3855 6531//3855 +f 6508//3855 6555//3855 6532//3855 +f 6556//3857 6532//3858 6557//3859 +f 6532//3858 6556//3857 6533//3860 +f 6533//3861 6534//10186 6509//10186 +f 6534//10186 6533//3861 6558//3861 +f 6534//3611 6535//3611 6510//3611 +f 6535//3611 6534//3611 6559//3611 +f 6560//10187 6511//10188 6535//10189 +f 6511//10188 6560//10187 6536//3866 +f 6537//3748 6536//3748 6561//3748 +f 6536//3748 6537//3748 6512//3748 +f 6538//10190 6537//10191 6562//10192 +f 6537//10191 6538//10190 6513//3870 +f 6563//7601 6514//7602 6538//7603 +f 6514//7602 6563//7601 6539//7604 +f 6515//10171 6564//7605 6540//7557 +f 6564//7605 6515//10171 6539//7606 +f 6541//7608 6540//10193 6565//3879 +f 6540//10193 6541//7608 6516//10194 +f 6566//10195 6517//3882 6541//7610 +f 6517//3882 6566//10195 6542//7611 +f 6567//10196 6518//10197 6542//3887 +f 6518//10197 6567//10196 6543//3888 +f 6568//3889 6519//10198 6543//10199 +f 6519//10198 6568//3889 6544//3892 +f 6545//3893 6544//10200 6568//3895 +f 6544//10200 6545//3893 6521//3896 +f 6546//3897 6545//10201 6569//10202 +f 6545//10201 6546//3897 6522//10201 +f 6547//10203 6546//10204 6570//3901 +f 6546//10204 6547//10203 6523//3902 +f 6524//7628 6571//3904 6548//3904 +f 6571//3904 6524//7628 6547//7628 +f 6549//10205 6548//10206 6572//10207 +f 6548//10206 6549//10205 6525//10208 +f 6526//7632 6573//7632 6550//7632 +f 6573//7632 6526//7632 6549//7632 +f 6551//3910 6550//3910 6574//3910 +f 6550//3910 6551//3910 6527//3910 +f 6528//3792 6575//3792 6552//3792 +f 6575//3792 6528//3792 6551//3792 +f 6576//3911 6529//3912 6552//10209 +f 6529//3912 6576//3911 6553//3914 +f 6577//3915 6530//10210 6553//3917 +f 6530//10210 6577//3915 6554//3672 +f 6578//3918 6531//3919 6554//3919 +f 6531//3919 6578//3918 6555//3918 +f 6557//10211 6555//3921 6579//3922 +f 6555//3921 6557//10211 6532//3923 +f 6580//3924 6557//3924 6581//3924 +f 6557//3924 6580//3924 6556//3924 +f 6582//7642 6533//3926 6556//7644 +f 6533//3926 6582//7642 6558//3928 +f 6558//3929 6559//3930 6534//3930 +f 6559//3930 6558//3929 6583//3929 +f 6559//7648 6560//7649 6535//7650 +f 6560//7649 6559//7648 6584//7452 +f 6585//10212 6536//10213 6560//7651 +f 6536//10213 6585//10212 6561//3934 +f 6562//3935 6561//3935 6586//3935 +f 6561//3935 6562//3935 6537//3935 +f 6563//10214 6562//7652 6587//7653 +f 6562//7652 6563//10214 6538//10215 +f 6564//7655 6563//7656 6588//7657 +f 6563//7656 6564//7655 6539//7658 +f 6540//10193 6589//7659 6565//3879 +f 6589//7659 6540//10193 6564//7659 +f 6566//10216 6565//10217 6590//10218 +f 6565//10217 6566//10216 6541//7663 +f 6591//3949 6542//3949 6566//3949 +f 6542//3949 6591//3949 6567//3949 +f 6592//10219 6543//3952 6567//3953 +f 6543//3952 6592//10219 6568//3954 +f 6569//3955 6568//3956 6592//3955 +f 6568//3956 6569//3955 6545//3957 +f 6570//10220 6569//10221 6593//3960 +f 6569//10221 6570//10220 6546//3961 +f 6547//3962 6594//10222 6571//10222 +f 6594//10222 6547//3962 6570//3962 +f 6548//10223 6595//3965 6572//3966 +f 6595//3965 6548//10223 6571//3967 +f 6549//10224 6596//10224 6573//10224 +f 6596//10224 6549//10224 6572//10224 +f 6574//7680 6573//7681 6597//7680 +f 6573//7681 6574//7680 6550//7681 +f 6575//3849 6574//3849 6598//3849 +f 6574//3849 6575//3849 6551//3849 +f 6599//3970 6552//3970 6575//3970 +f 6552//3970 6599//3970 6576//3970 +f 6577//3971 6576//7686 6600//3734 +f 6576//7686 6577//3971 6553//3973 +f 6601//7688 6554//3975 6577//3975 +f 6554//3975 6601//7688 6578//7688 +f 6579//10225 6578//10226 6602//3979 +f 6578//10226 6579//10225 6555//3980 +f 6581//3981 6579//10227 6603//3983 +f 6579//10227 6581//3981 6557//3981 +f 6604//7695 6581//7696 6605//7697 +f 6581//7696 6604//7695 6580//7695 +f 6606//7698 6556//7699 6580//7698 +f 6556//7699 6606//7698 6582//7699 +f 6607//10228 6558//10229 6582//10230 +f 6558//10229 6607//10228 6583//3993 +f 6583//7705 6584//7704 6559//7704 +f 6584//7704 6583//7705 6608//7705 +f 6609//7504 6560//7504 6584//7504 +f 6560//7504 6609//7504 6585//7504 +f 6561//3997 6610//3997 6586//3997 +f 6610//3997 6561//3997 6585//3997 +f 6587//7604 6586//7604 6611//7604 +f 6586//7604 6587//7604 6562//7604 +f 6588//10231 6587//7708 6612//7708 +f 6587//7708 6588//10231 6563//7706 +f 6589//4002 6588//4002 6613//4002 +f 6588//4002 6589//4002 6564//4002 +f 6590//10218 6589//10232 6614//10232 +f 6589//10232 6590//10218 6565//10217 +f 6591//7713 6590//10233 6615//7712 +f 6590//10233 6591//7713 6566//7713 +f 6616//4007 6567//10234 6591//4009 +f 6567//10234 6616//4007 6592//4010 +f 6593//10235 6592//10236 6616//4013 +f 6592//10236 6593//10235 6569//7715 +f 6570//10237 6617//10238 6594//4017 +f 6617//10238 6570//10237 6593//4016 +f 6571//4018 6618//10239 6595//10239 +f 6618//10239 6571//4018 6594//4018 +f 6572//4020 6619//4021 6596//4021 +f 6619//4021 6572//4020 6595//4020 +f 6596//10240 6597//10240 6573//10240 +f 6597//10240 6596//10240 6620//10240 +f 6598//7724 6597//7724 6621//7724 +f 6597//7724 6598//7724 6574//7724 +f 6622//4024 6575//4025 6598//4025 +f 6575//4025 6622//4024 6599//4024 +f 6600//3792 6599//3792 6623//3792 +f 6599//3792 6600//3792 6576//3792 +f 6601//7726 6600//4027 6624//7727 +f 6600//4027 6601//7726 6577//4029 +f 6602//10241 6601//4031 6625//4032 +f 6601//4031 6602//10241 6578//4033 +f 6603//4034 6602//10242 6626//4035 +f 6602//10242 6603//4034 6579//4037 +f 6605//4038 6603//4039 6627//4040 +f 6603//4039 6605//4038 6581//10243 +f 6628//10244 6605//10245 6629//4044 +f 6605//10245 6628//10244 6604//10246 +f 6630//10247 6580//10248 6604//4049 +f 6580//10248 6630//10247 6606//4048 +f 6631//7741 6582//10249 6606//7742 +f 6582//10249 6631//7741 6607//10250 +f 6583//10251 6632//10252 6608//10253 +f 6632//10252 6583//10251 6607//10254 +f 6584//10255 6633//7747 6609//7748 +f 6633//7747 6584//10255 6608//7749 +f 6634//4060 6585//4061 6609//4060 +f 6585//4061 6634//4060 6610//4061 +f 6586//7750 6635//7751 6611//7750 +f 6635//7751 6586//7750 6610//7751 +f 6636//7658 6587//7658 6611//7658 +f 6587//7658 6636//7658 6612//7658 +f 6613//7752 6612//10256 6637//7753 +f 6612//10256 6613//7752 6588//4067 +f 6614//7754 6613//7755 6638//7756 +f 6613//7755 6614//7754 6589//7757 +f 6615//7712 6614//4072 6639//4072 +f 6614//4072 6615//7712 6590//10233 +f 6616//10257 6615//10258 6640//10259 +f 6615//10258 6616//10257 6591//4076 +f 6593//10260 6640//4078 6617//4079 +f 6640//4078 6593//10260 6616//4078 +f 6594//4017 6641//10261 6618//10261 +f 6641//10261 6594//4017 6617//10238 +f 6595//4081 6642//4082 6619//4082 +f 6642//4082 6595//4081 6618//4081 +f 6619//4084 6620//4084 6596//4084 +f 6620//4084 6619//4084 6643//4084 +f 6621//10224 6620//10224 6644//10224 +f 6620//10224 6621//10224 6597//10224 +f 6645//7768 6598//7768 6621//7768 +f 6598//7768 6645//7768 6622//7768 +f 6623//4087 6622//4087 6646//4087 +f 6622//4087 6623//4087 6599//4087 +f 6624//7769 6623//10262 6647//10263 +f 6623//10262 6624//7769 6600//4091 +f 6648//4092 6601//7772 6624//4094 +f 6601//7772 6648//4092 6625//4095 +f 6626//10264 6625//4097 6649//4098 +f 6625//4097 6626//10264 6602//4099 +f 6627//10265 6626//10266 6650//7777 +f 6626//10266 6627//10265 6603//4103 +f 6629//10267 6627//4104 6651//7781 +f 6627//4104 6629//10267 6605//4107 +f 6652//10268 6628//10269 6629//4110 +f 6628//10269 6652//10268 6653//10270 +f 6628//10271 6630//4111 6604//10272 +f 6630//4111 6628//10271 6654//4114 +f 6655//10273 6606//7790 6630//10273 +f 6606//7790 6655//10273 6631//7790 +f 6607//10274 6656//10275 6632//10276 +f 6656//10275 6607//10274 6631//7794 +f 6608//4123 6657//4124 6633//10277 +f 6657//4124 6608//4123 6632//4126 +f 6609//10278 6658//7798 6634//4129 +f 6658//7798 6609//10278 6633//4130 +f 6635//7799 6634//7799 6659//7799 +f 6634//7799 6635//7799 6610//7799 +f 6611//7800 6660//7800 6636//7800 +f 6660//7800 6611//7800 6635//7800 +f 6661//4002 6612//4002 6636//4002 +f 6612//4002 6661//4002 6637//4002 +f 6638//10279 6637//10280 6662//7803 +f 6637//10280 6638//10279 6613//7804 +f 6639//4137 6638//4138 6663//4138 +f 6638//4138 6639//4137 6614//4137 +f 6640//10259 6639//10259 6664//10259 +f 6639//10259 6640//10259 6615//10258 +f 6617//4079 6664//4079 6641//4079 +f 6664//4079 6617//4079 6640//4078 +f 6618//4140 6665//4141 6642//4141 +f 6665//4141 6618//4140 6641//4140 +f 6642//4142 6643//4142 6619//4142 +f 6643//4142 6642//4142 6666//4142 +f 6643//4020 6644//4020 6620//4020 +f 6644//4020 6643//4020 6667//4020 +f 6621//10281 6668//10281 6645//10281 +f 6668//10281 6621//10281 6644//10281 +f 6622//7724 6669//7632 6646//7632 +f 6669//7632 6622//7724 6645//7724 +f 6647//4145 6646//10282 6670//10283 +f 6646//10282 6647//4145 6623//4148 +f 6671//4149 6624//7808 6647//7809 +f 6624//7808 6671//4149 6648//4152 +f 6672//7810 6625//10284 6648//7811 +f 6625//10284 6672//7810 6649//4156 +f 6650//10285 6649//7813 6673//10286 +f 6649//7813 6650//10285 6626//7813 +f 6651//10287 6650//10288 6674//10289 +f 6650//10288 6651//10287 6627//7814 +f 6651//10290 6652//10291 6629//7817 +f 6652//10291 6651//10290 6675//7818 +f 6676//10292 6653//10293 6652//10294 +f 6653//10293 6676//10292 6677//10295 +f 6653//10296 6654//10297 6628//10298 +f 6654//10297 6653//10296 6678//7822 +f 6654//10299 6655//10300 6630//10301 +f 6655//10300 6654//10299 6679//7823 +f 6631//7826 6680//10302 6656//7828 +f 6680//10302 6631//7826 6655//7829 +f 6632//10303 6681//10304 6657//10305 +f 6681//10304 6632//10303 6656//7833 +f 6633//10306 6682//4186 6658//7835 +f 6682//4186 6633//10306 6657//4188 +f 6634//7837 6683//7838 6659//7837 +f 6683//7838 6634//7837 6658//7838 +f 6660//7840 6659//7840 6684//7840 +f 6659//7840 6660//7840 6635//7840 +f 6636//7841 6685//7842 6661//7843 +f 6685//7842 6636//7841 6660//10307 +f 6637//7757 6686//7757 6662//7757 +f 6686//7757 6637//7757 6661//7757 +f 6638//4198 6687//4198 6663//4198 +f 6687//4198 6638//4198 6662//4198 +f 6664//10308 6663//10309 6688//10309 +f 6663//10309 6664//10308 6639//10308 +f 6641//4202 6688//4202 6665//4202 +f 6688//4202 6641//4202 6664//4202 +f 6665//4141 6666//10310 6642//4141 +f 6666//10310 6665//4141 6689//10311 +f 6666//4081 6667//4081 6643//4081 +f 6667//4081 6666//4081 6690//4081 +f 6644//4205 6691//4205 6668//4205 +f 6691//4205 6644//4205 6667//4205 +f 6645//10312 6692//10224 6669//10224 +f 6692//10224 6645//10312 6668//10312 +f 6670//10313 6669//10314 6693//7849 +f 6669//10314 6670//10313 6646//10315 +f 6694//4211 6647//10316 6670//10316 +f 6647//10316 6694//4211 6671//4211 +f 6695//4213 6648//10317 6671//7853 +f 6648//10317 6695//4213 6672//4216 +f 6696//4217 6649//4218 6672//10318 +f 6649//4218 6696//4217 6673//7857 +f 6674//10319 6673//10320 6697//4223 +f 6673//10320 6674//10319 6650//10321 +f 6674//10322 6675//10323 6651//10322 +f 6675//10323 6674//10322 6698//10324 +f 6699//4229 6652//10325 6675//4231 +f 6652//10325 6699//4229 6676//4232 +f 6700//4233 6677//4235 6676//4235 +f 6677//4235 6700//4233 6701//4233 +f 6677//10326 6678//10327 6653//10328 +f 6678//10327 6677//10326 6702//4236 +f 6678//4239 6679//4240 6654//4241 +f 6679//4240 6678//4239 6703//4242 +f 6655//7875 6704//7876 6680//7875 +f 6704//7876 6655//7875 6679//7876 +f 6656//7879 6705//7879 6681//7879 +f 6705//7879 6656//7879 6680//7879 +f 6657//4247 6706//4248 6682//4249 +f 6706//4248 6657//4247 6681//4250 +f 6658//4251 6707//4251 6683//4251 +f 6707//4251 6658//4251 6682//4322 +f 6659//10329 6708//10330 6684//10331 +f 6708//10330 6659//10329 6683//7887 +f 6685//4258 6684//4259 6709//7888 +f 6684//4259 6685//4258 6660//4261 +f 6661//7889 6710//10332 6686//7889 +f 6710//10332 6661//7889 6685//10332 +f 6662//4264 6711//4264 6687//4264 +f 6711//4264 6662//4264 6686//4264 +f 6663//10333 6712//10334 6688//10333 +f 6712//10334 6663//10333 6687//10334 +f 6688//4267 6689//4267 6665//4267 +f 6689//4267 6688//4267 6712//4267 +f 6689//10311 6690//4270 6666//10310 +f 6690//4270 6689//10311 6713//4270 +f 6667//4271 6714//4271 6691//4271 +f 6714//4271 6667//4271 6690//4271 +f 6691//4021 6692//4020 6668//4021 +f 6692//4020 6691//4021 6715//4020 +f 6693//10335 6692//10336 6716//10337 +f 6692//10336 6693//10335 6669//10338 +f 6717//10339 6670//7897 6693//4278 +f 6670//7897 6717//10339 6694//10339 +f 6718//4279 6671//10340 6694//4281 +f 6671//10340 6718//4279 6695//4282 +f 6719//4283 6672//4284 6695//4284 +f 6672//4284 6719//4283 6696//4285 +f 6720//10341 6673//10342 6696//4288 +f 6673//10342 6720//10341 6697//10343 +f 6697//10344 6698//10345 6674//4292 +f 6698//10345 6697//10344 6721//4291 +f 6698//10346 6699//10347 6675//4295 +f 6699//10347 6698//10346 6722//4296 +f 6723//4297 6676//4298 6699//10348 +f 6676//4298 6723//4297 6700//4297 +f 6724//4301 6701//4300 6700//7919 +f 6701//4300 6724//4301 6725//7920 +f 6701//10349 6702//10350 6677//10351 +f 6702//10350 6701//10349 6726//7924 +f 6702//10352 6703//4307 6678//4308 +f 6703//4307 6702//10352 6727//10352 +f 6679//7928 6728//10353 6704//10354 +f 6728//10353 6679//7928 6703//4313 +f 6680//10355 6729//10356 6705//10355 +f 6729//10356 6680//10355 6704//10356 +f 6681//10357 6730//10358 6706//4320 +f 6730//10358 6681//10357 6705//10359 +f 6682//4322 6731//4322 6707//4251 +f 6731//4322 6682//4322 6706//4323 +f 6683//10360 6732//4325 6708//4326 +f 6732//4325 6683//10360 6707//4325 +f 6684//7933 6733//7934 6709//4330 +f 6733//7934 6684//7933 6708//4331 +f 6685//7755 6734//10361 6710//7756 +f 6734//10361 6685//7755 6709//7757 +f 6711//4334 6710//4334 6735//4334 +f 6710//4334 6711//4334 6686//4334 +f 6687//10308 6736//10308 6712//10308 +f 6736//10308 6687//10308 6711//10308 +f 6712//4202 6713//4202 6689//4202 +f 6713//4202 6712//4202 6736//4202 +f 6690//4270 6737//4270 6714//4270 +f 6737//4270 6690//4270 6713//4270 +f 6714//4081 6715//4081 6691//4081 +f 6715//4081 6714//4081 6738//4081 +f 6715//4336 6716//10362 6692//4336 +f 6716//10362 6715//4336 6739//10362 +f 6740//10363 6693//4339 6716//7938 +f 6693//4339 6740//10363 6717//10364 +f 6741//4342 6694//4343 6717//4344 +f 6694//4343 6741//4342 6718//4345 +f 6742//10365 6695//10366 6718//4348 +f 6695//10366 6742//10365 6719//10367 +f 6743//10368 6696//10369 6719//10370 +f 6696//10369 6743//10368 6720//4353 +f 6744//4354 6697//4355 6720//4356 +f 6697//4355 6744//4354 6721//4357 +f 6721//4358 6722//10371 6698//4360 +f 6722//10371 6721//4358 6745//10372 +f 6746//10373 6699//4363 6722//4364 +f 6699//4363 6746//10373 6723//4365 +f 6747//4366 6700//4367 6723//4367 +f 6700//4367 6747//4366 6724//4366 +f 6748//4369 6725//4370 6724//4370 +f 6725//4370 6748//4369 6749//4369 +f 6725//7960 6726//7960 6701//7960 +f 6726//7960 6725//7960 6750//7960 +f 6726//10374 6727//10375 6702//10376 +f 6727//10375 6726//10374 6751//10377 +f 6727//4379 6728//4380 6703//4381 +f 6728//4380 6727//4379 6752//4382 +f 6704//7970 6753//10378 6729//10379 +f 6753//10378 6704//7970 6728//10380 +f 6705//4387 6754//4388 6730//4387 +f 6754//4388 6705//4387 6729//4388 +f 6706//4389 6755//4390 6731//4391 +f 6755//4390 6706//4389 6730//4392 +f 6707//4396 6756//10381 6732//10382 +f 6756//10381 6707//4396 6731//7978 +f 6708//4397 6757//10383 6733//4398 +f 6757//10383 6708//4397 6732//4399 +f 6709//10384 6758//7981 6734//10384 +f 6758//7981 6709//10384 6733//7981 +f 6710//4138 6759//4137 6735//4138 +f 6759//4137 6710//4138 6734//4137 +f 6736//10385 6735//10386 6760//10387 +f 6735//10386 6736//10385 6711//10388 +f 6713//10389 6760//10390 6737//10391 +f 6760//10390 6713//10389 6736//4408 +f 6737//4270 6738//4140 6714//4270 +f 6738//4140 6737//4270 6761//4140 +f 6738//10392 6739//10392 6715//4410 +f 6739//10392 6738//10392 6762//10392 +f 6763//4412 6716//4413 6739//4413 +f 6716//4413 6763//4412 6740//4412 +f 6764//4415 6717//4416 6740//4417 +f 6717//4416 6764//4415 6741//4418 +f 6765//10393 6718//7992 6741//4421 +f 6718//7992 6765//10393 6742//4420 +f 6766//10394 6719//10395 6742//10396 +f 6719//10395 6766//10394 6743//4426 +f 6767//4427 6720//4428 6743//4429 +f 6720//4428 6767//4427 6744//10397 +f 6768//4431 6721//4431 6744//4431 +f 6721//4431 6768//4431 6745//4431 +f 6769//10398 6722//10399 6745//4434 +f 6722//10399 6769//10398 6746//10400 +f 6770//10401 6723//10402 6746//4438 +f 6723//10402 6770//10401 6747//4439 +f 6771//10403 6724//4441 6747//4442 +f 6724//4441 6771//10403 6748//4443 +f 6772//10404 6748//4445 6773//4446 +f 6748//4445 6772//10404 6749//4447 +f 6725//10405 6774//4449 6750//10406 +f 6774//4449 6725//10405 6749//4451 +f 6726//10407 6775//10408 6751//10409 +f 6775//10408 6726//10407 6750//10410 +f 6751//10411 6752//4457 6727//10412 +f 6752//4457 6751//10411 6776//4459 +f 6728//8018 6777//4463 6753//8018 +f 6777//4463 6728//8018 6752//4463 +f 6729//10413 6778//10414 6754//4464 +f 6778//10414 6729//10413 6753//10415 +f 6730//4468 6779//4466 6755//4468 +f 6779//4466 6730//4468 6754//4466 +f 6731//10416 6780//4471 6756//8023 +f 6780//4471 6731//10416 6755//4472 +f 6732//4473 6781//10417 6757//4473 +f 6781//10417 6732//4473 6756//8025 +f 6758//10418 6757//8027 6782//4477 +f 6757//8027 6758//10418 6733//10419 +f 6734//4479 6783//8029 6759//4481 +f 6783//8029 6734//4479 6758//10420 +f 6735//10308 6784//10308 6760//10308 +f 6784//10308 6735//10308 6759//10308 +f 6760//4202 6761//4202 6737//4202 +f 6761//4202 6760//4202 6784//4202 +f 6761//4482 6762//10421 6738//10422 +f 6762//10421 6761//4482 6785//4485 +f 6739//4486 6786//8036 6763//8036 +f 6786//8036 6739//4486 6762//10423 +f 6787//4490 6740//10424 6763//8039 +f 6740//10424 6787//4490 6764//4492 +f 6788//4493 6741//8040 6764//4495 +f 6741//8040 6788//4493 6765//4493 +f 6789//10425 6742//10426 6765//8042 +f 6742//10426 6789//10425 6766//10425 +f 6790//4498 6743//4499 6766//4499 +f 6743//4499 6790//4498 6767//4498 +f 6791//8046 6744//10427 6767//4502 +f 6744//10427 6791//8046 6768//4503 +f 6769//4504 6768//10428 6792//4506 +f 6768//10428 6769//4504 6745//10429 +f 6793//10430 6746//10431 6769//4510 +f 6746//10431 6793//10430 6770//4511 +f 6794//4515 6747//4513 6770//4514 +f 6747//4513 6794//4515 6771//4515 +f 6773//4518 6771//10432 6795//4518 +f 6771//10432 6773//4518 6748//4517 +f 6796//4519 6773//4520 6797//4521 +f 6773//4520 6796//4519 6772//4522 +f 6798//10433 6749//4525 6772//4523 +f 6749//4525 6798//10433 6774//4525 +f 6750//4526 6799//10434 6775//4526 +f 6799//10434 6750//4526 6774//4527 +f 6751//4531 6800//4531 6776//4532 +f 6800//4531 6751//4531 6775//4533 +f 6777//4534 6776//4535 6801//4535 +f 6776//4535 6777//4534 6752//4534 +f 6753//8072 6802//10435 6778//10436 +f 6802//10435 6753//8072 6777//8073 +f 6754//4540 6803//10437 6779//4540 +f 6803//10437 6754//4540 6778//10438 +f 6755//8076 6804//8077 6780//8076 +f 6804//8077 6755//8076 6779//8077 +f 6756//8079 6805//4548 6781//8079 +f 6805//4548 6756//8079 6780//4548 +f 6782//4549 6781//4549 6806//10439 +f 6781//4549 6782//4549 6757//4549 +f 6783//4551 6782//4552 6807//4552 +f 6782//4552 6783//4551 6758//4553 +f 6759//10440 6808//10441 6784//10440 +f 6808//10441 6759//10440 6783//10441 +f 6784//4556 6785//4557 6761//4556 +f 6785//4557 6784//4556 6808//4557 +f 6762//10442 6809//10443 6786//10444 +f 6809//10443 6762//10442 6785//4561 +f 6763//8083 6810//10445 6787//8084 +f 6810//10445 6763//8083 6786//8085 +f 6811//10446 6764//10447 6787//10448 +f 6764//10447 6811//10446 6788//8089 +f 6812//8090 6765//8091 6788//8091 +f 6765//8091 6812//8090 6789//8090 +f 6813//4570 6766//4571 6789//4572 +f 6766//4571 6813//4570 6790//4573 +f 6814//4574 6767//4575 6790//4576 +f 6767//4575 6814//4574 6791//4577 +f 6792//8099 6791//10449 6815//4580 +f 6791//10449 6792//8099 6768//10450 +f 6793//4582 6792//10451 6816//4584 +f 6792//10451 6793//4582 6769//4585 +f 6817//4586 6770//10452 6793//4588 +f 6770//10452 6817//4586 6794//4589 +f 6795//10453 6794//10454 6818//10455 +f 6794//10454 6795//10453 6771//10456 +f 6797//10457 6795//10458 6819//4595 +f 6795//10458 6797//10457 6773//4596 +f 6820//4597 6797//4598 6821//4597 +f 6797//4598 6820//4597 6796//4598 +f 6822//4599 6772//4600 6796//4601 +f 6772//4600 6822//4599 6798//4602 +f 6823//10459 6774//10460 6798//10461 +f 6774//10460 6823//10459 6799//10462 +f 6775//10463 6824//10464 6800//4609 +f 6824//10464 6775//10463 6799//10465 +f 6801//10466 6800//10467 6825//4613 +f 6800//10467 6801//10466 6776//4611 +f 6802//8129 6801//4615 6826//4616 +f 6801//4615 6802//8129 6777//4617 +f 6778//4618 6827//4618 6803//4619 +f 6827//4618 6778//4618 6802//4618 +f 6779//10468 6828//10469 6804//4622 +f 6828//10469 6779//10468 6803//8134 +f 6780//4624 6829//10470 6805//4626 +f 6829//10470 6780//4624 6804//4627 +f 6806//4628 6805//4629 6830//4629 +f 6805//4629 6806//4628 6781//4628 +f 6807//4631 6806//4631 6831//4632 +f 6806//4631 6807//4631 6782//4633 +f 6808//10471 6807//10472 6832//4635 +f 6807//10472 6808//10471 6783//10473 +f 6785//10474 6832//4638 6809//4639 +f 6832//4638 6785//10474 6808//4640 +f 6786//10475 6833//10476 6810//4643 +f 6833//10476 6786//10475 6809//10477 +f 6787//10478 6834//10479 6811//8151 +f 6834//10479 6787//10478 6810//4645 +f 6835//10480 6788//10481 6811//10482 +f 6788//10481 6835//10480 6812//10483 +f 6836//8157 6789//8156 6812//8156 +f 6789//8156 6836//8157 6813//8157 +f 6837//10484 6790//4656 6813//4657 +f 6790//4656 6837//10484 6814//4658 +f 6815//4659 6814//10485 6838//8164 +f 6814//10485 6815//4659 6791//4662 +f 6816//10486 6815//4666 6839//4665 +f 6815//4666 6816//10486 6792//4666 +f 6817//10487 6816//4670 6840//8168 +f 6816//4670 6817//10487 6793//10488 +f 6818//10489 6817//8171 6841//8172 +f 6817//8171 6818//10489 6794//10490 +f 6819//10491 6818//10491 6842//10491 +f 6818//10491 6819//10491 6795//10492 +f 6821//4679 6819//10493 6843//4681 +f 6819//10493 6821//4679 6797//4682 +f 6844//8179 6820//4683 6821//4684 +f 6820//4683 6844//8179 6845//8179 +f 6846//8180 6796//4686 6820//4687 +f 6796//4686 6846//8180 6822//4688 +f 6847//10494 6798//8186 6822//10495 +f 6798//8186 6847//10494 6823//8186 +f 6848//10496 6799//10497 6823//10498 +f 6799//10497 6848//10496 6824//4695 +f 6825//4696 6824//4697 6849//4697 +f 6824//4697 6825//4696 6800//4696 +f 6826//4700 6825//10499 6850//10500 +f 6825//10499 6826//4700 6801//4701 +f 6827//10501 6826//4703 6851//4704 +f 6826//4703 6827//10501 6802//4705 +f 6803//10502 6852//4707 6828//4708 +f 6852//4707 6803//10502 6827//4709 +f 6804//10503 6853//10504 6829//4712 +f 6853//10504 6804//10503 6828//4713 +f 6830//10505 6829//10506 6854//10507 +f 6829//10506 6830//10505 6805//4717 +f 6831//10508 6830//10509 6855//10509 +f 6830//10509 6831//10508 6806//10508 +f 6832//4722 6831//10510 6856//4724 +f 6831//10510 6832//4722 6807//4725 +f 6809//10511 6856//4727 6833//4728 +f 6856//4727 6809//10511 6832//4729 +f 6810//10512 6857//10513 6834//4731 +f 6857//10513 6810//10512 6833//4732 +f 6834//4734 6835//10514 6811//10515 +f 6835//10514 6834//4734 6858//4736 +f 6859//10516 6812//10517 6835//10518 +f 6812//10517 6859//10516 6836//8227 +f 6860//10519 6813//10520 6836//8229 +f 6813//10520 6860//10519 6837//10521 +f 6838//10522 6837//10523 6861//10524 +f 6837//10523 6838//10522 6814//10525 +f 6839//10526 6838//8233 6862//4749 +f 6838//8233 6839//10526 6815//8234 +f 6840//10527 6839//8237 6863//8237 +f 6839//8237 6840//10527 6816//4752 +f 6841//10528 6840//10529 6864//4754 +f 6840//10529 6841//10528 6817//8240 +f 6842//10530 6841//8242 6865//8243 +f 6841//8242 6842//10530 6818//8244 +f 6843//4760 6842//10531 6866//4762 +f 6842//10531 6843//4760 6819//4763 +f 6867//8248 6821//4766 6843//4766 +f 6821//4766 6867//8248 6844//8248 +f 6868//4770 6845//4770 6844//8249 +f 6845//4770 6868//4770 6869//4770 +f 6845//4771 6846//4772 6820//4772 +f 6846//4772 6845//4771 6870//4771 +f 6871//10532 6822//10533 6846//10534 +f 6822//10533 6871//10532 6847//4774 +f 6872//10535 6823//10536 6847//10537 +f 6823//10536 6872//10535 6848//10538 +f 6849//4781 6848//4782 6873//4782 +f 6848//4782 6849//4781 6824//4781 +f 6850//10539 6849//10540 6874//10541 +f 6849//10540 6850//10539 6825//10542 +f 6851//4787 6850//10543 6875//10544 +f 6850//10543 6851//4787 6826//4787 +f 6852//4791 6851//4790 6876//10545 +f 6851//4790 6852//4791 6827//4790 +f 6828//10546 6877//4793 6853//10546 +f 6877//4793 6828//10546 6852//4793 +f 6853//10547 6854//4794 6829//10548 +f 6854//4794 6853//10547 6878//10547 +f 6855//10549 6854//10550 6879//10551 +f 6854//10550 6855//10549 6830//10552 +f 6856//10553 6855//10554 6880//10555 +f 6855//10554 6856//10553 6831//10556 +f 6833//10557 6880//4807 6857//10558 +f 6880//4807 6833//10557 6856//10559 +f 6857//10560 6858//4810 6834//10560 +f 6858//4810 6857//10560 6881//10561 +f 6858//4812 6859//4813 6835//10562 +f 6859//4813 6858//4812 6882//10563 +f 6859//8290 6860//10564 6836//8290 +f 6860//10564 6859//8290 6883//8291 +f 6861//10565 6860//10566 6884//8292 +f 6860//10566 6861//10565 6837//10567 +f 6862//10568 6861//4825 6885//4826 +f 6861//4825 6862//10568 6838//8295 +f 6863//8296 6862//10569 6886//4830 +f 6862//10569 6863//8296 6839//8299 +f 6864//10570 6863//8301 6887//8302 +f 6863//8301 6864//10570 6840//8303 +f 6865//10571 6864//10572 6888//8305 +f 6864//10572 6865//10571 6841//4839 +f 6866//8307 6865//10573 6889//10574 +f 6865//10573 6866//8307 6842//8308 +f 6890//4844 6843//10575 6866//10576 +f 6843//10575 6890//4844 6867//4847 +f 6891//4851 6844//4851 6867//4851 +f 6844//4851 6891//4851 6868//4851 +f 6892//4852 6869//4853 6868//4854 +f 6869//4853 6892//4852 6893//4855 +f 6869//4856 6870//10577 6845//4858 +f 6870//10577 6869//4856 6894//4859 +f 6870//8315 6871//8316 6846//8317 +f 6871//8316 6870//8315 6895//4863 +f 6896//10578 6847//10579 6871//8319 +f 6847//10579 6896//10578 6872//10580 +f 6873//10581 6872//4869 6897//4870 +f 6872//4869 6873//10581 6848//4871 +f 6874//10582 6873//10583 6898//10584 +f 6873//10583 6874//10582 6849//4875 +f 6875//10585 6874//10586 6899//4878 +f 6874//10586 6875//10585 6850//10587 +f 6876//4880 6875//4881 6900//4882 +f 6875//4881 6876//4880 6851//10588 +f 6877//4884 6876//4885 6901//4885 +f 6876//4885 6877//4884 6852//4884 +f 6877//4886 6878//10589 6853//10589 +f 6878//10589 6877//4886 6902//4886 +f 6878//4891 6879//4890 6854//4891 +f 6879//4890 6878//4891 6903//10590 +f 6880//10591 6879//10592 6904//4894 +f 6879//10592 6880//10591 6855//10593 +f 6857//10594 6904//4898 6881//10595 +f 6904//4898 6857//10594 6880//10596 +f 6881//10597 6882//10598 6858//4903 +f 6882//10598 6881//10597 6905//4904 +f 6882//10599 6883//10600 6859//10601 +f 6883//10600 6882//10599 6906//10602 +f 6883//10603 6884//8356 6860//10604 +f 6884//8356 6883//10603 6907//8358 +f 6885//10605 6884//10606 6908//8361 +f 6884//10606 6885//10605 6861//8362 +f 6886//8363 6885//10607 6909//4916 +f 6885//10607 6886//8363 6862//4917 +f 6887//4918 6886//8367 6910//10608 +f 6886//8367 6887//4918 6863//4921 +f 6888//10609 6887//10610 6911//4922 +f 6887//10610 6888//10609 6864//8373 +f 6889//10611 6888//8377 6912//8376 +f 6888//8377 6889//10611 6865//8377 +f 6913//10612 6866//4927 6889//10613 +f 6866//4927 6913//10612 6890//4929 +f 6914//4930 6867//10614 6890//4932 +f 6867//10614 6914//4930 6891//4933 +f 6915//4934 6868//4935 6891//8381 +f 6868//4935 6915//4934 6892//4937 +f 6916//4938 6893//10615 6892//10615 +f 6893//10615 6916//4938 6917//10616 +f 6893//4940 6894//10617 6869//4942 +f 6894//10617 6893//4940 6918//4943 +f 6894//4944 6895//10618 6870//4946 +f 6895//10618 6894//4944 6919//4947 +f 6895//8390 6896//4949 6871//10619 +f 6896//4949 6895//8390 6920//4951 +f 6897//10620 6896//10621 6921//8395 +f 6896//10621 6897//10620 6872//4955 +f 6898//8397 6897//10622 6922//8398 +f 6897//10622 6898//8397 6873//10623 +f 6899//10624 6898//4959 6923//8403 +f 6898//4959 6899//10624 6874//4961 +f 6900//8406 6899//4963 6924//10625 +f 6899//4963 6900//8406 6875//8406 +f 6901//10626 6900//10627 6925//4967 +f 6900//10627 6901//10626 6876//4968 +f 6877//10628 6926//10629 6902//10630 +f 6926//10629 6877//10628 6901//4971 +f 6902//4975 6903//4973 6878//4974 +f 6903//4973 6902//4975 6927//4975 +f 6903//4976 6904//10631 6879//4978 +f 6904//10631 6903//4976 6928//10632 +f 6904//10633 6905//10634 6881//4982 +f 6905//10634 6904//10633 6928//4983 +f 6905//10635 6906//10636 6882//10637 +f 6906//10636 6905//10635 6929//10638 +f 6906//10639 6907//10640 6883//10641 +f 6907//10640 6906//10639 6930//10642 +f 6931//10643 6884//8432 6907//10644 +f 6884//8432 6931//10643 6908//8434 +f 6909//4994 6908//4995 6932//4996 +f 6908//4995 6909//4994 6885//10645 +f 6910//10646 6909//10647 6933//10648 +f 6909//10647 6910//10646 6886//5000 +f 6911//10649 6910//5002 6934//10649 +f 6910//5002 6911//10649 6887//5002 +f 6912//10650 6911//5005 6935//5006 +f 6911//5005 6912//10650 6888//5007 +f 6889//5008 6936//10651 6913//5010 +f 6936//10651 6889//5008 6912//8447 +f 6937//10652 6890//10653 6913//5014 +f 6890//10653 6937//10652 6914//5015 +f 6938//5016 6891//8451 6914//5018 +f 6891//8451 6938//5016 6915//8452 +f 6939//10654 6892//5022 6915//5022 +f 6892//5022 6939//10654 6916//10655 +f 6940//5024 6916//5025 6941//5024 +f 6916//5025 6940//5024 6917//5025 +f 6917//10656 6918//10657 6893//10658 +f 6918//10657 6917//10656 6942//10659 +f 6918//5030 6919//10660 6894//10661 +f 6919//10660 6918//5030 6943//10662 +f 6919//5034 6920//5035 6895//5035 +f 6920//5035 6919//5034 6944//5034 +f 6945//5039 6896//8465 6920//8466 +f 6896//8465 6945//5039 6921//8467 +f 6922//10663 6921//10664 6946//8469 +f 6921//10664 6922//10663 6897//5043 +f 6923//5044 6922//5045 6947//5046 +f 6922//5045 6923//5044 6898//8471 +f 6924//10665 6923//10666 6948//8474 +f 6923//10666 6924//10665 6899//10667 +f 6925//10668 6924//10669 6949//10669 +f 6924//10669 6925//10668 6900//10668 +f 6901//5056 6950//10670 6926//5058 +f 6950//10670 6901//5056 6925//10671 +f 6902//10672 6951//5061 6927//5060 +f 6951//5061 6902//10672 6926//5061 +f 6903//8485 6952//8486 6928//8487 +f 6952//8486 6903//8485 6927//5065 +f 6928//5066 6929//5067 6905//5066 +f 6929//5067 6928//5066 6952//5067 +f 6929//10673 6930//8492 6906//10674 +f 6930//8492 6929//10673 6953//5071 +f 6954//8494 6907//8494 6930//8494 +f 6907//8494 6954//8494 6931//8494 +f 6955//10675 6908//10676 6931//5078 +f 6908//10676 6955//10675 6932//5079 +f 6933//5080 6932//5081 6956//5082 +f 6932//5081 6933//5080 6909//5083 +f 6934//5086 6933//8502 6957//5086 +f 6933//8502 6934//5086 6910//8502 +f 6935//5087 6934//5088 6958//5089 +f 6934//5088 6935//5087 6911//5090 +f 6912//5091 6959//8507 6936//5092 +f 6959//8507 6912//5091 6935//5093 +f 6913//5094 6960//10677 6937//5096 +f 6960//10677 6913//5094 6936//5097 +f 6961//10678 6914//8512 6937//5100 +f 6914//8512 6961//10678 6938//5101 +f 6939//10679 6938//10680 6962//5102 +f 6938//10680 6939//10679 6915//8516 +f 6941//10681 6939//10682 6963//8519 +f 6939//10682 6941//10681 6916//10683 +f 6964//10684 6941//5111 6965//5112 +f 6941//5111 6964//10684 6940//10685 +f 6942//10686 6940//10687 6966//10688 +f 6940//10687 6942//10686 6917//10689 +f 6942//10690 6943//10691 6918//10692 +f 6943//10691 6942//10690 6967//10693 +f 6943//10694 6944//10695 6919//5123 +f 6944//10695 6943//10694 6968//5124 +f 6969//5125 6920//8536 6944//5125 +f 6920//8536 6969//5125 6945//8536 +f 6970//10696 6921//8537 6945//5130 +f 6921//8537 6970//10696 6946//5131 +f 6947//8538 6946//5133 6971//8539 +f 6946//5133 6947//8538 6922//8540 +f 6948//8541 6947//8542 6972//8543 +f 6947//8542 6948//8541 6923//10697 +f 6949//10698 6948//8546 6973//8547 +f 6948//8546 6949//10698 6924//8548 +f 6925//10699 6974//10700 6950//10701 +f 6974//10700 6925//10699 6949//8549 +f 6926//5146 6975//10702 6951//5148 +f 6975//10702 6926//5146 6950//5149 +f 6927//10703 6976//10704 6952//10703 +f 6976//10704 6927//10703 6951//10704 +f 6952//8558 6953//10705 6929//5156 +f 6953//10705 6952//8558 6976//8560 +f 6953//10706 6954//10707 6930//10708 +f 6954//10707 6953//10706 6977//10709 +f 6978//10710 6931//10711 6954//8567 +f 6931//10711 6978//10710 6955//5165 +f 6932//8571 6979//10712 6956//10712 +f 6979//10712 6932//8571 6955//8571 +f 6957//10713 6956//5170 6980//10714 +f 6956//5170 6957//10713 6933//5173 +f 6958//5174 6957//5175 6981//5176 +f 6957//5175 6958//5174 6934//5177 +f 6935//5178 6982//5179 6959//5180 +f 6982//5179 6935//5178 6958//8581 +f 6936//10715 6983//10716 6960//5184 +f 6983//10716 6936//10715 6959//5185 +f 6937//5186 6984//10717 6961//10718 +f 6984//10717 6937//5186 6960//5189 +f 6938//5190 6985//10719 6962//10720 +f 6985//10719 6938//5190 6961//10721 +f 6963//10722 6962//10723 6986//5196 +f 6962//10723 6963//10722 6939//5197 +f 6965//5198 6963//5199 6987//5200 +f 6963//5199 6965//5198 6941//5201 +f 6988//10724 6965//5203 6989//10725 +f 6965//5203 6988//10724 6964//5205 +f 6990//5208 6940//5209 6964//5208 +f 6940//5209 6990//5208 6966//5209 +f 6967//10726 6966//10727 6991//10728 +f 6966//10727 6967//10726 6942//10729 +f 6992//8611 6943//10730 6967//10731 +f 6943//10730 6992//8611 6968//5215 +f 6993//10732 6944//8615 6968//10732 +f 6944//8615 6993//10732 6969//8615 +f 6994//5219 6945//5220 6969//8619 +f 6945//5220 6994//5219 6970//5222 +f 6995//5223 6946//5223 6970//5223 +f 6946//5133 6995//5224 6971//8539 +f 6972//5225 6971//10733 6996//5227 +f 6971//10733 6972//5225 6947//5228 +f 6973//8623 6972//8623 6997//5230 +f 6972//8623 6973//8623 6948//8623 +f 6974//5232 6973//10734 6998//5233 +f 6973//10734 6974//5232 6949//8626 +f 6950//10735 6999//10736 6975//10737 +f 6999//10736 6950//10735 6974//10738 +f 6951//10739 7000//10740 6976//10741 +f 7000//10740 6951//10739 6975//5242 +f 6976//10742 6977//10743 6953//10744 +f 6977//10743 6976//10742 7000//10745 +f 7001//10746 6954//10747 6977//10747 +f 6954//10747 7001//10746 6978//10748 +f 6955//8641 7002//8642 6979//8642 +f 7002//8642 6955//8641 6978//8641 +f 6956//10749 7003//10750 6980//10751 +f 7003//10750 6956//10749 6979//10752 +f 6981//8645 6980//8646 7004//8647 +f 6980//8646 6981//8645 6957//8648 +f 6958//5261 7005//5262 6982//5263 +f 7005//5262 6958//5261 6981//5264 +f 6959//10753 7006//10754 6983//10755 +f 7006//10754 6959//10753 6982//8649 +f 6960//5269 7007//10756 6984//5271 +f 7007//10756 6960//5269 6983//5272 +f 6961//5273 7008//10757 6985//10757 +f 7008//10757 6961//5273 6984//5273 +f 6986//5275 6985//5276 7009//5275 +f 6985//5276 6986//5275 6962//8660 +f 6987//5277 6986//10758 7010//10759 +f 6986//10758 6987//5277 6963//10760 +f 6989//5281 6987//5282 7011//10761 +f 6987//5282 6989//5281 6965//5284 +f 7012//5285 6989//5286 7013//5287 +f 6989//5286 7012//5285 6988//5288 +f 7014//10762 6964//10763 6988//5291 +f 6964//10763 7014//10762 6990//10764 +f 7015//10765 6966//10766 6990//10767 +f 6966//10766 7015//10765 6991//10768 +f 6992//10769 6991//10770 7016//10771 +f 6991//10770 6992//10769 6967//10772 +f 7017//10773 6968//10774 6992//5301 +f 6968//10774 7017//10773 6993//10775 +f 7018//10776 6969//10777 6993//5307 +f 6969//10777 7018//10776 6994//5308 +f 7019//10778 6970//10779 6994//8691 +f 6970//10779 7019//10778 6995//5312 +f 7020//5313 6971//8692 6995//5315 +f 6971//8692 7020//5313 6996//5314 +f 6997//5316 6996//10780 7021//5318 +f 6996//10780 6997//5316 6972//5319 +f 6998//10781 6997//5321 7022//8697 +f 6997//5321 6998//10781 6973//8698 +f 6999//10782 6998//10783 7023//10784 +f 6998//10783 6999//10782 6974//8702 +f 6975//5328 7024//5330 7000//5328 +f 7024//5330 6975//5328 6999//5330 +f 7000//10785 7001//10786 6977//5333 +f 7001//10786 7000//10785 7024//10787 +f 6978//5335 7025//5336 7002//5336 +f 7025//5336 6978//5335 7001//5335 +f 6979//8712 7026//10788 7003//10788 +f 7026//10788 6979//8712 7002//8712 +f 6980//10789 7027//10790 7004//8715 +f 7027//10790 6980//10789 7003//10791 +f 6981//5345 7028//8717 7005//5347 +f 7028//8717 6981//5345 7004//5348 +f 6982//10792 7029//10793 7006//10794 +f 7029//10793 6982//10792 7005//10795 +f 6983//10796 7030//8724 7007//5355 +f 7030//8724 6983//10796 7006//8726 +f 6984//5357 7031//5359 7008//5359 +f 7031//5359 6984//5357 7007//5357 +f 6985//10797 7032//5361 7009//5362 +f 7032//5361 6985//10797 7008//5363 +f 7010//10798 7009//10799 7033//8732 +f 7009//10799 7010//10798 6986//5367 +f 7011//10800 7010//5369 7034//10801 +f 7010//5369 7011//10800 6987//10802 +f 7013//10803 7011//10804 7035//8739 +f 7011//10804 7013//10803 6989//10805 +f 7036//8741 7013//10806 7037//5378 +f 7013//10806 7036//8741 7012//5379 +f 7038//5380 6988//8743 7012//5380 +f 6988//8743 7038//5380 7014//8743 +f 7039//8744 6990//5382 7014//5383 +f 6990//5382 7039//8744 7015//5382 +f 7040//10807 6991//10808 7015//8747 +f 6991//10808 7040//10807 7016//10809 +f 7041//10810 6992//10811 7016//5391 +f 6992//10811 7041//10810 7017//5392 +f 7042//8752 6993//10812 7017//8752 +f 6993//10812 7042//8752 7018//5393 +f 7043//10813 6994//10814 7018//5396 +f 6994//10814 7043//10813 7019//5397 +f 7044//5398 6995//5399 7019//5398 +f 6995//5399 7044//5398 7020//5401 +f 7045//10815 6996//5403 7020//10816 +f 6996//5403 7045//10815 7021//8760 +f 7022//10817 7021//10818 7046//5408 +f 7021//10818 7022//10817 6997//5409 +f 7023//8763 7022//8764 7047//5412 +f 7022//8764 7023//8763 6998//8765 +f 7024//5417 7023//10819 7048//10820 +f 7023//10819 7024//5417 6999//5417 +f 7001//5418 7048//5419 7025//5420 +f 7048//5419 7001//5418 7024//5421 +f 7002//8776 7049//8776 7026//8776 +f 7049//8776 7002//8776 7025//8776 +f 7003//5426 7050//5428 7027//5428 +f 7050//5428 7003//5426 7026//5427 +f 7027//5429 7028//8783 7004//10821 +f 7028//8783 7027//5429 7051//5432 +f 7005//5434 7052//10822 7029//5435 +f 7052//10822 7005//5434 7028//5436 +f 7006//5437 7053//8790 7030//8791 +f 7053//8790 7006//5437 7029//8792 +f 7007//8794 7054//5442 7031//5443 +f 7054//5442 7007//8794 7030//5444 +f 7008//5445 7055//10823 7032//5447 +f 7055//10823 7008//5445 7031//10824 +f 7009//10825 7056//5450 7033//10826 +f 7056//5450 7009//10825 7032//5452 +f 7010//5453 7057//10827 7034//5455 +f 7057//10827 7010//5453 7033//8799 +f 7011//5460 7058//10828 7035//10829 +f 7058//10828 7011//5460 7034//10830 +f 7013//10831 7059//5463 7037//5463 +f 7059//5463 7013//10831 7035//10831 +f 7060//10832 7036//10833 7037//10834 +f 7036//10833 7060//10832 7061//5465 +f 7062//10835 7012//10836 7036//5470 +f 7012//10836 7062//10835 7038//5471 +f 7063//5472 7014//10837 7038//5474 +f 7014//10837 7063//5472 7039//5473 +f 7064//10838 7015//5476 7039//5477 +f 7015//5476 7064//10838 7040//10839 +f 7065//5479 7016//5480 7040//5481 +f 7016//5480 7065//5479 7041//5482 +f 7066//10840 7017//5483 7041//10841 +f 7017//5483 7066//10840 7042//10842 +f 7067//5489 7018//5488 7042//8829 +f 7018//5488 7067//5489 7043//5488 +f 7068//10843 7019//5493 7043//10843 +f 7019//5493 7068//10843 7044//5493 +f 7069//10844 7020//10845 7044//5494 +f 7020//10845 7069//10844 7045//5495 +f 7021//5496 7070//10846 7046//5496 +f 7070//10846 7021//5496 7045//10846 +f 7047//5498 7046//5499 7071//5500 +f 7046//5499 7047//5498 7022//5501 +f 7048//5502 7047//5503 7072//5503 +f 7047//5503 7048//5502 7023//5502 +f 7025//10847 7072//10848 7049//10849 +f 7072//10848 7025//10847 7048//5504 +f 7026//8839 7073//10850 7050//5509 +f 7073//10850 7026//8839 7049//8840 +f 7050//5511 7051//5512 7027//5513 +f 7051//5512 7050//5511 7074//5514 +f 7051//5515 7052//10851 7028//5517 +f 7052//10851 7051//5515 7075//5518 +f 7029//5522 7076//10852 7053//8847 +f 7076//10852 7029//5522 7052//10853 +f 7030//8848 7077//8849 7054//8850 +f 7077//8849 7030//8848 7053//8851 +f 7031//5527 7078//10854 7055//5529 +f 7078//10854 7031//5527 7054//5527 +f 7032//5530 7079//10855 7056//5532 +f 7079//10855 7032//5530 7055//5533 +f 7056//8853 7057//5535 7033//5536 +f 7057//5535 7056//8853 7080//5537 +f 7034//10856 7081//10857 7058//10857 +f 7081//10857 7034//10856 7057//10856 +f 7035//10858 7082//10859 7059//10860 +f 7082//10859 7035//10858 7058//10861 +f 7083//10862 7037//10863 7059//10864 +f 7037//10863 7083//10862 7060//10865 +f 7083//10866 7061//10867 7060//8885 +f 7061//10867 7083//10866 7084//10868 +f 7061//10867 7084//10868 7085//10869 +f 7085//10869 7084//10868 7086//10870 +f 7085//10869 7086//10870 7087//10871 +f 7087//10871 7086//10870 7088//8877 +f 7087//10871 7088//8877 7089//5562 +f 7089//5562 7088//8877 7090//5556 +f 7089//5562 7090//5556 7091//5557 +f 7091//5557 7090//5556 7092//10872 +f 7091//5557 7092//10872 7093//10873 +f 7093//10873 7092//10872 7094//10874 +f 7093//10873 7094//10874 7095//10875 +f 7095//10875 7094//10874 7096//5562 +f 7095//10875 7096//5562 7097//5563 +f 7097//5563 7096//5562 7098//10876 +f 7097//5563 7098//10876 7099//5565 +f 7099//5565 7098//10876 7100//10877 +f 7099//5565 7100//10877 7101//10878 +f 7101//10878 7100//10877 7102//10879 +f 7101//10878 7102//10879 7103//10880 +f 7103//10880 7102//10879 7104//10881 +f 7061//10882 7062//10883 7036//5573 +f 7062//10883 7061//10882 7085//5574 +f 7105//10884 7038//5576 7062//10884 +f 7038//5576 7105//10884 7063//5577 +f 7106//5578 7039//5578 7063//10885 +f 7039//5578 7106//5578 7064//5578 +f 7064//10886 7065//10887 7040//10888 +f 7065//10887 7064//10886 7107//5583 +f 7108//8899 7041//5585 7065//8898 +f 7041//5585 7108//8899 7066//8899 +f 7109//10889 7042//10890 7066//10889 +f 7042//10890 7109//10889 7067//8900 +f 7110//10891 7043//10892 7067//10891 +f 7043//10892 7110//10891 7068//10893 +f 7111//10894 7044//10895 7068//8905 +f 7044//10895 7111//10894 7069//5594 +f 7045//10896 7112//10897 7070//5597 +f 7112//10897 7045//10896 7069//10898 +f 7046//5599 7113//5600 7071//5599 +f 7113//5600 7046//5599 7070//5600 +f 7072//5601 7071//5602 7114//5602 +f 7071//5602 7072//5601 7047//5601 +f 7049//10899 7114//5603 7073//5603 +f 7114//5603 7049//10899 7072//10899 +f 7073//5604 7074//5605 7050//5604 +f 7074//5605 7073//5604 7115//5605 +f 7074//5606 7075//8917 7051//5608 +f 7075//8917 7074//5606 7116//5609 +f 7075//5610 7076//10900 7052//5612 +f 7076//10900 7075//5610 7117//5613 +f 7053//8921 7118//10901 7077//8922 +f 7118//10901 7053//8921 7076//8920 +f 7054//10902 7119//10903 7078//5619 +f 7119//10903 7054//10902 7077//10904 +f 7055//5621 7120//5622 7079//5622 +f 7120//5622 7055//5621 7078//5621 +f 7079//8927 7080//10905 7056//5625 +f 7080//10905 7079//8927 7121//8929 +f 7080//10906 7081//10907 7057//10908 +f 7081//10907 7080//10906 7122//8931 +f 7058//10909 7123//10910 7082//10910 +f 7123//10910 7058//10909 7081//10909 +f 7084//10911 7059//10912 7082//10913 +f 7059//10912 7084//10911 7083//10914 +f 7124//10915 7103//10916 7125//5641 +f 7103//10916 7124//10915 7101//10917 +f 7126//8943 7101//5644 7124//5645 +f 7101//5644 7126//8943 7099//10918 +f 7097//10919 7126//10919 7127//5649 +f 7126//10919 7097//10919 7099//5647 +f 7095//5650 7127//10920 7128//10921 +f 7127//10920 7095//5650 7097//8949 +f 7093//10922 7128//10923 7129//5656 +f 7128//10923 7093//10922 7095//5657 +f 7091//8954 7129//5659 7130//5660 +f 7129//5659 7091//8954 7093//5661 +f 7089//10924 7130//5664 7131//10925 +f 7130//5664 7089//10924 7091//8956 +f 7087//10926 7131//10927 7105//10928 +f 7131//10927 7087//10926 7089//10926 +f 7085//10929 7105//5672 7062//5672 +f 7105//5672 7085//10929 7087//10930 +f 7086//10931 7082//8963 7123//10932 +f 7082//8963 7086//10931 7084//8962 +f 7132//10933 7086//10934 7123//8964 +f 7086//10934 7132//10933 7088//10935 +f 7133//5682 7088//5683 7132//5682 +f 7088//5683 7133//5682 7090//5683 +f 7134//8969 7090//8970 7133//5686 +f 7090//8970 7134//8969 7092//5687 +f 7135//10936 7092//5689 7134//10937 +f 7092//5689 7135//10936 7094//10938 +f 7136//8974 7094//10939 7135//8974 +f 7094//10939 7136//8974 7096//10939 +f 7137//10940 7096//10941 7136//10942 +f 7096//10941 7137//10940 7098//10943 +f 7138//10944 7098//10945 7137//5700 +f 7098//10945 7138//10944 7100//5702 +f 7139//10946 7100//10947 7138//10948 +f 7100//10947 7139//10946 7102//10949 +f 7140//10950 7102//10951 7139//8989 +f 7102//10951 7140//10950 7104//10952 +f 7125//10953 7104//5711 7140//5712 +f 7104//5711 7125//10953 7103//10954 +f 7105//5714 7106//5715 7063//5716 +f 7106//5715 7105//5714 7131//5717 +f 7106//10955 7107//10956 7064//5718 +f 7107//10956 7106//10955 7141//5720 +f 7107//5721 7108//5721 7065//5721 +f 7108//5721 7107//5721 7142//5721 +f 7143//10957 7066//10957 7108//5723 +f 7066//10957 7143//10957 7109//10958 +f 7144//10959 7067//10960 7109//10959 +f 7067//10960 7144//10959 7110//10960 +f 7145//10961 7068//9007 7110//9008 +f 7068//9007 7145//10961 7111//9009 +f 7069//9010 7146//10962 7112//5734 +f 7146//10962 7069//9010 7111//10963 +f 7070//9015 7147//10964 7113//5740 +f 7147//10964 7070//9015 7112//10964 +f 7071//5742 7148//5744 7114//5742 +f 7148//5744 7071//5742 7113//5744 +f 7114//5747 7115//5746 7073//5747 +f 7115//5746 7114//5747 7148//5746 +f 7115//5750 7116//5749 7074//5749 +f 7116//5749 7115//5750 7149//5750 +f 7116//5751 7117//10965 7075//10966 +f 7117//10965 7116//5751 7150//5754 +f 7076//10967 7151//10968 7118//10969 +f 7151//10968 7076//10967 7117//10970 +f 7077//9036 7152//10971 7119//10971 +f 7152//10971 7077//9036 7118//9036 +f 7078//5763 7153//5764 7120//5765 +f 7153//5764 7078//5763 7119//5766 +f 7120//10972 7121//10973 7079//5769 +f 7121//10973 7120//10972 7154//9042 +f 7121//10974 7122//10975 7080//9045 +f 7122//10975 7121//10974 7155//9046 +f 7122//9047 7123//9049 7081//9047 +f 7123//9049 7122//9047 7132//9049 +f 7125//5778 7156//5779 7124//10976 +f 7156//5779 7125//5778 7157//9051 +f 7124//10977 7158//5783 7126//5784 +f 7158//5783 7124//10977 7156//10978 +f 7127//10979 7158//5786 7159//9058 +f 7158//5786 7127//10979 7126//10980 +f 7128//5790 7159//10981 7160//5792 +f 7159//10981 7128//5790 7127//5793 +f 7129//5794 7160//10982 7161//10983 +f 7160//10982 7129//5794 7128//5797 +f 7130//5798 7161//10984 7141//5800 +f 7161//10984 7130//5798 7129//5798 +f 7131//5802 7141//5803 7106//10985 +f 7141//5803 7131//5802 7130//5802 +f 7155//10986 7132//10987 7122//9067 +f 7132//10987 7155//10986 7133//5808 +f 7162//10988 7133//10989 7155//9070 +f 7133//10989 7162//10988 7134//5812 +f 7163//5813 7134//10990 7162//5815 +f 7134//10990 7163//5813 7135//5816 +f 7164//5819 7135//10991 7163//5819 +f 7135//10991 7164//5819 7136//10991 +f 7165//10992 7136//10993 7164//10992 +f 7136//10993 7165//10992 7137//10993 +f 7165//5825 7138//5826 7137//5826 +f 7138//5826 7165//5825 7166//5825 +f 7166//9080 7139//10994 7138//9082 +f 7139//10994 7166//9080 7167//5830 +f 7167//5831 7140//9083 7139//9083 +f 7140//9083 7167//5831 7168//5831 +f 7140//5834 7157//5834 7125//5834 +f 7157//5834 7140//5834 7168//5834 +f 7141//10995 7142//5837 7107//5837 +f 7142//5837 7141//10995 7161//5838 +f 7142//10996 7143//10997 7108//5841 +f 7143//10997 7142//10996 7169//5842 +f 7170//10998 7109//10999 7143//9095 +f 7109//10999 7170//10998 7144//11000 +f 7171//11001 7110//11002 7144//11001 +f 7110//11002 7171//11001 7145//9099 +f 7172//11003 7111//11003 7145//11003 +f 7111//11003 7172//11003 7146//11003 +f 7112//9107 7173//11004 7147//11005 +f 7173//11004 7112//9107 7146//9107 +f 7113//11006 7174//5858 7148//5859 +f 7174//5858 7113//11006 7147//5860 +f 7148//5861 7149//9115 7115//5861 +f 7149//9115 7148//5861 7174//9115 +f 7149//11007 7150//5864 7116//5865 +f 7150//5864 7149//11007 7175//5866 +f 7150//11008 7151//11009 7117//11010 +f 7151//11009 7150//11008 7176//5870 +f 7152//11011 7151//5873 7177//11012 +f 7151//5873 7152//11011 7118//5874 +f 7119//5875 7178//11013 7153//5876 +f 7178//11013 7119//5875 7152//5877 +f 7153//11014 7154//11015 7120//11016 +f 7154//11015 7153//11014 7179//9135 +f 7154//9136 7155//5884 7121//9137 +f 7155//5884 7154//9136 7162//11017 +f 7157//11018 7180//11019 7156//5888 +f 7180//11019 7157//11018 7181//11020 +f 7156//5890 7182//11021 7158//5892 +f 7182//11021 7156//5890 7180//5893 +f 7159//11022 7182//5895 7183//5896 +f 7182//5895 7159//11022 7158//9147 +f 7160//11023 7183//11024 7169//5900 +f 7183//11024 7160//11023 7159//5901 +f 7161//11025 7169//11026 7142//5904 +f 7169//11026 7161//11025 7160//5905 +f 7179//5906 7162//9153 7154//9154 +f 7162//9153 7179//5906 7163//5909 +f 7184//5910 7163//5911 7179//5912 +f 7163//5911 7184//5910 7164//5913 +f 7184//11027 7165//11028 7164//11029 +f 7165//11028 7184//11027 7185//9161 +f 7185//11030 7166//11031 7165//9164 +f 7166//11031 7185//11030 7186//11032 +f 7186//5921 7167//11033 7166//5923 +f 7167//11033 7186//5921 7187//5924 +f 7187//9166 7168//5927 7167//11034 +f 7168//5927 7187//9166 7188//11035 +f 7188//11036 7157//11037 7168//5931 +f 7157//11037 7188//11036 7181//11038 +f 7169//5933 7170//9171 7143//9171 +f 7170//9171 7169//5933 7183//5933 +f 7189//11039 7144//11040 7170//11041 +f 7144//11040 7189//11039 7171//11042 +f 7172//11043 7171//11044 7190//11045 +f 7171//11044 7172//11043 7145//11046 +f 7173//5941 7172//11047 7191//5943 +f 7172//11047 7173//5941 7146//11048 +f 7147//11049 7192//11050 7174//5947 +f 7192//11050 7147//11049 7173//5948 +f 7174//9188 7175//5950 7149//5949 +f 7175//5950 7174//9188 7192//5951 +f 7175//9192 7176//9193 7150//5954 +f 7176//9193 7175//9192 7193//5955 +f 7177//5956 7176//5957 7194//5958 +f 7176//5957 7177//5956 7151//5959 +f 7152//11051 7195//11052 7178//5962 +f 7195//11052 7152//11051 7177//9199 +f 7178//11053 7179//5965 7153//5966 +f 7179//5965 7178//11053 7184//9200 +f 7196//11054 7180//11055 7181//5970 +f 7180//11055 7196//11054 7197//5971 +f 7180//5972 7189//11056 7182//5972 +f 7189//11056 7180//5972 7197//11056 +f 7183//11057 7189//5975 7170//9203 +f 7189//5975 7183//11057 7182//5975 +f 7178//9204 7185//11058 7184//9206 +f 7185//11058 7178//9204 7195//9207 +f 7195//11059 7186//11060 7185//11061 +f 7186//11060 7195//11059 7198//5980 +f 7198//5984 7187//5985 7186//5986 +f 7187//5985 7198//5984 7199//9214 +f 7199//9215 7188//9216 7187//5990 +f 7188//9216 7199//9215 7200//5991 +f 7200//5992 7181//5993 7188//11062 +f 7181//5993 7200//5992 7196//11063 +f 7190//11064 7189//5998 7197//5998 +f 7189//5998 7190//11064 7171//11064 +f 7191//11065 7190//11066 7201//11065 +f 7190//11066 7191//11065 7172//11066 +f 7173//11067 7202//6006 7192//11068 +f 7202//6006 7173//11067 7191//6006 +f 7192//6007 7193//11069 7175//6009 +f 7193//11069 7192//6007 7202//11070 +f 7194//11071 7193//11072 7203//6013 +f 7193//11072 7194//11071 7176//6014 +f 7177//11073 7198//11074 7195//6017 +f 7198//11074 7177//11073 7194//11075 +f 7201//11076 7197//11077 7196//11078 +f 7197//11077 7201//11076 7190//11079 +f 7194//6023 7199//11080 7198//11080 +f 7199//11080 7194//6023 7203//6023 +f 7203//6025 7200//11081 7199//6027 +f 7200//11081 7203//6025 7204//11082 +f 7204//11083 7196//11084 7200//6031 +f 7196//11084 7204//11083 7201//11085 +f 7202//11086 7201//11087 7204//11088 +f 7201//11087 7202//11086 7191//11089 +f 7193//11090 7204//6038 7203//11091 +f 7204//6038 7193//11090 7202//6040 +f 7205//11092 7206//11093 7207//11093 +f 7206//11093 7205//11092 7208//11094 +f 7207//11095 7209//11096 7210//11097 +f 7209//11096 7207//11095 7206//11098 +f 7211//11099 7207//6050 7212//6050 +f 7207//6050 7211//11099 7205//11099 +f 7205//11100 7213//11101 7208//1910 +f 7213//11101 7205//11100 7214//11102 +f 7208//11103 7215//11104 7206//6057 +f 7215//11104 7208//11103 7216//6058 +f 7210//1919 7217//6061 7218//6061 +f 7217//6061 7210//1919 7209//1919 +f 7212//6066 7210//11105 7219//11106 +f 7210//11105 7212//6066 7207//6066 +f 7206//11107 7220//11108 7209//11109 +f 7220//11108 7206//11107 7215//11110 +f 7221//11111 7212//11112 7222//11113 +f 7212//11112 7221//11111 7211//6074 +f 7211//11114 7214//11115 7205//11114 +f 7214//11115 7211//11114 7223//11115 +f 7214//6081 7224//11116 7213//6081 +f 7224//11116 7214//6081 7225//11116 +f 7208//11117 7226//11118 7216//6085 +f 7226//11118 7208//11117 7213//6086 +f 7215//11119 7227//11120 7228//6089 +f 7227//11120 7215//11119 7216//6090 +f 7218//6094 7229//11121 7230//11121 +f 7229//11121 7218//6094 7217//6094 +f 7219//11122 7218//11123 7231//11123 +f 7218//11123 7219//11122 7210//11122 +f 7209//1956 7232//1957 7217//11124 +f 7232//1957 7209//1956 7220//6101 +f 7222//11125 7219//11126 7233//11127 +f 7219//11126 7222//11125 7212//11128 +f 7215//11129 7234//11130 7220//1966 +f 7234//11130 7215//11129 7228//11131 +f 7235//11132 7222//6109 7236//6110 +f 7222//6109 7235//11132 7221//9294 +f 7237//11133 7211//1973 7221//1974 +f 7211//1973 7237//11133 7223//1975 +f 7223//11134 7225//11135 7214//11136 +f 7225//11135 7223//11134 7238//1977 +f 7225//9298 7239//1979 7224//1980 +f 7239//1979 7225//9298 7240//1981 +f 7213//6117 7241//1985 7226//6117 +f 7241//1985 7213//6117 7224//1985 +f 7216//11137 7242//6119 7227//6120 +f 7242//6119 7216//11137 7226//6119 +f 7228//11138 7243//11139 7244//11140 +f 7243//11139 7228//11138 7227//6122 +f 7230//11141 7245//11142 7246//11143 +f 7245//11142 7230//11141 7229//11141 +f 7231//11144 7230//1996 7247//1999 +f 7230//1996 7231//11144 7218//11145 +f 7217//11146 7248//2001 7229//2002 +f 7248//2001 7217//11146 7232//11147 +f 7233//11148 7231//11149 7249//11150 +f 7231//11149 7233//11148 7219//2007 +f 7220//11151 7250//11152 7232//11153 +f 7250//11152 7220//11151 7234//11154 +f 7236//11155 7233//11156 7251//11157 +f 7233//11156 7236//11155 7222//11158 +f 7228//11159 7252//11160 7234//2016 +f 7252//11160 7228//11159 7244//2015 +f 7253//6141 7235//11161 7236//6143 +f 7235//11161 7253//6141 7254//6144 +f 7235//11162 7237//11163 7221//6147 +f 7237//11163 7235//11162 7255//6148 +f 7237//6149 7238//11164 7223//11165 +f 7238//11164 7237//6149 7256//6151 +f 7238//2031 7240//11166 7225//2031 +f 7240//11166 7238//2031 7257//2032 +f 7240//2033 7258//2034 7239//2035 +f 7258//2034 7240//2033 7259//2036 +f 7224//2037 7260//11167 7241//2039 +f 7260//11167 7224//2037 7239//6155 +f 7226//11168 7261//11169 7242//6158 +f 7261//11169 7226//11168 7241//6159 +f 7227//6160 7262//6163 7243//6162 +f 7262//6163 7227//6160 7242//6163 +f 7243//9340 7263//2049 7244//11170 +f 7263//2049 7243//9340 7264//11171 +f 7265//6166 7245//6167 7266//6168 +f 7245//6167 7265//6166 7246//6166 +f 7267//11172 7230//11173 7246//2056 +f 7230//11173 7267//11172 7247//11174 +f 7229//11175 7268//11176 7245//11177 +f 7268//11176 7229//11175 7248//2058 +f 7269//11178 7231//11179 7247//6179 +f 7231//11179 7269//11178 7249//11180 +f 7232//11181 7270//11182 7248//11183 +f 7270//11182 7232//11181 7250//11184 +f 7271//11185 7233//11186 7249//11187 +f 7233//11186 7271//11185 7251//11188 +f 7234//11189 7272//11190 7250//11190 +f 7272//11190 7234//11189 7252//11189 +f 7273//2079 7236//11191 7251//2079 +f 7236//11191 7273//2079 7253//11191 +f 7244//11192 7274//11193 7252//11194 +f 7274//11193 7244//11192 7263//11195 +f 7275//11196 7254//11197 7253//2083 +f 7254//11197 7275//11196 7276//11198 +f 7254//11199 7255//6196 7235//11200 +f 7255//6196 7254//11199 7277//11201 +f 7255//11202 7256//11203 7237//11204 +f 7256//11203 7255//11202 7278//11205 +f 7256//11206 7257//9366 7238//2094 +f 7257//9366 7256//11206 7279//2095 +f 7257//6204 7259//6205 7240//2098 +f 7259//6205 7257//6204 7280//6207 +f 7258//11207 7281//2101 7282//6210 +f 7281//2101 7258//11207 7259//2103 +f 7239//2104 7283//11208 7260//2106 +f 7283//11208 7239//2104 7258//2107 +f 7241//2108 7284//2109 7261//11209 +f 7284//2109 7241//2108 7260//2109 +f 7242//6213 7285//6214 7262//6213 +f 7285//6214 7242//6213 7261//6214 +f 7262//6218 7264//6217 7243//11210 +f 7264//6217 7262//6218 7286//11211 +f 7264//2116 7287//11212 7263//11212 +f 7287//11212 7264//2116 7288//2115 +f 7266//11213 7289//6222 7265//11214 +f 7289//6222 7266//11213 7290//11215 +f 7290//11215 7266//11213 7291//6238 +f 7290//11215 7291//6238 7292//6237 +f 7292//6237 7291//6238 7293//11216 +f 7292//6237 7293//11216 7294//11217 +f 7294//11217 7293//11216 7295//2129 +f 7294//11217 7295//2129 7296//2129 +f 7296//2129 7295//2129 7297//6230 +f 7296//2129 7297//6230 7298//6231 +f 7298//6231 7297//6230 7299//6231 +f 7298//6231 7299//6231 7300//6230 +f 7300//6230 7299//6231 7301//11218 +f 7300//6230 7301//11218 7302//11219 +f 7302//11219 7301//11218 7303//11220 +f 7302//11219 7303//11220 7304//11221 +f 7304//11221 7303//11220 7305//11222 +f 7304//11221 7305//11222 7306//11223 +f 7306//11223 7305//11222 7307//11224 +f 7306//11223 7307//11224 7308//11225 +f 7308//11225 7307//11224 7309//11226 +f 7308//11225 7309//11226 7310//11227 +f 7289//11228 7246//6242 7265//2142 +f 7246//6242 7289//11228 7267//2143 +f 7266//11229 7268//11230 7291//11229 +f 7268//11230 7266//11229 7245//11231 +f 7311//6245 7247//11232 7267//6247 +f 7247//11232 7311//6245 7269//6248 +f 7248//11233 7312//11234 7268//9398 +f 7312//11234 7248//11233 7270//11235 +f 7313//11236 7249//6254 7269//11237 +f 7249//6254 7313//11236 7271//6256 +f 7250//6257 7314//6258 7270//6259 +f 7314//6258 7250//6257 7272//6260 +f 7271//2164 7273//11238 7251//9403 +f 7273//11238 7271//2164 7315//2165 +f 7274//6265 7272//6266 7252//6267 +f 7272//6266 7274//6265 7316//6268 +f 7317//11239 7253//11240 7273//11241 +f 7253//11240 7317//11239 7275//6271 +f 7263//6272 7318//11242 7274//6274 +f 7318//11242 7263//6272 7287//11243 +f 7319//9409 7276//2179 7275//11244 +f 7276//2179 7319//9409 7320//2181 +f 7276//11245 7277//11246 7254//11247 +f 7277//11246 7276//11245 7321//6281 +f 7277//6282 7278//11248 7255//2186 +f 7278//11248 7277//6282 7322//6284 +f 7278//11249 7279//6286 7256//2190 +f 7279//6286 7278//11249 7323//6287 +f 7279//11250 7280//2193 7257//2194 +f 7280//2193 7279//11250 7324//6289 +f 7259//6290 7325//6291 7281//2198 +f 7325//6291 7259//6290 7280//6292 +f 7282//6293 7326//6294 7327//11251 +f 7326//6294 7282//6293 7281//6293 +f 7283//2207 7282//11252 7328//6295 +f 7282//11252 7283//2207 7258//2207 +f 7260//11253 7329//11254 7284//2210 +f 7329//11254 7260//11253 7283//2211 +f 7261//6298 7330//11255 7285//6300 +f 7330//11255 7261//6298 7284//2215 +f 7285//2217 7286//6301 7262//6302 +f 7286//6301 7285//2217 7331//2219 +f 7286//6304 7288//11256 7264//11257 +f 7288//11256 7286//6304 7332//11258 +f 7288//2224 7333//6308 7287//2226 +f 7333//6308 7288//2224 7334//2227 +f 7335//6313 7309//11259 7307//6311 +f 7309//11259 7335//6313 7336//6313 +f 7336//11260 7310//11261 7309//2233 +f 7310//11261 7336//11260 7337//6316 +f 7337//2238 7308//6318 7310//6318 +f 7308//6318 7337//2238 7338//2238 +f 7338//11262 7306//2240 7308//2241 +f 7306//2240 7338//11262 7339//2242 +f 7339//11263 7304//11264 7306//11265 +f 7304//11264 7339//11263 7340//11266 +f 7302//6326 7340//6327 7341//6327 +f 7340//6327 7302//6326 7304//6328 +f 7300//11267 7341//11268 7342//11269 +f 7341//11268 7300//11267 7302//11270 +f 7298//6331 7342//11271 7343//2257 +f 7342//11271 7298//6331 7300//2258 +f 7296//11272 7343//6335 7344//9451 +f 7343//6335 7296//11272 7298//6336 +f 7294//11273 7344//11274 7345//11275 +f 7344//11274 7294//11273 7296//11276 +f 7292//6343 7345//6343 7346//6343 +f 7345//6343 7292//6343 7294//6343 +f 7292//11277 7311//11278 7290//11279 +f 7311//11278 7292//11277 7346//6348 +f 7290//11280 7267//6349 7289//11281 +f 7267//6349 7290//11280 7311//6349 +f 7291//2279 7312//9464 7293//2279 +f 7312//9464 7291//2279 7268//9464 +f 7347//6353 7293//11282 7312//6353 +f 7293//11282 7347//6353 7295//6354 +f 7348//2286 7295//11283 7347//11284 +f 7295//11283 7348//2286 7297//2288 +f 7349//11285 7297//6358 7348//2291 +f 7297//6358 7349//11285 7299//2292 +f 7350//11286 7299//11287 7349//11288 +f 7299//11287 7350//11286 7301//6360 +f 7351//11289 7301//11290 7350//6363 +f 7301//11290 7351//11289 7303//2299 +f 7352//11291 7303//11292 7351//11293 +f 7303//11292 7352//11291 7305//11294 +f 7352//6367 7307//6368 7305//11295 +f 7307//6368 7352//6367 7335//6367 +f 7346//6369 7269//6370 7311//11296 +f 7269//6370 7346//6369 7313//2308 +f 7314//11297 7312//2312 7270//11297 +f 7312//2312 7314//11297 7347//2312 +f 7313//2314 7315//6374 7271//2315 +f 7315//6374 7313//2314 7353//2314 +f 7316//6377 7314//6375 7272//6377 +f 7314//6375 7316//6377 7354//6378 +f 7315//11298 7317//11299 7273//2320 +f 7317//11299 7315//11298 7355//6382 +f 7318//6385 7316//11300 7274//6385 +f 7316//11300 7318//6385 7356//6386 +f 7357//11301 7275//11244 7317//11302 +f 7275//11244 7357//11301 7319//9409 +f 7287//11303 7358//11304 7318//2331 +f 7358//11304 7287//11303 7333//2332 +f 7359//11305 7320//6390 7319//11306 +f 7320//6390 7359//11305 7360//6390 +f 7320//2181 7321//2336 7276//2179 +f 7321//2336 7320//2181 7361//11307 +f 7321//11308 7322//11309 7277//11310 +f 7322//11309 7321//11308 7362//6396 +f 7322//11311 7323//11312 7278//6399 +f 7323//11312 7322//11311 7363//11313 +f 7323//6400 7324//6401 7279//6400 +f 7324//6401 7323//6400 7364//2347 +f 7280//6402 7365//6403 7325//2350 +f 7365//6403 7280//6402 7324//6404 +f 7281//6405 7366//6407 7326//6407 +f 7366//6407 7281//6405 7325//6405 +f 7327//11314 7367//11315 7368//11316 +f 7367//11315 7327//11314 7326//6411 +f 7328//6412 7327//11317 7369//11317 +f 7327//11317 7328//6412 7282//6412 +f 7329//2364 7328//11318 7370//11318 +f 7328//11318 7329//2364 7283//2364 +f 7284//2365 7371//2365 7330//2365 +f 7371//2365 7284//2365 7329//2365 +f 7330//2366 7331//11319 7285//6417 +f 7331//11319 7330//2366 7372//6418 +f 7331//9523 7332//11320 7286//9525 +f 7332//11320 7331//9523 7373//6422 +f 7332//6423 7334//2227 7288//2224 +f 7334//2227 7332//6423 7374//2375 +f 7334//11321 7375//11322 7333//6427 +f 7375//11322 7334//11321 7376//11323 +f 7377//6428 7336//6429 7335//6430 +f 7336//6429 7377//6428 7378//6431 +f 7378//11324 7337//6433 7336//9531 +f 7337//6433 7378//11324 7379//11325 +f 7337//6436 7380//6437 7338//6438 +f 7380//6437 7337//6436 7379//6437 +f 7338//11326 7381//11327 7339//11328 +f 7381//11327 7338//11326 7380//11329 +f 7339//11330 7382//11331 7340//11332 +f 7382//11331 7339//11330 7381//11333 +f 7341//6450 7382//6449 7383//6449 +f 7382//6449 7341//6450 7340//6450 +f 7342//6451 7383//6452 7384//6453 +f 7383//6452 7342//6451 7341//6454 +f 7343//6455 7384//11334 7385//6457 +f 7384//11334 7343//6455 7342//2410 +f 7344//9451 7385//2411 7386//11335 +f 7385//2411 7344//9451 7343//6335 +f 7345//6462 7386//6460 7353//11336 +f 7386//6460 7345//6462 7344//6462 +f 7346//6463 7353//6464 7313//6465 +f 7353//6464 7346//6463 7345//6466 +f 7354//2423 7347//6467 7314//2423 +f 7347//6467 7354//2423 7348//6467 +f 7387//11337 7348//11338 7354//6470 +f 7348//11338 7387//11337 7349//9558 +f 7388//2429 7349//11288 7387//6473 +f 7349//11288 7388//2429 7350//11286 +f 7389//11339 7350//11340 7388//11341 +f 7350//11340 7389//11339 7351//6476 +f 7390//11342 7351//11343 7389//6479 +f 7351//11343 7390//11342 7352//6480 +f 7390//11344 7335//11345 7352//11346 +f 7335//11345 7390//11344 7377//6484 +f 7353//6485 7355//6485 7315//6485 +f 7355//6485 7353//6485 7386//6485 +f 7356//6486 7354//6486 7316//6486 +f 7354//6486 7356//6486 7387//6486 +f 7355//6487 7357//11347 7317//6487 +f 7357//11347 7355//6487 7391//2448 +f 7358//11348 7356//2450 7318//2449 +f 7356//2450 7358//11348 7392//2450 +f 7393//11349 7319//11350 7357//11351 +f 7319//11350 7393//11349 7359//11352 +f 7333//11353 7394//6492 7358//6493 +f 7394//6492 7333//11353 7375//6494 +f 7395//11354 7360//11355 7359//6497 +f 7360//11355 7395//11354 7396//2462 +f 7360//6499 7361//11356 7320//11357 +f 7361//11356 7360//6499 7397//2466 +f 7361//11307 7362//9578 7321//2336 +f 7362//9578 7361//11307 7398//2468 +f 7362//2472 7363//11358 7322//11359 +f 7363//11358 7362//2472 7399//11360 +f 7363//6503 7364//6504 7323//11361 +f 7364//6504 7363//6503 7400//2476 +f 7324//6506 7401//11362 7365//6508 +f 7401//11362 7324//6506 7364//6506 +f 7325//11363 7402//11364 7366//6511 +f 7402//11364 7325//11363 7365//6512 +f 7326//11365 7403//11366 7367//2487 +f 7403//11366 7326//11365 7366//2488 +f 7404//11367 7367//2492 7405//11367 +f 7367//2492 7404//11367 7368//2492 +f 7369//11368 7368//11369 7406//11370 +f 7368//11369 7369//11368 7327//11371 +f 7370//2497 7369//2497 7407//2497 +f 7369//2497 7370//2497 7328//2497 +f 7371//11372 7370//11373 7408//6525 +f 7370//11373 7371//11372 7329//2502 +f 7371//2503 7372//2504 7330//2505 +f 7372//2504 7371//2503 7409//2506 +f 7372//11374 7373//11375 7331//11376 +f 7373//11375 7372//11374 7410//2510 +f 7373//11377 7374//2375 7332//6423 +f 7374//2375 7373//11377 7411//11378 +f 7374//2516 7376//11379 7334//11379 +f 7376//11379 7374//2516 7412//2516 +f 7376//11380 7413//11381 7375//11382 +f 7413//11381 7376//11380 7414//9608 +f 7415//11383 7378//11384 7377//6541 +f 7378//11384 7415//11383 7416//11385 +f 7378//11386 7417//11387 7379//2525 +f 7417//11387 7378//11386 7416//11388 +f 7379//11389 7418//11390 7380//11391 +f 7418//11390 7379//11389 7417//11392 +f 7380//11393 7419//11394 7381//6546 +f 7419//11394 7380//11393 7418//11395 +f 7381//6548 7420//6549 7382//6550 +f 7420//6549 7381//6548 7419//11396 +f 7382//6552 7421//6554 7383//6554 +f 7421//6554 7382//6552 7420//6555 +f 7384//2542 7421//11397 7422//2542 +f 7421//11397 7384//2542 7383//2543 +f 7385//2544 7422//11398 7391//11398 +f 7422//11398 7385//2544 7384//2544 +f 7386//11335 7391//2546 7355//6558 +f 7391//2546 7386//11335 7385//2411 +f 7392//9617 7387//6473 7356//2548 +f 7387//6473 7392//9617 7388//2429 +f 7423//11399 7388//9618 7392//2552 +f 7388//9618 7423//11399 7389//11400 +f 7424//11401 7389//11401 7423//11402 +f 7389//11401 7424//11401 7390//11403 +f 7424//6563 7377//6564 7390//6565 +f 7377//6564 7424//6563 7415//6566 +f 7391//6567 7393//2560 7357//11404 +f 7393//2560 7391//6567 7422//2562 +f 7394//6568 7392//6568 7358//6568 +f 7392//6568 7394//6568 7423//6568 +f 7425//11405 7359//11406 7393//6571 +f 7359//11406 7425//11405 7395//2570 +f 7375//2573 7426//11407 7394//2573 +f 7426//11407 7375//2573 7413//11408 +f 7427//11409 7395//11410 7428//11411 +f 7395//11410 7427//11409 7396//2575 +f 7396//11412 7397//11413 7360//2577 +f 7397//11413 7396//11412 7429//2578 +f 7397//6581 7398//2580 7361//2581 +f 7398//2580 7397//6581 7430//11414 +f 7398//2468 7399//6583 7362//9578 +f 7399//6583 7398//2468 7431//6584 +f 7399//11415 7400//11416 7363//2587 +f 7400//11416 7399//11415 7432//11417 +f 7433//11418 7364//2590 7400//11419 +f 7364//2590 7433//11418 7401//2592 +f 7365//6590 7434//6590 7402//6589 +f 7434//6590 7365//6590 7401//6590 +f 7366//6591 7435//6592 7403//6592 +f 7435//6592 7366//6591 7402//6591 +f 7405//6595 7403//6594 7436//6595 +f 7403//6594 7405//6595 7367//6594 +f 7437//11420 7405//6600 7438//11420 +f 7405//6600 7437//11420 7404//6600 +f 7406//11421 7404//11422 7439//6602 +f 7404//11422 7406//11421 7368//11423 +f 7407//6603 7406//11424 7440//6605 +f 7406//11424 7407//6603 7369//11425 +f 7408//11426 7407//11427 7441//6609 +f 7407//11427 7408//11426 7370//6610 +f 7371//2619 7442//9650 7409//11428 +f 7442//9650 7371//2619 7408//6614 +f 7409//11429 7410//11430 7372//2625 +f 7410//11430 7409//11429 7443//2626 +f 7410//2627 7411//11378 7373//11377 +f 7411//11378 7410//2627 7444//2628 +f 7411//6619 7412//11431 7374//2631 +f 7412//11431 7411//6619 7445//6621 +f 7412//6623 7414//11432 7376//9656 +f 7414//11432 7412//6623 7446//11433 +f 7413//11434 7447//11435 7448//11436 +f 7447//11435 7413//11434 7414//9657 +f 7415//11437 7449//11438 7416//11439 +f 7449//11438 7415//11437 7450//11440 +f 7416//11441 7451//11442 7417//11443 +f 7451//11442 7416//11441 7449//11444 +f 7417//11445 7452//11446 7418//11447 +f 7452//11446 7417//11445 7451//6632 +f 7418//11448 7453//11449 7419//11450 +f 7453//11449 7418//11448 7452//11451 +f 7419//6637 7454//6638 7420//6639 +f 7454//6638 7419//6637 7453//6640 +f 7420//6641 7425//6642 7421//6643 +f 7425//6642 7420//6641 7454//6644 +f 7422//11452 7425//6646 7393//6647 +f 7425//6646 7422//11452 7421//6648 +f 7426//11453 7423//11454 7394//6650 +f 7423//11454 7426//11453 7424//6651 +f 7426//6652 7415//6653 7424//11455 +f 7415//6653 7426//6652 7450//6655 +f 7454//6658 7395//11456 7425//6658 +f 7395//11456 7454//6658 7428//6659 +f 7426//11457 7448//11458 7450//2677 +f 7448//11458 7426//11457 7413//11459 +f 7455//11460 7428//9670 7456//11461 +f 7428//9670 7455//11460 7427//6661 +f 7457//11462 7396//11463 7427//11464 +f 7396//11463 7457//11462 7429//11465 +f 7429//6667 7430//11466 7397//11467 +f 7430//11466 7429//6667 7458//6670 +f 7430//6671 7431//2689 7398//6671 +f 7431//2689 7430//6671 7459//9675 +f 7431//6584 7432//2693 7399//6583 +f 7432//2693 7431//6584 7460//2694 +f 7461//6675 7400//11468 7432//11468 +f 7400//11468 7461//6675 7433//6675 +f 7462//9678 7401//11469 7433//11470 +f 7401//11469 7462//9678 7434//11471 +f 7463//11472 7402//11473 7434//11474 +f 7402//11473 7463//11472 7435//6682 +f 7436//11475 7435//11476 7464//11475 +f 7435//11476 7436//11475 7403//11477 +f 7438//11478 7436//11479 7465//11478 +f 7436//11479 7438//11478 7405//11480 +f 7466//11481 7438//11482 7467//11483 +f 7438//11482 7466//11481 7437//11484 +f 7468//11485 7404//6695 7437//11486 +f 7404//6695 7468//11485 7439//11487 +f 7440//11488 7439//11489 7469//11490 +f 7439//11489 7440//11488 7406//6701 +f 7441//6702 7440//6703 7470//6704 +f 7440//6703 7441//6702 7407//6705 +f 7408//2730 7471//2731 7442//2730 +f 7471//2731 7408//2730 7441//2731 +f 7409//11491 7472//11491 7443//2734 +f 7472//11491 7409//11491 7442//11492 +f 7443//2736 7444//2628 7410//2627 +f 7444//2628 7443//2736 7473//2737 +f 7444//11493 7445//6710 7411//6710 +f 7445//6710 7444//11493 7474//11494 +f 7445//6713 7446//6714 7412//6714 +f 7446//6714 7445//6713 7475//6713 +f 7446//6715 7447//11495 7414//11495 +f 7447//11495 7446//6715 7476//11496 +f 7448//11497 7477//11498 7478//6719 +f 7477//11498 7448//11497 7447//11499 +f 7450//11500 7478//11501 7449//2756 +f 7478//11501 7450//11500 7448//2757 +f 7449//11502 7479//6724 7451//11503 +f 7479//6724 7449//11502 7478//11504 +f 7451//2764 7480//6728 7452//2764 +f 7480//6728 7451//2764 7479//6728 +f 7452//11505 7456//11506 7453//11507 +f 7456//11506 7452//11505 7480//11508 +f 7453//11509 7428//11510 7454//2772 +f 7428//11510 7453//11509 7456//11511 +f 7480//11512 7455//2777 7456//11513 +f 7455//2777 7480//11512 7481//11514 +f 7482//11515 7427//11516 7455//11517 +f 7427//11516 7482//11515 7457//11518 +f 7483//2783 7429//11519 7457//6742 +f 7429//11519 7483//2783 7458//6743 +f 7458//11520 7459//6745 7430//6746 +f 7459//6745 7458//11520 7484//6747 +f 7459//9717 7460//9718 7431//9717 +f 7460//9718 7459//9717 7485//11521 +f 7486//11522 7432//2693 7460//2694 +f 7432//2693 7486//11522 7461//6811 +f 7487//2795 7433//6750 7461//6750 +f 7433//6750 7487//2795 7462//2795 +f 7488//11523 7434//11524 7462//11524 +f 7434//11524 7488//11523 7463//6755 +f 7464//11525 7463//6759 7489//11526 +f 7463//6759 7464//11525 7435//11527 +f 7465//6760 7464//2803 7490//6762 +f 7464//2803 7465//6760 7436//11528 +f 7467//11529 7465//2810 7491//11529 +f 7465//2810 7467//11529 7438//2810 +f 7492//11530 7467//11531 7493//11532 +f 7467//11531 7492//11530 7466//11533 +f 7494//11534 7437//11535 7466//11536 +f 7437//11535 7494//11534 7468//6774 +f 7469//11537 7468//11538 7495//11539 +f 7468//11538 7469//11537 7439//11540 +f 7470//11541 7469//11542 7496//2824 +f 7469//11542 7470//11541 7440//11543 +f 7441//11544 7497//11545 7471//2826 +f 7497//11545 7441//11544 7470//11546 +f 7442//11547 7498//11548 7472//11549 +f 7498//11548 7442//11547 7471//6786 +f 7443//2736 7499//11550 7473//2737 +f 7499//11550 7443//2736 7472//2833 +f 7473//2834 7474//2836 7444//6789 +f 7474//2836 7473//2834 7500//2836 +f 7474//2840 7475//2838 7445//2837 +f 7475//2838 7474//2840 7501//2840 +f 7475//2844 7476//6790 7446//6790 +f 7476//6790 7475//2844 7502//2844 +f 7447//6791 7503//11551 7477//6793 +f 7503//11551 7447//6791 7476//11551 +f 7478//6795 7504//6796 7479//6795 +f 7504//6796 7478//6795 7477//6796 +f 7479//6797 7481//11552 7480//6797 +f 7481//11552 7479//6797 7504//6799 +f 7505//6801 7455//11553 7481//6802 +f 7455//11553 7505//6801 7482//11554 +f 7506//11555 7457//11556 7482//11556 +f 7457//11556 7506//11555 7483//11555 +f 7507//11557 7458//2865 7483//6806 +f 7458//2865 7507//11557 7484//2867 +f 7484//2868 7485//11558 7459//11559 +f 7485//11558 7484//2868 7508//2871 +f 7509//6809 7460//11560 7485//2874 +f 7460//11560 7509//6809 7486//6810 +f 7510//6811 7461//6811 7486//11522 +f 7461//6811 7510//6811 7487//6812 +f 7511//11561 7462//2880 7487//2880 +f 7462//2880 7511//11561 7488//11561 +f 7512//6816 7463//6816 7488//6816 +f 7463//6816 7512//6816 7489//6816 +f 7490//11562 7489//11563 7513//11564 +f 7489//11563 7490//11562 7464//6820 +f 7491//11565 7490//11566 7514//6823 +f 7490//11566 7491//11565 7465//6824 +f 7493//11567 7491//6825 7515//11568 +f 7491//6825 7493//11567 7467//6827 +f 7516//11569 7493//11570 7517//6830 +f 7493//11570 7516//11569 7492//11571 +f 7518//11572 7466//11573 7492//11574 +f 7466//11573 7518//11572 7494//11575 +f 7519//11576 7468//6837 7494//6837 +f 7468//6837 7519//11576 7495//6837 +f 7496//11577 7495//11578 7520//6840 +f 7495//11578 7496//11577 7469//11579 +f 7470//2914 7521//11580 7497//6844 +f 7521//11580 7470//2914 7496//2914 +f 7471//11581 7522//2918 7498//6845 +f 7522//2918 7471//11581 7497//11582 +f 7472//2833 7523//6787 7499//11550 +f 7523//6787 7472//2833 7498//11583 +f 7473//6849 7524//6849 7500//11584 +f 7524//6849 7473//6849 7499//6851 +f 7500//11585 7501//2926 7474//2927 +f 7501//2926 7500//11585 7525//2928 +f 7501//11586 7502//9781 7475//2931 +f 7502//9781 7501//11586 7526//11587 +f 7502//6858 7503//2933 7476//2933 +f 7503//2933 7502//6858 7527//2935 +f 7503//11588 7504//6860 7477//6861 +f 7504//6860 7503//11588 7528//11589 +f 7528//9789 7481//6863 7504//11590 +f 7481//6863 7528//9789 7505//6865 +f 7529//11591 7482//2944 7505//11592 +f 7482//2944 7529//11591 7506//11593 +f 7530//2947 7483//6871 7506//2949 +f 7483//6871 7530//2947 7507//6873 +f 7531//11594 7484//11595 7507//2953 +f 7484//11595 7531//11594 7508//11596 +f 7532//11597 7485//11598 7508//11599 +f 7485//11598 7532//11597 7509//11600 +f 7533//11601 7486//11602 7509//9796 +f 7486//11602 7533//11601 7510//11603 +f 7534//2963 7487//6812 7510//6811 +f 7487//6812 7534//2963 7511//2964 +f 7535//6881 7488//6883 7511//6883 +f 7488//6883 7535//6881 7512//6881 +f 7536//2969 7489//2971 7512//2971 +f 7489//2971 7536//2969 7513//6884 +f 7514//2974 7513//11604 7537//11605 +f 7513//11604 7514//2974 7490//6887 +f 7491//6888 7538//9800 7515//11606 +f 7538//9800 7491//6888 7514//6890 +f 7493//11607 7539//6892 7517//6893 +f 7539//6892 7493//11607 7515//11608 +f 7516//6897 7540//6896 7541//6896 +f 7540//6896 7516//6897 7517//6897 +f 7542//11609 7492//11610 7516//11611 +f 7492//11610 7542//11609 7518//11612 +f 7543//11613 7494//11614 7518//11615 +f 7494//11614 7543//11613 7519//11616 +f 7544//2996 7495//2996 7519//2997 +f 7495//2996 7544//2996 7520//2998 +f 7496//6908 7545//6907 7521//11617 +f 7545//6907 7496//6908 7520//6907 +f 7497//11618 7546//6911 7522//11619 +f 7546//6911 7497//11618 7521//11620 +f 7498//11583 7547//11621 7523//6787 +f 7547//11621 7498//11583 7522//11622 +f 7499//11623 7548//6916 7524//6914 +f 7548//6916 7499//11623 7523//11624 +f 7500//3009 7549//3010 7525//3009 +f 7549//3010 7500//3009 7524//11625 +f 7525//9815 7526//3012 7501//11626 +f 7526//3012 7525//9815 7550//6920 +f 7526//11627 7527//11628 7502//3017 +f 7527//11628 7526//11627 7551//11629 +f 7527//3019 7528//3020 7503//11630 +f 7528//3020 7527//3019 7552//11631 +f 7552//11632 7505//11633 7528//11634 +f 7505//11633 7552//11632 7529//11635 +f 7553//11636 7506//11637 7529//3029 +f 7506//11637 7553//11636 7530//9825 +f 7554//3031 7507//6933 7530//3033 +f 7507//6933 7554//3031 7531//3034 +f 7532//11638 7531//11639 7555//11640 +f 7531//11639 7532//11638 7508//11641 +f 7556//3037 7509//3037 7532//11642 +f 7509//3037 7556//3037 7533//3037 +f 7557//11643 7510//11644 7533//11645 +f 7510//11644 7557//11643 7534//3042 +f 7558//3043 7511//2964 7534//2963 +f 7511//2964 7558//3043 7535//11646 +f 7559//11647 7512//6940 7535//6940 +f 7512//6940 7559//11647 7536//11647 +f 7560//11648 7513//11649 7536//11650 +f 7513//11649 7560//11648 7537//6946 +f 7514//11651 7561//6948 7538//11652 +f 7561//6948 7514//11651 7537//9835 +f 7515//11653 7562//11654 7539//11655 +f 7562//11654 7515//11653 7538//11656 +f 7517//6955 7563//11657 7540//6957 +f 7563//11657 7517//6955 7539//6958 +f 7541//11658 7564//6960 7565//11659 +f 7564//6960 7541//11658 7540//11658 +f 7566//6961 7516//11660 7541//6963 +f 7516//11660 7566//6961 7542//6964 +f 7567//3074 7518//6967 7542//11661 +f 7518//6967 7567//3074 7543//3074 +f 7568//11662 7519//11663 7543//11664 +f 7519//11663 7568//11662 7544//6971 +f 7520//11665 7569//11666 7545//6974 +f 7569//11666 7520//11665 7544//11667 +f 7521//6976 7570//3084 7546//6976 +f 7570//3084 7521//6976 7545//3084 +f 7522//11622 7571//11668 7547//11621 +f 7571//11668 7522//11622 7546//9852 +f 7523//3088 7572//11669 7548//6981 +f 7572//11669 7523//3088 7547//11670 +f 7524//9856 7573//6983 7549//9856 +f 7573//6983 7524//9856 7548//11671 +f 7574//6984 7525//11672 7549//6984 +f 7525//11672 7574//6984 7550//6987 +f 7550//3099 7551//6988 7526//3101 +f 7551//6988 7550//3099 7575//6990 +f 7551//6991 7552//6992 7527//6993 +f 7552//6992 7551//6991 7576//9860 +f 7576//11673 7529//6995 7552//6997 +f 7529//6995 7576//11673 7553//6995 +f 7577//3111 7530//9864 7553//11674 +f 7530//9864 7577//3111 7554//3114 +f 7555//11675 7554//11676 7578//11675 +f 7554//11676 7555//11675 7531//11676 +f 7556//7003 7555//11677 7579//3118 +f 7555//11677 7556//7003 7532//3117 +f 7580//9866 7533//11678 7556//7006 +f 7533//11678 7580//9866 7557//11679 +f 7581//7008 7534//11680 7557//7010 +f 7534//11680 7581//7008 7558//11681 +f 7582//11682 7535//11646 7558//3043 +f 7535//11646 7582//11682 7559//11683 +f 7583//11684 7536//11685 7559//3130 +f 7536//11685 7583//11684 7560//7013 +f 7561//11686 7560//11687 7584//11688 +f 7560//11687 7561//11686 7537//11689 +f 7538//11690 7585//11691 7562//11692 +f 7585//11691 7538//11690 7561//11693 +f 7539//11694 7586//11695 7563//3139 +f 7586//11695 7539//11694 7562//11696 +f 7540//7026 7587//7027 7564//7028 +f 7587//7027 7540//7026 7563//7026 +f 7565//7029 7588//3145 7589//3147 +f 7588//3145 7565//7029 7564//7030 +f 7566//11697 7565//7032 7590//7032 +f 7565//7032 7566//11697 7541//11698 +f 7591//7037 7542//7035 7566//7037 +f 7542//7035 7591//7037 7567//7035 +f 7568//11699 7567//11700 7592//11701 +f 7567//11700 7568//11699 7543//11702 +f 7569//11703 7568//11703 7593//11703 +f 7568//11703 7569//11703 7544//11703 +f 7545//11704 7594//7045 7570//7046 +f 7594//7045 7545//11704 7569//7047 +f 7546//9852 7595//3167 7571//11668 +f 7595//3167 7546//9852 7570//3168 +f 7547//11705 7596//11706 7572//11707 +f 7596//11706 7547//11705 7571//11708 +f 7548//11709 7597//9891 7573//11710 +f 7597//9891 7548//11709 7572//9893 +f 7598//11711 7549//11712 7573//11713 +f 7549//11712 7598//11711 7574//11714 +f 7599//11715 7550//3179 7574//11716 +f 7550//3179 7599//11715 7575//11717 +f 7575//3184 7576//3184 7551//3184 +f 7576//3184 7575//3184 7600//3183 +f 7600//3186 7553//3186 7576//3188 +f 7553//3186 7600//3186 7577//3187 +f 7578//3189 7577//11718 7601//7060 +f 7577//11718 7578//3189 7554//3192 +f 7579//11719 7578//11720 7602//7063 +f 7578//11720 7579//11719 7555//11721 +f 7580//3197 7579//11722 7603//3199 +f 7579//11722 7580//3197 7556//3200 +f 7604//11723 7557//11724 7580//7070 +f 7557//11724 7604//11723 7581//11725 +f 7605//11726 7558//11727 7581//11726 +f 7558//11727 7605//11726 7582//11727 +f 7606//11728 7559//11683 7582//11682 +f 7559//11683 7606//11728 7583//3208 +f 7584//11729 7583//11730 7607//11729 +f 7583//11730 7584//11729 7560//11731 +f 7585//11732 7584//11733 7608//11734 +f 7584//11733 7585//11732 7561//3213 +f 7562//3215 7609//11735 7586//3216 +f 7609//11735 7562//3215 7585//3215 +f 7563//11736 7610//11737 7587//3217 +f 7610//11737 7563//11736 7586//11737 +f 7564//11738 7611//11739 7588//7089 +f 7611//11739 7564//11738 7587//7088 +f 7589//7090 7612//3225 7613//7091 +f 7612//3225 7589//7090 7588//7092 +f 7590//3228 7589//3228 7614//3228 +f 7589//3228 7590//3228 7565//11740 +f 7591//11741 7590//9919 7615//9919 +f 7590//9919 7591//11741 7566//11741 +f 7592//11742 7591//11743 7616//11744 +f 7591//11743 7592//11742 7567//7100 +f 7593//7103 7592//7103 7617//7103 +f 7592//7103 7593//7103 7568//7103 +f 7594//7104 7593//11745 7618//7106 +f 7593//11745 7594//7104 7569//7107 +f 7570//3242 7619//11746 7595//3244 +f 7619//11746 7570//3242 7594//11747 +f 7571//11748 7620//11749 7596//11750 +f 7620//11749 7571//11748 7595//7112 +f 7572//3250 7621//9928 7597//3252 +f 7621//9928 7572//3250 7596//3253 +f 7573//11751 7622//11752 7598//11753 +f 7622//11752 7573//11751 7597//11754 +f 7623//11755 7574//11756 7598//11757 +f 7574//11756 7623//11755 7599//7122 +f 7624//11758 7575//11759 7599//3263 +f 7575//11759 7624//11758 7600//7125 +f 7601//7126 7600//7127 7624//7128 +f 7600//7127 7601//7126 7577//3268 +f 7602//7129 7601//7129 7625//7129 +f 7601//7129 7602//7129 7578//7129 +f 7603//3274 7602//3273 7626//3274 +f 7602//3273 7603//3274 7579//3273 +f 7604//11760 7603//7132 7627//11761 +f 7603//7132 7604//11760 7580//3279 +f 7628//11762 7581//11763 7604//11763 +f 7581//11763 7628//11762 7605//11762 +f 7629//11764 7582//11765 7605//11766 +f 7582//11765 7629//11764 7606//7135 +f 7607//3286 7606//11728 7630//11767 +f 7606//11728 7607//3286 7583//3208 +f 7608//11768 7607//7139 7631//11768 +f 7607//7139 7608//11768 7584//11769 +f 7609//11770 7608//11771 7632//7143 +f 7608//11771 7609//11770 7585//7144 +f 7586//3294 7633//11772 7610//11773 +f 7633//11772 7586//3294 7609//9946 +f 7587//7149 7634//11774 7611//7151 +f 7634//11774 7587//7149 7610//3298 +f 7588//3301 7635//7153 7612//3301 +f 7635//7153 7588//3301 7611//7154 +f 7636//11775 7612//11776 7637//11777 +f 7612//11776 7636//11775 7613//11778 +f 7614//3307 7613//3308 7638//3308 +f 7613//3308 7614//3307 7589//3307 +f 7615//3310 7614//11779 7639//3312 +f 7614//11779 7615//3310 7590//11780 +f 7616//11781 7615//3316 7640//11782 +f 7615//3316 7616//11781 7591//11783 +f 7617//11784 7616//11785 7641//3318 +f 7616//11785 7617//11784 7592//11786 +f 7618//11787 7617//3321 7642//9953 +f 7617//3321 7618//11787 7593//7171 +f 7619//9954 7618//3323 7643//11788 +f 7618//3323 7619//9954 7594//11789 +f 7595//7174 7644//9955 7620//7176 +f 7644//9955 7595//7174 7619//11790 +f 7596//3327 7645//3328 7621//3327 +f 7645//3328 7596//3327 7620//11791 +f 7622//11792 7621//11793 7646//3332 +f 7621//11793 7622//11792 7597//11794 +f 7647//11795 7598//11796 7622//11797 +f 7598//11796 7647//11795 7623//11798 +f 7648//7189 7599//7190 7623//11799 +f 7599//7190 7648//7189 7624//7187 +f 7625//7191 7624//7192 7648//7191 +f 7624//7192 7625//7191 7601//7192 +f 7626//7194 7625//7195 7649//7194 +f 7625//7195 7626//7194 7602//7195 +f 7627//11800 7626//11801 7650//11802 +f 7626//11801 7627//11800 7603//11803 +f 7628//11804 7627//11805 7651//11806 +f 7627//11805 7628//11804 7604//11807 +f 7652//11808 7605//11809 7628//3355 +f 7605//11809 7652//11808 7629//11810 +f 7630//7211 7629//11811 7653//7211 +f 7629//11811 7630//7211 7606//11811 +f 7631//11812 7630//11767 7654//11813 +f 7630//11767 7631//11812 7607//3286 +f 7632//11814 7631//7217 7655//11814 +f 7631//7217 7632//11814 7608//7217 +f 7633//11815 7632//7219 7656//7220 +f 7632//7219 7633//11815 7609//9974 +f 7610//3368 7657//11816 7634//7222 +f 7657//11816 7610//3368 7633//7223 +f 7611//3370 7658//7224 7635//7224 +f 7658//7224 7611//3370 7634//3370 +f 7637//11817 7635//11818 7659//11819 +f 7635//11818 7637//11817 7612//9978 +f 7660//11777 7637//11777 7661//11777 +f 7637//11777 7660//11777 7636//11775 +f 7662//7228 7613//11820 7636//11821 +f 7613//11820 7662//7228 7638//9981 +f 7639//7232 7638//3378 7663//3377 +f 7638//3378 7639//7232 7614//3379 +f 7640//7233 7639//11822 7664//7235 +f 7639//11822 7640//7233 7615//11823 +f 7641//11824 7640//11825 7665//11826 +f 7640//11825 7641//11824 7616//3385 +f 7642//11827 7641//11828 7666//11829 +f 7641//11828 7642//11827 7617//3390 +f 7643//11788 7642//11830 7667//3392 +f 7642//11830 7643//11788 7618//3323 +f 7644//7242 7643//11831 7668//7243 +f 7643//11831 7644//7242 7619//7242 +f 7620//9990 7669//9991 7645//7246 +f 7669//9991 7620//9990 7644//7247 +f 7646//11832 7645//11833 7670//3401 +f 7645//11833 7646//11832 7621//3402 +f 7647//11834 7646//11835 7671//11836 +f 7646//11835 7647//11834 7622//11837 +f 7672//7256 7623//7255 7647//7256 +f 7623//7255 7672//7256 7648//7255 +f 7649//11838 7648//11839 7672//11840 +f 7648//11839 7649//11838 7625//7260 +f 7650//7263 7649//7263 7673//7263 +f 7649//7263 7650//7263 7626//11841 +f 7651//7265 7650//7266 7674//7267 +f 7650//7266 7651//7265 7627//11842 +f 7628//11843 7675//11844 7652//11845 +f 7675//11844 7628//11843 7651//11846 +f 7653//3429 7652//7272 7676//3429 +f 7652//7272 7653//3429 7629//7272 +f 7654//3433 7653//7273 7677//3433 +f 7653//7273 7654//3433 7630//11847 +f 7655//11848 7654//11813 7678//11849 +f 7654//11813 7655//11848 7631//11812 +f 7656//10013 7655//10014 7679//11850 +f 7655//10014 7656//10013 7632//11851 +f 7657//7279 7656//11852 7680//7281 +f 7656//11852 7657//7279 7633//7282 +f 7634//11853 7681//11854 7658//3446 +f 7681//11854 7634//11853 7657//11855 +f 7659//7286 7658//11856 7682//3450 +f 7658//11856 7659//7286 7635//11857 +f 7661//3451 7659//11819 7683//3451 +f 7659//11819 7661//3451 7637//11817 +f 7684//7289 7661//7289 7685//7289 +f 7661//7289 7684//7289 7660//7289 +f 7686//7230 7636//11821 7660//7230 +f 7636//11821 7686//7230 7662//7228 +f 7687//11858 7638//11859 7662//11860 +f 7638//11859 7687//11858 7663//3454 +f 7664//3456 7663//7293 7688//3458 +f 7663//7293 7664//3456 7639//3459 +f 7665//11861 7664//3461 7689//7297 +f 7664//3461 7665//11861 7640//3463 +f 7666//11862 7665//11863 7690//7300 +f 7665//11863 7666//11862 7641//11864 +f 7667//3392 7666//11865 7691//11866 +f 7666//11865 7667//3392 7642//11830 +f 7668//7302 7667//11867 7692//11868 +f 7667//11867 7668//7302 7643//7305 +f 7669//3474 7668//11869 7693//7307 +f 7668//11869 7669//3474 7644//11870 +f 7670//11871 7669//11872 7694//7310 +f 7669//11872 7670//11871 7645//11873 +f 7671//11874 7670//11875 7695//11876 +f 7670//11875 7671//11874 7646//11877 +f 7672//11878 7671//11879 7696//11880 +f 7671//11879 7672//11878 7647//7319 +f 7649//7322 7696//11881 7673//7321 +f 7696//11881 7649//7322 7672//7322 +f 7650//3497 7697//7323 7674//7323 +f 7697//7323 7650//3497 7673//3497 +f 7651//11882 7698//11883 7675//3500 +f 7698//11883 7651//11882 7674//7327 +f 7652//7328 7699//7329 7676//11884 +f 7699//7329 7652//7328 7675//7331 +f 7677//11885 7676//11886 7700//11885 +f 7676//11886 7677//11885 7653//3509 +f 7678//3510 7677//3511 7701//3512 +f 7677//3511 7678//3510 7654//3513 +f 7679//10049 7678//11849 7702//11887 +f 7678//11849 7679//10049 7655//11848 +f 7680//11888 7679//11889 7703//3517 +f 7679//11889 7680//11888 7656//3519 +f 7681//7342 7680//11890 7704//11891 +f 7680//11890 7681//7342 7657//3523 +f 7682//11892 7681//11893 7705//11894 +f 7681//11893 7682//11892 7658//3527 +f 7683//11895 7682//3450 7706//11896 +f 7682//3450 7683//11895 7659//7286 +f 7685//3530 7683//3530 7707//3530 +f 7683//3530 7685//3530 7661//3530 +f 7708//7351 7684//7352 7685//7352 +f 7684//7352 7708//7351 7709//7351 +f 7710//11897 7660//7354 7684//7355 +f 7660//7354 7710//11897 7686//7354 +f 7711//11898 7662//11899 7686//7356 +f 7662//11899 7711//11898 7687//11899 +f 7712//11900 7663//11901 7687//11902 +f 7663//11901 7712//11900 7688//3539 +f 7689//10058 7688//10059 7713//3542 +f 7688//10059 7689//10058 7664//3543 +f 7690//7360 7689//11903 7714//11904 +f 7689//11903 7690//7360 7665//11905 +f 7691//11866 7690//3548 7715//7363 +f 7690//3548 7691//11866 7666//11865 +f 7692//7364 7691//11906 7716//3552 +f 7691//11906 7692//7364 7667//7365 +f 7693//7366 7692//7367 7717//7368 +f 7692//7367 7693//7366 7668//7369 +f 7718//11907 7669//11908 7693//11907 +f 7669//11908 7718//11907 7694//11908 +f 7695//7372 7694//11909 7719//3564 +f 7694//11909 7695//7372 7670//11909 +f 7696//3566 7695//3567 7720//3568 +f 7695//3567 7696//3566 7671//3569 +f 7673//3570 7720//11910 7697//7377 +f 7720//11910 7673//3570 7696//3573 +f 7674//7379 7721//11911 7698//11912 +f 7721//11911 7674//7379 7697//7382 +f 7675//3578 7722//3579 7699//3580 +f 7722//3579 7675//3578 7698//11913 +f 7700//11914 7699//11915 7723//11916 +f 7699//11915 7700//11914 7676//7388 +f 7701//7389 7700//11917 7724//11918 +f 7700//11917 7701//7389 7677//7392 +f 7702//7394 7701//11919 7725//7394 +f 7701//11919 7702//7394 7678//3593 +f 7703//3594 7702//11887 7726//7395 +f 7702//11887 7703//3594 7679//10049 +f 7704//3596 7703//7396 7727//3598 +f 7703//7396 7704//3596 7680//11920 +f 7705//11921 7704//11922 7728//7400 +f 7704//11922 7705//11921 7681//3601 +f 7706//11923 7705//11894 7729//11924 +f 7705//11894 7706//11923 7682//11892 +f 7707//3606 7706//3607 7730//3606 +f 7706//3607 7707//3606 7683//3607 +f 7731//3608 7685//11925 7707//11925 +f 7685//11925 7731//3608 7708//3608 +f 7732//7289 7708//11926 7733//7289 +f 7708//11926 7732//7289 7709//7289 +f 7709//11927 7710//11897 7684//7355 +f 7710//11897 7709//11927 7734//7405 +f 7735//7406 7686//7406 7710//7406 +f 7686//7406 7735//7406 7711//7406 +f 7736//10089 7687//10090 7711//10091 +f 7687//10090 7736//10089 7712//7410 +f 7713//11928 7712//11929 7737//3618 +f 7712//11929 7713//11928 7688//7414 +f 7714//3622 7713//7416 7738//7416 +f 7713//7416 7714//3622 7689//11930 +f 7715//7363 7714//3623 7739//7417 +f 7714//3623 7715//7363 7690//3548 +f 7716//3627 7715//11931 7740//11932 +f 7715//11931 7716//3627 7691//11933 +f 7717//3632 7716//11934 7741//11935 +f 7716//11934 7717//3632 7692//3632 +f 7742//7425 7693//11936 7717//7425 +f 7693//11936 7742//7425 7718//11936 +f 7743//11937 7694//11938 7718//11939 +f 7694//11938 7743//11937 7719//11938 +f 7720//3640 7719//3641 7744//3641 +f 7719//3641 7720//3640 7695//3640 +f 7697//7427 7744//11940 7721//11941 +f 7744//11940 7697//7427 7720//3645 +f 7698//3646 7745//7428 7722//7428 +f 7745//7428 7698//3646 7721//3648 +f 7699//7429 7746//11942 7723//7431 +f 7746//11942 7699//7429 7722//3652 +f 7724//11943 7723//11944 7747//3655 +f 7723//11944 7724//11943 7700//11945 +f 7725//10108 7724//11946 7748//7437 +f 7724//11946 7725//10108 7701//7438 +f 7726//11947 7725//3661 7749//11947 +f 7725//3661 7726//11947 7702//7439 +f 7727//3663 7726//7395 7750//10109 +f 7726//7395 7727//3663 7703//3594 +f 7728//11948 7727//11949 7751//11950 +f 7727//11949 7728//11948 7704//11951 +f 7729//3669 7728//7400 7752//10111 +f 7728//7400 7729//3669 7705//11921 +f 7730//11952 7729//11953 7753//11952 +f 7729//11953 7730//11952 7706//11953 +f 7754//7443 7707//3673 7730//3673 +f 7707//3673 7754//7443 7731//7443 +f 7733//3530 7731//3530 7755//3530 +f 7731//3530 7733//3530 7708//3530 +f 7756//7444 7733//7444 7757//7444 +f 7733//7444 7756//7444 7732//7444 +f 7758//11954 7709//11927 7732//11955 +f 7709//11927 7758//11954 7734//7405 +f 7734//11956 7735//7449 7710//7450 +f 7735//7449 7734//11956 7759//7451 +f 7760//11957 7711//7452 7735//11957 +f 7711//7452 7760//11957 7736//7452 +f 7712//11958 7761//11959 7737//3684 +f 7761//11959 7712//11958 7736//3685 +f 7738//11960 7737//11961 7762//11962 +f 7737//11961 7738//11960 7713//7460 +f 7739//7417 7738//3690 7763//3690 +f 7738//3690 7739//7417 7714//3623 +f 7740//11963 7739//11964 7764//11965 +f 7739//11964 7740//11963 7715//11966 +f 7741//11967 7740//11968 7765//11969 +f 7740//11968 7741//11967 7716//11967 +f 7766//3702 7717//11970 7741//11970 +f 7717//11970 7766//3702 7742//11971 +f 7767//11972 7718//11973 7742//11974 +f 7718//11973 7767//11972 7743//11975 +f 7744//11976 7743//11976 7768//11976 +f 7743//11976 7744//11976 7719//11976 +f 7721//7477 7768//11977 7745//7479 +f 7768//11977 7721//7477 7744//11978 +f 7722//11979 7769//11980 7746//11980 +f 7769//11980 7722//11979 7745//11979 +f 7723//11981 7770//11982 7747//11983 +f 7770//11982 7723//11981 7746//7485 +f 7748//11984 7747//11985 7771//11986 +f 7747//11985 7748//11984 7724//11987 +f 7749//3723 7748//11988 7772//11989 +f 7748//11988 7749//3723 7725//11990 +f 7750//3726 7749//3727 7773//3726 +f 7749//3727 7750//3726 7726//3727 +f 7751//11991 7750//11992 7774//11993 +f 7750//11992 7751//11991 7727//11994 +f 7752//11995 7751//11950 7775//11996 +f 7751//11950 7752//11995 7728//11948 +f 7753//3734 7752//3734 7776//3734 +f 7752//3734 7753//3734 7729//3734 +f 7754//11997 7753//11997 7777//11997 +f 7753//11997 7754//11997 7730//11997 +f 7755//3737 7754//3737 7778//3737 +f 7754//3737 7755//3737 7731//3737 +f 7757//11998 7755//3739 7779//11998 +f 7755//3739 7757//11998 7733//3739 +f 7780//7289 7756//7289 7757//7289 +f 7756//7289 7780//7289 7781//7289 +f 7782//11999 7732//11955 7756//7498 +f 7732//11955 7782//11999 7758//11954 +f 7783//7499 7734//7499 7758//7499 +f 7734//7499 7783//7499 7759//7499 +f 7759//10139 7760//7500 7735//7500 +f 7760//7500 7759//10139 7784//10139 +f 7785//3745 7736//3746 7760//3747 +f 7736//3746 7785//3745 7761//3748 +f 7737//11961 7786//12000 7762//11962 +f 7786//12000 7737//11961 7761//12001 +f 7763//10140 7762//12002 7787//12003 +f 7762//12002 7763//10140 7738//3754 +f 7764//3755 7763//3756 7788//3756 +f 7763//3756 7764//3755 7739//3755 +f 7765//12004 7764//12005 7789//7510 +f 7764//12005 7765//12004 7740//3760 +f 7790//12006 7741//12007 7765//7514 +f 7741//12007 7790//12006 7766//7515 +f 7791//12008 7742//7517 7766//12009 +f 7742//7517 7791//12008 7767//12010 +f 7792//12011 7743//12012 7767//12013 +f 7743//12012 7792//12011 7768//12014 +f 7745//12015 7792//7524 7769//12016 +f 7792//7524 7745//12015 7768//7526 +f 7746//12017 7793//12018 7770//12018 +f 7793//12018 7746//12017 7769//12017 +f 7771//12019 7770//12020 7794//12021 +f 7770//12020 7771//12019 7747//12022 +f 7772//12023 7771//12024 7795//12025 +f 7771//12024 7772//12023 7748//12026 +f 7773//12027 7772//10159 7796//3784 +f 7772//10159 7773//12027 7749//3785 +f 7774//12028 7773//12029 7797//7540 +f 7773//12029 7774//12028 7750//7541 +f 7775//12030 7774//11993 7798//12031 +f 7774//11993 7775//12030 7751//11991 +f 7776//12032 7775//7634 7799//12032 +f 7775//7634 7776//12032 7752//7634 +f 7777//12033 7776//12033 7800//12033 +f 7776//12033 7777//12033 7753//12033 +f 7778//11953 7777//11953 7801//11953 +f 7777//11953 7778//11953 7754//11953 +f 7779//7542 7778//10161 7802//7542 +f 7778//10161 7779//7542 7755//10161 +f 7803//3797 7757//3797 7779//3797 +f 7757//3797 7803//3797 7780//3530 +f 7804//12034 7781//12035 7780//12035 +f 7781//12035 7804//12034 7805//12034 +f 7781//7354 7782//11999 7756//7498 +f 7782//11999 7781//7354 7806//7354 +f 7807//7546 7758//7546 7782//7547 +f 7758//7546 7807//7546 7783//7548 +f 7808//7452 7759//7452 7783//7452 +f 7759//7452 7808//7452 7784//7452 +f 7784//12036 7785//12037 7760//3804 +f 7785//12037 7784//12036 7809//12038 +f 7810//12039 7761//12040 7785//3807 +f 7761//12040 7810//12039 7786//12041 +f 7762//12002 7811//7553 7787//12003 +f 7811//7553 7762//12002 7786//7554 +f 7788//12042 7787//7556 7812//7557 +f 7787//7556 7788//12042 7763//7558 +f 7789//10172 7788//7559 7813//3818 +f 7788//7559 7789//10172 7764//7560 +f 7814//12043 7765//12044 7789//3822 +f 7765//12044 7814//12043 7790//12045 +f 7815//12046 7766//3825 7790//3826 +f 7766//3825 7815//12046 7791//3825 +f 7816//7566 7767//12047 7791//12048 +f 7767//12047 7816//7566 7792//7569 +f 7793//12049 7792//12050 7816//12051 +f 7792//12050 7793//12049 7769//12052 +f 7794//12053 7793//12054 7817//12053 +f 7793//12054 7794//12053 7770//12055 +f 7795//3837 7794//12056 7818//3837 +f 7794//12056 7795//3837 7771//12056 +f 7796//3842 7795//3841 7819//3842 +f 7795//3841 7796//3842 7772//3841 +f 7797//12057 7796//3843 7820//3845 +f 7796//3843 7797//12057 7773//7580 +f 7798//7581 7797//7540 7821//7582 +f 7797//7540 7798//7581 7774//12028 +f 7775//7682 7822//7682 7799//7682 +f 7822//7682 7775//7682 7798//7682 +f 7800//7584 7799//7584 7823//7584 +f 7799//7584 7800//7584 7776//7584 +f 7777//3734 7824//3734 7801//3734 +f 7824//3734 7777//3734 7800//3734 +f 7825//12058 7778//12059 7801//12060 +f 7778//12059 7825//12058 7802//3853 +f 7826//3607 7779//3606 7802//3606 +f 7779//3606 7826//3607 7803//3607 +f 7827//12061 7780//3855 7803//3855 +f 7780//3855 7827//12061 7804//12061 +f 7828//7590 7804//3858 7829//3859 +f 7804//3858 7828//7590 7805//12062 +f 7805//7593 7806//12063 7781//12063 +f 7806//12063 7805//7593 7830//7593 +f 7806//7406 7807//7595 7782//7595 +f 7807//7595 7806//7406 7831//7406 +f 7832//12064 7783//12064 7807//7598 +f 7783//12064 7832//12064 7808//12065 +f 7809//3748 7808//3748 7833//3748 +f 7808//3748 7809//3748 7784//3748 +f 7810//12066 7809//12067 7834//12068 +f 7809//12067 7810//12066 7785//12069 +f 7835//7601 7786//7602 7810//7603 +f 7786//7602 7835//7601 7811//7604 +f 7787//7556 7836//7605 7812//7557 +f 7836//7605 7787//7556 7811//7606 +f 7813//3944 7812//12070 7837//3879 +f 7812//12070 7813//3944 7788//7608 +f 7838//12071 7789//3882 7813//7610 +f 7789//3882 7838//12071 7814//3884 +f 7839//12072 7790//12073 7814//12074 +f 7790//12073 7839//12072 7815//3888 +f 7840//3889 7791//12075 7815//10199 +f 7791//12075 7840//3889 7816//12076 +f 7817//12077 7816//3894 7840//3895 +f 7816//3894 7817//12077 7793//12078 +f 7818//12079 7817//12080 7841//3897 +f 7817//12080 7818//12079 7794//12081 +f 7819//10203 7818//10204 7842//7624 +f 7818//10204 7819//10203 7795//12082 +f 7796//12083 7843//12084 7820//12085 +f 7843//12084 7796//12083 7819//3903 +f 7821//7629 7820//7630 7844//3907 +f 7820//7630 7821//7629 7797//12086 +f 7798//7632 7845//7632 7822//7632 +f 7845//7632 7798//7632 7821//7632 +f 7823//7633 7822//7633 7846//7633 +f 7822//7633 7823//7633 7799//7633 +f 7800//7634 7847//7634 7824//7634 +f 7847//7634 7800//7634 7823//7634 +f 7848//3911 7801//3912 7824//10209 +f 7801//3912 7848//3911 7825//3914 +f 7849//12087 7802//12088 7825//12089 +f 7802//12088 7849//12087 7826//11953 +f 7850//12090 7803//3919 7826//3919 +f 7803//3919 7850//12090 7827//12090 +f 7829//10211 7827//12091 7851//7640 +f 7827//12091 7829//10211 7804//3923 +f 7852//3924 7829//3924 7853//3924 +f 7829//3924 7852//3924 7828//3924 +f 7854//12092 7805//7643 7828//7644 +f 7805//7643 7854//12092 7830//7645 +f 7830//7646 7831//12093 7806//12093 +f 7831//12093 7830//7646 7855//7646 +f 7831//12094 7832//12095 7807//7650 +f 7832//12095 7831//12094 7856//7452 +f 7857//3934 7808//12096 7832//3934 +f 7808//12096 7857//3934 7833//12096 +f 7834//12041 7833//12041 7858//12041 +f 7833//12041 7834//12041 7809//12041 +f 7835//12097 7834//7652 7859//7653 +f 7834//7652 7835//12097 7810//7654 +f 7836//7655 7835//7656 7860//7657 +f 7835//7656 7836//7655 7811//7658 +f 7812//12070 7861//3944 7837//3879 +f 7861//3944 7812//12070 7836//3944 +f 7838//7660 7837//12098 7862//7709 +f 7837//12098 7838//7660 7813//7663 +f 7863//3950 7814//12099 7838//3950 +f 7814//12099 7863//3950 7839//12099 +f 7864//12100 7815//7665 7839//7666 +f 7815//7665 7864//12100 7840//12101 +f 7841//12102 7840//3957 7864//7668 +f 7840//3957 7841//12102 7817//3957 +f 7842//12103 7841//10221 7865//7673 +f 7841//10221 7842//12103 7818//3961 +f 7819//7674 7866//7675 7843//7675 +f 7866//7675 7819//7674 7842//7674 +f 7820//12104 7867//12105 7844//7678 +f 7867//12105 7820//12104 7843//12106 +f 7821//3968 7868//3968 7845//3968 +f 7868//3968 7821//3968 7844//3968 +f 7846//7681 7845//7680 7869//7681 +f 7845//7680 7846//7681 7822//7680 +f 7847//7682 7846//7682 7870//7682 +f 7846//7682 7847//7682 7823//7682 +f 7871//12107 7824//7685 7847//7685 +f 7824//7685 7871//12107 7848//12107 +f 7849//12108 7848//3972 7872//12109 +f 7848//3972 7849//12108 7825//3973 +f 7873//12110 7826//12111 7849//12111 +f 7826//12111 7873//12110 7850//12110 +f 7851//10225 7850//12112 7874//3979 +f 7850//12112 7851//10225 7827//3980 +f 7853//12113 7851//7692 7875//7693 +f 7851//7692 7853//12113 7829//7694 +f 7876//12114 7853//12115 7877//7697 +f 7853//12115 7876//12114 7852//12114 +f 7878//7698 7828//7699 7852//7698 +f 7828//7699 7878//7698 7854//7699 +f 7879//7700 7830//3991 7854//3992 +f 7830//3991 7879//7700 7855//7702 +f 7855//7705 7856//7704 7831//7704 +f 7856//7704 7855//7705 7880//7705 +f 7881//3748 7832//3748 7856//3748 +f 7832//3748 7881//3748 7857//3748 +f 7833//12116 7882//12116 7858//12116 +f 7882//12116 7833//12116 7857//12116 +f 7859//7604 7858//7604 7883//7604 +f 7858//7604 7859//7604 7834//7604 +f 7860//12117 7859//12117 7884//7708 +f 7859//12117 7860//12117 7835//7706 +f 7861//4002 7860//4002 7885//4002 +f 7860//4002 7861//4002 7836//4002 +f 7862//7709 7861//10232 7886//10232 +f 7861//10232 7862//7709 7837//12098 +f 7863//12118 7862//12119 7887//7712 +f 7862//12119 7863//12118 7838//12120 +f 7888//12121 7839//12122 7863//12123 +f 7839//12122 7888//12121 7864//4010 +f 7865//4011 7864//12124 7888//4013 +f 7864//12124 7865//4011 7841//7715 +f 7842//12125 7889//12125 7866//7765 +f 7889//12125 7842//12125 7865//12125 +f 7843//7720 7890//7722 7867//7722 +f 7890//7722 7843//7720 7866//7720 +f 7844//7723 7891//7723 7868//7723 +f 7891//7723 7844//7723 7867//7723 +f 7868//12126 7869//12126 7845//12126 +f 7869//12126 7868//12126 7892//12126 +f 7870//7724 7869//7724 7893//7724 +f 7869//7724 7870//7724 7846//7724 +f 7894//7725 7847//12127 7870//12127 +f 7847//12127 7894//7725 7871//7725 +f 7872//7634 7871//12032 7895//7634 +f 7871//12032 7872//7634 7848//12032 +f 7873//12128 7872//4027 7896//7727 +f 7872//4027 7873//12128 7849//4029 +f 7874//4030 7873//4031 7897//7730 +f 7873//4031 7874//4030 7850//12129 +f 7875//7732 7874//4037 7898//7732 +f 7874//4037 7875//7732 7851//4037 +f 7877//7733 7875//12130 7899//7733 +f 7875//12130 7877//7733 7853//12130 +f 7900//12131 7877//12132 7901//12133 +f 7877//12132 7900//12131 7876//7738 +f 7902//7739 7852//7740 7876//7739 +f 7852//7740 7902//7739 7878//7740 +f 7903//12134 7854//12135 7878//7742 +f 7854//12135 7903//12134 7879//4051 +f 7855//7743 7904//12136 7880//10253 +f 7904//12136 7855//7743 7879//7745 +f 7856//12137 7905//12138 7881//4058 +f 7905//12138 7856//12137 7880//4059 +f 7906//12139 7857//12139 7881//12139 +f 7857//12139 7906//12139 7882//12139 +f 7858//7751 7907//7751 7883//7751 +f 7907//7751 7858//7751 7882//7751 +f 7908//7658 7859//7658 7883//7658 +f 7859//7658 7908//7658 7884//7658 +f 7885//7752 7884//4065 7909//7753 +f 7884//4065 7885//7752 7860//4067 +f 7886//7754 7885//7935 7910//7756 +f 7885//7935 7886//7754 7861//7757 +f 7887//7712 7886//4072 7911//4072 +f 7886//4072 7887//7712 7862//12119 +f 7888//12140 7887//12141 7912//4139 +f 7887//12141 7888//12140 7863//12142 +f 7865//7760 7912//12143 7889//12144 +f 7912//12143 7865//7760 7888//4077 +f 7866//7765 7913//12145 7890//12145 +f 7913//12145 7866//7765 7889//12125 +f 7867//7766 7914//7766 7891//7766 +f 7914//7766 7867//7766 7890//7766 +f 7891//7767 7892//7767 7868//7767 +f 7892//7767 7891//7767 7915//7767 +f 7893//3968 7892//3968 7916//3968 +f 7892//3968 7893//3968 7869//3968 +f 7917//7768 7870//7768 7893//7768 +f 7870//7768 7917//7768 7894//7768 +f 7895//12146 7894//12146 7918//12146 +f 7894//12146 7895//12146 7871//12146 +f 7896//12147 7895//12148 7919//12149 +f 7895//12148 7896//12147 7872//12150 +f 7920//12151 7873//12152 7896//4094 +f 7873//12152 7920//12151 7897//4095 +f 7898//12153 7897//12154 7921//12155 +f 7897//12154 7898//12153 7874//4099 +f 7899//7775 7898//12156 7922//7777 +f 7898//12156 7899//7775 7875//12157 +f 7901//12158 7899//12159 7923//12160 +f 7899//12159 7901//12158 7877//12161 +f 7924//12162 7900//12163 7901//12164 +f 7900//12163 7924//12162 7925//7782 +f 7900//12165 7902//12166 7876//12167 +f 7902//12166 7900//12165 7926//7785 +f 7927//12168 7878//7788 7902//4117 +f 7878//7788 7927//12168 7903//7790 +f 7879//12169 7928//7792 7904//7793 +f 7928//7792 7879//12169 7903//7794 +f 7880//12170 7929//4124 7905//4125 +f 7929//4124 7880//12170 7904//7796 +f 7881//7798 7930//12171 7906//12172 +f 7930//12171 7881//7798 7905//12173 +f 7907//7799 7906//7799 7931//7799 +f 7906//7799 7907//7799 7882//7799 +f 7883//7800 7932//7800 7908//7800 +f 7932//7800 7883//7800 7907//7800 +f 7933//4002 7884//4002 7908//4002 +f 7884//4002 7933//4002 7909//4002 +f 7910//7801 7909//7802 7934//12174 +f 7909//7802 7910//7801 7885//7804 +f 7911//4137 7910//4138 7935//4138 +f 7910//4138 7911//4137 7886//4137 +f 7912//4139 7911//12175 7936//12175 +f 7911//12175 7912//4139 7887//12141 +f 7889//12144 7936//7762 7913//7762 +f 7936//7762 7889//12144 7912//12143 +f 7890//7805 7937//12176 7914//12176 +f 7937//12176 7890//7805 7913//7805 +f 7914//12177 7915//7807 7891//12177 +f 7915//7807 7914//12177 7938//7807 +f 7915//7723 7916//7723 7892//7723 +f 7916//7723 7915//7723 7939//7723 +f 7893//4144 7940//12178 7917//12178 +f 7940//12178 7893//4144 7916//4144 +f 7894//7724 7941//7632 7918//7632 +f 7941//7632 7894//7724 7917//7724 +f 7919//12179 7918//12180 7942//12181 +f 7918//12180 7919//12179 7895//12182 +f 7943//12183 7896//4150 7919//12184 +f 7896//4150 7943//12183 7920//4152 +f 7944//4155 7897//4155 7920//4155 +f 7897//4155 7944//4155 7921//4155 +f 7922//4159 7921//7813 7945//12185 +f 7921//7813 7922//4159 7898//7813 +f 7923//4162 7922//12186 7946//4162 +f 7922//12186 7923//4162 7899//12187 +f 7923//12188 7924//12189 7901//12190 +f 7924//12189 7923//12188 7947//12191 +f 7948//4171 7925//12192 7924//7821 +f 7925//12192 7948//4171 7949//12193 +f 7925//7822 7926//12194 7900//12194 +f 7926//12194 7925//7822 7950//12195 +f 7926//12196 7927//12197 7902//7825 +f 7927//12197 7926//12196 7951//12198 +f 7903//7826 7952//12199 7928//7828 +f 7952//12199 7903//7826 7927//7829 +f 7904//10303 7953//12200 7929//7832 +f 7953//12200 7904//10303 7928//7833 +f 7905//10306 7954//4186 7930//7835 +f 7954//4186 7905//10306 7929//7836 +f 7906//7837 7955//7838 7931//7837 +f 7955//7838 7906//7837 7930//7838 +f 7932//7840 7931//7840 7956//7840 +f 7931//7840 7932//7840 7907//7840 +f 7908//7841 7957//12201 7933//7843 +f 7957//12201 7908//7841 7932//10307 +f 7909//7757 7958//7757 7934//12202 +f 7958//7757 7909//7757 7933//7757 +f 7910//4197 7959//4198 7935//4197 +f 7959//4198 7910//4197 7934//4198 +f 7936//4201 7935//4199 7960//4199 +f 7935//4199 7936//4201 7911//4201 +f 7913//7845 7960//7845 7937//7845 +f 7960//7845 7913//7845 7936//7845 +f 7937//12176 7938//12203 7914//12176 +f 7938//12203 7937//12176 7961//7806 +f 7938//7766 7939//7766 7915//7766 +f 7939//7766 7938//7766 7962//7766 +f 7916//7846 7963//7846 7940//7846 +f 7963//7846 7916//7846 7939//7846 +f 7917//3968 7964//3968 7941//3968 +f 7964//3968 7917//3968 7940//3968 +f 7942//7847 7941//10314 7965//7849 +f 7941//10314 7942//7847 7918//10315 +f 7966//12204 7919//10316 7942//10316 +f 7919//10316 7966//12204 7943//12204 +f 7967//4215 7920//10317 7943//7853 +f 7920//10317 7967//4215 7944//4216 +f 7968//12205 7921//12206 7944//12207 +f 7921//12206 7968//12205 7945//7857 +f 7946//7860 7945//7858 7969//7860 +f 7945//7858 7946//7860 7922//7858 +f 7946//7861 7947//7862 7923//7861 +f 7947//7862 7946//7861 7970//12208 +f 7971//12209 7924//12210 7947//12211 +f 7924//12210 7971//12209 7948//12212 +f 7972//12213 7949//12214 7948//4235 +f 7949//12214 7972//12213 7973//12215 +f 7949//7871 7950//7872 7925//12216 +f 7950//7872 7949//7871 7974//7873 +f 7950//12217 7951//12218 7926//12219 +f 7951//12218 7950//12217 7975//12220 +f 7927//12221 7976//12222 7952//7875 +f 7976//12222 7927//12221 7951//12223 +f 7928//12224 7977//12224 7953//7879 +f 7977//12224 7928//12224 7952//4246 +f 7929//7880 7978//7881 7954//12225 +f 7978//7881 7929//7880 7953//4250 +f 7930//4251 7979//4251 7955//4251 +f 7979//4251 7930//4251 7954//12226 +f 7931//10329 7980//12227 7956//12228 +f 7980//12227 7931//10329 7955//7887 +f 7957//4258 7956//4259 7981//4260 +f 7956//4259 7957//4258 7932//4261 +f 7933//7889 7982//10332 7958//7889 +f 7982//10332 7933//7889 7957//10332 +f 7934//4264 7983//4264 7959//4264 +f 7983//4264 7934//4264 7958//4264 +f 7935//4265 7984//4266 7960//4265 +f 7984//4266 7935//4265 7959//4266 +f 7960//7891 7961//7891 7937//7891 +f 7961//7891 7960//7891 7984//7891 +f 7961//7806 7962//7892 7938//12203 +f 7962//7892 7961//7806 7985//7892 +f 7939//7893 7986//7893 7963//7893 +f 7986//7893 7939//7893 7962//7893 +f 7963//7723 7964//7723 7940//7723 +f 7964//7723 7963//7723 7987//7723 +f 7965//12229 7964//12230 7988//4275 +f 7964//12230 7965//12229 7941//4276 +f 7989//4277 7942//4278 7965//4278 +f 7942//4278 7989//4277 7966//4277 +f 7990//7898 7943//7899 7966//12231 +f 7943//7899 7990//7898 7967//4282 +f 7991//4283 7944//4285 7967//12232 +f 7944//4285 7991//4283 7968//4285 +f 7992//12233 7945//12234 7968//4288 +f 7945//12234 7992//12233 7969//7907 +f 7969//7908 7970//12235 7946//7908 +f 7970//12235 7969//7908 7993//12235 +f 7970//12236 7971//12237 7947//12238 +f 7971//12237 7970//12236 7994//12239 +f 7995//12240 7948//12241 7971//12242 +f 7948//12241 7995//12240 7972//12243 +f 7996//12244 7973//12245 7972//12246 +f 7973//12245 7996//12244 7997//4301 +f 7973//7921 7974//12247 7949//7923 +f 7974//12247 7973//7921 7998//12248 +f 7974//7925 7975//12249 7950//4308 +f 7975//12249 7974//7925 7999//7927 +f 7951//12250 8000//12251 7976//12252 +f 8000//12251 7951//12250 7975//12253 +f 7952//4316 8001//4316 7977//4316 +f 8001//4316 7952//4316 7976//4316 +f 7953//12254 8002//7930 7978//12255 +f 8002//7930 7953//12254 7977//12256 +f 7954//12226 8003//4323 7979//4251 +f 8003//4323 7954//12226 7978//4323 +f 7955//4325 8004//12257 7980//4326 +f 8004//12257 7955//4325 7979//4325 +f 7956//12258 8005//12259 7981//4330 +f 8005//12259 7956//12258 7980//4331 +f 7957//7755 8006//10361 7982//7756 +f 8006//10361 7957//7755 7981//7757 +f 7983//4334 7982//4335 8007//4335 +f 7982//4335 7983//4334 7958//4334 +f 7959//4201 8008//4201 7984//4201 +f 8008//4201 7959//4201 7983//4201 +f 7984//7845 7985//7845 7961//7845 +f 7985//7845 7984//7845 8008//7845 +f 7962//7892 8009//12260 7986//12260 +f 8009//12260 7962//7892 7985//7892 +f 7986//7766 7987//7766 7963//7766 +f 7987//7766 7986//7766 8010//7766 +f 7987//7937 7988//7937 7964//12261 +f 7988//7937 7987//7937 8011//7937 +f 8012//12262 7965//4339 7988//7938 +f 7965//4339 8012//12262 7989//12263 +f 8013//7940 7966//4343 7989//7942 +f 7966//4343 8013//7940 7990//12264 +f 8014//7944 7967//7945 7990//7946 +f 7967//7945 8014//7944 7991//4349 +f 8015//7947 7968//7948 7991//7948 +f 7968//7948 8015//7947 7992//7947 +f 8016//4355 7969//12265 7992//12266 +f 7969//12265 8016//4355 7993//4357 +f 7993//7952 7994//12267 7970//7954 +f 7994//12267 7993//7952 8017//4361 +f 8018//12268 7971//12269 7994//12270 +f 7971//12269 8018//12268 7995//12271 +f 8019//12272 7972//12273 7995//12274 +f 7972//12273 8019//12272 7996//12275 +f 8020//12276 7997//12277 7996//12277 +f 7997//12277 8020//12276 8021//12276 +f 7997//7960 7998//7960 7973//7960 +f 7998//7960 7997//7960 8022//7960 +f 7998//12278 7999//12279 7974//7963 +f 7999//12279 7998//12278 8023//7964 +f 7999//12280 8000//12281 7975//12282 +f 8000//12281 7999//12280 8024//12283 +f 7976//12284 8025//12285 8001//7970 +f 8025//12285 7976//12284 8000//12286 +f 7977//7972 8026//7973 8002//7972 +f 8026//7973 7977//7972 8001//7973 +f 7978//4389 8027//4390 8003//12287 +f 8027//4390 7978//4389 8002//4392 +f 7979//12288 8028//10382 8004//12289 +f 8028//10382 7979//12288 8003//4396 +f 7980//12290 8029//12291 8005//12292 +f 8029//12291 7980//12290 8004//4399 +f 7981//12293 8030//12294 8006//10384 +f 8030//12294 7981//12293 8005//12294 +f 7982//4138 8031//4137 8007//4138 +f 8031//4137 7982//4138 8006//4137 +f 8008//12295 8007//4404 8032//12296 +f 8007//4404 8008//12295 7983//4406 +f 7985//12297 8032//12298 8009//7985 +f 8032//12298 7985//12297 8008//7984 +f 8009//12260 8010//7805 7986//12260 +f 8010//7805 8009//12260 8033//7805 +f 8010//12299 8011//12300 7987//12299 +f 8011//12300 8010//12299 8034//12300 +f 8035//7990 7988//7990 8011//7990 +f 7988//7990 8035//7990 8012//12301 +f 8036//12302 7989//12303 8012//12304 +f 7989//12303 8036//12302 8013//12305 +f 8037//10393 7990//7992 8013//4421 +f 7990//7992 8037//10393 8014//4420 +f 8038//12306 7991//7993 8014//7994 +f 7991//7993 8038//12306 8015//7995 +f 8039//12307 7992//12308 8015//7998 +f 7992//12308 8039//12307 8016//12309 +f 8040//4431 7993//12310 8016//12310 +f 7993//12310 8040//4431 8017//4431 +f 8041//12311 7994//12312 8017//4434 +f 7994//12312 8041//12311 8018//12313 +f 8042//12314 7995//12315 8018//12316 +f 7995//12315 8042//12314 8019//8006 +f 8043//12317 7996//12318 8019//12319 +f 7996//12318 8043//12317 8020//12320 +f 8044//12321 8020//12322 8045//12323 +f 8020//12322 8044//12321 8021//12324 +f 7997//8010 8046//8011 8022//8010 +f 8046//8011 7997//8010 8021//8011 +f 7998//12325 8047//12326 8023//8015 +f 8047//12326 7998//12325 8022//12327 +f 8023//12328 8024//4456 7999//8017 +f 8024//4456 8023//12328 8048//10412 +f 8000//12329 8049//12330 8025//12331 +f 8049//12330 8000//12329 8024//12332 +f 8001//8019 8050//4465 8026//8019 +f 8050//4465 8001//8019 8025//4465 +f 8002//12333 8051//4467 8027//4468 +f 8051//4467 8002//12333 8026//4466 +f 8003//12334 8052//12335 8028//4471 +f 8052//12335 8003//12334 8027//12336 +f 8004//12337 8053//12338 8029//12339 +f 8053//12338 8004//12337 8028//12340 +f 8030//4475 8029//12341 8054//4477 +f 8029//12341 8030//4475 8005//10419 +f 8006//4481 8055//4480 8031//4481 +f 8055//4480 8006//4481 8030//4480 +f 8007//4201 8056//4201 8032//4201 +f 8056//4201 8007//4201 8031//4201 +f 8032//7845 8033//7845 8009//7845 +f 8033//7845 8032//7845 8056//7845 +f 8033//8030 8034//8031 8010//12342 +f 8034//8031 8033//8030 8057//8033 +f 8011//10423 8058//8035 8035//8036 +f 8058//8035 8011//10423 8034//8037 +f 8059//12343 8012//10424 8035//10424 +f 8012//10424 8059//12343 8036//12343 +f 8060//12344 8013//4495 8036//12345 +f 8013//4495 8060//12344 8037//4493 +f 8061//8041 8014//8042 8037//8042 +f 8014//8042 8061//8041 8038//8041 +f 8062//12346 8015//8044 8038//8045 +f 8015//8044 8062//12346 8039//8043 +f 8063//12347 8016//12348 8039//4502 +f 8016//12348 8063//12347 8040//4503 +f 8041//4504 8040//8048 8064//4506 +f 8040//8048 8041//4504 8017//10429 +f 8065//4511 8018//8050 8041//12349 +f 8018//8050 8065//4511 8042//12350 +f 8066//12351 8019//12352 8042//12353 +f 8019//12352 8066//12351 8043//12354 +f 8045//12355 8043//12356 8067//12357 +f 8043//12356 8045//12355 8020//12358 +f 8068//8056 8045//8057 8069//8058 +f 8045//8057 8068//8056 8044//8059 +f 8070//12359 8021//12360 8044//12359 +f 8021//12360 8070//12359 8046//12361 +f 8022//12362 8071//12363 8047//12364 +f 8071//12363 8022//12362 8046//8067 +f 8023//12365 8072//12366 8048//12367 +f 8072//12366 8023//12365 8047//12368 +f 8049//12369 8048//12370 8073//12371 +f 8048//12370 8049//12369 8024//12372 +f 8025//12373 8074//8073 8050//4538 +f 8074//8073 8025//12373 8049//8073 +f 8026//4540 8075//12374 8051//4540 +f 8075//12374 8026//4540 8050//12374 +f 8027//4543 8076//8077 8052//4543 +f 8076//8077 8027//4543 8051//8077 +f 8028//8079 8077//12375 8053//8079 +f 8077//12375 8028//8079 8052//4548 +f 8054//12376 8053//4549 8078//10439 +f 8053//4549 8054//12376 8029//12377 +f 8055//4551 8054//4552 8079//4552 +f 8054//4552 8055//4551 8030//4553 +f 8031//8080 8080//8080 8056//8080 +f 8080//8080 8031//8080 8055//8080 +f 8056//8081 8057//8081 8033//8081 +f 8057//8081 8056//8081 8080//8081 +f 8034//4558 8081//4559 8058//4560 +f 8081//4559 8034//4558 8057//4561 +f 8035//12378 8082//12379 8059//12379 +f 8082//12379 8035//12378 8058//12378 +f 8083//12380 8036//12381 8059//12382 +f 8036//12381 8083//12380 8060//12383 +f 8084//8091 8037//8090 8060//8090 +f 8037//8090 8084//8091 8061//8091 +f 8085//12384 8038//12385 8061//12386 +f 8038//12385 8085//12384 8062//4573 +f 8086//8095 8039//8096 8062//12387 +f 8039//8096 8086//8095 8063//8098 +f 8064//12388 8063//10449 8087//12389 +f 8063//10449 8064//12388 8040//10450 +f 8065//12390 8064//8102 8088//12390 +f 8064//8102 8065//12390 8041//8102 +f 8089//8104 8042//8105 8065//12391 +f 8042//8105 8089//8104 8066//12392 +f 8067//12393 8066//12394 8090//8108 +f 8066//12394 8067//12393 8043//12395 +f 8069//12396 8067//8111 8091//8112 +f 8067//8111 8069//12396 8045//4593 +f 8092//4597 8069//8113 8093//4597 +f 8069//8113 8092//4597 8068//8113 +f 8094//12397 8044//12398 8068//8117 +f 8044//12398 8094//12397 8070//12399 +f 8095//12400 8046//12401 8070//4605 +f 8046//12401 8095//12400 8071//12402 +f 8047//12403 8096//12404 8072//12405 +f 8096//12404 8047//12403 8071//10465 +f 8073//12406 8072//12407 8097//12408 +f 8072//12407 8073//12406 8048//12409 +f 8074//8129 8073//12410 8098//4616 +f 8073//12410 8074//8129 8049//4617 +f 8050//8130 8099//8131 8075//12411 +f 8099//8131 8050//8130 8074//8130 +f 8051//12412 8100//12413 8076//12414 +f 8100//12413 8051//12412 8075//12415 +f 8052//12416 8101//8136 8077//8137 +f 8101//8136 8052//12416 8076//8138 +f 8078//4628 8077//4629 8102//4629 +f 8077//4629 8078//4628 8053//4628 +f 8079//12417 8078//12418 8103//8141 +f 8078//12418 8079//12417 8054//4633 +f 8080//8143 8079//12419 8104//12420 +f 8079//12419 8080//8143 8055//8143 +f 8057//4637 8104//8145 8081//4639 +f 8104//8145 8057//4637 8080//8146 +f 8058//4643 8105//12421 8082//12422 +f 8105//12421 8058//4643 8081//12423 +f 8059//12424 8106//12425 8083//12426 +f 8106//12425 8059//12424 8082//12427 +f 8107//12428 8060//12429 8083//8154 +f 8060//12429 8107//12428 8084//12430 +f 8108//12431 8061//12432 8084//8156 +f 8061//12432 8108//12431 8085//12433 +f 8109//10484 8062//4656 8085//4657 +f 8062//4656 8109//10484 8086//4658 +f 8087//8162 8086//10485 8110//8164 +f 8086//10485 8087//8162 8063//8165 +f 8088//12434 8087//12435 8111//4665 +f 8087//12435 8088//12434 8064//4666 +f 8089//12436 8088//12437 8112//8168 +f 8088//12437 8089//12436 8065//8169 +f 8090//10489 8089//8171 8113//8172 +f 8089//8171 8090//10489 8066//12438 +f 8091//8174 8090//12439 8114//8174 +f 8090//12439 8091//8174 8067//4678 +f 8093//12440 8091//8176 8115//4681 +f 8091//8176 8093//12440 8069//8177 +f 8116//8179 8092//4684 8093//4684 +f 8092//4684 8116//8179 8117//8179 +f 8118//4685 8068//4686 8092//4687 +f 8068//4686 8118//4685 8094//4688 +f 8119//4689 8070//4691 8094//4689 +f 8070//4691 8119//4689 8095//4691 +f 8120//12441 8071//12442 8095//8189 +f 8071//12442 8120//12441 8096//12443 +f 8097//12444 8096//12445 8121//12446 +f 8096//12445 8097//12444 8072//4696 +f 8098//8195 8097//12447 8122//8197 +f 8097//12447 8098//8195 8073//8198 +f 8099//8199 8098//12448 8123//4704 +f 8098//12448 8099//8199 8074//8201 +f 8075//12449 8124//8205 8100//12449 +f 8124//8205 8075//12449 8099//8205 +f 8076//10504 8125//12450 8101//12451 +f 8125//12450 8076//10504 8100//12452 +f 8102//4714 8101//12453 8126//12454 +f 8101//12453 8102//4714 8077//12455 +f 8103//12456 8102//12456 8127//8212 +f 8102//12456 8103//12456 8078//12457 +f 8104//4722 8103//12458 8128//4723 +f 8103//12458 8104//4722 8079//4722 +f 8081//12459 8128//12460 8105//8217 +f 8128//12460 8081//12459 8104//4729 +f 8082//12461 8129//12462 8106//12463 +f 8129//12462 8082//12461 8105//12464 +f 8106//12465 8107//4734 8083//4733 +f 8107//4734 8106//12465 8130//4736 +f 8131//12466 8084//12467 8107//8226 +f 8084//12467 8131//12466 8108//8227 +f 8132//12468 8085//12469 8108//12470 +f 8085//12469 8132//12468 8109//4741 +f 8110//12471 8109//8230 8133//8231 +f 8109//8230 8110//12471 8086//4746 +f 8111//10526 8110//8233 8134//4749 +f 8110//8233 8111//10526 8087//8234 +f 8112//12472 8111//8236 8135//8237 +f 8111//8236 8112//12472 8088//4752 +f 8113//12473 8112//8239 8136//4754 +f 8112//8239 8113//12473 8089//8240 +f 8114//8241 8113//12474 8137//8241 +f 8113//12474 8114//8241 8090//12474 +f 8115//4761 8114//12475 8138//12476 +f 8114//12475 8115//4761 8091//12475 +f 8139//4764 8093//4765 8115//4766 +f 8093//4765 8139//4764 8116//4764 +f 8140//4770 8117//8249 8116//8249 +f 8117//8249 8140//4770 8141//4770 +f 8117//4771 8118//4772 8092//4772 +f 8118//4772 8117//4771 8142//4771 +f 8143//12477 8094//4773 8118//4775 +f 8094//4773 8143//12477 8119//12478 +f 8144//4779 8095//4779 8119//4779 +f 8095//4779 8144//4779 8120//4779 +f 8121//12479 8120//12480 8145//8257 +f 8120//12480 8121//12479 8096//12481 +f 8122//8258 8121//12482 8146//4785 +f 8121//12482 8122//8258 8097//8259 +f 8123//8260 8122//12483 8147//8262 +f 8122//12483 8123//8260 8098//4787 +f 8124//8263 8123//12484 8148//8265 +f 8123//12484 8124//8263 8099//8264 +f 8100//12485 8149//12486 8125//12487 +f 8149//12486 8100//12485 8124//4793 +f 8125//12488 8126//8269 8101//8270 +f 8126//8269 8125//12488 8150//12489 +f 8127//8272 8126//8273 8151//8273 +f 8126//8273 8127//8272 8102//12490 +f 8128//8275 8127//12491 8152//8277 +f 8127//12491 8128//8275 8103//8278 +f 8105//12492 8152//12493 8129//12494 +f 8152//12493 8105//12492 8128//8282 +f 8129//12495 8130//12496 8106//12497 +f 8130//12496 8129//12495 8153//12498 +f 8130//8289 8131//12499 8107//12500 +f 8131//12499 8130//8289 8154//12501 +f 8131//8290 8132//12502 8108//8290 +f 8132//12502 8131//8290 8155//12502 +f 8133//12503 8132//12503 8156//8292 +f 8132//12503 8133//12503 8109//8293 +f 8134//10568 8133//4825 8157//4826 +f 8133//4825 8134//10568 8110//8295 +f 8135//8296 8134//8297 8158//8298 +f 8134//8297 8135//8296 8111//8299 +f 8136//8300 8135//8301 8159//8302 +f 8135//8301 8136//8300 8112//8303 +f 8137//10571 8136//12504 8160//4838 +f 8136//12504 8137//10571 8113//4839 +f 8138//12505 8137//12506 8161//4842 +f 8137//12506 8138//12505 8114//12507 +f 8162//4844 8115//10575 8138//4846 +f 8115//10575 8162//4844 8139//12508 +f 8163//4850 8116//4850 8139//4850 +f 8116//4850 8163//4850 8140//4850 +f 8164//4852 8141//8313 8140//4854 +f 8141//8313 8164//4852 8165//4855 +f 8141//4856 8142//10577 8117//4858 +f 8142//10577 8141//4856 8166//4859 +f 8142//4860 8143//8316 8118//4862 +f 8143//8316 8142//4860 8167//8318 +f 8168//4867 8119//4867 8143//8319 +f 8119//4867 8168//4867 8144//4867 +f 8145//8320 8144//12509 8169//12509 +f 8144//12509 8145//8320 8120//8320 +f 8146//8323 8145//4873 8170//10582 +f 8145//4873 8146//8323 8121//8325 +f 8147//10585 8146//8327 8171//8328 +f 8146//8327 8147//10585 8122//4879 +f 8148//8329 8147//8330 8172//8331 +f 8147//8330 8148//8329 8123//12510 +f 8149//8333 8148//8334 8173//8334 +f 8148//8334 8149//8333 8124//8333 +f 8149//8335 8150//8336 8125//12511 +f 8150//8336 8149//8335 8174//4886 +f 8150//12512 8151//12513 8126//8339 +f 8151//12513 8150//12512 8175//12513 +f 8152//12514 8151//12515 8176//8344 +f 8151//12515 8152//12514 8127//8345 +f 8129//12516 8176//12517 8153//12517 +f 8176//12517 8129//12516 8152//12516 +f 8153//8350 8154//8349 8130//8350 +f 8154//8349 8153//8350 8177//12518 +f 8154//12519 8155//12520 8131//10601 +f 8155//12520 8154//12519 8178//12521 +f 8155//10603 8156//8356 8132//8357 +f 8156//8356 8155//10603 8179//8358 +f 8157//12522 8156//8362 8180//12522 +f 8156//8362 8157//12522 8133//8362 +f 8158//8363 8157//12523 8181//4916 +f 8157//12523 8158//8363 8134//8365 +f 8159//8366 8158//8367 8182//8368 +f 8158//8367 8159//8366 8135//8369 +f 8160//12524 8159//12525 8183//12524 +f 8159//12525 8160//12524 8136//12526 +f 8161//12527 8160//12528 8184//12527 +f 8160//12528 8161//12527 8137//8377 +f 8185//4926 8138//4927 8161//4928 +f 8138//4927 8185//4926 8162//4929 +f 8186//12529 8139//12530 8162//12531 +f 8139//12530 8186//12529 8163//12532 +f 8187//12533 8140//12534 8163//12535 +f 8140//12534 8187//12533 8164//4935 +f 8188//12536 8165//12537 8164//12538 +f 8165//12537 8188//12536 8189//12536 +f 8165//8385 8166//8386 8141//8386 +f 8166//8386 8165//8385 8190//8385 +f 8166//12539 8167//12540 8142//12541 +f 8167//12540 8166//12539 8191//4947 +f 8167//8390 8168//4949 8143//4950 +f 8168//4949 8167//8390 8192//12542 +f 8169//12543 8168//12544 8193//12545 +f 8168//12544 8169//12543 8144//12546 +f 8170//8397 8169//8398 8194//8398 +f 8169//8398 8170//8397 8145//8397 +f 8171//4958 8170//4959 8195//4960 +f 8170//4959 8171//4958 8146//8404 +f 8172//4964 8171//12547 8196//4962 +f 8171//12547 8172//4964 8147//4964 +f 8173//8407 8172//12548 8197//12549 +f 8172//12548 8173//8407 8148//8410 +f 8149//8411 8198//8412 8174//12550 +f 8198//8412 8149//8411 8173//8413 +f 8174//12551 8175//12552 8150//12553 +f 8175//12552 8174//12551 8199//12554 +f 8175//12555 8176//12556 8151//12557 +f 8176//12556 8175//12555 8200//8419 +f 8176//12558 8177//12559 8153//4982 +f 8177//12559 8176//12558 8200//8422 +f 8177//8423 8178//4987 8154//8425 +f 8178//4987 8177//8423 8201//8426 +f 8178//12560 8179//12561 8155//10641 +f 8179//12561 8178//12560 8202//12562 +f 8203//12563 8156//12564 8179//12564 +f 8156//12564 8203//12563 8180//12563 +f 8181//12565 8180//12566 8204//12567 +f 8180//12566 8181//12565 8157//12568 +f 8182//12569 8181//10647 8205//8441 +f 8181//10647 8182//12569 8158//12570 +f 8183//5003 8182//5001 8206//5003 +f 8182//5001 8183//5003 8159//5001 +f 8184//12571 8183//12572 8207//12571 +f 8183//12572 8184//12571 8160//12573 +f 8161//5008 8208//5009 8185//8446 +f 8208//5009 8161//5008 8184//8447 +f 8209//8448 8162//12574 8185//8450 +f 8162//12574 8209//8448 8186//12575 +f 8210//12576 8163//12577 8186//12578 +f 8163//12577 8210//12576 8187//12579 +f 8211//8453 8164//8454 8187//5022 +f 8164//8454 8211//8453 8188//8455 +f 8212//12580 8188//8456 8213//12580 +f 8188//8456 8212//12580 8189//8456 +f 8189//12581 8190//12582 8165//12582 +f 8190//12582 8189//12581 8214//12581 +f 8190//12583 8191//12584 8166//8461 +f 8191//12584 8190//12583 8215//8462 +f 8191//8463 8192//8463 8167//8463 +f 8192//8463 8191//8463 8216//8463 +f 8217//5039 8168//8465 8192//8466 +f 8168//8465 8217//5039 8193//8467 +f 8194//5040 8193//12585 8218//5042 +f 8193//12585 8194//5040 8169//5043 +f 8195//12586 8194//5045 8219//5046 +f 8194//5045 8195//12586 8170//8471 +f 8196//5048 8195//12587 8220//5050 +f 8195//12587 8196//5048 8171//12588 +f 8197//12589 8196//12590 8221//12591 +f 8196//12590 8197//12589 8172//8479 +f 8173//12592 8222//12593 8198//12594 +f 8222//12593 8173//12592 8197//5059 +f 8174//12595 8223//12596 8199//12597 +f 8223//12596 8174//12595 8198//8484 +f 8175//12598 8224//12599 8200//5064 +f 8224//12599 8175//12598 8199//12600 +f 8200//8488 8201//12601 8177//8490 +f 8201//12601 8200//8488 8224//12602 +f 8201//12603 8202//12604 8178//12605 +f 8202//12604 8201//12603 8225//12606 +f 8226//12607 8179//12608 8202//12609 +f 8179//12608 8226//12607 8203//8494 +f 8227//12610 8180//12611 8203//12612 +f 8180//12611 8227//12610 8204//12613 +f 8205//12614 8204//12615 8228//8499 +f 8204//12615 8205//12614 8181//8500 +f 8206//5086 8205//8502 8229//12616 +f 8205//8502 8206//5086 8182//8504 +f 8207//5087 8206//5088 8230//8506 +f 8206//5088 8207//5087 8183//5090 +f 8184//12617 8231//12618 8208//5092 +f 8231//12618 8184//12617 8207//8508 +f 8185//12619 8232//12620 8209//12621 +f 8232//12620 8185//12619 8208//5095 +f 8233//12622 8186//12623 8209//12624 +f 8186//12623 8233//12622 8210//12625 +f 8211//8513 8210//8514 8234//8515 +f 8210//8514 8211//8513 8187//8516 +f 8213//8518 8211//12626 8235//5108 +f 8211//12626 8213//8518 8188//8520 +f 8236//12627 8213//12628 8237//12627 +f 8213//12628 8236//12627 8212//12629 +f 8214//12630 8212//8526 8238//12631 +f 8212//8526 8214//12630 8189//8528 +f 8214//8529 8215//8530 8190//8531 +f 8215//8530 8214//8529 8239//8532 +f 8215//12632 8216//5122 8191//5123 +f 8216//5122 8215//12632 8240//5124 +f 8241//8535 8192//5127 8216//8535 +f 8192//5127 8241//8535 8217//5127 +f 8242//12633 8193//8537 8217//12634 +f 8193//8537 8242//12633 8218//12635 +f 8219//12636 8218//12637 8243//5134 +f 8218//12637 8219//12636 8194//8540 +f 8220//5136 8219//12638 8244//12639 +f 8219//12638 8220//5136 8195//12640 +f 8221//5140 8220//5141 8245//5141 +f 8220//5141 8221//5140 8196//5140 +f 8197//12641 8246//12642 8222//12643 +f 8246//12642 8197//12641 8221//12644 +f 8198//12645 8247//5149 8223//12646 +f 8247//5149 8198//12645 8222//8555 +f 8199//12647 8248//12648 8224//12649 +f 8248//12648 8199//12647 8223//12650 +f 8224//12651 8225//12652 8201//12653 +f 8225//12652 8224//12651 8248//8560 +f 8225//12654 8226//12655 8202//12656 +f 8226//12655 8225//12654 8249//8564 +f 8250//8565 8203//8566 8226//8567 +f 8203//8566 8250//8565 8227//12657 +f 8204//8568 8251//8569 8228//8570 +f 8251//8569 8204//8568 8227//8571 +f 8229//12658 8228//12659 8252//5172 +f 8228//12659 8229//12658 8205//8575 +f 8230//5174 8229//12660 8253//12661 +f 8229//12660 8230//5174 8206//5177 +f 8207//12662 8254//5179 8231//5180 +f 8254//5179 8207//12662 8230//8581 +f 8208//12663 8255//12664 8232//8584 +f 8255//12664 8208//12663 8231//5185 +f 8209//8588 8256//8588 8233//8588 +f 8256//8588 8209//8588 8232//8588 +f 8210//12665 8257//12666 8234//8591 +f 8257//12666 8210//12665 8233//12667 +f 8235//8595 8234//8593 8258//12668 +f 8234//8593 8235//8595 8211//8596 +f 8237//12669 8235//12669 8259//12669 +f 8235//12669 8237//12669 8213//12670 +f 8260//12671 8237//12672 8261//12671 +f 8237//12672 8260//12671 8236//12673 +f 8262//12674 8212//12675 8236//8606 +f 8212//12675 8262//12674 8238//5206 +f 8239//12676 8238//12677 8263//8609 +f 8238//12677 8239//12676 8214//8610 +f 8264//12678 8215//8612 8239//8613 +f 8215//8612 8264//12678 8240//12679 +f 8265//8614 8216//8615 8240//8616 +f 8216//8615 8265//8614 8241//8615 +f 8266//12680 8217//12681 8241//5221 +f 8217//12681 8266//12680 8242//12681 +f 8267//5223 8218//5223 8242//5223 +f 8218//12637 8267//5224 8243//5134 +f 8244//8621 8243//12682 8268//8622 +f 8243//12682 8244//8621 8219//5228 +f 8245//8623 8244//8623 8269//8623 +f 8244//8623 8245//8623 8220//8623 +f 8246//12683 8245//5232 8270//5233 +f 8245//5232 8246//12683 8221//5234 +f 8222//8627 8271//12684 8247//8629 +f 8271//12684 8222//8627 8246//8630 +f 8223//12685 8272//12686 8248//8633 +f 8272//12686 8223//12685 8247//12687 +f 8248//8635 8249//8636 8225//8637 +f 8249//8636 8248//8635 8272//8638 +f 8273//8639 8226//8640 8249//8640 +f 8226//8640 8273//8639 8250//8639 +f 8227//12688 8274//12689 8251//12690 +f 8274//12689 8227//12688 8250//12691 +f 8228//5253 8275//12692 8252//5255 +f 8275//12692 8228//5253 8251//12693 +f 8253//5257 8252//12694 8276//8647 +f 8252//12694 8253//5257 8229//5260 +f 8230//5261 8277//5262 8254//5263 +f 8277//5262 8230//5261 8253//5264 +f 8231//10753 8278//5266 8255//10755 +f 8278//5266 8231//10753 8254//12695 +f 8232//5269 8279//8651 8256//5271 +f 8279//8651 8232//5269 8255//12696 +f 8233//5273 8280//10757 8257//10757 +f 8280//10757 8233//5273 8256//12697 +f 8258//12698 8257//8658 8281//8659 +f 8257//8658 8258//12698 8234//5276 +f 8259//8661 8258//8662 8282//8663 +f 8258//8662 8259//8661 8235//12699 +f 8261//12700 8259//12700 8283//8667 +f 8259//12700 8261//12700 8237//5284 +f 8284//12701 8261//8671 8285//8669 +f 8261//8671 8284//12701 8260//12702 +f 8286//8672 8236//8673 8260//8674 +f 8236//8673 8286//8672 8262//8675 +f 8287//12703 8238//12704 8262//12705 +f 8238//12704 8287//12703 8263//10768 +f 8264//12706 8263//12707 8288//8682 +f 8263//12707 8264//12706 8239//8680 +f 8289//12708 8240//12709 8264//12710 +f 8240//12709 8289//12708 8265//12711 +f 8290//8687 8241//8687 8265//8688 +f 8241//8687 8290//8687 8266//5308 +f 8291//12712 8242//8690 8266//8691 +f 8242//8690 8291//12712 8267//5312 +f 8292//8693 8243//12713 8267//5315 +f 8243//12713 8292//8693 8268//12714 +f 8269//12715 8268//12716 8293//8695 +f 8268//12716 8269//12715 8244//5319 +f 8270//5320 8269//12717 8294//12718 +f 8269//12717 8270//5320 8245//8698 +f 8271//8699 8270//12719 8295//10784 +f 8270//12719 8271//8699 8246//8702 +f 8247//8703 8296//12720 8272//5328 +f 8296//12720 8247//8703 8271//12721 +f 8272//12722 8273//10786 8249//5333 +f 8273//10786 8272//12722 8296//5334 +f 8250//12723 8297//12724 8274//12725 +f 8297//12724 8250//12723 8273//8707 +f 8251//12726 8298//5337 8275//8711 +f 8298//5337 8251//12726 8274//12727 +f 8252//12728 8299//5342 8276//8715 +f 8299//5342 8252//12728 8275//5344 +f 8253//5345 8300//8717 8277//12729 +f 8300//8717 8253//5345 8276//5348 +f 8254//12730 8301//12731 8278//8721 +f 8301//12731 8254//12730 8277//8722 +f 8255//12732 8302//8724 8279//8725 +f 8302//8724 8255//12732 8278//8726 +f 8256//12733 8303//12734 8280//12735 +f 8303//12734 8256//12733 8279//12736 +f 8257//12737 8304//12738 8281//8729 +f 8304//12738 8257//12737 8280//12739 +f 8282//5366 8281//5366 8305//5366 +f 8281//5366 8282//5366 8258//5366 +f 8283//12740 8282//12741 8306//8735 +f 8282//12741 8283//12740 8259//8736 +f 8285//12742 8283//12743 8307//8739 +f 8283//12743 8285//12742 8261//8740 +f 8308//8741 8285//12744 8309//5378 +f 8285//12744 8308//8741 8284//12745 +f 8310//5380 8260//8743 8284//5380 +f 8260//8743 8310//5380 8286//8743 +f 8311//8744 8262//5382 8286//8744 +f 8262//5382 8311//8744 8287//5382 +f 8312//8745 8263//12746 8287//8747 +f 8263//12746 8312//8745 8288//8748 +f 8313//10811 8264//10810 8288//12747 +f 8264//10810 8313//10811 8289//12748 +f 8314//5393 8265//12749 8289//12750 +f 8265//12749 8314//5393 8290//8753 +f 8315//8754 8266//8755 8290//5396 +f 8266//8755 8315//8754 8291//8756 +f 8316//8757 8267//5401 8291//8757 +f 8267//5401 8316//8757 8292//5401 +f 8317//5402 8268//8758 8292//10816 +f 8268//8758 8317//5402 8293//8760 +f 8294//5408 8293//10818 8318//12751 +f 8293//10818 8294//5408 8269//12752 +f 8295//12753 8294//8764 8319//5412 +f 8294//8764 8295//12753 8270//12754 +f 8296//12755 8295//12756 8320//8768 +f 8295//12756 8296//12755 8271//8769 +f 8273//12757 8320//12758 8297//8772 +f 8320//12758 8273//12757 8296//8773 +f 8274//12759 8321//12760 8298//12760 +f 8321//12760 8274//12759 8297//12759 +f 8275//12761 8322//8779 8299//8780 +f 8322//8779 8275//12761 8298//8781 +f 8299//8782 8300//8783 8276//8784 +f 8300//8783 8299//8782 8323//8785 +f 8277//12762 8324//5435 8301//8788 +f 8324//5435 8277//12762 8300//5434 +f 8278//8789 8325//8790 8302//8791 +f 8325//8790 8278//8789 8301//12763 +f 8279//5441 8326//12764 8303//12765 +f 8326//12764 8279//5441 8302//8795 +f 8280//12766 8327//12767 8304//12767 +f 8327//12767 8280//12766 8303//12768 +f 8281//10825 8328//12769 8305//5451 +f 8328//12769 8281//10825 8304//5452 +f 8282//8799 8329//8800 8306//8800 +f 8329//8800 8282//8799 8305//8799 +f 8283//12770 8330//8802 8307//12771 +f 8330//8802 8283//12770 8306//12772 +f 8285//8804 8331//12773 8309//12774 +f 8331//12773 8285//8804 8307//12775 +f 8332//12776 8308//8810 8309//12777 +f 8308//8810 8332//12776 8333//12776 +f 8334//12778 8284//12779 8308//12780 +f 8284//12779 8334//12778 8310//8814 +f 8335//8815 8286//12781 8310//8817 +f 8286//12781 8335//8815 8311//8818 +f 8336//12782 8287//12783 8311//8819 +f 8287//12783 8336//12782 8312//12784 +f 8337//8821 8288//12785 8312//8823 +f 8288//12785 8337//8821 8313//8824 +f 8338//12786 8289//5486 8313//12786 +f 8289//5486 8338//12786 8314//5486 +f 8339//8830 8290//12787 8314//12788 +f 8290//12787 8339//8830 8315//8830 +f 8340//5492 8291//12789 8315//5492 +f 8291//12789 8340//5492 8316//12789 +f 8341//5494 8292//8831 8316//5494 +f 8292//8831 8341//5494 8317//8832 +f 8293//5497 8342//5497 8318//12790 +f 8342//5497 8293//5497 8317//5497 +f 8319//12791 8318//12792 8343//5500 +f 8318//12792 8319//12791 8294//5501 +f 8320//5502 8319//5503 8344//5503 +f 8319//5503 8320//5502 8295//5502 +f 8297//12793 8344//12794 8321//8837 +f 8344//12794 8297//12793 8320//8836 +f 8298//12795 8345//12796 8322//8839 +f 8345//12796 8298//12795 8321//8840 +f 8322//12797 8323//12798 8299//8842 +f 8323//12798 8322//12797 8346//5514 +f 8323//12799 8324//12800 8300//8844 +f 8324//12800 8323//12799 8347//5515 +f 8301//8846 8348//10852 8325//12801 +f 8348//10852 8301//8846 8324//8846 +f 8302//12802 8349//12803 8326//12804 +f 8349//12803 8302//12802 8325//8851 +f 8303//12805 8350//12806 8327//12806 +f 8350//12806 8303//12805 8326//12805 +f 8304//12807 8351//5531 8328//5532 +f 8351//5531 8304//12807 8327//5533 +f 8328//12808 8329//5535 8305//5536 +f 8329//5535 8328//12808 8352//8856 +f 8306//8859 8353//8858 8330//8858 +f 8353//8858 8306//8859 8329//8859 +f 8307//8860 8354//12809 8331//8862 +f 8354//12809 8307//8860 8330//8863 +f 8355//12810 8309//12811 8331//8866 +f 8309//12811 8355//12810 8332//8867 +f 8355//12812 8333//12813 8332//12814 +f 8333//12813 8355//12812 8356//12815 +f 8333//12813 8356//12815 8357//5551 +f 8357//5551 8356//12815 8358//8872 +f 8357//5551 8358//8872 8359//12816 +f 8359//12816 8358//8872 8360//5553 +f 8359//12816 8360//5553 8361//12817 +f 8361//12817 8360//5553 8362//8875 +f 8361//12817 8362//8875 8363//8876 +f 8363//8876 8362//8875 8364//8875 +f 8363//8876 8364//8875 8365//12818 +f 8365//12818 8364//8875 8366//10875 +f 8365//12818 8366//10875 8367//10873 +f 8367//10873 8366//10875 8368//8878 +f 8367//10873 8368//8878 8369//5553 +f 8369//5553 8368//8878 8370//12819 +f 8369//5553 8370//12819 8371//12820 +f 8371//12820 8370//12819 8372//8881 +f 8371//12820 8372//8881 8373//8882 +f 8373//8882 8372//8881 8374//12821 +f 8373//8882 8374//12821 8375//8884 +f 8375//8885 8374//8885 8376//8885 +f 8333//5574 8334//12822 8308//5572 +f 8334//12822 8333//5574 8357//5574 +f 8377//12823 8310//12824 8334//12825 +f 8310//12824 8377//12823 8335//12826 +f 8378//12827 8311//8891 8335//12828 +f 8311//8891 8378//12827 8336//12829 +f 8336//8896 8337//12830 8312//12831 +f 8337//12830 8336//8896 8379//8896 +f 8380//8897 8313//8898 8337//12832 +f 8313//8898 8380//8897 8338//12833 +f 8381//5586 8314//8900 8338//5586 +f 8314//8900 8381//5586 8339//8900 +f 8382//8902 8315//5590 8339//8902 +f 8315//5590 8382//8902 8340//5590 +f 8383//12834 8316//12835 8340//12836 +f 8316//12835 8383//12834 8341//5594 +f 8317//12837 8384//5597 8342//8907 +f 8384//5597 8317//12837 8341//8908 +f 8318//12838 8385//5600 8343//12838 +f 8385//5600 8318//12838 8342//5600 +f 8344//8910 8343//8911 8386//8911 +f 8343//8911 8344//8910 8319//8910 +f 8321//8912 8386//12839 8345//12840 +f 8386//12839 8321//8912 8344//8915 +f 8345//8916 8346//12841 8322//8916 +f 8346//12841 8345//8916 8387//12841 +f 8346//12842 8347//8917 8323//5608 +f 8347//8917 8346//12842 8388//8917 +f 8347//12843 8348//12844 8324//12845 +f 8348//12844 8347//12843 8389//5613 +f 8325//12846 8390//10901 8349//8922 +f 8390//10901 8325//12846 8348//8920 +f 8326//12847 8391//12848 8350//5619 +f 8391//12848 8326//12847 8349//5620 +f 8327//12849 8392//5621 8351//5621 +f 8392//5621 8327//12849 8350//12849 +f 8351//8927 8352//10905 8328//12850 +f 8352//10905 8351//8927 8393//8929 +f 8352//10906 8353//12851 8329//12852 +f 8353//12851 8352//10906 8394//5630 +f 8330//12853 8395//12854 8354//8934 +f 8395//12854 8330//12853 8353//8935 +f 8356//12855 8331//12855 8354//8938 +f 8331//12855 8356//12855 8355//8937 +f 8396//8939 8375//12856 8397//8941 +f 8375//12856 8396//8939 8373//8940 +f 8398//8942 8373//8943 8396//5643 +f 8373//8943 8398//8942 8371//8944 +f 8369//8946 8398//8946 8399//12857 +f 8398//8946 8369//8946 8371//12858 +f 8367//12859 8399//5653 8400//12860 +f 8399//5653 8367//12859 8369//5653 +f 8365//12861 8400//8952 8401//8953 +f 8400//8952 8365//12861 8367//12862 +f 8363//12863 8401//12864 8402//5660 +f 8401//12864 8363//12863 8365//5661 +f 8361//12865 8402//12866 8403//8957 +f 8402//12866 8361//12865 8363//8958 +f 8359//12867 8403//8961 8377//5668 +f 8403//8961 8359//12867 8361//12867 +f 8357//12868 8377//12869 8334//5672 +f 8377//12869 8357//12868 8359//12870 +f 8358//8962 8354//8963 8395//8963 +f 8354//8963 8358//8962 8356//8962 +f 8404//12871 8358//8966 8395//8964 +f 8358//8966 8404//12871 8360//8966 +f 8405//12872 8360//8968 8404//8967 +f 8360//8968 8405//12872 8362//8968 +f 8406//8969 8362//12873 8405//12874 +f 8362//12873 8406//8969 8364//5687 +f 8407//12875 8364//8971 8406//12876 +f 8364//8971 8407//12875 8366//8973 +f 8408//12877 8366//12878 8407//12879 +f 8366//12878 8408//12877 8368//12880 +f 8409//12881 8368//12882 8408//12883 +f 8368//12882 8409//12881 8370//12884 +f 8410//12885 8370//12886 8409//8979 +f 8370//12886 8410//12885 8372//8980 +f 8411//12887 8372//12888 8410//12889 +f 8372//12888 8411//12887 8374//12888 +f 8412//12890 8374//12891 8411//12892 +f 8374//12891 8412//12890 8376//12893 +f 8397//12894 8376//5711 8412//12895 +f 8376//5711 8397//12894 8375//5711 +f 8377//5714 8378//12896 8335//5716 +f 8378//12896 8377//5714 8403//5717 +f 8378//12897 8379//8996 8336//8996 +f 8379//8996 8378//12897 8413//12897 +f 8379//8998 8380//8999 8337//5721 +f 8380//8999 8379//8998 8414//9000 +f 8415//5724 8338//5725 8380//5724 +f 8338//5725 8415//5724 8381//5725 +f 8416//12898 8339//12899 8381//12900 +f 8339//12899 8416//12898 8382//10959 +f 8417//12901 8340//12902 8382//5732 +f 8340//12902 8417//12901 8383//12903 +f 8341//9010 8418//9011 8384//5734 +f 8418//9011 8341//9010 8383//5737 +f 8342//9013 8419//12904 8385//9015 +f 8419//12904 8342//9013 8384//12905 +f 8343//9017 8420//9018 8386//5742 +f 8420//9018 8343//9017 8385//9018 +f 8386//9021 8387//12906 8345//9019 +f 8387//12906 8386//9021 8420//12907 +f 8387//12908 8388//12909 8346//12910 +f 8388//12909 8387//12908 8421//12911 +f 8388//9026 8389//12912 8347//9028 +f 8389//12912 8388//9026 8422//12913 +f 8348//9029 8423//12914 8390//9031 +f 8423//12914 8348//9029 8389//12915 +f 8349//9033 8424//12916 8391//9035 +f 8424//12916 8349//9033 8390//9036 +f 8350//5763 8425//12917 8392//5765 +f 8425//12917 8350//5763 8391//5766 +f 8392//10972 8393//12918 8351//9041 +f 8393//12918 8392//10972 8426//9042 +f 8393//12919 8394//12920 8352//12921 +f 8394//12920 8393//12919 8427//9046 +f 8394//9048 8395//9048 8353//9047 +f 8395//9048 8394//9048 8404//9049 +f 8397//9050 8428//5781 8396//9050 +f 8428//5781 8397//9050 8429//5781 +f 8396//12922 8430//12923 8398//12924 +f 8430//12923 8396//12922 8428//9055 +f 8399//12925 8430//12926 8431//12927 +f 8430//12926 8399//12925 8398//9059 +f 8400//12928 8431//5792 8432//5792 +f 8431//5792 8400//12928 8399//12928 +f 8401//5794 8432//12929 8433//12930 +f 8432//12929 8401//5794 8400//12931 +f 8402//12932 8433//5799 8413//12933 +f 8433//5799 8402//12932 8401//5801 +f 8403//12934 8413//12935 8378//5803 +f 8413//12935 8403//12934 8402//12936 +f 8427//10986 8404//12937 8394//12938 +f 8404//12937 8427//10986 8405//5808 +f 8434//9070 8405//5810 8427//12939 +f 8405//5810 8434//9070 8406//12940 +f 8435//12941 8406//12942 8434//9071 +f 8406//12942 8435//12941 8407//12943 +f 8436//12944 8407//12945 8435//12946 +f 8407//12945 8436//12944 8408//5820 +f 8437//9076 8408//12947 8436//9078 +f 8408//12947 8437//9076 8409//10993 +f 8437//5825 8410//5826 8409//5826 +f 8410//5826 8437//5825 8438//5825 +f 8438//12948 8411//12949 8410//12950 +f 8411//12949 8438//12948 8439//12951 +f 8439//5831 8412//9083 8411//9083 +f 8412//9083 8439//5831 8440//5831 +f 8412//12952 8429//5834 8397//12952 +f 8429//5834 8412//12952 8440//5834 +f 8413//12953 8414//12954 8379//12955 +f 8414//12954 8413//12953 8433//12956 +f 8414//12957 8415//9091 8380//12958 +f 8415//9091 8414//12957 8441//12959 +f 8442//5843 8381//5844 8415//5845 +f 8381//5844 8442//5843 8416//5846 +f 8443//12960 8382//9097 8416//5849 +f 8382//9097 8443//12960 8417//5850 +f 8444//9103 8383//9103 8417//9103 +f 8383//9103 8444//9103 8418//9103 +f 8384//12961 8445//12962 8419//12963 +f 8445//12962 8384//12961 8418//9107 +f 8385//9108 8446//9109 8420//9110 +f 8446//9109 8385//9108 8419//12964 +f 8420//12965 8421//9115 8387//12965 +f 8421//9115 8420//12965 8446//12966 +f 8421//9116 8422//9117 8388//9118 +f 8422//9117 8421//9116 8447//9119 +f 8422//12967 8423//12968 8389//9122 +f 8423//12968 8422//12967 8448//5870 +f 8424//12969 8423//12970 8449//9126 +f 8423//12970 8424//12969 8390//12971 +f 8391//12972 8450//12973 8425//12974 +f 8450//12973 8391//12972 8424//9131 +f 8425//12975 8426//12976 8392//12977 +f 8426//12976 8425//12975 8451//9135 +f 8426//9136 8427//5884 8393//9137 +f 8427//5884 8426//9136 8434//11017 +f 8429//12978 8452//9139 8428//5888 +f 8452//9139 8429//12978 8453//9140 +f 8428//9141 8454//9142 8430//9143 +f 8454//9142 8428//9141 8452//12979 +f 8431//12980 8454//12981 8455//12982 +f 8454//12981 8431//12980 8430//12983 +f 8432//12984 8455//12985 8441//12986 +f 8455//12985 8432//12984 8431//12987 +f 8433//9149 8441//12988 8414//9151 +f 8441//12988 8433//9149 8432//12989 +f 8451//9152 8434//9153 8426//9154 +f 8434//9153 8451//9152 8435//12990 +f 8456//9155 8435//9156 8451//5912 +f 8435//9156 8456//9155 8436//9157 +f 8456//12991 8437//11027 8436//9160 +f 8437//11027 8456//12991 8457//9161 +f 8457//12992 8438//12993 8437//9163 +f 8438//12993 8457//12992 8458//9162 +f 8458//11033 8439//5921 8438//5924 +f 8439//5921 8458//11033 8459//5923 +f 8459//12994 8440//12995 8439//12995 +f 8440//12995 8459//12994 8460//12994 +f 8460//12996 8429//12997 8440//12998 +f 8429//12997 8460//12996 8453//9170 +f 8441//12999 8442//13000 8415//13000 +f 8442//13000 8441//12999 8455//12999 +f 8461//13001 8416//13002 8442//13003 +f 8416//13002 8461//13001 8443//13004 +f 8444//13005 8443//13006 8462//9178 +f 8443//13006 8444//13005 8417//5939 +f 8445//13007 8444//9181 8463//13008 +f 8444//9181 8445//13007 8418//9183 +f 8419//13009 8464//13010 8446//9186 +f 8464//13010 8419//13009 8445//13011 +f 8446//5949 8447//13012 8421//5949 +f 8447//13012 8446//5949 8464//13012 +f 8447//13013 8448//13013 8422//13014 +f 8448//13013 8447//13013 8465//13015 +f 8449//13016 8448//13017 8466//13018 +f 8448//13017 8449//13016 8423//13019 +f 8424//5960 8467//5962 8450//5962 +f 8467//5962 8424//5960 8449//9199 +f 8450//9200 8451//9200 8425//9200 +f 8451//9200 8450//9200 8456//9200 +f 8468//13020 8452//13021 8453//13022 +f 8452//13021 8468//13020 8469//13023 +f 8452//9202 8461//13024 8454//9202 +f 8461//13024 8452//9202 8469//13024 +f 8455//13025 8461//13026 8442//13027 +f 8461//13026 8455//13025 8454//5975 +f 8450//13028 8457//13029 8456//13030 +f 8457//13029 8450//13028 8467//9207 +f 8467//9208 8458//9209 8457//9210 +f 8458//9209 8467//9208 8470//13031 +f 8470//13032 8459//9212 8458//9213 +f 8459//9212 8470//13032 8471//9214 +f 8471//13033 8460//13034 8459//5990 +f 8460//13034 8471//13033 8472//5989 +f 8472//13035 8453//13036 8460//11062 +f 8453//13036 8472//13035 8468//13037 +f 8462//13038 8461//9219 8469//5999 +f 8461//9219 8462//13038 8443//11064 +f 8463//13039 8462//13040 8473//11065 +f 8462//13040 8463//13039 8444//9224 +f 8445//13041 8474//9228 8464//13041 +f 8474//9228 8445//13041 8463//13042 +f 8464//13043 8465//6010 8447//13043 +f 8465//6010 8464//13043 8474//6010 +f 8466//11071 8465//6012 8475//6013 +f 8465//6012 8466//11071 8448//6014 +f 8449//13044 8470//9237 8467//6017 +f 8470//9237 8449//13044 8466//13045 +f 8473//13046 8469//9240 8468//6021 +f 8469//9240 8473//13046 8462//13047 +f 8466//13048 8471//13049 8470//9243 +f 8471//13049 8466//13048 8475//13048 +f 8475//13050 8472//13051 8471//13052 +f 8472//13051 8475//13050 8476//9247 +f 8476//13053 8468//13054 8472//13055 +f 8468//13054 8476//13053 8473//9251 +f 8474//9252 8473//9254 8476//9254 +f 8473//9254 8474//9252 8463//9252 +f 8465//13056 8476//9256 8475//9257 +f 8476//9256 8465//13056 8474//13057 diff --git a/maniskill3_environment_assets/racks/sockerbit_box.mtl b/maniskill3_environment_assets/racks/sockerbit_box.mtl new file mode 100644 index 0000000000000000000000000000000000000000..fcde12eb5e46a9c477973031b05aab7fe3504e58 --- /dev/null +++ b/maniskill3_environment_assets/racks/sockerbit_box.mtl @@ -0,0 +1,12 @@ +# Blender 4.3.2 MTL File: 'None' +# www.blender.org + +newmtl SimplygonCastMaterial.004 +Ns 250.000000 +Ka 1.000000 1.000000 1.000000 +Kd 0.800000 0.800000 0.800000 +Ks 0.500000 0.500000 0.500000 +Ke 0.000000 0.000000 0.000000 +Ni 1.500000 +d 1.000000 +illum 2 diff --git a/maniskill3_environment_assets/racks/sockerbit_box.obj b/maniskill3_environment_assets/racks/sockerbit_box.obj new file mode 100644 index 0000000000000000000000000000000000000000..6c8b5c4a553e3d5bedeec77e79530c1ad6d17163 --- /dev/null +++ b/maniskill3_environment_assets/racks/sockerbit_box.obj @@ -0,0 +1,21710 @@ +# Blender 4.3.2 +# www.blender.org +mtllib sockerbit_box.mtl +o model +v 0.081225 -0.000598 0.126826 +v 0.081007 -0.000925 0.128349 +v 0.081007 0.000598 0.128349 +v 0.081225 0.002556 0.126826 +v 0.081007 0.002121 0.128349 +v 0.081007 0.005275 0.128349 +v -0.080952 0.000598 0.128349 +v -0.080952 -0.000925 0.128349 +v -0.081278 0.000925 0.126826 +v -0.080952 -0.007016 0.128349 +v -0.080952 -0.008430 0.128349 +v -0.081278 0.000925 0.126826 +v -0.080952 -0.002447 0.128349 +v -0.080952 -0.005493 0.128349 +v -0.081278 0.000925 0.126826 +v -0.069857 0.099471 0.105290 +v -0.071706 0.101102 0.104963 +v -0.071598 0.101211 0.105290 +v -0.072468 0.101972 0.105398 +v -0.072359 0.101972 0.105616 +v -0.079647 0.008430 0.131721 +v -0.079973 0.008430 0.131612 +v -0.079647 0.008539 0.132047 +v -0.079647 0.008539 0.132047 +v -0.079647 0.008539 0.132047 +v -0.080408 0.050959 0.131612 +v -0.080190 0.053352 0.131830 +v -0.080190 0.053352 0.131830 +v -0.080190 0.053352 0.131830 +v -0.080190 0.008647 0.131830 +v -0.080190 0.008647 0.131830 +v -0.080190 0.008647 0.131830 +v -0.079647 0.053352 0.132047 +v -0.079647 0.053352 0.132047 +v -0.079647 0.053352 0.132047 +v -0.079647 0.053352 0.132047 +v -0.080190 0.008430 0.131286 +v -0.080190 0.008430 0.131286 +v -0.080190 0.008430 0.131286 +v -0.080190 0.008430 0.131286 +v -0.074970 0.008430 0.131721 +v -0.074970 0.008430 0.131721 +v -0.074970 0.008430 0.131721 +v -0.074970 0.008647 0.132047 +v -0.074970 0.008647 0.132047 +v -0.074970 0.008647 0.132047 +v -0.074970 0.008647 0.132047 +v -0.074970 0.030401 0.132047 +v -0.074970 0.030401 0.132047 +v -0.074970 0.053352 0.132047 +v -0.074970 0.053352 0.132047 +v -0.074970 0.053352 0.132047 +v -0.074752 0.053569 0.131721 +v -0.074752 0.053569 0.131721 +v -0.079647 0.053569 0.131830 +v -0.079647 0.053569 0.131830 +v -0.080190 0.053569 0.131395 +v -0.080190 0.053569 0.131395 +v -0.080517 0.053352 0.131286 +v -0.080517 0.053352 0.131286 +v -0.080517 0.053352 0.131286 +v -0.080517 0.030401 0.131286 +v -0.080517 0.008647 0.131286 +v -0.080517 0.008647 0.131286 +v -0.080952 0.008647 0.128349 +v -0.080734 0.008430 0.128240 +v -0.080734 0.008430 0.128240 +v -0.080734 0.008430 0.128240 +v -0.074099 -0.055854 0.114209 +v -0.074099 -0.056071 0.114209 +v -0.074099 -0.056071 0.114209 +v -0.074099 -0.056071 0.114209 +v -0.079647 -0.056071 0.114209 +v -0.079647 -0.056071 0.114209 +v -0.079647 -0.056071 0.114209 +v -0.079647 -0.056071 0.114209 +v 0.038043 0.005384 0.004460 +v 0.038043 0.005384 0.004460 +v 0.038043 0.005384 0.004460 +v 0.038043 0.005384 0.004460 +v 0.038043 0.005384 0.004460 +v 0.037717 0.005167 0.004460 +v 0.037717 0.005167 0.004460 +v 0.037717 0.005167 0.004460 +v 0.037717 0.005493 0.004460 +v 0.037717 0.005493 0.004460 +v 0.037173 0.005710 0.004460 +v 0.037173 0.005710 0.004460 +v 0.037173 0.005058 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.004405 0.004460 +v 0.039131 0.006689 0.004460 +v 0.039131 0.006689 0.004460 +v 0.039131 0.006689 0.004460 +v 0.039131 0.006689 0.004460 +v 0.039131 0.006689 0.004460 +v 0.039131 0.005819 0.004460 +v 0.039131 0.005819 0.004460 +v 0.039131 0.005819 0.004460 +v 0.037608 0.002447 0.004460 +v 0.037608 0.002447 0.004460 +v 0.038260 0.002447 0.004460 +v 0.038478 0.001251 0.004460 +v 0.038478 0.001251 0.004460 +v 0.038478 0.001251 0.004460 +v 0.038478 0.002774 0.004460 +v 0.038478 0.002774 0.004460 +v 0.038478 0.002774 0.004460 +v 0.038478 0.002774 0.004460 +v 0.039131 -0.000163 0.004460 +v 0.039131 -0.000163 0.004460 +v 0.039131 -0.000163 0.004460 +v 0.039131 -0.000163 0.004460 +v 0.039131 -0.000163 0.004460 +v 0.038369 -0.003317 0.004460 +v 0.038369 -0.003317 0.004460 +v 0.039131 -0.002665 0.004460 +v 0.039131 -0.002665 0.004460 +v 0.039131 -0.002665 0.004460 +v 0.039131 -0.003317 0.004460 +v 0.039131 -0.003317 0.004460 +v 0.039131 -0.003317 0.004460 +v 0.039131 -0.003317 0.004460 +v 0.040871 -0.002447 0.004460 +v 0.040871 -0.002447 0.004460 +v 0.040871 -0.002447 0.004460 +v 0.040871 -0.002447 0.004460 +v 0.040980 -0.000816 0.004460 +v 0.040980 -0.000816 0.004460 +v 0.040980 -0.000816 0.004460 +v 0.040980 -0.000816 0.004460 +v 0.039131 -0.000925 0.004460 +v 0.040980 0.000816 0.004460 +v 0.040980 0.000816 0.004460 +v 0.040871 0.002447 0.004460 +v 0.040871 0.002447 0.004460 +v 0.040871 0.002447 0.004460 +v 0.040871 0.002447 0.004460 +v 0.039131 0.002774 0.004460 +v 0.039131 0.002774 0.004460 +v 0.039131 0.002774 0.004460 +v 0.039131 0.002774 0.004460 +v 0.039131 0.002774 0.004460 +v 0.037390 0.002774 0.004460 +v 0.037608 0.001251 0.004460 +v 0.037608 0.001251 0.004460 +v 0.037608 -0.000163 0.004460 +v 0.037608 -0.000163 0.004460 +v 0.038152 -0.001577 0.004460 +v 0.038152 -0.001577 0.004460 +v 0.038152 -0.001577 0.004460 +v 0.038152 -0.001577 0.004460 +v 0.037717 -0.003317 0.004460 +v 0.037717 -0.003317 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.004949 0.004460 +v 0.039131 -0.006689 0.004460 +v 0.039131 -0.006689 0.004460 +v 0.039131 -0.006689 0.004460 +v 0.039131 -0.006689 0.004460 +v 0.039131 -0.006689 0.004460 +v 0.039131 -0.007668 0.004460 +v 0.039131 -0.007668 0.004460 +v 0.039131 -0.007668 0.004460 +v 0.039131 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.038913 -0.007668 0.004460 +v 0.039783 -0.007886 0.004460 +v 0.039783 -0.007886 0.004460 +v 0.039783 -0.007886 0.004460 +v 0.039783 -0.007886 0.004460 +v 0.040110 -0.006798 0.004460 +v 0.040110 -0.006798 0.004460 +v 0.040110 -0.006798 0.004460 +v 0.040110 -0.006798 0.004460 +v 0.040436 -0.005493 0.004460 +v 0.040436 -0.005493 0.004460 +v 0.040436 -0.005493 0.004460 +v 0.040436 -0.005493 0.004460 +v 0.040436 -0.005493 0.004460 +v 0.039131 -0.005819 0.004460 +v 0.039131 -0.005819 0.004460 +v 0.039131 -0.005819 0.004460 +v 0.039131 -0.005819 0.004460 +v 0.040762 -0.004079 0.004460 +v 0.040762 -0.004079 0.004460 +v 0.040762 -0.004079 0.004460 +v 0.040762 -0.004079 0.004460 +v 0.041524 0.002665 0.004460 +v 0.041524 0.002665 0.004460 +v 0.041524 0.002665 0.004460 +v 0.040762 0.003970 0.004460 +v 0.040762 0.003970 0.004460 +v 0.040762 0.003970 0.004460 +v 0.040762 0.003970 0.004460 +v 0.040436 0.005493 0.004460 +v 0.040436 0.005493 0.004460 +v 0.040436 0.005493 0.004460 +v 0.040436 0.005493 0.004460 +v 0.040110 0.006798 0.004460 +v 0.040110 0.006798 0.004460 +v 0.040110 0.006798 0.004460 +v 0.040110 0.006798 0.004460 +v 0.039783 0.007886 0.004460 +v 0.039783 0.007886 0.004460 +v 0.039783 0.007886 0.004460 +v 0.038913 0.006798 0.004460 +v 0.038913 0.006798 0.004460 +v 0.038913 0.006798 0.004460 +v 0.038913 0.006798 0.004460 +v 0.038913 0.006798 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.038043 0.007124 0.004460 +v 0.036738 0.007668 0.004460 +v 0.036738 0.007668 0.004460 +v 0.036738 0.007124 0.004460 +v 0.036738 0.007124 0.004460 +v 0.036738 0.007124 0.004460 +v 0.036738 0.007124 0.004460 +v 0.036738 0.007124 0.004460 +v 0.036738 0.006472 0.004460 +v 0.036738 0.006472 0.004460 +v 0.036738 0.006472 0.004460 +v 0.036738 0.006472 0.004460 +v 0.036738 0.006472 0.004460 +v 0.036738 0.005928 0.004460 +v 0.036738 0.005928 0.004460 +v 0.036738 0.005928 0.004460 +v 0.036738 0.005928 0.004460 +v 0.036738 0.005928 0.004460 +v 0.036738 0.004840 0.004460 +v 0.036738 0.004840 0.004460 +v 0.036738 0.004840 0.004460 +v 0.036738 0.004840 0.004460 +v 0.036738 0.003426 0.004460 +v 0.036738 0.003426 0.004460 +v 0.036738 0.003426 0.004460 +v 0.036738 0.002774 0.004460 +v 0.036738 0.002774 0.004460 +v 0.036738 0.002774 0.004460 +v 0.036738 0.002774 0.004460 +v 0.036738 0.002012 0.004460 +v 0.036738 0.002012 0.004460 +v 0.036738 0.002012 0.004460 +v 0.036738 0.002012 0.004460 +v 0.036738 -0.000163 0.004460 +v 0.036738 -0.000163 0.004460 +v 0.036738 -0.000163 0.004460 +v 0.036738 -0.000163 0.004460 +v 0.036738 -0.000489 0.004460 +v 0.036738 -0.000489 0.004460 +v 0.036738 -0.000489 0.004460 +v 0.036738 -0.000489 0.004460 +v 0.036738 -0.002012 0.004460 +v 0.036738 -0.002012 0.004460 +v 0.036738 -0.002012 0.004460 +v 0.036738 -0.002012 0.004460 +v 0.036738 -0.002556 0.004460 +v 0.036738 -0.002556 0.004460 +v 0.036738 -0.002556 0.004460 +v 0.036738 -0.002556 0.004460 +v 0.036738 -0.002556 0.004460 +v 0.036738 -0.003317 0.004460 +v 0.036738 -0.003317 0.004460 +v 0.036738 -0.003317 0.004460 +v 0.036738 -0.003317 0.004460 +v 0.036738 -0.004949 0.004460 +v 0.036738 -0.004949 0.004460 +v 0.036738 -0.004949 0.004460 +v 0.036738 -0.004949 0.004460 +v 0.036738 -0.004949 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.005819 0.004460 +v 0.036738 -0.007668 0.004460 +v 0.036738 -0.007668 0.004460 +v 0.036738 -0.007668 0.004460 +v 0.036738 -0.007668 0.004460 +v 0.036738 -0.007668 0.004460 +v 0.037064 -0.007668 0.004460 +v 0.037064 -0.007668 0.004460 +v 0.037064 -0.007668 0.004460 +v 0.037064 -0.007668 0.004460 +v 0.037064 -0.007668 0.004460 +v 0.037934 -0.007668 0.004460 +v 0.037934 -0.007668 0.004460 +v 0.037934 -0.007668 0.004460 +v 0.037934 -0.007668 0.004460 +v 0.037934 -0.007668 0.004460 +v 0.037934 -0.010061 0.004460 +v 0.037934 -0.010061 0.004460 +v 0.037934 -0.010061 0.004460 +v 0.037934 -0.010061 0.004460 +v 0.038369 -0.009952 0.004460 +v 0.038369 -0.009952 0.004460 +v 0.038369 -0.009952 0.004460 +v 0.038369 -0.009952 0.004460 +v 0.038913 -0.009517 0.004460 +v 0.038913 -0.009517 0.004460 +v 0.039348 -0.008865 0.004460 +v 0.039348 -0.008865 0.004460 +v 0.039348 -0.008865 0.004460 +v 0.041524 -0.010714 0.004460 +v 0.041524 -0.010714 0.004460 +v 0.041524 -0.010714 0.004460 +v 0.041524 -0.009300 0.004460 +v 0.041524 -0.009300 0.004460 +v 0.041524 -0.009300 0.004460 +v 0.041524 -0.009300 0.004460 +v 0.041524 -0.009300 0.004460 +v 0.041524 0.009191 0.004460 +v 0.041524 0.009191 0.004460 +v 0.041524 0.009191 0.004460 +v 0.041524 0.009191 0.004460 +v 0.041524 0.009191 0.004460 +v 0.041524 0.010714 0.004460 +v 0.041524 0.010714 0.004460 +v 0.041524 0.010714 0.004460 +v 0.039348 0.008756 0.004460 +v 0.039348 0.008756 0.004460 +v 0.039348 0.008756 0.004460 +v 0.039348 0.008756 0.004460 +v 0.039348 0.008756 0.004460 +v 0.038913 0.009409 0.004460 +v 0.038913 0.009409 0.004460 +v 0.038913 0.009409 0.004460 +v 0.038369 0.009844 0.004460 +v 0.038369 0.009844 0.004460 +v 0.038369 0.009844 0.004460 +v 0.038369 0.009844 0.004460 +v 0.037934 0.009952 0.004460 +v 0.037934 0.009952 0.004460 +v 0.037934 0.009952 0.004460 +v 0.037390 0.009844 0.004460 +v 0.037390 0.009844 0.004460 +v 0.036955 0.009409 0.004460 +v 0.036955 0.009409 0.004460 +v 0.036520 0.008756 0.004460 +v 0.036520 0.008756 0.004460 +v 0.036085 0.007886 0.004460 +v 0.036085 0.007886 0.004460 +v 0.036085 0.007886 0.004460 +v 0.035650 0.006798 0.004460 +v 0.035650 0.006798 0.004460 +v 0.035650 0.006798 0.004460 +v 0.035650 0.006798 0.004460 +v 0.035324 0.005493 0.004460 +v 0.035324 0.005493 0.004460 +v 0.035324 0.005493 0.004460 +v 0.035324 0.005493 0.004460 +v 0.035324 0.005493 0.004460 +v 0.035324 0.005493 0.004460 +v 0.035106 0.003970 0.004460 +v 0.035106 0.003970 0.004460 +v 0.035106 0.003970 0.004460 +v 0.035106 0.003970 0.004460 +v 0.035106 0.003970 0.004460 +v 0.035106 0.003970 0.004460 +v 0.034997 0.002447 0.004460 +v 0.034997 0.002447 0.004460 +v 0.034997 0.002447 0.004460 +v 0.034997 0.002447 0.004460 +v 0.034997 0.002447 0.004460 +v 0.034889 0.000816 0.004460 +v 0.034889 0.000816 0.004460 +v 0.034889 0.000816 0.004460 +v 0.034889 0.000816 0.004460 +v 0.034889 0.000816 0.004460 +v 0.034889 -0.000816 0.004460 +v 0.034889 -0.000816 0.004460 +v 0.034889 -0.000816 0.004460 +v 0.034889 -0.000816 0.004460 +v 0.034889 -0.000816 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.034997 -0.002447 0.004460 +v 0.035106 -0.004079 0.004460 +v 0.035106 -0.004079 0.004460 +v 0.035106 -0.004079 0.004460 +v 0.035106 -0.004079 0.004460 +v 0.035324 -0.005493 0.004460 +v 0.035324 -0.005493 0.004460 +v 0.035324 -0.005493 0.004460 +v 0.035324 -0.005493 0.004460 +v 0.035650 -0.006798 0.004460 +v 0.035650 -0.006798 0.004460 +v 0.035650 -0.006798 0.004460 +v 0.035650 -0.006798 0.004460 +v 0.036085 -0.007886 0.004460 +v 0.036085 -0.007886 0.004460 +v 0.036085 -0.007886 0.004460 +v 0.036520 -0.008865 0.004460 +v 0.036520 -0.008865 0.004460 +v 0.036520 -0.008865 0.004460 +v 0.036955 -0.009517 0.004460 +v 0.036955 -0.009517 0.004460 +v 0.036955 -0.009517 0.004460 +v 0.036955 -0.009517 0.004460 +v 0.037390 -0.009952 0.004460 +v 0.037390 -0.009952 0.004460 +v 0.040218 -0.010714 0.004460 +v 0.040218 -0.010714 0.004460 +v 0.040218 -0.010714 0.004460 +v 0.040218 -0.010714 0.004460 +v 0.040218 -0.010714 0.004460 +v 0.043155 -0.022679 0.004460 +v 0.043155 -0.022679 0.004460 +v 0.043155 -0.022679 0.004460 +v 0.043155 -0.022679 0.004460 +v 0.043155 -0.022679 0.004460 +v 0.043155 -0.022679 0.004460 +v 0.040436 -0.073801 0.004460 +v 0.040436 -0.073801 0.004460 +v 0.040436 -0.073801 0.004460 +v 0.043155 -0.073801 0.004460 +v 0.043155 -0.073801 0.004460 +v 0.043155 -0.073801 0.004460 +v 0.043155 -0.074562 0.004351 +v 0.043155 -0.074562 0.004351 +v 0.048267 -0.076846 0.000979 +v 0.048267 -0.076846 0.000979 +v 0.047506 -0.078587 0.000870 +v 0.047506 -0.078587 0.000870 +v 0.047506 -0.078587 0.000870 +v 0.045330 -0.075215 0.003589 +v 0.045330 -0.075215 0.003589 +v 0.044569 -0.075976 0.003589 +v 0.044569 -0.075976 0.003589 +v 0.044569 -0.075976 0.003589 +v 0.045548 -0.073801 0.003698 +v 0.043916 -0.073801 0.004351 +v 0.043155 -0.049545 0.004460 +v 0.043155 -0.049545 0.004460 +v 0.043155 -0.049545 0.004460 +v 0.043155 -0.049545 0.004460 +v -0.006009 -0.073801 0.004460 +v -0.006009 -0.073801 0.004460 +v -0.006009 -0.073801 0.004460 +v -0.006009 -0.073801 0.004460 +v -0.006009 -0.074562 0.004351 +v -0.006009 -0.074562 0.004351 +v -0.028198 -0.075432 0.004133 +v -0.006009 -0.075432 0.004133 +v 0.043155 -0.076194 0.003698 +v 0.043155 -0.076194 0.003698 +v 0.043155 -0.076194 0.003698 +v 0.040436 -0.076955 0.003154 +v 0.040436 -0.076955 0.003154 +v 0.040436 -0.076955 0.003154 +v 0.043155 -0.076955 0.003154 +v 0.043155 -0.076955 0.003154 +v 0.043155 -0.076955 0.003154 +v 0.046309 -0.078913 0.000979 +v 0.046309 -0.078913 0.000979 +v 0.047506 -0.080001 0.000218 +v 0.048920 -0.078913 0.000218 +v 0.048920 -0.078913 0.000218 +v 0.050443 -0.075650 0.000109 +v 0.050878 -0.078260 0.000000 +v 0.050878 -0.078260 0.000000 +v 0.050878 -0.078260 0.000000 +v 0.048811 -0.075650 0.000761 +v 0.048159 -0.075650 0.001305 +v 0.048159 -0.075650 0.001305 +v 0.046309 -0.073801 0.003154 +v 0.046309 -0.073801 0.003154 +v 0.045548 -0.045412 0.003698 +v 0.044787 -0.023766 0.004133 +v 0.045548 0.071843 0.003698 +v 0.045548 0.071843 0.003698 +v 0.045548 0.071843 0.003698 +v 0.044787 0.032033 0.004133 +v 0.043916 0.071843 0.004351 +v 0.043916 0.071843 0.004351 +v 0.043155 0.071843 0.004460 +v 0.043155 0.071843 0.004460 +v 0.043155 0.071843 0.004460 +v 0.043155 0.073474 0.004133 +v 0.040436 0.071843 0.004460 +v 0.040436 0.071843 0.004460 +v 0.040436 0.071843 0.004460 +v 0.043155 0.070755 0.004460 +v 0.043155 0.070755 0.004460 +v 0.043155 0.070755 0.004460 +v 0.043155 0.070755 0.004460 +v 0.043155 0.011802 0.004460 +v 0.043155 0.011802 0.004460 +v 0.043155 0.011802 0.004460 +v 0.043155 0.011802 0.004460 +v 0.043155 0.011802 0.004460 +v 0.043155 0.011802 0.004460 +v 0.040218 0.010714 0.004460 +v 0.034345 0.010714 0.004460 +v 0.034345 0.010714 0.004460 +v 0.034345 0.010714 0.004460 +v 0.034345 0.010714 0.004460 +v 0.034345 0.010714 0.004460 +v 0.034345 0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v 0.034345 -0.010714 0.004460 +v -0.009599 -0.047805 0.004460 +v -0.009599 -0.047805 0.004460 +v -0.009599 -0.047805 0.004460 +v -0.009599 -0.047805 0.004460 +v -0.035703 -0.073801 0.004460 +v -0.035703 -0.073801 0.004460 +v -0.035703 -0.073801 0.004460 +v -0.035703 -0.073801 0.004460 +v -0.035703 -0.073801 0.004460 +v -0.043100 -0.073801 0.004460 +v -0.043100 -0.073801 0.004460 +v -0.043100 -0.073801 0.004460 +v -0.043100 -0.074562 0.004351 +v -0.043100 -0.074562 0.004351 +v -0.043100 -0.076194 0.003698 +v -0.043100 -0.076194 0.003698 +v -0.043100 -0.076955 0.003154 +v -0.043100 -0.076955 0.003154 +v -0.043100 -0.076955 0.003154 +v -0.035703 -0.076955 0.003154 +v -0.035703 -0.076955 0.003154 +v 0.042611 -0.078695 0.001305 +v 0.042611 -0.078695 0.001305 +v 0.042611 -0.078695 0.001305 +v 0.045004 -0.080218 0.000326 +v 0.046962 -0.081632 0.000000 +v 0.046962 -0.081632 0.000000 +v 0.046962 -0.081632 0.000000 +v 0.049137 -0.080653 0.000000 +v 0.049137 -0.080653 0.000000 +v 0.049137 -0.080653 0.000000 +v 0.049137 -0.080653 0.000000 +v 0.049137 -0.080653 0.000000 +v 0.049137 -0.080653 0.000000 +v -0.076819 0.105127 0.126609 +v -0.076819 0.105127 0.126609 +v -0.077036 0.104800 0.126717 +v -0.077036 0.104800 0.126717 +v -0.077036 0.104800 0.126717 +v -0.077254 0.104800 0.126609 +v -0.077254 0.104800 0.126609 +v -0.079864 -0.057811 0.113230 +v -0.079864 -0.092835 0.095065 +v -0.079864 -0.092835 0.095065 +v -0.080190 -0.092727 0.094957 +v -0.080190 -0.092727 0.094957 +v -0.080190 -0.092727 0.094957 +v -0.080190 -0.092727 0.094957 +v 0.024555 -0.096969 0.018056 +v 0.024555 -0.096969 0.018056 +v 0.024555 -0.096969 0.018056 +v 0.021510 -0.096969 0.018056 +v 0.021510 -0.096969 0.018056 +v 0.021510 -0.096969 0.018056 +v 0.021510 -0.096969 0.018056 +v 0.024555 -0.096969 0.016751 +v 0.024555 -0.096969 0.016751 +v 0.024555 -0.096969 0.016751 +v 0.024555 -0.096969 0.016751 +v 0.024555 -0.096969 0.016751 +v 0.024555 -0.096969 0.016751 +v 0.024555 -0.096969 0.016751 +v 0.018355 -0.096969 0.018056 +v 0.018355 -0.096969 0.018056 +v 0.018355 -0.096969 0.018056 +v 0.018355 -0.096969 0.018056 +v 0.018355 -0.096969 0.018056 +v 0.018355 -0.096860 0.016424 +v 0.018355 -0.096860 0.016424 +v 0.018355 -0.096860 0.016424 +v 0.018355 -0.096860 0.016424 +v 0.018355 -0.096860 0.016424 +v 0.018355 -0.096860 0.016424 +v 0.018355 -0.096860 0.016424 +v -0.001550 -0.096860 0.016424 +v -0.001550 -0.096860 0.016424 +v -0.001550 -0.096860 0.016424 +v -0.001550 -0.096860 0.016424 +v -0.001550 -0.096860 0.016424 +v -0.001550 -0.096860 0.016424 +v 0.006064 -0.096969 0.018056 +v 0.006064 -0.096969 0.018056 +v 0.006064 -0.096969 0.018056 +v 0.006064 -0.096969 0.018056 +v 0.006064 -0.096969 0.018056 +v 0.006064 -0.096969 0.018056 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097186 0.022407 +v 0.027710 -0.097948 0.037199 +v 0.027710 -0.097948 0.037199 +v 0.027710 -0.097948 0.037199 +v 0.027710 -0.097948 0.037199 +v 0.021510 -0.098056 0.037852 +v 0.021510 -0.098056 0.037852 +v 0.021510 -0.098056 0.037852 +v 0.021510 -0.098056 0.037852 +v 0.021510 -0.097948 0.035568 +v 0.021510 -0.097948 0.035568 +v 0.021510 -0.097948 0.035568 +v 0.021510 -0.097948 0.035568 +v 0.021510 -0.097948 0.035568 +v 0.021510 -0.097948 0.035568 +v 0.021510 -0.097948 0.035568 +v 0.015310 -0.097948 0.037199 +v 0.015310 -0.097948 0.037199 +v 0.015310 -0.097948 0.037199 +v 0.015310 -0.097948 0.037199 +v 0.015310 -0.097948 0.037199 +v 0.015310 -0.097948 0.037199 +v 0.015310 -0.098056 0.037852 +v 0.015310 -0.098056 0.037852 +v 0.015310 -0.098056 0.037852 +v 0.015310 -0.098056 0.037852 +v 0.018355 -0.098056 0.038070 +v 0.018355 -0.098056 0.038070 +v 0.018355 -0.098056 0.038070 +v 0.018355 -0.098056 0.038070 +v 0.018355 -0.098056 0.038070 +v 0.018355 -0.098056 0.038070 +v 0.018355 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.027710 -0.098056 0.038070 +v 0.033910 -0.098056 0.037852 +v 0.033910 -0.098056 0.037852 +v 0.033910 -0.098056 0.037852 +v 0.046853 -0.098056 0.038070 +v 0.046853 -0.098056 0.038070 +v 0.046853 -0.098056 0.038070 +v 0.046853 -0.098056 0.038070 +v 0.046853 -0.098056 0.038070 +v 0.046853 -0.098056 0.038070 +v 0.043155 -0.098056 0.037526 +v 0.043155 -0.098056 0.037526 +v 0.043155 -0.098056 0.037526 +v 0.043155 -0.098056 0.037526 +v 0.047180 -0.098056 0.037526 +v 0.047180 -0.098056 0.037526 +v 0.047180 -0.098056 0.037526 +v 0.047180 -0.098056 0.037526 +v 0.047723 -0.097948 0.037199 +v 0.047723 -0.097948 0.037199 +v 0.047723 -0.097948 0.037199 +v 0.047723 -0.097948 0.037199 +v 0.047723 -0.097948 0.037199 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.047180 -0.097404 0.026758 +v 0.006064 -0.097948 0.035568 +v 0.006064 -0.097948 0.035568 +v 0.006064 -0.097948 0.035568 +v 0.006064 -0.097948 0.035568 +v 0.006064 -0.097948 0.035568 +v -0.001550 -0.097948 0.037199 +v -0.001550 -0.097948 0.037199 +v -0.001550 -0.097948 0.037199 +v -0.001550 -0.097948 0.037199 +v -0.001550 -0.097948 0.037199 +v 0.006064 -0.098056 0.037526 +v 0.006064 -0.098056 0.037526 +v 0.006064 -0.098056 0.037526 +v 0.006064 -0.098056 0.037526 +v 0.006064 -0.098056 0.037526 +v 0.006064 -0.098056 0.037526 +v -0.001550 -0.098056 0.038070 +v -0.001550 -0.098056 0.038070 +v -0.001550 -0.098056 0.038070 +v -0.001550 -0.098056 0.038070 +v 0.006064 -0.098056 0.038070 +v 0.006064 -0.098056 0.038070 +v 0.006064 -0.098056 0.038070 +v 0.006064 -0.098056 0.038070 +v 0.006064 -0.098056 0.038070 +v 0.006064 -0.098056 0.038070 +v 0.012264 -0.098383 0.044487 +v 0.012264 -0.098383 0.044487 +v 0.012264 -0.098383 0.044487 +v 0.012264 -0.098383 0.044487 +v 0.019878 -0.098383 0.044487 +v 0.019878 -0.098383 0.044487 +v 0.019878 -0.098383 0.044487 +v 0.019878 -0.098383 0.044487 +v 0.019878 -0.098383 0.044487 +v 0.080137 0.053352 0.131830 +v 0.080137 0.053352 0.131830 +v 0.080137 0.030401 0.131830 +v 0.079593 0.053352 0.132047 +v 0.079593 0.053352 0.132047 +v 0.079593 0.053352 0.132047 +v 0.079593 0.008539 0.132047 +v 0.079593 0.008539 0.132047 +v 0.079593 0.008539 0.132047 +v 0.080137 0.008647 0.131830 +v 0.080137 0.008647 0.131830 +v 0.080355 0.016696 0.131612 +v 0.080355 0.053352 0.131612 +v 0.080355 0.053352 0.131612 +v 0.080355 0.053352 0.131612 +v 0.080246 0.053569 0.131286 +v 0.080246 0.053569 0.131286 +v 0.080246 0.053569 0.131286 +v 0.080246 0.053569 0.131286 +v 0.079593 0.053569 0.131721 +v 0.079593 0.053569 0.131721 +v 0.079593 0.053569 0.131721 +v 0.074699 0.053569 0.131721 +v 0.074699 0.053569 0.131721 +v 0.075025 0.053352 0.132047 +v 0.075025 0.053352 0.132047 +v 0.075025 0.053352 0.132047 +v 0.075025 0.008647 0.132047 +v 0.075025 0.008647 0.132047 +v 0.075025 0.008647 0.132047 +v 0.075025 0.008430 0.131721 +v 0.075025 0.008430 0.131721 +v 0.079811 0.008430 0.131721 +v 0.079811 0.008430 0.131721 +v 0.080137 0.008430 0.131395 +v 0.080137 0.008430 0.131395 +v 0.080463 0.008647 0.131286 +v 0.080463 0.008647 0.131286 +v 0.080463 0.013216 0.131286 +v 0.080463 0.013216 0.131286 +v 0.080463 0.013216 0.131286 +v 0.080463 0.053352 0.131286 +v 0.080463 0.053352 0.131286 +v 0.080463 0.053352 0.131286 +v 0.080790 0.053569 0.128240 +v 0.080790 0.053569 0.128240 +v 0.081007 0.053352 0.128349 +v 0.081007 0.053352 0.128349 +v 0.081007 0.053352 0.128349 +v 0.081007 0.053352 0.128349 +v 0.080137 -0.054657 0.114644 +v 0.080137 -0.054657 0.114644 +v 0.080137 -0.054657 0.114644 +v 0.079919 -0.053026 0.115623 +v 0.080246 -0.023984 0.130416 +v 0.080246 -0.023984 0.130416 +v 0.080246 -0.021917 0.131286 +v 0.079919 -0.024093 0.130633 +v 0.079919 -0.024093 0.130633 +v 0.079593 -0.054766 0.114862 +v 0.079593 -0.054766 0.114862 +v 0.079593 -0.054766 0.114862 +v 0.079593 -0.054766 0.114862 +v 0.079593 -0.054766 0.114862 +v 0.079593 -0.054766 0.114862 +v 0.080028 -0.054875 0.114426 +v 0.080028 -0.054875 0.114426 +v 0.080463 -0.054657 0.114100 +v 0.080463 -0.054657 0.114100 +v 0.080463 -0.054657 0.114100 +v 0.081333 0.065643 0.132591 +v 0.081007 0.065643 0.132809 +v 0.081007 0.029966 0.132809 +v 0.081007 0.029966 0.132809 +v 0.043699 0.074997 0.004786 +v 0.043155 0.075106 0.004786 +v 0.043155 0.075106 0.004786 +v 0.043155 0.072931 0.005656 +v 0.043155 0.072931 0.005656 +v 0.049573 -0.023766 0.001849 +v 0.049573 0.006254 0.001849 +v 0.049029 -0.049219 0.002284 +v 0.049029 -0.049219 0.002284 +v 0.043155 -0.073801 0.005765 +v 0.043155 -0.073801 0.005765 +v 0.044243 -0.073801 0.005656 +v 0.044243 -0.073801 0.005656 +v 0.044243 -0.048240 0.005656 +v 0.046309 -0.073801 0.004786 +v 0.046309 -0.073801 0.004786 +v 0.046309 -0.073801 0.004786 +v 0.045983 -0.075650 0.004677 +v 0.045983 -0.075650 0.004677 +v 0.043155 -0.076955 0.004786 +v 0.044460 -0.077064 0.004568 +v 0.044460 -0.077064 0.004568 +v 0.043155 -0.074780 0.005656 +v 0.043155 -0.074780 0.005656 +v 0.035650 -0.073801 0.005765 +v 0.035650 -0.073801 0.005765 +v 0.035650 -0.073801 0.005765 +v 0.035650 -0.073801 0.005765 +v 0.043155 -0.048240 0.005765 +v 0.043155 -0.048240 0.005765 +v 0.043155 -0.048240 0.005765 +v 0.043155 -0.048240 0.005765 +v 0.044243 -0.005928 0.005656 +v 0.044243 -0.005928 0.005656 +v 0.046309 0.070429 0.004786 +v 0.046309 -0.048240 0.004786 +v 0.046309 -0.048240 0.004786 +v 0.047288 -0.073801 0.004133 +v 0.047288 -0.073801 0.004133 +v 0.047288 -0.073801 0.004133 +v 0.047288 -0.073801 0.004133 +v 0.049137 -0.077064 0.002067 +v 0.049137 -0.077064 0.002067 +v 0.048594 -0.078695 0.001958 +v 0.048594 -0.078695 0.001958 +v 0.048594 -0.078695 0.001958 +v 0.048594 -0.079892 0.001523 +v 0.047288 -0.079565 0.001958 +v 0.047288 -0.079565 0.001958 +v 0.047288 -0.079565 0.001958 +v 0.043155 -0.077825 0.004133 +v 0.028254 -0.075867 0.005330 +v -0.043100 -0.076955 0.004786 +v -0.000027 -0.076955 0.004786 +v -0.000027 -0.076955 0.004786 +v -0.000027 -0.074780 0.005656 +v -0.000027 -0.074780 0.005656 +v -0.000027 -0.074780 0.005656 +v -0.000027 -0.073801 0.005765 +v -0.000027 -0.073801 0.005765 +v -0.000027 -0.073801 0.005765 +v -0.000027 -0.073801 0.005765 +v -0.035703 -0.073801 0.005765 +v -0.035703 -0.073801 0.005765 +v -0.035703 -0.073801 0.005765 +v -0.043100 -0.073801 0.005765 +v -0.043100 -0.073801 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 -0.048240 0.005765 +v -0.043100 0.004079 0.005765 +v -0.043100 0.004079 0.005765 +v -0.043100 0.004079 0.005765 +v -0.043100 0.004079 0.005765 +v -0.043100 0.004079 0.005765 +v 0.043155 0.070429 0.005765 +v 0.043155 0.070429 0.005765 +v 0.043155 0.070429 0.005765 +v 0.043155 0.070429 0.005765 +v 0.043155 0.070429 0.005765 +v 0.043155 0.071843 0.005765 +v 0.043155 0.071843 0.005765 +v 0.044243 0.071843 0.005656 +v 0.047288 0.071843 0.004133 +v 0.047288 0.071843 0.004133 +v 0.047288 0.070429 0.004133 +v 0.047288 0.070429 0.004133 +v 0.047288 -0.045520 0.004133 +v 0.047288 -0.045520 0.004133 +v 0.049029 -0.075650 0.002284 +v 0.049029 -0.075650 0.002284 +v 0.049029 -0.075650 0.002284 +v 0.049029 -0.075650 0.002284 +v 0.051313 -0.075650 0.001305 +v 0.051313 -0.075650 0.001305 +v 0.051313 -0.075650 0.001305 +v 0.051313 -0.075650 0.001305 +v 0.050660 -0.078804 0.001305 +v 0.050660 -0.078804 0.001305 +v 0.050660 -0.078804 0.001305 +v 0.049137 -0.080653 0.001305 +v 0.049137 -0.080653 0.001305 +v 0.049137 -0.080653 0.001305 +v 0.049137 -0.080653 0.001305 +v 0.055772 -0.086636 0.001305 +v 0.055772 -0.086636 0.001305 +v 0.055772 -0.086636 0.001305 +v 0.055772 -0.086636 0.001305 +v 0.046962 -0.081632 0.001305 +v 0.046962 -0.081632 0.001305 +v 0.046962 -0.081632 0.001305 +v 0.046962 -0.081632 0.001305 +v 0.047288 -0.080544 0.001523 +v 0.045004 -0.079674 0.002284 +v 0.045004 -0.079674 0.002284 +v 0.037499 -0.079674 0.002284 +v 0.037499 -0.079674 0.002284 +v 0.037499 -0.079674 0.002284 +v -0.004486 -0.079674 0.002284 +v -0.004486 -0.079674 0.002284 +v -0.004486 -0.079674 0.002284 +v -0.004486 -0.079674 0.002284 +v -0.000027 -0.077825 0.004133 +v -0.000027 -0.077825 0.004133 +v -0.000027 -0.077825 0.004133 +v -0.000027 -0.077825 0.004133 +v -0.000027 -0.077825 0.004133 +v -0.035703 -0.077825 0.004133 +v -0.035703 -0.077825 0.004133 +v -0.035703 -0.077825 0.004133 +v -0.043100 -0.077825 0.004133 +v -0.043100 -0.077825 0.004133 +v -0.044405 -0.077064 0.004568 +v -0.044405 -0.077064 0.004568 +v -0.044405 -0.077064 0.004568 +v -0.043752 -0.076846 0.004786 +v -0.043100 -0.074780 0.005656 +v -0.044188 -0.073801 0.005656 +v -0.044188 -0.048240 0.005656 +v -0.044188 -0.021700 0.005656 +v -0.044188 0.014412 0.005656 +v -0.044188 0.071843 0.005656 +v -0.043100 0.070429 0.005765 +v -0.043100 0.070429 0.005765 +v -0.043100 0.070429 0.005765 +v -0.043100 0.071843 0.005765 +v -0.043100 0.071843 0.005765 +v -0.035703 0.071843 0.005765 +v -0.035703 0.071843 0.005765 +v -0.035703 0.071843 0.005765 +v -0.035703 0.071843 0.005765 +v -0.004595 0.071843 0.005765 +v 0.045983 0.073692 0.004677 +v 0.045983 0.073692 0.004677 +v 0.048920 0.076085 0.001958 +v 0.049573 0.074888 0.001740 +v 0.049029 0.072169 0.002284 +v 0.051313 0.034752 0.001305 +v 0.051313 0.034752 0.001305 +v 0.051313 0.034752 0.001305 +v 0.051313 0.002556 0.001305 +v 0.051313 0.002556 0.001305 +v 0.051313 0.002556 0.001305 +v 0.051313 -0.049219 0.001305 +v 0.051313 -0.049219 0.001305 +v 0.051313 -0.049219 0.001305 +v 0.051313 -0.049219 0.001305 +v 0.056425 -0.085548 0.001305 +v 0.056425 -0.085548 0.001305 +v 0.056425 -0.085548 0.001305 +v 0.056425 -0.085548 0.001305 +v 0.056425 -0.085548 0.001305 +v 0.056425 -0.085548 0.001305 +v 0.057078 -0.084895 0.001414 +v 0.056316 -0.086853 0.001414 +v 0.057730 -0.087723 0.002175 +v 0.057730 -0.087723 0.002175 +v 0.056316 -0.088811 0.002175 +v 0.056316 -0.088811 0.002175 +v 0.057839 -0.093597 0.007179 +v 0.057839 -0.093597 0.007179 +v 0.054250 -0.089246 0.002284 +v 0.054250 -0.089246 0.002284 +v 0.054250 -0.089246 0.002284 +v 0.054250 -0.089246 0.002284 +v 0.054358 -0.087071 0.001305 +v 0.054358 -0.087071 0.001305 +v 0.054358 -0.087071 0.001305 +v 0.054358 -0.087071 0.001305 +v 0.054358 -0.087071 0.001305 +v 0.045004 -0.081850 0.001305 +v 0.045004 -0.081850 0.001305 +v 0.045004 -0.081850 0.001305 +v 0.045004 -0.081850 0.001305 +v 0.045004 -0.081306 0.001414 +v 0.015963 -0.080218 0.001849 +v -0.004486 -0.080218 0.001849 +v -0.004486 -0.080218 0.001849 +v -0.037444 -0.079674 0.002284 +v -0.037444 -0.079674 0.002284 +v -0.037444 -0.079674 0.002284 +v -0.037444 -0.079674 0.002284 +v -0.044949 -0.079674 0.002284 +v -0.044949 -0.079674 0.002284 +v -0.046472 -0.079783 0.002067 +v -0.046472 -0.079783 0.002067 +v -0.046472 -0.079783 0.002067 +v -0.047994 -0.079239 0.001958 +v -0.047994 -0.079239 0.001958 +v -0.047994 -0.079239 0.001958 +v -0.045928 -0.075650 0.004677 +v -0.045928 -0.075650 0.004677 +v -0.046363 -0.073801 0.004786 +v -0.046363 -0.044433 0.004786 +v -0.046363 0.024093 0.004786 +v -0.046363 0.071843 0.004786 +v -0.045928 0.073692 0.004677 +v -0.045928 0.073692 0.004677 +v -0.044623 0.073366 0.005330 +v -0.043100 0.074018 0.005330 +v -0.004595 0.074018 0.005330 +v -0.004595 0.075106 0.004786 +v 0.035650 0.075106 0.004786 +v 0.043155 0.075976 0.004133 +v 0.043155 0.075976 0.004133 +v 0.043155 0.075976 0.004133 +v 0.043155 0.075976 0.004133 +v 0.044460 0.075215 0.004568 +v 0.044460 0.075215 0.004568 +v 0.044460 0.075215 0.004568 +v 0.048050 0.077281 0.001958 +v 0.048050 0.077281 0.001958 +v 0.049246 0.077281 0.001523 +v 0.055990 0.084460 0.001305 +v 0.055990 0.084460 0.001305 +v 0.055990 0.084460 0.001305 +v 0.055990 0.084460 0.001305 +v 0.049137 0.078695 0.001305 +v 0.049137 0.078695 0.001305 +v 0.049137 0.078695 0.001305 +v 0.049137 0.078695 0.001305 +v 0.050660 0.076846 0.001305 +v 0.050660 0.076846 0.001305 +v 0.050660 0.076846 0.001305 +v 0.050660 0.076846 0.001305 +v 0.051313 0.073692 0.001305 +v 0.051313 0.073692 0.001305 +v 0.051313 0.073692 0.001305 +v 0.051313 0.073692 0.001305 +v 0.056534 0.015173 0.001305 +v 0.056534 0.015173 0.001305 +v 0.056534 0.015173 0.001305 +v 0.056534 0.015173 0.001305 +v 0.056534 0.015173 0.001305 +v 0.056534 0.015173 0.001305 +v 0.057078 -0.020721 0.001414 +v 0.057078 -0.054004 0.001414 +v 0.058709 -0.084895 0.002284 +v 0.058709 -0.084895 0.002284 +v 0.058165 -0.086962 0.002175 +v 0.058165 -0.086962 0.002175 +v 0.062081 -0.090116 0.007179 +v 0.062081 -0.090116 0.007179 +v 0.060885 -0.091530 0.007070 +v 0.060885 -0.091530 0.007070 +v 0.060015 -0.093488 0.009137 +v 0.060015 -0.093488 0.009137 +v 0.061537 -0.092183 0.009137 +v 0.061537 -0.092183 0.009137 +v 0.063495 -0.094032 0.044922 +v 0.063495 -0.094032 0.044922 +v 0.061102 -0.095881 0.044922 +v 0.061102 -0.095881 0.044922 +v 0.060232 -0.096534 0.045575 +v 0.058274 -0.096860 0.044922 +v 0.058274 -0.096860 0.044922 +v 0.057295 -0.094793 0.009137 +v 0.057295 -0.094793 0.009137 +v 0.054250 -0.094250 0.007288 +v 0.054250 -0.094250 0.007288 +v 0.054250 -0.094250 0.007288 +v 0.054250 -0.094250 0.007288 +v 0.046418 -0.089246 0.002284 +v 0.046418 -0.089246 0.002284 +v 0.008457 -0.089246 0.002284 +v 0.008457 -0.089246 0.002284 +v 0.008457 -0.089246 0.002284 +v 0.008457 -0.089246 0.002284 +v 0.008349 -0.087071 0.001305 +v 0.008349 -0.087071 0.001305 +v 0.008349 -0.087071 0.001305 +v 0.008349 -0.087071 0.001305 +v -0.004486 -0.081850 0.001305 +v -0.004486 -0.081850 0.001305 +v -0.004486 -0.081850 0.001305 +v -0.004486 -0.081850 0.001305 +v -0.004486 -0.081850 0.001305 +v -0.004486 -0.081306 0.001414 +v -0.004486 -0.081306 0.001414 +v -0.044949 -0.081306 0.001414 +v -0.047886 -0.080762 0.001414 +v -0.049300 -0.079130 0.001523 +v -0.048973 -0.077934 0.001958 +v -0.048973 -0.077934 0.001958 +v -0.049082 -0.075650 0.002284 +v -0.049082 -0.075650 0.002284 +v -0.047233 -0.073801 0.004133 +v -0.047233 -0.073801 0.004133 +v -0.047233 -0.073801 0.004133 +v -0.047233 -0.048240 0.004133 +v -0.047233 -0.048240 0.004133 +v -0.047233 -0.048240 0.004133 +v -0.047233 -0.048240 0.004133 +v -0.047233 0.071843 0.004133 +v -0.047233 0.071843 0.004133 +v -0.047233 0.071843 0.004133 +v -0.048973 0.076085 0.001958 +v -0.048973 0.076085 0.001958 +v -0.047994 0.077281 0.001958 +v -0.047994 0.077281 0.001958 +v -0.044405 0.075215 0.004568 +v -0.044405 0.075215 0.004568 +v -0.043100 0.075976 0.004133 +v -0.043100 0.075976 0.004133 +v -0.035703 0.075976 0.004133 +v -0.035703 0.075976 0.004133 +v -0.035703 0.075976 0.004133 +v -0.035703 0.075976 0.004133 +v -0.035703 0.075976 0.004133 +v -0.000027 0.078260 0.001849 +v -0.000027 0.077825 0.002284 +v -0.000027 0.077825 0.002284 +v -0.000027 0.077825 0.002284 +v -0.000027 0.077825 0.002284 +v 0.037499 0.077825 0.002284 +v 0.037499 0.077825 0.002284 +v 0.037499 0.077825 0.002284 +v 0.045004 0.077825 0.002284 +v 0.045004 0.077825 0.002284 +v 0.045004 0.077825 0.002284 +v 0.045004 0.077825 0.002284 +v 0.046527 0.077825 0.002067 +v 0.046527 0.077825 0.002067 +v 0.046527 0.077825 0.002067 +v 0.047941 0.078913 0.001414 +v 0.047941 0.078913 0.001414 +v 0.046962 0.079783 0.001305 +v 0.046962 0.079783 0.001305 +v 0.046962 0.079783 0.001305 +v 0.055011 0.085113 0.001305 +v 0.055011 0.085113 0.001305 +v 0.055011 0.085113 0.001305 +v 0.055011 0.085113 0.001305 +v 0.057404 0.085548 0.001849 +v 0.056425 0.083699 0.001305 +v 0.056425 0.083699 0.001305 +v 0.056425 0.083699 0.001305 +v 0.056425 0.083699 0.001305 +v 0.056425 0.083699 0.001305 +v 0.058709 0.058029 0.002284 +v 0.058709 0.058029 0.002284 +v 0.058709 0.058029 0.002284 +v 0.058709 0.058029 0.002284 +v 0.058709 -0.054004 0.002284 +v 0.058709 -0.054004 0.002284 +v 0.058709 -0.054004 0.002284 +v 0.063713 -0.084895 0.007288 +v 0.063713 -0.084895 0.007288 +v 0.063495 -0.086744 0.007179 +v 0.063495 -0.086744 0.007179 +v 0.063386 -0.089790 0.009137 +v 0.063386 -0.089790 0.009137 +v 0.064692 -0.092509 0.044922 +v 0.064692 -0.092509 0.044922 +v 0.064692 -0.092509 0.044922 +v 0.063713 -0.094250 0.045575 +v 0.063713 -0.094250 0.045575 +v 0.063713 -0.094250 0.045575 +v 0.063713 -0.094250 0.045575 +v 0.066432 -0.097078 0.089409 +v 0.066432 -0.097078 0.089409 +v 0.064148 -0.094793 0.045792 +v 0.064148 -0.094793 0.045792 +v 0.063278 -0.095446 0.045792 +v 0.063278 -0.095446 0.045792 +v 0.063278 -0.095446 0.045792 +v 0.063278 -0.095446 0.045792 +v 0.060558 -0.097186 0.045792 +v 0.060558 -0.097186 0.045792 +v 0.060558 -0.097186 0.045792 +v 0.058274 -0.097186 0.045575 +v 0.056208 -0.097295 0.045575 +v 0.056208 -0.097078 0.044922 +v 0.056208 -0.097078 0.044922 +v 0.054358 -0.095228 0.009246 +v 0.054358 -0.095228 0.009246 +v 0.054358 -0.095228 0.009246 +v 0.054358 -0.095228 0.009246 +v 0.046527 -0.095228 0.009246 +v 0.046527 -0.095228 0.009246 +v 0.046527 -0.095228 0.009246 +v 0.008784 -0.094685 0.007614 +v 0.008784 -0.094685 0.007614 +v 0.008784 -0.095228 0.009246 +v 0.008784 -0.095228 0.009246 +v 0.008784 -0.095228 0.009246 +v 0.008784 -0.095228 0.009246 +v 0.008784 -0.094902 0.008158 +v 0.008784 -0.094902 0.008158 +v -0.000027 -0.095228 0.009246 +v -0.000027 -0.095228 0.009246 +v -0.000027 -0.095228 0.009246 +v -0.000027 -0.094902 0.008158 +v 0.008675 -0.094250 0.007288 +v 0.008675 -0.094250 0.007288 +v 0.008675 -0.094250 0.007288 +v 0.008675 -0.094250 0.007288 +v 0.008675 -0.094250 0.007288 +v -0.046363 -0.089246 0.002284 +v -0.046363 -0.089246 0.002284 +v -0.046363 -0.089246 0.002284 +v -0.046363 -0.089246 0.002284 +v -0.004486 -0.087071 0.001305 +v -0.004486 -0.087071 0.001305 +v -0.004486 -0.087071 0.001305 +v -0.004486 -0.087071 0.001305 +v -0.004486 -0.087071 0.001305 +v -0.044949 -0.081850 0.001305 +v -0.044949 -0.081850 0.001305 +v -0.044949 -0.081850 0.001305 +v -0.044949 -0.081850 0.001305 +v -0.047016 -0.081632 0.001305 +v -0.047016 -0.081632 0.001305 +v -0.047016 -0.081632 0.001305 +v -0.047016 -0.081632 0.001305 +v -0.049191 -0.080653 0.001305 +v -0.049191 -0.080653 0.001305 +v -0.049191 -0.080653 0.001305 +v -0.050605 -0.078804 0.001305 +v -0.050605 -0.078804 0.001305 +v -0.050605 -0.078804 0.001305 +v -0.051258 -0.075650 0.001305 +v -0.051258 -0.075650 0.001305 +v -0.051258 -0.075650 0.001305 +v -0.051258 -0.075650 0.001305 +v -0.049082 -0.049219 0.002284 +v -0.049082 -0.049219 0.002284 +v -0.049082 -0.049219 0.002284 +v -0.050714 0.034752 0.001414 +v -0.049082 0.072169 0.002284 +v -0.049082 0.072169 0.002284 +v -0.049082 0.072169 0.002284 +v -0.049082 0.072169 0.002284 +v -0.049082 0.073692 0.002284 +v -0.049082 0.073692 0.002284 +v -0.049517 0.076955 0.001523 +v -0.047886 0.078913 0.001414 +v -0.046472 0.077825 0.002067 +v -0.046472 0.077825 0.002067 +v -0.044949 0.077825 0.002284 +v -0.044949 0.077825 0.002284 +v -0.037444 0.077825 0.002284 +v -0.037444 0.077825 0.002284 +v -0.037444 0.077825 0.002284 +v -0.004486 0.078804 0.001523 +v -0.000027 0.079457 0.001414 +v 0.045004 0.080001 0.001305 +v 0.045004 0.080001 0.001305 +v 0.045004 0.080001 0.001305 +v 0.045004 0.080001 0.001305 +v 0.045004 0.080001 0.001305 +v 0.040653 0.085222 0.001305 +v 0.040653 0.085222 0.001305 +v 0.040653 0.085222 0.001305 +v 0.040653 0.085222 0.001305 +v 0.040653 0.085222 0.001305 +v -0.000027 0.087397 0.002284 +v -0.000027 0.087397 0.002284 +v -0.000027 0.087397 0.002284 +v 0.015963 0.086418 0.001523 +v 0.042829 0.087397 0.002284 +v 0.042829 0.087397 0.002284 +v 0.042829 0.087397 0.002284 +v 0.042829 0.087397 0.002284 +v 0.054250 0.087397 0.002284 +v 0.054250 0.087397 0.002284 +v 0.055120 0.087397 0.002284 +v 0.055120 0.087397 0.002284 +v 0.057078 0.086418 0.002175 +v 0.057078 0.086418 0.002175 +v 0.058165 0.085113 0.002175 +v 0.058165 0.085113 0.002175 +v 0.058709 0.083046 0.002284 +v 0.058709 0.083046 0.002284 +v 0.058709 0.083046 0.002284 +v 0.058709 0.083046 0.002284 +v 0.063713 0.080980 0.007288 +v 0.063713 0.080980 0.007288 +v 0.063713 0.080980 0.007288 +v 0.063713 0.080980 0.007288 +v 0.063713 0.080980 0.007288 +v 0.063604 0.083916 0.007179 +v 0.063604 0.083916 0.007179 +v 0.064257 0.080980 0.008158 +v 0.064257 0.080980 0.008158 +v 0.064257 0.034752 0.008158 +v 0.063713 -0.054004 0.007288 +v 0.063713 -0.054004 0.007288 +v 0.063713 -0.054004 0.007288 +v 0.063713 -0.054004 0.007288 +v 0.064583 -0.085004 0.009246 +v 0.064583 -0.085004 0.009246 +v 0.064148 -0.087941 0.009137 +v 0.064148 -0.087941 0.009137 +v 0.065671 -0.090769 0.044922 +v 0.065671 -0.090769 0.044922 +v 0.065671 -0.090769 0.044922 +v 0.065888 -0.090878 0.045575 +v 0.065888 -0.090878 0.045575 +v 0.065562 -0.093053 0.045792 +v 0.065562 -0.093053 0.045792 +v 0.068390 -0.094467 0.089409 +v 0.068390 -0.094467 0.089409 +v 0.067411 -0.096425 0.089953 +v 0.067085 -0.097730 0.090279 +v 0.067085 -0.097730 0.090279 +v 0.066649 -0.097295 0.089953 +v 0.063821 -0.099035 0.089409 +v 0.063821 -0.099035 0.089409 +v 0.063821 -0.099035 0.089409 +v 0.063821 -0.099035 0.089409 +v 0.061755 -0.099797 0.089409 +v 0.061755 -0.099797 0.089409 +v 0.058383 -0.097839 0.045792 +v 0.058383 -0.097839 0.045792 +v 0.056316 -0.098056 0.045792 +v 0.056316 -0.098056 0.045792 +v 0.048376 -0.098056 0.045792 +v 0.048376 -0.098056 0.045792 +v 0.048376 -0.098056 0.045792 +v 0.048376 -0.098056 0.045792 +v 0.048376 -0.097295 0.045575 +v 0.048376 -0.097295 0.045575 +v 0.010742 -0.097295 0.045575 +v -0.000027 -0.097186 0.045248 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v 0.048376 -0.097078 0.044922 +v -0.046472 -0.095228 0.009246 +v -0.046472 -0.095228 0.009246 +v -0.046472 -0.095228 0.009246 +v -0.046472 -0.095228 0.009246 +v -0.046363 -0.094250 0.007288 +v -0.046363 -0.094250 0.007288 +v -0.046363 -0.094250 0.007288 +v -0.054303 -0.094250 0.007288 +v -0.054303 -0.094250 0.007288 +v -0.054303 -0.094250 0.007288 +v -0.054303 -0.094250 0.007288 +v -0.057022 -0.093923 0.007179 +v -0.057022 -0.093923 0.007179 +v -0.054303 -0.089246 0.002284 +v -0.054303 -0.089246 0.002284 +v -0.054303 -0.089246 0.002284 +v -0.054303 -0.089246 0.002284 +v -0.054956 -0.086962 0.001305 +v -0.054956 -0.086962 0.001305 +v -0.054956 -0.086962 0.001305 +v -0.054956 -0.086962 0.001305 +v -0.054956 -0.086962 0.001305 +v -0.056370 -0.085548 0.001305 +v -0.056370 -0.085548 0.001305 +v -0.056370 -0.085548 0.001305 +v -0.056370 -0.085548 0.001305 +v -0.056370 -0.085548 0.001305 +v -0.056370 -0.085548 0.001305 +v -0.057131 -0.084895 0.001414 +v -0.056479 -0.054004 0.001305 +v -0.056479 -0.054004 0.001305 +v -0.056479 -0.054004 0.001305 +v -0.056479 -0.054004 0.001305 +v -0.051258 -0.045303 0.001305 +v -0.051258 -0.045303 0.001305 +v -0.051258 -0.045303 0.001305 +v -0.051258 -0.045303 0.001305 +v -0.051258 0.073692 0.001305 +v -0.051258 0.073692 0.001305 +v -0.051258 0.073692 0.001305 +v -0.051258 0.073692 0.001305 +v -0.051040 0.075759 0.001305 +v -0.051040 0.075759 0.001305 +v -0.051040 0.075759 0.001305 +v -0.049952 0.077934 0.001305 +v -0.049952 0.077934 0.001305 +v -0.049952 0.077934 0.001305 +v -0.049191 0.078695 0.001305 +v -0.049191 0.078695 0.001305 +v -0.047016 0.079783 0.001305 +v -0.047016 0.079783 0.001305 +v -0.047016 0.079783 0.001305 +v -0.044949 0.080001 0.001305 +v -0.044949 0.080001 0.001305 +v -0.044949 0.080001 0.001305 +v -0.000027 0.080001 0.001305 +v -0.000027 0.080001 0.001305 +v -0.000027 0.080001 0.001305 +v -0.000027 0.080001 0.001305 +v -0.000027 0.080001 0.001305 +v -0.000027 0.080001 0.001305 +v -0.000027 0.085222 0.001305 +v -0.000027 0.085222 0.001305 +v -0.000027 0.085222 0.001305 +v -0.004486 0.086418 0.001523 +v -0.042882 0.087397 0.002284 +v -0.042882 0.087397 0.002284 +v -0.042882 0.087397 0.002284 +v -0.042882 0.087397 0.002284 +v -0.046472 0.093271 0.008593 +v -0.046363 0.092400 0.007288 +v -0.046363 0.092400 0.007288 +v -0.046363 0.092400 0.007288 +v -0.046363 0.092400 0.007288 +v -0.046363 0.092400 0.007288 +v -0.000027 0.093053 0.008158 +v -0.000027 0.093053 0.008158 +v -0.000027 0.093053 0.008158 +v -0.000027 0.092400 0.007288 +v -0.000027 0.092400 0.007288 +v -0.000027 0.092400 0.007288 +v -0.000027 0.092400 0.007288 +v -0.000027 0.092400 0.007288 +v -0.000027 0.092400 0.007288 +v 0.054250 0.092400 0.007288 +v 0.054250 0.092400 0.007288 +v 0.056969 0.092074 0.007179 +v 0.056969 0.092074 0.007179 +v 0.059471 0.090878 0.007179 +v 0.059471 0.090878 0.007179 +v 0.060885 0.089681 0.007070 +v 0.060885 0.089681 0.007070 +v 0.062951 0.086636 0.007179 +v 0.062951 0.086636 0.007179 +v 0.064365 0.085113 0.009245 +v 0.064365 0.085113 0.009245 +v 0.064583 0.083155 0.009245 +v 0.064583 0.083155 0.009245 +v 0.064583 0.083155 0.009245 +v 0.064583 0.080980 0.009245 +v 0.064583 0.080980 0.009245 +v 0.064583 0.080980 0.009245 +v 0.064583 0.080980 0.009245 +v 0.064583 -0.051612 0.009245 +v 0.064583 -0.051612 0.009245 +v 0.066432 -0.055092 0.044922 +v 0.066432 -0.055092 0.044922 +v 0.066432 -0.086853 0.044922 +v 0.066432 -0.086853 0.044922 +v 0.066432 -0.086853 0.044922 +v 0.066758 -0.086853 0.045575 +v 0.066541 -0.088920 0.045575 +v 0.066867 -0.090116 0.045792 +v 0.066867 -0.090116 0.045792 +v 0.069151 -0.092400 0.089409 +v 0.069151 -0.092400 0.089409 +v 0.069151 -0.092400 0.089409 +v 0.069042 -0.093597 0.089953 +v 0.068607 -0.095881 0.090279 +v 0.068607 -0.095881 0.090279 +v 0.069260 -0.094902 0.092672 +v 0.069260 -0.094902 0.092672 +v 0.067955 -0.096860 0.091476 +v 0.067955 -0.096860 0.091476 +v 0.067085 -0.097730 0.090932 +v 0.067085 -0.097730 0.090932 +v 0.066976 -0.097948 0.091367 +v 0.068172 -0.098056 0.092999 +v 0.067955 -0.097621 0.092672 +v 0.066106 -0.099253 0.091693 +v 0.065344 -0.099906 0.091476 +v 0.066106 -0.098927 0.091367 +v 0.066106 -0.098709 0.090823 +v 0.065235 -0.099144 0.090279 +v 0.065235 -0.099144 0.090279 +v 0.065235 -0.099144 0.090279 +v 0.063930 -0.099253 0.089953 +v 0.063930 -0.099253 0.089953 +v 0.061864 -0.100123 0.089953 +v 0.058601 -0.100341 0.089409 +v 0.058601 -0.100341 0.089409 +v 0.050660 -0.100341 0.089409 +v 0.050660 -0.100341 0.089409 +v 0.050660 -0.100341 0.089409 +v 0.050660 -0.100341 0.089409 +v 0.013134 -0.100341 0.089409 +v 0.013134 -0.100341 0.089409 +v 0.013134 -0.100341 0.089409 +v 0.013134 -0.100341 0.089409 +v 0.010850 -0.098056 0.045792 +v 0.010850 -0.098056 0.045792 +v 0.010850 -0.098056 0.045792 +v 0.010850 -0.098056 0.045792 +v 0.010850 -0.098056 0.045792 +v 0.010850 -0.098056 0.045792 +v -0.000027 -0.097295 0.045575 +v -0.044949 -0.097295 0.045575 +v -0.048321 -0.097078 0.044922 +v -0.048321 -0.097078 0.044922 +v -0.048321 -0.097078 0.044922 +v -0.054412 -0.095228 0.009246 +v -0.054412 -0.095228 0.009246 +v -0.054412 -0.095228 0.009246 +v -0.058328 -0.094467 0.009137 +v -0.058328 -0.094467 0.009137 +v -0.060068 -0.093488 0.009137 +v -0.060068 -0.093488 0.009137 +v -0.059524 -0.092727 0.007179 +v -0.059524 -0.092727 0.007179 +v -0.056370 -0.088811 0.002175 +v -0.056370 -0.088811 0.002175 +v -0.056696 -0.087288 0.001523 +v -0.058219 -0.084895 0.001849 +v -0.058219 -0.049762 0.001849 +v -0.057131 -0.005710 0.001414 +v -0.056479 0.034752 0.001305 +v -0.056479 0.034752 0.001305 +v -0.056479 0.034752 0.001305 +v -0.056479 0.034752 0.001305 +v -0.056370 0.083699 0.001305 +v -0.056370 0.083699 0.001305 +v -0.056370 0.083699 0.001305 +v -0.056370 0.083699 0.001305 +v -0.056370 0.083699 0.001305 +v -0.057131 0.084895 0.001523 +v -0.055717 0.084678 0.001305 +v -0.055717 0.084678 0.001305 +v -0.054956 0.085113 0.001305 +v -0.054956 0.085113 0.001305 +v -0.054956 0.085113 0.001305 +v -0.054956 0.085113 0.001305 +v -0.054956 0.085113 0.001305 +v -0.054303 0.087397 0.002284 +v -0.054303 0.087397 0.002284 +v -0.054303 0.092400 0.007288 +v -0.054303 0.092400 0.007288 +v -0.054412 0.093379 0.009245 +v -0.054412 0.093379 0.009245 +v -0.054412 0.093379 0.009245 +v -0.046472 0.093379 0.009245 +v -0.046472 0.093379 0.009245 +v -0.046472 0.093379 0.009245 +v -0.046472 0.093379 0.009245 +v -0.000027 0.093379 0.009245 +v -0.000027 0.093379 0.009245 +v -0.000027 0.093379 0.009245 +v -0.000027 0.093379 0.009245 +v -0.000027 0.093379 0.009245 +v 0.046527 0.093379 0.009245 +v 0.046527 0.093379 0.009245 +v 0.046527 0.093379 0.009245 +v 0.054358 0.093379 0.009245 +v 0.054358 0.093379 0.009245 +v 0.054358 0.093379 0.009245 +v 0.058274 0.092509 0.009137 +v 0.058274 0.092509 0.009137 +v 0.060015 0.091639 0.009137 +v 0.060015 0.091639 0.009137 +v 0.061537 0.090334 0.009137 +v 0.061537 0.090334 0.009137 +v 0.062843 0.088811 0.009137 +v 0.062843 0.088811 0.009137 +v 0.063821 0.087071 0.009137 +v 0.063821 0.087071 0.009137 +v 0.065671 0.088920 0.044922 +v 0.065671 0.088920 0.044922 +v 0.066432 0.085983 0.044922 +v 0.066432 0.085983 0.044922 +v 0.066432 0.082829 0.044922 +v 0.066432 0.082829 0.044922 +v 0.066432 0.082829 0.044922 +v 0.066432 0.007016 0.044922 +v 0.066432 0.007016 0.044922 +v 0.066432 0.007016 0.044922 +v 0.066432 -0.022679 0.044922 +v 0.066432 -0.022679 0.044922 +v 0.066758 -0.055092 0.045575 +v 0.066758 -0.055092 0.045575 +v 0.066758 -0.055092 0.045575 +v 0.067411 -0.055092 0.045792 +v 0.067411 -0.055092 0.045792 +v 0.067411 -0.055092 0.045792 +v 0.067411 -0.055092 0.045792 +v 0.067411 -0.055092 0.045792 +v 0.067411 -0.086962 0.045792 +v 0.067411 -0.086962 0.045792 +v 0.069695 -0.089246 0.089409 +v 0.069695 -0.089246 0.089409 +v 0.069695 -0.089246 0.089627 +v 0.069695 -0.091421 0.089953 +v 0.069695 -0.091421 0.089953 +v 0.070130 -0.092727 0.090279 +v 0.070130 -0.092727 0.090279 +v 0.070239 -0.092727 0.093978 +v 0.070239 -0.092727 0.093978 +v 0.072523 -0.056941 0.114535 +v 0.071000 -0.089355 0.096588 +v 0.071653 -0.089899 0.097567 +v 0.070783 -0.093706 0.095392 +v 0.070783 -0.090443 0.095392 +v 0.070783 -0.090443 0.095392 +v 0.069695 -0.095990 0.094195 +v 0.069586 -0.099362 0.093325 +v 0.069586 -0.099362 0.093325 +v 0.069586 -0.099362 0.093325 +v 0.069586 -0.099362 0.093325 +v 0.069586 -0.099362 0.093325 +v 0.065997 -0.101211 0.092129 +v 0.064692 -0.101428 0.091693 +v 0.065235 -0.099579 0.091041 +v 0.063169 -0.100341 0.090279 +v 0.063169 -0.100341 0.090279 +v 0.063169 -0.100341 0.090279 +v 0.063169 -0.100341 0.090279 +v 0.062081 -0.100885 0.090606 +v 0.059797 -0.101211 0.090279 +v 0.059797 -0.101211 0.090279 +v 0.058601 -0.101211 0.090279 +v 0.058601 -0.101211 0.090279 +v 0.058601 -0.100558 0.089953 +v 0.050660 -0.100558 0.089953 +v 0.013134 -0.100341 0.089736 +v -0.000027 -0.100341 0.089409 +v -0.000027 -0.100341 0.089409 +v -0.000027 -0.100341 0.089409 +v -0.000027 -0.100341 0.089409 +v -0.011883 -0.098056 0.045792 +v -0.011883 -0.098056 0.045792 +v -0.011883 -0.098056 0.045792 +v -0.011883 -0.098056 0.045792 +v -0.048430 -0.098056 0.045792 +v -0.048430 -0.098056 0.045792 +v -0.048430 -0.098056 0.045792 +v -0.056261 -0.098056 0.045792 +v -0.056261 -0.098056 0.045792 +v -0.056261 -0.097295 0.045575 +v -0.056261 -0.097078 0.044922 +v -0.056261 -0.097078 0.044922 +v -0.058219 -0.096860 0.044922 +v -0.058219 -0.096860 0.044922 +v -0.060177 -0.096316 0.044922 +v -0.060177 -0.096316 0.044922 +v -0.063440 -0.094032 0.044922 +v -0.063440 -0.094032 0.044922 +v -0.061591 -0.092183 0.009137 +v -0.061591 -0.092183 0.009137 +v -0.060938 -0.091530 0.007070 +v -0.060938 -0.091530 0.007070 +v -0.058219 -0.086962 0.002175 +v -0.058219 -0.086962 0.002175 +v -0.058654 -0.084895 0.002284 +v -0.058654 -0.084895 0.002284 +v -0.058654 -0.054004 0.002284 +v -0.058654 -0.054004 0.002284 +v -0.058654 -0.054004 0.002284 +v -0.058654 -0.005710 0.002284 +v -0.058654 -0.005710 0.002284 +v -0.058654 -0.005710 0.002284 +v -0.058654 0.083481 0.002284 +v -0.058654 0.083481 0.002284 +v -0.058654 0.083481 0.002284 +v -0.058219 0.085113 0.002175 +v -0.058219 0.085113 0.002175 +v -0.057675 0.085874 0.002175 +v -0.057675 0.085874 0.002175 +v -0.056370 0.086962 0.002175 +v -0.056370 0.086962 0.002175 +v -0.057893 0.091748 0.007179 +v -0.057893 0.091748 0.007179 +v -0.059198 0.092074 0.009137 +v -0.059198 0.092074 0.009137 +v -0.057349 0.092836 0.009137 +v -0.057349 0.092836 0.009137 +v -0.057349 0.092836 0.009137 +v -0.058219 0.095011 0.044922 +v -0.058219 0.095011 0.044922 +v -0.056261 0.095228 0.044922 +v -0.056261 0.095228 0.044922 +v -0.048321 0.095228 0.044922 +v -0.048321 0.095228 0.044922 +v -0.048321 0.095228 0.044922 +v -0.005683 0.095228 0.044922 +v -0.005683 0.095228 0.044922 +v -0.005683 0.095228 0.044922 +v -0.005683 0.095228 0.044922 +v -0.005683 0.095228 0.044922 +v 0.048376 0.095228 0.044922 +v 0.048376 0.095228 0.044922 +v 0.048376 0.095228 0.044922 +v 0.048376 0.095228 0.044922 +v 0.056208 0.095228 0.044922 +v 0.056208 0.095228 0.044922 +v 0.056208 0.095228 0.044922 +v 0.056208 0.095228 0.044922 +v 0.059253 0.094793 0.044922 +v 0.059253 0.094793 0.044922 +v 0.061972 0.093488 0.044922 +v 0.061972 0.093488 0.044922 +v 0.063495 0.092183 0.044922 +v 0.063495 0.092183 0.044922 +v 0.064692 0.090660 0.044922 +v 0.064692 0.090660 0.044922 +v 0.065888 0.089029 0.045575 +v 0.066649 0.085983 0.045575 +v 0.066758 0.082829 0.045575 +v 0.066758 0.068253 0.045575 +v 0.066758 0.016044 0.045575 +v 0.067411 -0.023766 0.045792 +v 0.067411 -0.023766 0.045792 +v 0.067411 -0.023766 0.045792 +v 0.067411 -0.023766 0.045792 +v 0.067411 -0.023766 0.045792 +v 0.068934 -0.021700 0.074834 +v 0.068934 -0.021700 0.074834 +v 0.068934 -0.021700 0.074834 +v 0.068934 -0.021700 0.074834 +v 0.069695 0.002230 0.089409 +v 0.069695 0.002230 0.089409 +v 0.069695 0.002230 0.089409 +v 0.069695 0.002230 0.089409 +v 0.069695 0.002230 0.089409 +v 0.069695 -0.056289 0.089409 +v 0.069695 -0.056289 0.089409 +v 0.069695 -0.056289 0.089409 +v 0.069695 -0.056289 0.089409 +v 0.069695 -0.056289 0.089409 +v 0.069695 -0.056289 0.089627 +v 0.069695 -0.056289 0.089627 +v 0.069913 -0.089246 0.089953 +v 0.070565 -0.089137 0.090279 +v 0.070565 -0.089137 0.090279 +v 0.070565 -0.089137 0.090279 +v 0.070892 -0.089137 0.096153 +v 0.070892 -0.089137 0.096153 +v 0.071870 -0.056397 0.113556 +v 0.073393 -0.024419 0.131286 +v 0.073393 -0.024419 0.131286 +v 0.073720 -0.052808 0.117363 +v 0.072741 -0.090116 0.098002 +v 0.072741 -0.093162 0.096479 +v 0.072741 -0.093162 0.096479 +v 0.072741 -0.093162 0.096479 +v 0.071218 -0.096969 0.094521 +v 0.071218 -0.096969 0.094521 +v 0.071218 -0.096969 0.094521 +v 0.071218 -0.096969 0.094521 +v 0.077853 -0.101211 0.092346 +v 0.077853 -0.101211 0.092346 +v 0.077853 -0.101211 0.092346 +v 0.077853 -0.101211 0.092346 +v 0.075569 -0.104474 0.090606 +v 0.075569 -0.104474 0.090606 +v 0.075569 -0.104474 0.090606 +v 0.080028 -0.098927 0.093107 +v 0.078941 -0.101537 0.091802 +v 0.078397 -0.102734 0.090932 +v 0.077418 -0.103930 0.090606 +v 0.077091 -0.105018 0.089192 +v 0.077091 -0.105018 0.089192 +v 0.075569 -0.105888 0.089627 +v 0.072741 -0.106976 0.089409 +v 0.072741 -0.106976 0.089409 +v 0.072741 -0.106976 0.089409 +v 0.072741 -0.106976 0.089409 +v 0.067520 -0.101320 0.092346 +v 0.067520 -0.101320 0.092346 +v 0.067520 -0.101320 0.092346 +v 0.067520 -0.101320 0.092346 +v 0.064039 -0.103169 0.091367 +v 0.064039 -0.103169 0.091367 +v 0.064039 -0.103169 0.091367 +v 0.062407 -0.102407 0.091367 +v 0.062407 -0.102407 0.091367 +v 0.062190 -0.101211 0.090823 +v 0.062190 -0.101211 0.090823 +v 0.058601 -0.101537 0.090497 +v 0.050660 -0.101537 0.090497 +v 0.050660 -0.101211 0.090279 +v 0.050660 -0.101211 0.090279 +v 0.013243 -0.100885 0.090171 +v 0.013243 -0.101211 0.090279 +v 0.013243 -0.101211 0.090279 +v 0.013243 -0.101211 0.090279 +v -0.000027 -0.101211 0.090279 +v -0.000027 -0.101211 0.090279 +v -0.000027 -0.101211 0.090279 +v -0.000027 -0.101211 0.090279 +v -0.000027 -0.100558 0.089953 +v -0.018626 -0.100558 0.089953 +v -0.050714 -0.100558 0.089953 +v -0.050714 -0.100341 0.089409 +v -0.050714 -0.100341 0.089409 +v -0.050714 -0.100341 0.089409 +v -0.050714 -0.100341 0.089409 +v -0.057022 -0.098818 0.060368 +v -0.058545 -0.100341 0.089409 +v -0.058545 -0.100341 0.089409 +v -0.058545 -0.100341 0.089409 +v -0.058545 -0.100341 0.089409 +v -0.058436 -0.097839 0.045792 +v -0.058436 -0.097839 0.045792 +v -0.058436 -0.097839 0.045792 +v -0.058328 -0.097186 0.045575 +v -0.061156 -0.096099 0.045575 +v -0.063657 -0.094250 0.045575 +v -0.065289 -0.091639 0.044922 +v -0.065289 -0.091639 0.044922 +v -0.063331 -0.089790 0.009137 +v -0.063331 -0.089790 0.009137 +v -0.063331 -0.089790 0.009137 +v -0.063005 -0.088485 0.007179 +v -0.063005 -0.088485 0.007179 +v -0.063657 -0.084895 0.007288 +v -0.063657 -0.084895 0.007288 +v -0.063657 -0.054004 0.007288 +v -0.063657 -0.054004 0.007288 +v -0.063657 -0.054004 0.007288 +v -0.063657 -0.054004 0.007288 +v -0.064528 0.015173 0.008593 +v -0.063657 0.044541 0.007288 +v -0.063657 0.044541 0.007288 +v -0.063657 0.083046 0.007288 +v -0.063657 0.083046 0.007288 +v -0.063657 0.083046 0.007288 +v -0.063549 0.084895 0.007179 +v -0.063549 0.084895 0.007179 +v -0.062135 0.088267 0.007179 +v -0.062135 0.088267 0.007179 +v -0.060938 0.089681 0.007070 +v -0.060938 0.089681 0.007070 +v -0.061591 0.090334 0.009137 +v -0.061591 0.090334 0.009137 +v -0.063440 0.092183 0.044922 +v -0.063440 0.092183 0.044922 +v -0.061047 0.094032 0.044922 +v -0.061047 0.094032 0.044922 +v -0.061047 0.094032 0.044922 +v -0.061047 0.094032 0.044922 +v -0.059307 0.095011 0.045575 +v -0.056261 0.095446 0.045575 +v -0.048321 0.095446 0.045575 +v 0.018355 0.095446 0.045575 +v 0.018355 0.095446 0.045575 +v 0.048376 0.095446 0.045575 +v 0.056208 0.095446 0.045575 +v 0.056208 0.095446 0.045575 +v 0.058274 0.095228 0.045575 +v 0.059362 0.095337 0.045684 +v 0.061211 0.094250 0.045575 +v 0.063713 0.092400 0.045575 +v 0.066106 0.090225 0.045792 +v 0.066106 0.090225 0.045792 +v 0.066106 0.090225 0.045792 +v 0.067193 0.087179 0.045792 +v 0.067193 0.087179 0.045792 +v 0.067411 0.085004 0.045792 +v 0.067411 0.085004 0.045792 +v 0.067411 0.082937 0.045792 +v 0.067411 0.082937 0.045792 +v 0.067411 0.082937 0.045792 +v 0.067411 0.082937 0.045792 +v 0.067411 0.050850 0.045792 +v 0.067411 0.050850 0.045792 +v 0.067411 0.050850 0.045792 +v 0.067411 0.050850 0.045792 +v 0.067411 0.050850 0.045792 +v 0.069695 0.037254 0.089409 +v 0.069695 0.037254 0.089409 +v 0.069695 0.037254 0.089409 +v 0.069913 0.017023 0.089953 +v 0.069913 -0.056289 0.089953 +v 0.069913 -0.056289 0.089953 +v 0.070565 -0.051829 0.090279 +v 0.070565 -0.051829 0.090279 +v 0.070565 -0.051829 0.090279 +v 0.070565 -0.051829 0.090279 +v 0.070565 -0.051829 0.090279 +v 0.071762 -0.056289 0.113230 +v 0.071762 -0.056289 0.113230 +v 0.071762 -0.056289 0.113230 +v 0.071762 -0.056289 0.113230 +v 0.072741 -0.023875 0.130307 +v 0.072741 -0.023875 0.130307 +v 0.072741 -0.020721 0.131068 +v 0.073393 -0.020938 0.132482 +v 0.074481 -0.024745 0.131830 +v 0.074481 -0.024745 0.131830 +v 0.074155 -0.052808 0.117363 +v 0.074155 -0.052808 0.117363 +v 0.074155 -0.052808 0.117363 +v 0.073176 -0.090225 0.098002 +v 0.073176 -0.090225 0.098002 +v 0.073176 -0.090225 0.098002 +v 0.073176 -0.090225 0.098002 +v 0.079376 -0.096099 0.094957 +v 0.079376 -0.096099 0.094957 +v 0.079376 -0.096099 0.094957 +v 0.079376 -0.096099 0.094957 +v 0.079376 -0.096099 0.094957 +v 0.079919 -0.093488 0.096262 +v 0.081007 -0.093271 0.095935 +v 0.080681 -0.096207 0.094413 +v 0.080246 -0.098927 0.092781 +v 0.079158 -0.101537 0.091585 +v 0.078614 -0.102734 0.090714 +v 0.077744 -0.103930 0.090171 +v 0.078070 -0.103930 0.089736 +v 0.078070 -0.103930 0.089736 +v 0.077962 -0.106105 0.082883 +v 0.077962 -0.106105 0.082883 +v 0.076983 -0.106867 0.081795 +v 0.076983 -0.106867 0.081795 +v 0.076004 -0.108063 0.081687 +v 0.075786 -0.108172 0.081904 +v 0.075786 -0.108172 0.081904 +v 0.073720 -0.107846 0.087886 +v 0.073720 -0.107846 0.087886 +v 0.072088 -0.108063 0.088648 +v 0.069369 -0.108498 0.088648 +v 0.069369 -0.108498 0.088648 +v 0.069369 -0.108498 0.088648 +v 0.069369 -0.108498 0.088648 +v 0.061320 -0.103821 0.091041 +v 0.061320 -0.103821 0.091041 +v 0.061320 -0.103821 0.091041 +v 0.061320 -0.103821 0.091041 +v 0.061320 -0.103821 0.091041 +v 0.058601 -0.102190 0.090932 +v 0.042720 -0.102190 0.090932 +v 0.020748 -0.101537 0.090497 +v -0.000027 -0.101428 0.090497 +v -0.000027 -0.101428 0.090497 +v -0.000027 -0.101428 0.090497 +v -0.050714 -0.101211 0.090279 +v -0.050714 -0.101211 0.090279 +v -0.050714 -0.101211 0.090279 +v -0.058545 -0.101211 0.090279 +v -0.058545 -0.101211 0.090279 +v -0.058545 -0.101211 0.090279 +v -0.058545 -0.101211 0.090279 +v -0.058545 -0.100558 0.089953 +v -0.058545 -0.100558 0.089953 +v -0.060721 -0.100123 0.089409 +v -0.060721 -0.100123 0.089409 +v -0.062787 -0.099470 0.089409 +v -0.062787 -0.099470 0.089409 +v -0.060503 -0.097186 0.045792 +v -0.060503 -0.097186 0.045792 +v -0.063331 -0.095446 0.045792 +v -0.063331 -0.095446 0.045792 +v -0.064092 -0.094793 0.045792 +v -0.064092 -0.094793 0.045792 +v -0.065506 -0.093053 0.045792 +v -0.065506 -0.093053 0.045792 +v -0.065506 -0.091857 0.045575 +v -0.066050 -0.089790 0.044922 +v -0.066050 -0.089790 0.044922 +v -0.066050 -0.089790 0.044922 +v -0.066050 -0.089790 0.044922 +v -0.064419 -0.086962 0.009246 +v -0.064419 -0.086962 0.009246 +v -0.064636 -0.085004 0.009246 +v -0.064636 -0.085004 0.009246 +v -0.064636 -0.054113 0.009245 +v -0.064636 -0.054113 0.009245 +v -0.064636 -0.054113 0.009245 +v -0.064636 -0.054113 0.009245 +v -0.064636 0.080980 0.009245 +v -0.064636 0.080980 0.009245 +v -0.064636 0.080980 0.009245 +v -0.064636 0.080980 0.009245 +v -0.064636 0.080980 0.009245 +v -0.064528 0.084134 0.009245 +v -0.064528 0.084134 0.009245 +v -0.064092 0.086092 0.009137 +v -0.064092 0.086092 0.009137 +v -0.062896 0.088811 0.009137 +v -0.062896 0.088811 0.009137 +v -0.064745 0.090660 0.044922 +v -0.064745 0.090660 0.044922 +v -0.063657 0.092400 0.045575 +v -0.063657 0.092400 0.045575 +v -0.061156 0.094250 0.045575 +v -0.061156 0.094250 0.045575 +v -0.061156 0.094250 0.045575 +v -0.061482 0.094793 0.045792 +v -0.061482 0.094793 0.045792 +v -0.061482 0.094793 0.045792 +v -0.058436 0.095881 0.045792 +v -0.058436 0.095881 0.045792 +v -0.056261 0.096099 0.045792 +v -0.056261 0.096099 0.045792 +v -0.056261 0.096099 0.045792 +v -0.048430 0.096099 0.045792 +v -0.048430 0.096099 0.045792 +v -0.048430 0.096099 0.045792 +v -0.048430 0.096099 0.045792 +v -0.050714 0.098383 0.089409 +v -0.050714 0.098383 0.089409 +v -0.050714 0.098383 0.089409 +v -0.006988 0.098383 0.089409 +v -0.006988 0.098383 0.089409 +v -0.006988 0.098383 0.089409 +v -0.006988 0.098383 0.089409 +v 0.048376 0.096099 0.045792 +v 0.048376 0.096099 0.045792 +v 0.048376 0.096099 0.045792 +v 0.048376 0.096099 0.045792 +v 0.048376 0.096099 0.045792 +v 0.056316 0.096099 0.045792 +v 0.056316 0.096099 0.045792 +v 0.056316 0.096099 0.045792 +v 0.058383 0.095881 0.045792 +v 0.058383 0.095881 0.045792 +v 0.058383 0.095881 0.045792 +v 0.060558 0.095337 0.045792 +v 0.060558 0.095337 0.045792 +v 0.062516 0.094250 0.045792 +v 0.062516 0.094250 0.045792 +v 0.064148 0.092836 0.045792 +v 0.064148 0.092836 0.045792 +v 0.066432 0.095120 0.089409 +v 0.066432 0.095120 0.089409 +v 0.067846 0.093488 0.089409 +v 0.067846 0.093488 0.089409 +v 0.067846 0.093488 0.089409 +v 0.067846 0.093488 0.089409 +v 0.068825 0.091530 0.089409 +v 0.068825 0.091530 0.089409 +v 0.069586 0.088376 0.089409 +v 0.069586 0.088376 0.089409 +v 0.069695 0.085113 0.089409 +v 0.069695 0.085113 0.089409 +v 0.069695 0.085113 0.089409 +v 0.069695 0.052373 0.089409 +v 0.069695 0.052373 0.089409 +v 0.069695 0.052373 0.089409 +v 0.069695 0.052373 0.089409 +v 0.069913 0.037254 0.089953 +v 0.070565 -0.002012 0.090279 +v 0.070565 -0.002012 0.090279 +v 0.070565 -0.002012 0.090279 +v 0.070565 -0.002012 0.090279 +v 0.070565 -0.002012 0.090279 +v 0.072741 -0.023766 0.129981 +v 0.072741 -0.023766 0.129981 +v 0.072741 -0.023766 0.129981 +v 0.072741 -0.023766 0.129981 +v 0.072741 -0.023766 0.129981 +v 0.072741 -0.023766 0.129981 +v 0.072741 -0.018654 0.131286 +v 0.072741 -0.018654 0.131286 +v 0.073502 -0.018654 0.132700 +v 0.075025 -0.018654 0.133352 +v 0.075025 -0.018654 0.133352 +v 0.074916 -0.022244 0.132809 +v 0.074916 -0.022244 0.132809 +v 0.074916 -0.024745 0.131830 +v 0.074916 -0.024745 0.131830 +v 0.074916 -0.024745 0.131830 +v 0.074916 -0.024745 0.131830 +v 0.079593 -0.024745 0.131830 +v 0.079593 -0.024745 0.131830 +v 0.079593 -0.054331 0.116602 +v 0.079593 -0.054331 0.116602 +v 0.079593 -0.054331 0.116602 +v 0.079593 -0.054331 0.116602 +v 0.080028 -0.054331 0.116493 +v 0.081007 -0.058682 0.113665 +v 0.081660 -0.092727 0.095065 +v 0.081769 -0.092618 0.094739 +v 0.081769 -0.092618 0.094739 +v 0.081769 -0.094141 0.094086 +v 0.081769 -0.094141 0.094086 +v 0.080898 -0.098492 0.092129 +v 0.080898 -0.098492 0.092129 +v 0.078941 -0.102625 0.090279 +v 0.078941 -0.102625 0.090279 +v 0.079811 -0.103713 0.084188 +v 0.079811 -0.103713 0.084188 +v 0.078941 -0.104691 0.082883 +v 0.078941 -0.104691 0.082883 +v 0.079376 -0.103169 0.083427 +v 0.077744 -0.105453 0.082122 +v 0.075786 -0.107411 0.081143 +v 0.075786 -0.107411 0.081143 +v 0.074807 -0.108716 0.081143 +v 0.073611 -0.109477 0.080816 +v 0.073393 -0.109695 0.081143 +v 0.073393 -0.109695 0.081143 +v 0.071218 -0.109151 0.087343 +v 0.071218 -0.109151 0.087343 +v 0.068607 -0.109912 0.086908 +v 0.068607 -0.109912 0.086908 +v 0.069695 -0.109042 0.088213 +v 0.065453 -0.109586 0.087886 +v 0.065453 -0.109586 0.087886 +v 0.066867 -0.108934 0.088322 +v 0.066867 -0.108934 0.088322 +v 0.066867 -0.108934 0.088322 +v 0.066867 -0.108934 0.088322 +v 0.066867 -0.108934 0.088322 +v 0.066867 -0.108934 0.088322 +v 0.058601 -0.104039 0.090932 +v 0.058601 -0.104039 0.090932 +v 0.058601 -0.104039 0.090932 +v 0.050660 -0.103386 0.091150 +v 0.050660 -0.103386 0.091150 +v 0.013243 -0.103713 0.091041 +v -0.000027 -0.102190 0.090932 +v -0.050714 -0.101537 0.090497 +v -0.058545 -0.101537 0.090497 +v -0.058545 -0.101537 0.090497 +v -0.059742 -0.101211 0.090279 +v -0.059742 -0.101211 0.090279 +v -0.061917 -0.100123 0.089953 +v -0.063984 -0.099253 0.089953 +v -0.064745 -0.098383 0.089409 +v -0.064745 -0.098383 0.089409 +v -0.066377 -0.097078 0.089409 +v -0.066377 -0.097078 0.089409 +v -0.068335 -0.094467 0.089409 +v -0.068335 -0.094467 0.089409 +v -0.066050 -0.092074 0.045792 +v -0.066050 -0.092074 0.045792 +v -0.066268 -0.089899 0.045575 +v -0.066268 -0.089899 0.045575 +v -0.066703 -0.086853 0.045575 +v -0.066485 -0.086853 0.044922 +v -0.066485 -0.086853 0.044922 +v -0.066485 -0.050633 0.044922 +v -0.066485 -0.050633 0.044922 +v -0.066485 -0.050633 0.044922 +v -0.066485 -0.050633 0.044922 +v -0.066485 -0.019633 0.044922 +v -0.066485 -0.019633 0.044922 +v -0.066485 -0.019633 0.044922 +v -0.066485 -0.019633 0.044922 +v -0.066485 0.059552 0.044922 +v -0.066485 0.059552 0.044922 +v -0.066485 0.059552 0.044922 +v -0.066485 0.082829 0.044922 +v -0.066485 0.082829 0.044922 +v -0.066485 0.082829 0.044922 +v -0.066377 0.085983 0.044922 +v -0.066377 0.085983 0.044922 +v -0.065724 0.088920 0.044922 +v -0.065724 0.088920 0.044922 +v -0.065506 0.089899 0.045575 +v -0.065506 0.089899 0.045575 +v -0.064092 0.092836 0.045792 +v -0.064092 0.092836 0.045792 +v -0.066377 0.095120 0.089409 +v -0.066377 0.095120 0.089409 +v -0.064745 0.096534 0.089409 +v -0.064745 0.096534 0.089409 +v -0.062787 0.097621 0.089409 +v -0.062787 0.097621 0.089409 +v -0.060721 0.098165 0.089409 +v -0.060721 0.098165 0.089409 +v -0.058545 0.098383 0.089409 +v -0.058545 0.098383 0.089409 +v -0.058545 0.098383 0.089409 +v -0.058545 0.098383 0.089409 +v -0.050714 0.098709 0.089953 +v -0.006988 0.098709 0.089953 +v 0.048376 0.098709 0.089953 +v 0.050660 0.098383 0.089409 +v 0.050660 0.098383 0.089409 +v 0.050660 0.098383 0.089409 +v 0.058601 0.098383 0.089409 +v 0.058601 0.098383 0.089409 +v 0.061755 0.097948 0.089409 +v 0.061755 0.097948 0.089409 +v 0.061755 0.097948 0.089409 +v 0.061755 0.097948 0.089409 +v 0.064800 0.096534 0.089409 +v 0.064800 0.096534 0.089409 +v 0.066649 0.095337 0.089953 +v 0.066649 0.095337 0.089953 +v 0.068064 0.093597 0.089953 +v 0.069042 0.091639 0.089953 +v 0.069913 0.088485 0.089953 +v 0.069913 0.085113 0.089953 +v 0.070565 0.087397 0.090279 +v 0.070565 0.087397 0.090279 +v 0.070565 0.085222 0.090279 +v 0.070565 0.085222 0.090279 +v 0.070565 0.085222 0.090279 +v 0.072741 0.087288 0.131286 +v 0.072741 0.087288 0.131286 +v 0.072741 0.087288 0.131286 +v 0.072741 0.087288 0.131286 +v 0.072741 0.038559 0.131286 +v 0.072741 0.038559 0.131286 +v 0.072741 0.005710 0.131286 +v 0.072741 0.005710 0.131286 +v 0.072741 0.005710 0.131286 +v 0.072741 0.005710 0.131286 +v 0.073502 0.008539 0.132700 +v 0.075025 -0.013216 0.133352 +v 0.075025 -0.013216 0.133352 +v 0.075025 -0.013216 0.133352 +v 0.079593 -0.018654 0.133352 +v 0.079593 -0.018654 0.133352 +v 0.079593 -0.022352 0.132809 +v 0.079593 -0.022352 0.132809 +v 0.079593 -0.022352 0.132809 +v 0.079593 -0.022352 0.132809 +v 0.080028 -0.024745 0.131830 +v 0.081116 -0.024528 0.131395 +v 0.081660 -0.024093 0.130524 +v 0.081769 -0.023875 0.130198 +v 0.081769 -0.023875 0.130198 +v 0.081769 -0.058138 0.112577 +v 0.081769 -0.058138 0.112577 +v 0.082530 -0.057594 0.107574 +v 0.082530 -0.057594 0.107574 +v 0.082530 -0.093488 0.088974 +v 0.082530 -0.093488 0.088974 +v 0.082530 -0.093597 0.089192 +v 0.082530 -0.093597 0.089192 +v 0.082095 -0.098056 0.086908 +v 0.082095 -0.098056 0.086908 +v 0.081116 -0.100993 0.085494 +v 0.081116 -0.100993 0.085494 +v 0.081116 -0.100667 0.084841 +v 0.081116 -0.100667 0.084841 +v 0.079702 -0.101755 0.084188 +v 0.078832 -0.102951 0.083644 +v 0.078288 -0.104148 0.082883 +v 0.076439 -0.106214 0.081795 +v 0.074372 -0.107846 0.080925 +v 0.074372 -0.107846 0.080925 +v 0.073611 -0.109260 0.080599 +v 0.072306 -0.109912 0.080273 +v 0.069586 -0.111000 0.080490 +v 0.069586 -0.111000 0.080490 +v 0.065453 -0.110239 0.086690 +v 0.065453 -0.110239 0.086690 +v 0.065453 -0.110239 0.086690 +v 0.065453 -0.110239 0.086690 +v 0.065453 -0.110239 0.086690 +v 0.056751 -0.109586 0.087886 +v 0.013134 -0.109042 0.088322 +v 0.013134 -0.109042 0.088322 +v 0.013134 -0.109042 0.088322 +v -0.000027 -0.109042 0.088322 +v -0.000027 -0.109042 0.088322 +v -0.000027 -0.109042 0.088322 +v -0.000027 -0.109042 0.088322 +v -0.000027 -0.109042 0.088322 +v 0.013243 -0.104039 0.090932 +v 0.013243 -0.104039 0.090932 +v 0.013243 -0.104039 0.090932 +v 0.013243 -0.104039 0.090932 +v 0.013243 -0.104039 0.090932 +v 0.013243 -0.104039 0.090932 +v 0.013243 -0.104039 0.090932 +v -0.042774 -0.102951 0.091150 +v -0.058545 -0.102951 0.091150 +v -0.058545 -0.102951 0.091150 +v -0.059850 -0.102081 0.090932 +v -0.059850 -0.102081 0.090932 +v -0.059850 -0.102081 0.090932 +v -0.062135 -0.100885 0.090606 +v -0.062135 -0.100885 0.090606 +v -0.063222 -0.100341 0.090279 +v -0.063222 -0.100341 0.090279 +v -0.063222 -0.100341 0.090279 +v -0.065289 -0.099144 0.090279 +v -0.065289 -0.099144 0.090279 +v -0.065289 -0.099144 0.090279 +v -0.067029 -0.097730 0.090279 +v -0.067029 -0.097730 0.090279 +v -0.066594 -0.097295 0.089953 +v -0.069096 -0.093597 0.089953 +v -0.069205 -0.092400 0.089409 +v -0.069205 -0.092400 0.089409 +v -0.067138 -0.089029 0.045792 +v -0.067138 -0.089029 0.045792 +v -0.067138 -0.089029 0.045792 +v -0.067029 -0.086853 0.045684 +v -0.067356 -0.086962 0.045792 +v -0.067356 -0.086962 0.045792 +v -0.067356 -0.086962 0.045792 +v -0.067029 -0.050633 0.045684 +v -0.066703 -0.055092 0.045575 +v -0.066703 -0.055092 0.045575 +v -0.066703 0.012672 0.045575 +v -0.067029 0.050741 0.045684 +v -0.066703 0.085004 0.045575 +v -0.066703 0.085004 0.045575 +v -0.066485 0.087071 0.045575 +v -0.066485 0.087071 0.045575 +v -0.066050 0.090225 0.045792 +v -0.066050 0.090225 0.045792 +v -0.067791 0.093488 0.089409 +v -0.067791 0.093488 0.089409 +v -0.066594 0.095337 0.089953 +v -0.063984 0.097404 0.089953 +v -0.061917 0.098165 0.089953 +v -0.058545 0.098709 0.089953 +v -0.058545 0.098709 0.089953 +v -0.058545 0.098927 0.090171 +v -0.058545 0.098927 0.090171 +v -0.042774 0.098927 0.090171 +v -0.000027 0.098927 0.090171 +v 0.050660 0.098927 0.090171 +v 0.058601 0.098709 0.089953 +v 0.060776 0.098492 0.089953 +v 0.060776 0.098492 0.089953 +v 0.062951 0.097839 0.089953 +v 0.065344 0.097295 0.090279 +v 0.065344 0.097295 0.090279 +v 0.067085 0.095881 0.090279 +v 0.067085 0.095881 0.090279 +v 0.067085 0.095881 0.090279 +v 0.067085 0.095881 0.090279 +v 0.068607 0.094032 0.090279 +v 0.068607 0.094032 0.090279 +v 0.071327 0.095120 0.131286 +v 0.071327 0.095120 0.131286 +v 0.070130 0.090769 0.090279 +v 0.070130 0.090769 0.090279 +v 0.070130 0.090769 0.090279 +v 0.072741 0.089464 0.131286 +v 0.072741 0.089464 0.131286 +v 0.073502 0.087288 0.132700 +v 0.073502 0.063032 0.132700 +v 0.075025 0.028226 0.133352 +v 0.075025 0.028226 0.133352 +v 0.075025 0.028226 0.133352 +v 0.075025 0.028226 0.133352 +v 0.079593 -0.012889 0.133352 +v 0.079593 -0.012889 0.133352 +v 0.079593 -0.012889 0.133352 +v 0.079593 -0.012889 0.133352 +v 0.081116 -0.018654 0.132809 +v 0.081116 -0.003753 0.132809 +v 0.081660 0.000816 0.131938 +v 0.081660 -0.018654 0.131938 +v 0.081116 -0.020938 0.132591 +v 0.081116 -0.020938 0.132591 +v 0.081769 -0.021808 0.131068 +v 0.081769 -0.021808 0.131068 +v 0.082530 -0.020612 0.126717 +v 0.082530 -0.020612 0.126717 +v 0.082530 -0.021700 0.126065 +v 0.082530 -0.021700 0.126065 +v 0.082312 -0.054657 0.108553 +v 0.082312 -0.054657 0.108553 +v 0.082312 -0.093379 0.088539 +v 0.082312 -0.093379 0.088539 +v 0.081877 -0.097839 0.086364 +v 0.080790 -0.099035 0.085602 +v 0.080790 -0.099035 0.085602 +v 0.080137 -0.100341 0.084950 +v 0.079484 -0.101755 0.084515 +v 0.079484 -0.101755 0.084515 +v 0.077962 -0.104148 0.083209 +v 0.077962 -0.104148 0.083209 +v 0.077091 -0.105235 0.082665 +v 0.077091 -0.105235 0.082665 +v 0.074046 -0.107846 0.081251 +v 0.074046 -0.107846 0.081251 +v 0.071762 -0.109042 0.080599 +v 0.071762 -0.109042 0.080599 +v 0.070674 -0.109586 0.080055 +v 0.070674 -0.109586 0.080055 +v 0.069586 -0.110783 0.079837 +v 0.069586 -0.110783 0.079837 +v 0.066432 -0.111326 0.080273 +v 0.066432 -0.111326 0.080273 +v 0.066432 -0.111326 0.080273 +v 0.066432 -0.111326 0.080273 +v 0.056860 -0.111326 0.080273 +v 0.056860 -0.111326 0.080273 +v 0.056860 -0.111326 0.080273 +v 0.056860 -0.111326 0.080273 +v 0.056860 -0.111326 0.080273 +v 0.056751 -0.110239 0.086690 +v 0.056751 -0.110239 0.086690 +v 0.056751 -0.110239 0.086690 +v 0.056751 -0.110239 0.086690 +v 0.013026 -0.110239 0.086690 +v 0.013026 -0.110239 0.086690 +v 0.013026 -0.110239 0.086690 +v -0.000027 -0.109804 0.087669 +v -0.000027 -0.109804 0.087669 +v -0.049082 -0.109042 0.088322 +v -0.049082 -0.109042 0.088322 +v -0.049082 -0.109042 0.088322 +v -0.058545 -0.104039 0.090932 +v -0.058545 -0.104039 0.090932 +v -0.058545 -0.104039 0.090932 +v -0.058545 -0.104039 0.090932 +v -0.058545 -0.104039 0.090932 +v -0.058545 -0.104039 0.090932 +v -0.061264 -0.103821 0.091041 +v -0.061264 -0.103821 0.091041 +v -0.062352 -0.102407 0.091367 +v -0.062135 -0.101211 0.090823 +v -0.065180 -0.099579 0.091041 +v -0.065289 -0.099906 0.091476 +v -0.066050 -0.098927 0.091367 +v -0.066050 -0.098709 0.090823 +v -0.067029 -0.097730 0.090932 +v -0.067029 -0.097730 0.090932 +v -0.068552 -0.095881 0.090279 +v -0.068552 -0.095881 0.090279 +v -0.068552 -0.095881 0.090279 +v -0.070075 -0.092727 0.090279 +v -0.070075 -0.092727 0.090279 +v -0.069749 -0.091421 0.089953 +v -0.069640 -0.089246 0.089409 +v -0.069640 -0.089246 0.089409 +v -0.069640 -0.089246 0.089409 +v -0.069640 -0.089246 0.089409 +v -0.067356 -0.055092 0.045792 +v -0.067356 -0.055092 0.045792 +v -0.067356 -0.055092 0.045792 +v -0.067356 -0.055092 0.045792 +v -0.067356 -0.023766 0.045792 +v -0.067356 -0.023766 0.045792 +v -0.067356 -0.023766 0.045792 +v -0.067356 0.006472 0.045792 +v -0.067356 0.006472 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.082937 0.045792 +v -0.067356 0.085004 0.045792 +v -0.067356 0.085004 0.045792 +v -0.067138 0.087179 0.045792 +v -0.067138 0.087179 0.045792 +v -0.067138 0.087179 0.045792 +v -0.069205 0.090551 0.089409 +v -0.069205 0.090551 0.089409 +v -0.068661 0.092727 0.089953 +v -0.068552 0.094032 0.090279 +v -0.068552 0.094032 0.090279 +v -0.067138 0.095881 0.090279 +v -0.067138 0.095881 0.090279 +v -0.064310 0.097948 0.090279 +v -0.064310 0.097948 0.090279 +v -0.060938 0.099144 0.090279 +v -0.060938 0.099144 0.090279 +v -0.060938 0.099144 0.090279 +v -0.058654 0.099362 0.090279 +v -0.058654 0.099362 0.090279 +v -0.058654 0.099362 0.090279 +v -0.050714 0.099362 0.090279 +v -0.050714 0.099362 0.090279 +v -0.050714 0.099362 0.090279 +v -0.050714 0.099362 0.090279 +v -0.050714 0.099362 0.090279 +v -0.000027 0.100776 0.117581 +v -0.000027 0.100776 0.117581 +v -0.000027 0.100776 0.117581 +v -0.000027 0.100776 0.117581 +v -0.007967 0.101537 0.131286 +v -0.007967 0.101537 0.131286 +v -0.007967 0.101537 0.131286 +v -0.007967 0.101537 0.131286 +v -0.000027 0.101863 0.132374 +v -0.007967 0.101646 0.131938 +v -0.000027 0.101537 0.131612 +v -0.000027 0.101537 0.131286 +v -0.000027 0.101537 0.131286 +v -0.000027 0.101537 0.131286 +v -0.000027 0.101537 0.131286 +v 0.050769 0.099362 0.090279 +v 0.050769 0.099362 0.090279 +v 0.050769 0.099362 0.090279 +v 0.058601 0.099362 0.090279 +v 0.058601 0.099362 0.090279 +v 0.058601 0.099362 0.090279 +v 0.060885 0.099144 0.090279 +v 0.060885 0.099144 0.090279 +v 0.060885 0.099144 0.090279 +v 0.063169 0.098492 0.090279 +v 0.063169 0.098492 0.090279 +v 0.066432 0.100123 0.131286 +v 0.066432 0.100123 0.131286 +v 0.069260 0.097948 0.131286 +v 0.069260 0.097948 0.131286 +v 0.069260 0.097948 0.131286 +v 0.071762 0.095337 0.132482 +v 0.072197 0.093053 0.131286 +v 0.072197 0.093053 0.131286 +v 0.072197 0.093053 0.131286 +v 0.073393 0.090769 0.132700 +v 0.073393 0.090769 0.132700 +v 0.074916 0.090878 0.133352 +v 0.074916 0.090878 0.133352 +v 0.075025 0.072169 0.133352 +v 0.075025 0.072169 0.133352 +v 0.075025 0.072169 0.133352 +v 0.079593 0.040517 0.133352 +v 0.079593 0.040517 0.133352 +v 0.079593 0.040517 0.133352 +v 0.079593 0.040517 0.133352 +v 0.079593 0.040517 0.133352 +v 0.080028 0.075106 0.133352 +v 0.080028 0.093053 0.133352 +v 0.081333 0.093053 0.132591 +v 0.081769 0.090769 0.131503 +v 0.081769 0.090769 0.131503 +v 0.081769 0.090769 0.131503 +v 0.081769 0.090769 0.131503 +v 0.081769 0.029966 0.131503 +v 0.081769 0.029966 0.131503 +v 0.081769 0.029966 0.131503 +v 0.081769 0.029966 0.131503 +v 0.081769 -0.019633 0.131503 +v 0.081769 -0.019633 0.131503 +v 0.081769 -0.019633 0.131503 +v 0.081769 -0.019633 0.131503 +v 0.082530 -0.018654 0.127044 +v 0.082530 -0.018654 0.127044 +v 0.082530 -0.018654 0.127044 +v 0.082530 -0.018654 0.127044 +v 0.082530 -0.018654 0.126826 +v 0.082530 -0.018654 0.126826 +v 0.082312 -0.019851 0.126282 +v 0.082312 -0.021591 0.125630 +v 0.081877 -0.052590 0.109532 +v 0.081877 -0.052590 0.109532 +v 0.081877 -0.052590 0.109532 +v 0.081877 -0.052590 0.109532 +v 0.081551 -0.052590 0.109641 +v 0.081551 -0.052590 0.109641 +v 0.081551 -0.093379 0.088539 +v 0.081551 -0.093379 0.088539 +v 0.081551 -0.093379 0.088539 +v 0.081333 -0.093379 0.088757 +v 0.081333 -0.093379 0.088757 +v 0.081442 -0.094793 0.087886 +v 0.080790 -0.097730 0.086690 +v 0.080790 -0.097730 0.086690 +v 0.080790 -0.097730 0.086690 +v 0.080790 -0.097730 0.086690 +v 0.080028 -0.096642 0.092455 +v 0.080028 -0.096642 0.092455 +v 0.080028 -0.096642 0.092455 +v 0.080028 -0.096642 0.092455 +v 0.078614 -0.100667 0.090606 +v 0.078614 -0.100667 0.090606 +v 0.077091 -0.103060 0.089409 +v 0.077091 -0.103060 0.089409 +v 0.076221 -0.104148 0.088865 +v 0.076221 -0.104148 0.088865 +v 0.074263 -0.105888 0.087995 +v 0.074263 -0.105888 0.087995 +v 0.071979 -0.107302 0.087234 +v 0.071979 -0.107302 0.087234 +v 0.069586 -0.108281 0.086799 +v 0.069586 -0.108281 0.086799 +v 0.067846 -0.109912 0.080164 +v 0.067846 -0.109912 0.080164 +v 0.067955 -0.110130 0.079729 +v 0.066432 -0.111109 0.079729 +v 0.066432 -0.111109 0.079729 +v 0.066432 -0.111109 0.079729 +v 0.066432 -0.111109 0.079729 +v 0.066432 -0.110456 0.079620 +v 0.056860 -0.111109 0.079729 +v 0.056860 -0.111109 0.079729 +v 0.056860 -0.111109 0.079729 +v 0.012699 -0.111326 0.080273 +v 0.012699 -0.111326 0.080273 +v 0.012699 -0.111326 0.080273 +v 0.012699 -0.111326 0.080273 +v 0.012699 -0.111326 0.080273 +v 0.012699 -0.111326 0.080273 +v 0.012699 -0.111326 0.080273 +v -0.000027 -0.110239 0.086690 +v -0.000027 -0.110239 0.086690 +v -0.000027 -0.110239 0.086690 +v -0.000027 -0.110239 0.086690 +v -0.000027 -0.110239 0.086690 +v -0.056805 -0.109586 0.087886 +v -0.066920 -0.108934 0.088322 +v -0.066920 -0.108934 0.088322 +v -0.066920 -0.108934 0.088322 +v -0.066920 -0.108934 0.088322 +v -0.064092 -0.103169 0.091367 +v -0.064092 -0.103169 0.091367 +v -0.064636 -0.101428 0.091693 +v -0.067029 -0.098492 0.092237 +v -0.068008 -0.097621 0.092672 +v -0.066920 -0.097948 0.091367 +v -0.068661 -0.095881 0.092020 +v -0.068661 -0.095881 0.092020 +v -0.068661 -0.095881 0.092020 +v -0.068661 -0.095881 0.092020 +v -0.070292 -0.092727 0.093978 +v -0.070292 -0.092727 0.093978 +v -0.070945 -0.089137 0.096153 +v -0.070945 -0.089137 0.096153 +v -0.070619 -0.089137 0.090279 +v -0.070619 -0.089137 0.090279 +v -0.070619 -0.089137 0.090279 +v -0.070292 -0.056289 0.090171 +v -0.069966 -0.089246 0.089953 +v -0.069966 -0.089246 0.089953 +v -0.069640 -0.056289 0.089409 +v -0.069640 -0.056289 0.089409 +v -0.069640 -0.056289 0.089409 +v -0.069640 -0.023766 0.089409 +v -0.069640 -0.023766 0.089409 +v -0.069640 -0.023766 0.089409 +v -0.069640 -0.023766 0.089409 +v -0.069640 -0.023766 0.089409 +v -0.069640 0.047369 0.089409 +v -0.069640 0.047369 0.089409 +v -0.069640 0.047369 0.089409 +v -0.069640 0.070320 0.089409 +v -0.069640 0.070320 0.089409 +v -0.069640 0.070320 0.089409 +v -0.069640 0.085113 0.089409 +v -0.069640 0.085113 0.089409 +v -0.069640 0.085113 0.089409 +v -0.069640 0.088376 0.089409 +v -0.069640 0.088376 0.089409 +v -0.069640 0.088376 0.089409 +v -0.069640 0.088376 0.089409 +v -0.069749 0.089572 0.089953 +v -0.069749 0.089572 0.089953 +v -0.069205 0.093053 0.090279 +v -0.069205 0.093053 0.090279 +v -0.067791 0.096534 0.103876 +v -0.067464 0.099471 0.131286 +v -0.067464 0.099471 0.131286 +v -0.064310 0.100993 0.131286 +v -0.064310 0.100993 0.131286 +v -0.060721 0.101537 0.131286 +v -0.060721 0.101537 0.131286 +v -0.060721 0.101537 0.131286 +v -0.060721 0.101537 0.131286 +v -0.052889 0.101537 0.131286 +v -0.052889 0.101537 0.131286 +v -0.052889 0.101537 0.131286 +v -0.052889 0.102190 0.132700 +v -0.000027 0.102190 0.132700 +v -0.000027 0.102190 0.132700 +v 0.022489 0.101537 0.131612 +v 0.052836 0.101537 0.131286 +v 0.052836 0.101537 0.131286 +v 0.060776 0.101537 0.131286 +v 0.060776 0.101537 0.131286 +v 0.060776 0.101537 0.131286 +v 0.060776 0.101537 0.131286 +v 0.064257 0.100993 0.131286 +v 0.064257 0.100993 0.131286 +v 0.066649 0.100449 0.132482 +v 0.069586 0.098274 0.132482 +v 0.071653 0.096751 0.133026 +v 0.071979 0.095555 0.132700 +v 0.072958 0.093162 0.132700 +v 0.074372 0.093597 0.133352 +v 0.074372 0.093597 0.133352 +v 0.074372 0.093597 0.133352 +v 0.074372 0.093597 0.133352 +v 0.079484 0.094576 0.133352 +v 0.079484 0.094576 0.133352 +v 0.079484 0.094576 0.133352 +v 0.079484 0.094576 0.133352 +v 0.080572 0.094685 0.133026 +v 0.080572 0.094685 0.133026 +v 0.081007 0.096316 0.132591 +v 0.081007 0.096316 0.132591 +v 0.081660 0.094793 0.131503 +v 0.081660 0.094793 0.131503 +v 0.082530 0.095555 0.127044 +v 0.082530 0.095555 0.127044 +v 0.082204 0.097295 0.127044 +v 0.082204 0.097295 0.127044 +v 0.081660 0.097078 0.126282 +v 0.081660 0.098818 0.126391 +v 0.082312 0.095555 0.126391 +v 0.082530 0.091530 0.127044 +v 0.082530 0.091530 0.127044 +v 0.082530 0.091530 0.127044 +v 0.082530 0.075759 0.127044 +v 0.082530 0.075759 0.127044 +v 0.082530 0.075759 0.127044 +v 0.082530 0.040952 0.127044 +v 0.082530 0.040952 0.127044 +v 0.082530 0.040952 0.127044 +v 0.082530 0.040952 0.127044 +v 0.082530 0.010061 0.127044 +v 0.082530 0.010061 0.127044 +v 0.082530 -0.005167 0.126826 +v 0.082312 -0.012889 0.126391 +v 0.082312 -0.018654 0.126391 +v 0.082312 -0.018654 0.126391 +v 0.081551 -0.018654 0.126391 +v 0.081551 -0.018654 0.126391 +v 0.081551 -0.021591 0.125630 +v 0.081551 -0.021591 0.125630 +v 0.081333 -0.053352 0.109423 +v 0.081333 -0.053352 0.109423 +v 0.081225 -0.057594 0.107574 +v 0.081225 -0.057594 0.107574 +v 0.081225 -0.057594 0.107574 +v 0.081225 -0.057594 0.107574 +v 0.081225 -0.057594 0.107574 +v 0.081225 -0.057594 0.107574 +v 0.081225 -0.093488 0.088974 +v 0.081225 -0.093488 0.088974 +v 0.081225 -0.093488 0.088974 +v 0.081225 -0.093488 0.088974 +v 0.080463 -0.092509 0.094521 +v 0.080463 -0.092509 0.094521 +v 0.080463 -0.092509 0.094521 +v 0.080463 -0.092509 0.094521 +v 0.080463 -0.092509 0.094521 +v 0.080137 -0.092727 0.094957 +v 0.080137 -0.092727 0.094957 +v 0.079919 -0.057811 0.113230 +v 0.080137 -0.055962 0.113991 +v 0.080137 -0.055962 0.113991 +v 0.080137 -0.055962 0.113991 +v 0.079593 -0.056071 0.114209 +v 0.079593 -0.056071 0.114209 +v 0.079593 -0.056071 0.114209 +v 0.079593 -0.056071 0.114209 +v 0.079919 -0.092835 0.095065 +v 0.079919 -0.092835 0.095065 +v 0.079919 -0.095555 0.093543 +v 0.079919 -0.095555 0.093543 +v 0.079919 -0.095555 0.093543 +v 0.078832 -0.099579 0.091585 +v 0.078288 -0.100776 0.090932 +v 0.077527 -0.101972 0.090279 +v 0.075569 -0.103821 0.089518 +v 0.075569 -0.103821 0.089518 +v 0.075025 -0.105018 0.088757 +v 0.072958 -0.106541 0.087995 +v 0.071762 -0.107084 0.087778 +v 0.069478 -0.107955 0.087343 +v 0.066867 -0.108498 0.087016 +v 0.065453 -0.108934 0.086472 +v 0.065453 -0.108934 0.086472 +v 0.066432 -0.110021 0.080055 +v 0.066432 -0.110021 0.080055 +v 0.054250 -0.108390 0.087125 +v 0.054250 -0.108390 0.087125 +v 0.054250 -0.108390 0.087125 +v 0.054250 -0.108390 0.087125 +v 0.054250 -0.108390 0.087125 +v 0.054358 -0.108607 0.087016 +v 0.054358 -0.108607 0.087016 +v 0.054141 -0.108498 0.086908 +v 0.054141 -0.108498 0.086908 +v 0.054358 -0.108934 0.086472 +v 0.054358 -0.108934 0.086472 +v 0.054358 -0.108934 0.086472 +v 0.054358 -0.108934 0.086472 +v 0.054358 -0.108934 0.086472 +v 0.054141 -0.108825 0.086472 +v 0.054032 -0.109804 0.080490 +v 0.054032 -0.109804 0.080490 +v 0.053923 -0.109695 0.080273 +v 0.053923 -0.109695 0.080273 +v 0.054358 -0.109912 0.080490 +v 0.054358 -0.109912 0.080490 +v 0.054358 -0.109912 0.080490 +v 0.056860 -0.110021 0.080055 +v 0.056860 -0.110021 0.080055 +v 0.056860 -0.110021 0.080055 +v 0.056860 -0.110021 0.080055 +v 0.066432 -0.110239 0.079729 +v 0.066432 -0.110239 0.079729 +v 0.066432 -0.110239 0.079729 +v 0.053923 -0.110239 0.079729 +v 0.053923 -0.110239 0.079729 +v 0.053923 -0.110239 0.079729 +v 0.012591 -0.110239 0.079729 +v 0.012591 -0.111109 0.079729 +v 0.012591 -0.111109 0.079729 +v 0.012699 -0.111326 0.080055 +v 0.012699 -0.111326 0.080055 +v 0.012699 -0.111326 0.080055 +v -0.010795 -0.111326 0.080055 +v -0.010795 -0.111326 0.080055 +v -0.000027 -0.111326 0.080273 +v -0.000027 -0.111326 0.080273 +v -0.000027 -0.111326 0.080273 +v -0.000027 -0.111326 0.080273 +v -0.000027 -0.111326 0.080273 +v -0.056805 -0.110239 0.086690 +v -0.056805 -0.110239 0.086690 +v -0.056805 -0.110239 0.086690 +v -0.056805 -0.110239 0.086690 +v -0.065506 -0.110239 0.086690 +v -0.065506 -0.110239 0.086690 +v -0.065506 -0.110239 0.086690 +v -0.065506 -0.110239 0.086690 +v -0.065506 -0.109586 0.087886 +v -0.065506 -0.109586 0.087886 +v -0.069749 -0.109042 0.088213 +v -0.073229 -0.107411 0.088974 +v -0.071598 -0.107519 0.089083 +v -0.071598 -0.107519 0.089083 +v -0.071598 -0.107519 0.089083 +v -0.066485 -0.101972 0.091911 +v -0.066485 -0.101972 0.091911 +v -0.065942 -0.101211 0.092129 +v -0.067899 -0.099688 0.092890 +v -0.068226 -0.098056 0.092999 +v -0.069313 -0.095120 0.093107 +v -0.069313 -0.095120 0.093107 +v -0.070619 -0.091857 0.095065 +v -0.070945 -0.089355 0.096588 +v -0.071815 -0.056397 0.113556 +v -0.071815 -0.056289 0.113230 +v -0.071815 -0.056289 0.113230 +v -0.071815 -0.056289 0.113230 +v -0.071815 -0.056289 0.113230 +v -0.070619 -0.051829 0.090279 +v -0.070619 -0.051829 0.090279 +v -0.070619 -0.051829 0.090279 +v -0.069966 -0.056289 0.089953 +v -0.069966 0.017023 0.089953 +v -0.069966 0.052373 0.089953 +v -0.069966 0.087288 0.089953 +v -0.070510 0.088485 0.090279 +v -0.070510 0.088485 0.090279 +v -0.070510 0.088485 0.090279 +v -0.070075 0.090769 0.090279 +v -0.070075 0.090769 0.090279 +v -0.070075 0.090769 0.090279 +v -0.071380 0.095120 0.131286 +v -0.071380 0.095120 0.131286 +v -0.069205 0.097948 0.131286 +v -0.069205 0.097948 0.131286 +v -0.066594 0.100449 0.132482 +v -0.066812 0.100667 0.132700 +v -0.064419 0.101646 0.132700 +v -0.060721 0.102190 0.132700 +v -0.060721 0.102190 0.132700 +v -0.060721 0.102190 0.132700 +v -0.052889 0.103277 0.133352 +v -0.052889 0.103277 0.133352 +v -0.007967 0.103277 0.133352 +v -0.007967 0.103277 0.133352 +v -0.007967 0.103277 0.133352 +v -0.000027 0.102516 0.133026 +v -0.000027 0.102951 0.133244 +v -0.000027 0.102951 0.133244 +v -0.000027 0.102951 0.133244 +v 0.052836 0.102190 0.132700 +v 0.060776 0.102190 0.132700 +v 0.060776 0.102190 0.132700 +v 0.060776 0.102190 0.132700 +v 0.062081 0.102842 0.133244 +v 0.062081 0.102842 0.133244 +v 0.064474 0.101646 0.132700 +v 0.066758 0.100667 0.132700 +v 0.069260 0.099906 0.133244 +v 0.073285 0.096207 0.133352 +v 0.073285 0.096207 0.133352 +v 0.073285 0.096207 0.133352 +v 0.078397 0.098927 0.133352 +v 0.078397 0.098927 0.133352 +v 0.079158 0.099144 0.133244 +v 0.080028 0.099579 0.132591 +v 0.081007 0.098165 0.131612 +v 0.081007 0.098165 0.131612 +v 0.081007 0.098165 0.131612 +v 0.081225 0.100558 0.127044 +v 0.081225 0.100558 0.127044 +v 0.081007 0.100449 0.126391 +v 0.079593 0.101646 0.126391 +v 0.080790 0.098600 0.126391 +v 0.081442 0.095446 0.126391 +v 0.082312 0.091530 0.126391 +v 0.082312 0.066296 0.126391 +v 0.082312 0.056071 0.126391 +v 0.082312 0.040952 0.126391 +v 0.082312 0.040952 0.126391 +v 0.081877 0.019742 0.126282 +v 0.082312 0.002556 0.126391 +v 0.081551 -0.012889 0.126391 +v 0.081225 -0.018654 0.126826 +v 0.081225 -0.018654 0.126826 +v 0.081225 -0.018654 0.126826 +v 0.081225 -0.018654 0.126826 +v 0.081225 -0.020503 0.126500 +v 0.081225 -0.020503 0.126500 +v 0.081333 -0.021700 0.125847 +v 0.081333 -0.021700 0.125847 +v 0.080898 -0.054657 0.111381 +v 0.080898 -0.054657 0.111381 +v 0.080898 -0.054875 0.111055 +v 0.080898 -0.054875 0.111055 +v 0.080898 -0.054875 0.111055 +v 0.080898 -0.055527 0.110946 +v 0.080898 -0.055527 0.110946 +v 0.080898 -0.055527 0.110946 +v 0.080790 -0.055962 0.111381 +v 0.080790 -0.055962 0.111381 +v 0.080790 -0.055962 0.111381 +v 0.080790 -0.055962 0.111381 +v 0.080463 -0.055962 0.113448 +v 0.080463 -0.055962 0.113448 +v 0.080463 -0.055962 0.113448 +v 0.080028 -0.055636 0.113774 +v 0.080028 -0.055636 0.113774 +v 0.079593 -0.055636 0.114100 +v 0.079593 -0.055636 0.114100 +v 0.079593 -0.055636 0.114100 +v 0.074046 -0.055854 0.114209 +v 0.074046 -0.055854 0.114209 +v 0.074046 -0.055636 0.113991 +v 0.074046 -0.055636 0.113991 +v 0.073720 -0.055745 0.114100 +v 0.074046 -0.056071 0.114209 +v 0.074046 -0.056071 0.114209 +v 0.074046 -0.056071 0.114209 +v 0.073176 -0.089572 0.096914 +v 0.073176 -0.089572 0.096914 +v 0.073176 -0.089572 0.096914 +v 0.073176 -0.089572 0.096914 +v 0.073176 -0.089572 0.096914 +v 0.079593 -0.092835 0.095174 +v 0.079593 -0.092835 0.095174 +v 0.079593 -0.092835 0.095174 +v 0.079593 -0.092835 0.095174 +v 0.079593 -0.092835 0.095174 +v 0.079158 -0.096751 0.093107 +v 0.079158 -0.096751 0.093107 +v 0.079158 -0.096751 0.093107 +v 0.079158 -0.096751 0.093107 +v 0.079158 -0.096751 0.093107 +v 0.078397 -0.099362 0.091802 +v 0.078397 -0.099362 0.091802 +v 0.067085 -0.100014 0.091041 +v 0.069042 -0.098165 0.092020 +v 0.068607 -0.099688 0.091585 +v 0.068607 -0.099688 0.091585 +v 0.070456 -0.097621 0.092672 +v 0.070456 -0.097621 0.092672 +v 0.072741 -0.106323 0.088213 +v 0.072741 -0.106323 0.088213 +v 0.070565 -0.107411 0.087669 +v 0.070565 -0.107411 0.087669 +v 0.070565 -0.107411 0.087669 +v 0.068281 -0.108172 0.087234 +v 0.068281 -0.108172 0.087234 +v 0.068281 -0.108172 0.087234 +v 0.068281 -0.108172 0.087234 +v 0.065453 -0.108607 0.087016 +v 0.065453 -0.108607 0.087016 +v 0.054358 -0.103604 0.089627 +v 0.054358 -0.103604 0.089627 +v 0.054358 -0.103604 0.089627 +v 0.054141 -0.103495 0.089518 +v 0.054141 -0.103495 0.089518 +v 0.054141 -0.102734 0.089518 +v 0.054141 -0.102734 0.089518 +v 0.054032 -0.102842 0.089300 +v 0.054032 -0.103169 0.089409 +v 0.054032 -0.103169 0.089409 +v 0.054032 -0.108390 0.086690 +v 0.054032 -0.108390 0.086690 +v 0.054032 -0.108390 0.086690 +v 0.054032 -0.108498 0.086472 +v 0.054032 -0.108498 0.086472 +v 0.054032 -0.108498 0.086472 +v 0.053923 -0.109586 0.080490 +v 0.053923 -0.109586 0.080490 +v 0.053923 -0.109586 0.080490 +v 0.053923 -0.109586 0.080490 +v 0.053923 -0.109586 0.080490 +v 0.053815 -0.107737 0.080164 +v 0.053923 -0.107193 0.080381 +v 0.053923 -0.107193 0.080381 +v 0.053923 -0.107193 0.080381 +v 0.053923 -0.105888 0.079729 +v 0.053923 -0.105888 0.079729 +v 0.053923 -0.105888 0.079729 +v 0.053923 -0.105888 0.079729 +v 0.053815 -0.106867 0.080055 +v 0.053597 -0.107737 0.080055 +v 0.053597 -0.109804 0.080055 +v 0.053597 -0.109804 0.080055 +v 0.053923 -0.110021 0.079946 +v 0.053053 -0.110130 0.079946 +v 0.053053 -0.110130 0.079946 +v 0.012699 -0.109912 0.080490 +v 0.012699 -0.109912 0.080490 +v 0.049899 -0.110021 0.080055 +v 0.049899 -0.110021 0.080055 +v -0.000027 -0.110130 0.079946 +v -0.000027 -0.110130 0.079946 +v -0.000027 -0.110130 0.079946 +v -0.000027 -0.110130 0.079946 +v -0.000027 -0.110674 0.079511 +v -0.000027 -0.110674 0.079511 +v -0.013732 -0.111109 0.079729 +v -0.013732 -0.111109 0.079729 +v -0.013732 -0.111109 0.079729 +v -0.013732 -0.111109 0.079729 +v -0.056914 -0.111109 0.079729 +v -0.056914 -0.111109 0.079729 +v -0.056914 -0.111109 0.079729 +v -0.056914 -0.111326 0.080273 +v -0.056914 -0.111326 0.080273 +v -0.056914 -0.111326 0.080273 +v -0.066377 -0.111326 0.080273 +v -0.066377 -0.111326 0.080273 +v -0.066377 -0.111326 0.080273 +v -0.066377 -0.111326 0.080273 +v -0.068661 -0.109912 0.086908 +v -0.068661 -0.109912 0.086908 +v -0.071271 -0.109151 0.087343 +v -0.071271 -0.109151 0.087343 +v -0.074970 -0.106976 0.088322 +v -0.074970 -0.106976 0.088322 +v -0.075513 -0.105888 0.089627 +v -0.073773 -0.106214 0.089736 +v -0.073773 -0.106214 0.089736 +v -0.073773 -0.106214 0.089736 +v -0.069640 -0.099362 0.093325 +v -0.069640 -0.099362 0.093325 +v -0.069640 -0.099362 0.093325 +v -0.069640 -0.099362 0.093325 +v -0.069640 -0.099362 0.093325 +v -0.069749 -0.095990 0.094195 +v -0.069749 -0.095990 0.094195 +v -0.070836 -0.093706 0.095392 +v -0.071598 -0.089899 0.097567 +v -0.072468 -0.056941 0.114535 +v -0.072794 -0.023875 0.130307 +v -0.072685 -0.022679 0.130524 +v -0.072685 -0.022679 0.130524 +v -0.072685 -0.022679 0.130524 +v -0.072685 -0.022679 0.130524 +v -0.070619 -0.002012 0.090279 +v -0.070619 -0.002012 0.090279 +v -0.070619 -0.002012 0.090279 +v -0.070619 -0.002012 0.090279 +v -0.070619 -0.002012 0.090279 +v -0.070619 -0.002012 0.090279 +v -0.070619 0.070320 0.090279 +v -0.070619 0.070320 0.090279 +v -0.070619 0.070320 0.090279 +v -0.070619 0.070320 0.090279 +v -0.071271 0.085874 0.103876 +v -0.071271 0.085874 0.103876 +v -0.070619 0.087397 0.090279 +v -0.070619 0.087397 0.090279 +v -0.070619 0.087397 0.090279 +v -0.072685 0.090660 0.131286 +v -0.072685 0.090660 0.131286 +v -0.072250 0.093053 0.131286 +v -0.072250 0.093053 0.131286 +v -0.072250 0.093053 0.131286 +v -0.072250 0.093053 0.131286 +v -0.071706 0.095337 0.132482 +v -0.071706 0.095337 0.132482 +v -0.069531 0.098274 0.132482 +v -0.069096 0.099579 0.133026 +v -0.067138 0.101320 0.133244 +v -0.067138 0.101320 0.133244 +v -0.062135 0.102842 0.133244 +v -0.062135 0.102842 0.133244 +v -0.062135 0.103604 0.133352 +v -0.062135 0.103604 0.133352 +v -0.062135 0.103604 0.133352 +v -0.062135 0.103604 0.133352 +v -0.007967 0.103713 0.133352 +v -0.007967 0.103713 0.133352 +v -0.007967 0.103713 0.133352 +v -0.007967 0.103713 0.133352 +v -0.000027 0.103713 0.133352 +v -0.000027 0.103713 0.133352 +v -0.000027 0.103713 0.133352 +v -0.000027 0.103713 0.133352 +v 0.052836 0.103713 0.133352 +v 0.052836 0.103713 0.133352 +v 0.052836 0.103713 0.133352 +v 0.052836 0.103713 0.133352 +v 0.052836 0.103713 0.133352 +v 0.060776 0.103713 0.133352 +v 0.060776 0.103713 0.133352 +v 0.060776 0.103713 0.133352 +v 0.063495 0.103495 0.133352 +v 0.063495 0.103495 0.133352 +v 0.063495 0.103495 0.133352 +v 0.063495 0.103495 0.133352 +v 0.063495 0.103495 0.133352 +v 0.066214 0.102625 0.133352 +v 0.066214 0.102625 0.133352 +v 0.066214 0.102625 0.133352 +v 0.066214 0.102625 0.133352 +v 0.066214 0.102625 0.133352 +v 0.068716 0.101320 0.133352 +v 0.068716 0.101320 0.133352 +v 0.068716 0.101320 0.133352 +v 0.070783 0.099579 0.133352 +v 0.070783 0.099579 0.133352 +v 0.070783 0.099579 0.133352 +v 0.070783 0.099579 0.133352 +v 0.070783 0.099579 0.133352 +v 0.075460 0.103604 0.133352 +v 0.075460 0.103604 0.133352 +v 0.075460 0.103604 0.133352 +v 0.077091 0.101537 0.133352 +v 0.077091 0.101537 0.133352 +v 0.077091 0.101537 0.133352 +v 0.077091 0.101537 0.133352 +v 0.078397 0.102516 0.132591 +v 0.076874 0.104365 0.132591 +v 0.075677 0.105670 0.132591 +v 0.076765 0.104909 0.132374 +v 0.077091 0.104583 0.132374 +v 0.078832 0.102734 0.131612 +v 0.078832 0.102734 0.131612 +v 0.079593 0.103604 0.127044 +v 0.079593 0.103604 0.127044 +v 0.079484 0.103495 0.126391 +v 0.079158 0.103169 0.126282 +v 0.078614 0.102951 0.126500 +v 0.078832 0.102516 0.126717 +v 0.078832 0.102516 0.126717 +v 0.080137 0.099797 0.126717 +v 0.080137 0.099797 0.126717 +v 0.081007 0.096860 0.126717 +v 0.081007 0.096860 0.126717 +v 0.081225 0.093814 0.126826 +v 0.081225 0.093814 0.126826 +v 0.081551 0.091530 0.126391 +v 0.081551 0.066296 0.126391 +v 0.081225 0.040952 0.126826 +v 0.081225 0.040952 0.126826 +v 0.081225 0.040952 0.126826 +v 0.081225 0.040952 0.126826 +v 0.081225 0.040952 0.126826 +v 0.081225 0.040952 0.126826 +v 0.081551 0.040952 0.126391 +v 0.081551 0.040952 0.126391 +v 0.081551 0.000925 0.126391 +v 0.081225 0.002556 0.126826 +v 0.081225 0.002556 0.126826 +v 0.081225 -0.000598 0.126826 +v 0.081225 -0.002121 0.126826 +v 0.081225 -0.002121 0.126826 +v 0.081225 -0.002121 0.126826 +v 0.081007 -0.002447 0.128349 +v 0.081007 -0.018654 0.128349 +v 0.081007 -0.018654 0.128349 +v 0.080463 -0.023766 0.129981 +v 0.080463 -0.023766 0.129981 +v 0.080463 -0.023766 0.129981 +v 0.080463 -0.020721 0.131068 +v 0.080463 -0.020721 0.131068 +v 0.080246 -0.018654 0.131721 +v 0.080463 -0.012998 0.131286 +v 0.080137 0.007342 0.131830 +v 0.080137 0.007342 0.131830 +v 0.080137 0.007342 0.131830 +v 0.080463 0.007342 0.131286 +v 0.080463 0.007342 0.131286 +v 0.080246 0.007668 0.131286 +v 0.080246 0.007668 0.131286 +v 0.079811 0.007668 0.131721 +v 0.079811 0.007668 0.131721 +v 0.079811 0.007668 0.131721 +v 0.079811 0.007668 0.131721 +v 0.079593 0.007451 0.132047 +v 0.079593 0.007451 0.132047 +v 0.079593 0.007451 0.132047 +v 0.079593 -0.012998 0.132047 +v 0.079593 -0.012998 0.132047 +v 0.079593 -0.012998 0.132047 +v 0.079593 -0.012998 0.132047 +v 0.079593 -0.018654 0.132047 +v 0.079593 -0.018654 0.132047 +v 0.079593 -0.020830 0.131830 +v 0.079593 -0.020830 0.131830 +v 0.079593 -0.024201 0.130633 +v 0.079593 -0.024201 0.130633 +v 0.074046 -0.054766 0.114862 +v 0.074046 -0.054766 0.114862 +v 0.074046 -0.054766 0.114862 +v 0.074046 -0.054766 0.114862 +v 0.074916 -0.024201 0.130633 +v 0.074916 -0.024201 0.130633 +v 0.074916 -0.024201 0.130633 +v 0.073720 -0.054766 0.114753 +v 0.073720 -0.054766 0.114753 +v 0.073393 -0.054875 0.114318 +v 0.073393 -0.054875 0.114318 +v 0.073502 -0.054657 0.114644 +v 0.073502 -0.054657 0.114644 +v 0.073611 -0.054875 0.114426 +v 0.073611 -0.054875 0.114426 +v 0.073611 -0.054875 0.114426 +v 0.073611 -0.054875 0.114426 +v 0.080681 -0.054875 0.111381 +v 0.080681 -0.054875 0.111381 +v 0.080681 -0.054983 0.111163 +v 0.080681 -0.054983 0.111163 +v 0.073285 -0.055092 0.111163 +v 0.073285 -0.055092 0.111163 +v 0.080572 -0.055527 0.111163 +v 0.080572 -0.055527 0.111163 +v 0.080572 -0.055527 0.111163 +v 0.080572 -0.055527 0.111163 +v 0.080572 -0.055636 0.111381 +v 0.080572 -0.055636 0.111381 +v 0.073502 -0.055636 0.113556 +v 0.073502 -0.055636 0.113556 +v 0.073393 -0.055962 0.113991 +v 0.073393 -0.055962 0.113991 +v 0.073720 -0.057703 0.113230 +v 0.072849 -0.089572 0.096806 +v 0.072849 -0.089572 0.096806 +v 0.072958 -0.091095 0.096044 +v 0.072958 -0.091095 0.096044 +v 0.072958 -0.091095 0.096044 +v 0.072958 -0.091095 0.096044 +v 0.071870 -0.095120 0.093978 +v 0.071870 -0.095120 0.093978 +v 0.071870 -0.095120 0.093978 +v 0.071870 -0.095120 0.093978 +v 0.071218 -0.094793 0.093869 +v 0.071218 -0.094793 0.093869 +v 0.071109 -0.094358 0.093216 +v 0.071109 -0.094358 0.093216 +v 0.071109 -0.094358 0.093216 +v 0.069804 -0.096642 0.092020 +v 0.069804 -0.096642 0.092020 +v 0.069804 -0.096642 0.092020 +v 0.068064 -0.098709 0.090932 +v 0.068064 -0.098709 0.090932 +v 0.069695 -0.096642 0.089736 +v 0.069695 -0.096642 0.089736 +v 0.069695 -0.096642 0.089736 +v 0.069695 -0.096642 0.089736 +v 0.067955 -0.098600 0.089736 +v 0.067955 -0.098600 0.089736 +v 0.066976 -0.099579 0.090388 +v 0.066976 -0.099579 0.090388 +v 0.066976 -0.099797 0.090714 +v 0.065344 -0.102081 0.090388 +v 0.065344 -0.102081 0.090388 +v 0.065344 -0.102081 0.090388 +v 0.065344 -0.102081 0.090388 +v 0.062734 -0.103060 0.089844 +v 0.062734 -0.103060 0.089844 +v 0.062734 -0.103060 0.089844 +v 0.059906 -0.103495 0.089627 +v 0.059906 -0.103495 0.089627 +v 0.054358 -0.102625 0.089518 +v 0.054358 -0.102625 0.089518 +v 0.054358 -0.102625 0.089518 +v 0.054032 -0.102516 0.088974 +v 0.054032 -0.102516 0.088974 +v 0.054032 -0.101972 0.088539 +v 0.054032 -0.101972 0.088539 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.054032 -0.101863 0.086799 +v 0.053923 -0.105235 0.078532 +v 0.053923 -0.105235 0.078532 +v 0.053923 -0.105235 0.078532 +v 0.053923 -0.105235 0.077988 +v 0.053923 -0.105235 0.077988 +v 0.053923 -0.105235 0.077988 +v 0.053923 -0.105235 0.077988 +v 0.053815 -0.105344 0.077988 +v 0.053815 -0.106105 0.079511 +v 0.053815 -0.106105 0.079511 +v 0.053597 -0.106214 0.079511 +v 0.053597 -0.106214 0.079511 +v 0.053053 -0.107737 0.080055 +v 0.053053 -0.109804 0.080055 +v 0.053053 -0.109804 0.080055 +v 0.052836 -0.110021 0.080055 +v 0.052509 -0.110021 0.080055 +v 0.052509 -0.110021 0.080055 +v 0.052509 -0.110021 0.080055 +v 0.052401 -0.109912 0.080490 +v 0.052401 -0.109912 0.080490 +v 0.052292 -0.108934 0.086472 +v 0.052292 -0.108934 0.086472 +v 0.052292 -0.108934 0.086472 +v 0.013026 -0.108934 0.086472 +v 0.013026 -0.108934 0.086472 +v 0.013026 -0.108934 0.086472 +v -0.011013 -0.108934 0.086472 +v -0.011013 -0.108934 0.086472 +v -0.011013 -0.108934 0.086472 +v -0.011013 -0.108934 0.086472 +v -0.011013 -0.108934 0.086472 +v -0.052345 -0.108934 0.086472 +v -0.052345 -0.108934 0.086472 +v -0.052345 -0.108934 0.086472 +v -0.052345 -0.109912 0.080490 +v -0.052345 -0.109912 0.080490 +v -0.052345 -0.109912 0.080490 +v -0.052345 -0.109912 0.080490 +v -0.052563 -0.109804 0.080490 +v -0.052563 -0.109804 0.080490 +v -0.052454 -0.110021 0.080055 +v -0.052454 -0.110021 0.080055 +v -0.052454 -0.110021 0.080055 +v -0.052780 -0.110021 0.080055 +v -0.052780 -0.110021 0.080055 +v -0.053868 -0.110021 0.079946 +v -0.053107 -0.110130 0.079946 +v -0.053107 -0.110130 0.079946 +v -0.053107 -0.110130 0.079946 +v -0.049952 -0.110239 0.079729 +v -0.049952 -0.110239 0.079729 +v -0.049952 -0.110239 0.079729 +v -0.049952 -0.110239 0.079729 +v -0.066377 -0.111109 0.079729 +v -0.066377 -0.111109 0.079729 +v -0.066377 -0.111109 0.079729 +v -0.066377 -0.111109 0.079729 +v -0.066377 -0.111109 0.079729 +v -0.069640 -0.110783 0.079837 +v -0.069640 -0.110783 0.079837 +v -0.070836 -0.110674 0.080708 +v -0.070836 -0.110674 0.080708 +v -0.073447 -0.109695 0.081143 +v -0.073447 -0.109695 0.081143 +v -0.078015 -0.106105 0.082665 +v -0.074861 -0.108716 0.081143 +v -0.075840 -0.108172 0.081904 +v -0.075840 -0.108172 0.081904 +v -0.073664 -0.109477 0.080816 +v -0.077145 -0.105018 0.089192 +v -0.077145 -0.105018 0.089192 +v -0.077362 -0.103930 0.090606 +v -0.077798 -0.103930 0.090171 +v -0.078450 -0.102734 0.090932 +v -0.078885 -0.101537 0.091802 +v -0.076492 -0.103495 0.091150 +v -0.076492 -0.103495 0.091150 +v -0.076492 -0.103495 0.091150 +v -0.076492 -0.103495 0.091150 +v -0.071271 -0.096969 0.094521 +v -0.071271 -0.096969 0.094521 +v -0.071271 -0.096969 0.094521 +v -0.071271 -0.096969 0.094521 +v -0.071271 -0.096969 0.094521 +v -0.072359 -0.094467 0.095827 +v -0.072359 -0.094467 0.095827 +v -0.072359 -0.094467 0.095827 +v -0.072359 -0.094467 0.095827 +v -0.073012 -0.091748 0.097241 +v -0.073012 -0.091748 0.097241 +v -0.073012 -0.091748 0.097241 +v -0.072794 -0.090116 0.098002 +v -0.073773 -0.052808 0.117363 +v -0.074534 -0.024745 0.131830 +v -0.073338 -0.024419 0.131286 +v -0.073447 -0.020938 0.132482 +v -0.073447 -0.020938 0.132482 +v -0.072794 -0.018654 0.131286 +v -0.072794 -0.018654 0.131286 +v -0.072794 -0.018654 0.131286 +v -0.072794 0.028226 0.131286 +v -0.072794 0.028226 0.131286 +v -0.072794 0.028226 0.131286 +v -0.072794 0.028226 0.131286 +v -0.072794 0.087288 0.131286 +v -0.072794 0.087288 0.131286 +v -0.073447 0.089464 0.132700 +v -0.073229 0.091965 0.132700 +v -0.071924 0.095555 0.132700 +v -0.071598 0.096751 0.133026 +v -0.071598 0.096751 0.133026 +v -0.070836 0.099579 0.133352 +v -0.070836 0.099579 0.133352 +v -0.070836 0.099579 0.133352 +v -0.070836 0.099579 0.133352 +v -0.070836 0.099579 0.133352 +v -0.070836 0.099579 0.133352 +v -0.068661 0.101320 0.133352 +v -0.068661 0.101320 0.133352 +v -0.068661 0.101320 0.133352 +v -0.068661 0.101320 0.133352 +v -0.066159 0.102625 0.133352 +v -0.066159 0.102625 0.133352 +v -0.066159 0.102625 0.133352 +v -0.066159 0.102625 0.133352 +v -0.066159 0.102625 0.133352 +v -0.066159 0.102625 0.133352 +v -0.064310 0.108390 0.133352 +v -0.064310 0.108390 0.133352 +v -0.064310 0.108390 0.133352 +v -0.064310 0.108390 0.133352 +v -0.064310 0.108390 0.133352 +v -0.007967 0.108390 0.133352 +v -0.007967 0.108390 0.133352 +v -0.007967 0.108390 0.133352 +v -0.007967 0.108390 0.133352 +v 0.056208 0.108390 0.133352 +v 0.056208 0.108390 0.133352 +v 0.056208 0.108390 0.133352 +v 0.056208 0.108390 0.133352 +v 0.056208 0.108390 0.133352 +v 0.056208 0.108390 0.133352 +v 0.065779 0.108281 0.133352 +v 0.065779 0.108281 0.133352 +v 0.065779 0.108281 0.133352 +v 0.068716 0.107737 0.133352 +v 0.068716 0.107737 0.133352 +v 0.068716 0.107737 0.133352 +v 0.068716 0.107737 0.133352 +v 0.068716 0.107737 0.133352 +v 0.071544 0.106541 0.133352 +v 0.071544 0.106541 0.133352 +v 0.071544 0.106541 0.133352 +v 0.071544 0.106541 0.133352 +v 0.071544 0.106541 0.133352 +v 0.071544 0.106541 0.133352 +v 0.074699 0.105670 0.133135 +v 0.073720 0.107193 0.132591 +v 0.075786 0.105888 0.132374 +v 0.076983 0.105127 0.131612 +v 0.076983 0.105127 0.131612 +v 0.077853 0.105888 0.127044 +v 0.077853 0.105888 0.127044 +v 0.077744 0.105888 0.126609 +v 0.076765 0.106867 0.126609 +v 0.076765 0.106867 0.126609 +v 0.077962 0.105453 0.126391 +v 0.077962 0.105453 0.126391 +v 0.077200 0.104800 0.126609 +v 0.076874 0.105127 0.126609 +v 0.076874 0.105127 0.126609 +v 0.076983 0.104800 0.126717 +v 0.076983 0.104800 0.126717 +v 0.076983 0.104800 0.126717 +v 0.076983 0.104691 0.127153 +v 0.076983 0.104691 0.127153 +v 0.076983 0.104691 0.127153 +v 0.076983 0.104691 0.127153 +v 0.075786 0.103930 0.131286 +v 0.075786 0.103930 0.131286 +v 0.076548 0.104691 0.127044 +v 0.076548 0.104691 0.127044 +v 0.076548 0.104691 0.127044 +v 0.076548 0.104691 0.127044 +v 0.076330 0.103930 0.131395 +v 0.076330 0.103930 0.131395 +v 0.076330 0.103930 0.131395 +v 0.077200 0.102734 0.131395 +v 0.077200 0.102734 0.131395 +v 0.077200 0.102734 0.131395 +v 0.077200 0.102734 0.131395 +v 0.079376 0.098818 0.131286 +v 0.079376 0.098818 0.131286 +v 0.080246 0.095990 0.131286 +v 0.080246 0.095990 0.131286 +v 0.080463 0.093053 0.131286 +v 0.080463 0.093053 0.131286 +v 0.080463 0.057268 0.131286 +v 0.080463 0.057268 0.131286 +v 0.080463 0.057268 0.131286 +v 0.080463 0.090769 0.131286 +v 0.080463 0.090769 0.131286 +v 0.080463 0.090769 0.131286 +v 0.081225 0.091530 0.126826 +v 0.081225 0.091530 0.126826 +v 0.081225 0.091530 0.126826 +v 0.081225 0.091530 0.126826 +v 0.081225 0.091530 0.126826 +v 0.081116 0.053678 0.127805 +v 0.081116 0.053678 0.127805 +v 0.081116 0.053678 0.127805 +v 0.081116 0.053678 0.127805 +v 0.081007 0.020068 0.128349 +v 0.081007 0.020068 0.128349 +v 0.081225 0.010061 0.126826 +v 0.081225 0.010061 0.126826 +v 0.081225 0.010061 0.126826 +v 0.081116 0.007560 0.127914 +v 0.081116 0.007560 0.127914 +v 0.081007 0.007777 0.128023 +v 0.081007 0.007777 0.128023 +v 0.081116 0.008539 0.127914 +v 0.081007 0.016696 0.128349 +v 0.080898 0.008430 0.128349 +v 0.080898 0.008430 0.128349 +v 0.080898 0.008430 0.128349 +v 0.080898 0.008430 0.128349 +v 0.080790 0.008321 0.128132 +v 0.080790 0.008321 0.128132 +v 0.080790 0.008321 0.128132 +v 0.080790 0.007886 0.128023 +v 0.080790 0.007886 0.128023 +v 0.080790 0.007668 0.128240 +v 0.080790 0.007668 0.128240 +v 0.081007 0.007451 0.128349 +v 0.074155 0.007668 0.128240 +v 0.074155 0.007668 0.128240 +v 0.074155 0.007668 0.128240 +v 0.074590 0.007668 0.131612 +v 0.074590 0.007668 0.131612 +v 0.075025 0.007668 0.131721 +v 0.075025 0.007668 0.131721 +v 0.075025 0.007342 0.132047 +v 0.075025 0.007342 0.132047 +v 0.075025 0.007342 0.132047 +v 0.075025 0.007342 0.132047 +v 0.075025 -0.012998 0.132047 +v 0.075025 -0.012998 0.132047 +v 0.075025 -0.012998 0.132047 +v 0.075025 -0.018654 0.132047 +v 0.075025 -0.018654 0.132047 +v 0.075025 -0.018654 0.132047 +v 0.074916 -0.020830 0.131830 +v 0.074916 -0.020830 0.131830 +v 0.074372 -0.020721 0.131612 +v 0.074263 -0.023984 0.130416 +v 0.074263 -0.023984 0.130416 +v 0.073176 -0.054657 0.113991 +v 0.073176 -0.054657 0.113991 +v 0.073176 -0.054657 0.113991 +v 0.073176 -0.054657 0.113991 +v 0.073285 -0.054875 0.111381 +v 0.073285 -0.054875 0.111381 +v 0.073067 -0.054983 0.111163 +v 0.073067 -0.054983 0.111163 +v 0.073285 -0.055527 0.111163 +v 0.073285 -0.055527 0.111163 +v 0.073285 -0.055527 0.111163 +v 0.073285 -0.055527 0.111163 +v 0.073176 -0.055745 0.111381 +v 0.073176 -0.055745 0.111381 +v 0.073067 -0.055854 0.111381 +v 0.073067 -0.055854 0.111381 +v 0.073067 -0.055854 0.111381 +v 0.073176 -0.055962 0.113339 +v 0.073176 -0.055962 0.113339 +v 0.073176 -0.055962 0.113339 +v 0.073393 -0.057703 0.113121 +v 0.072523 -0.089464 0.096697 +v 0.072523 -0.089464 0.096697 +v 0.072088 -0.092292 0.095174 +v 0.071870 -0.091857 0.094630 +v 0.071870 -0.091857 0.094630 +v 0.071653 -0.091748 0.089736 +v 0.071653 -0.091748 0.089736 +v 0.070892 -0.094249 0.089736 +v 0.070892 -0.094249 0.089736 +v 0.069478 -0.096425 0.089192 +v 0.069478 -0.096425 0.089192 +v 0.067846 -0.098383 0.089192 +v 0.064692 -0.100667 0.089192 +v 0.064909 -0.100993 0.089736 +v 0.064909 -0.100993 0.089736 +v 0.063713 -0.101646 0.089844 +v 0.061211 -0.102407 0.089627 +v 0.058601 -0.102625 0.089518 +v 0.054358 -0.102299 0.089192 +v 0.054358 -0.102299 0.089192 +v 0.054358 -0.102299 0.089192 +v 0.054141 -0.102081 0.088865 +v 0.054141 -0.101755 0.088757 +v 0.054032 -0.101646 0.087016 +v 0.054032 -0.101646 0.087016 +v 0.054032 -0.101102 0.077988 +v 0.054032 -0.101102 0.077988 +v 0.053815 -0.100667 0.065262 +v 0.053815 -0.100667 0.065262 +v 0.053815 -0.100667 0.065262 +v 0.053815 -0.100667 0.065262 +v 0.053815 -0.100667 0.065262 +v 0.053815 -0.100667 0.065262 +v 0.053815 -0.103821 0.063087 +v 0.053815 -0.103821 0.063087 +v 0.053815 -0.103821 0.063087 +v 0.053815 -0.104256 0.063631 +v 0.053815 -0.104256 0.063631 +v 0.053815 -0.104256 0.063631 +v 0.053815 -0.104474 0.063957 +v 0.053815 -0.104474 0.063957 +v 0.053815 -0.104474 0.063957 +v 0.053488 -0.104691 0.063957 +v 0.053488 -0.104691 0.063957 +v 0.053597 -0.105453 0.077988 +v 0.053053 -0.106214 0.079511 +v 0.053053 -0.106214 0.079511 +v 0.052836 -0.106867 0.080055 +v 0.052836 -0.106867 0.080055 +v 0.052836 -0.106105 0.079511 +v 0.052727 -0.105888 0.079729 +v 0.052727 -0.105888 0.079729 +v 0.052727 -0.105888 0.079729 +v 0.052727 -0.106758 0.080273 +v 0.052727 -0.106758 0.080273 +v 0.052727 -0.106758 0.080273 +v 0.052836 -0.107737 0.080164 +v 0.052836 -0.109695 0.080273 +v 0.052836 -0.109695 0.080273 +v 0.052618 -0.109804 0.080490 +v 0.052618 -0.108825 0.086472 +v 0.052292 -0.108607 0.087016 +v 0.052292 -0.108607 0.087016 +v 0.052292 -0.108390 0.087125 +v 0.052292 -0.108390 0.087125 +v 0.052292 -0.108390 0.087125 +v 0.013026 -0.108390 0.087125 +v 0.013026 -0.108390 0.087125 +v 0.013026 -0.108390 0.087125 +v -0.000027 -0.108607 0.087016 +v -0.017321 -0.108607 0.087016 +v -0.017321 -0.108607 0.087016 +v -0.017321 -0.108607 0.087016 +v -0.052345 -0.108716 0.086908 +v -0.052345 -0.108716 0.086908 +v -0.052345 -0.108716 0.086908 +v -0.052345 -0.108716 0.086908 +v -0.052563 -0.108825 0.086472 +v -0.052563 -0.108825 0.086472 +v -0.052672 -0.109586 0.080490 +v -0.052672 -0.109586 0.080490 +v -0.052672 -0.109586 0.080490 +v -0.052672 -0.109586 0.080490 +v -0.052672 -0.109586 0.080490 +v -0.052672 -0.109586 0.080490 +v -0.052672 -0.109586 0.080490 +v -0.053107 -0.109804 0.080055 +v -0.053107 -0.109804 0.080055 +v -0.052780 -0.107737 0.080490 +v -0.052780 -0.107737 0.080490 +v -0.052780 -0.107737 0.080490 +v -0.052780 -0.107737 0.080164 +v -0.053107 -0.106214 0.079511 +v -0.052889 -0.106867 0.080055 +v -0.053107 -0.107737 0.080055 +v -0.053651 -0.109804 0.080055 +v -0.053651 -0.109804 0.080055 +v -0.053868 -0.109695 0.080273 +v -0.053868 -0.109695 0.080273 +v -0.053868 -0.109695 0.080273 +v -0.053868 -0.109912 0.080055 +v -0.054194 -0.110021 0.080055 +v -0.054194 -0.110021 0.080055 +v -0.054194 -0.110021 0.080055 +v -0.053651 -0.110239 0.079729 +v -0.053651 -0.110239 0.079729 +v -0.067899 -0.110130 0.079729 +v -0.067899 -0.110130 0.079729 +v -0.070727 -0.109586 0.080055 +v -0.072359 -0.109912 0.080273 +v -0.072359 -0.109912 0.080273 +v -0.073664 -0.109260 0.080599 +v -0.074861 -0.108607 0.080925 +v -0.077036 -0.106867 0.081795 +v -0.079755 -0.103386 0.083536 +v -0.078994 -0.104909 0.083318 +v -0.078015 -0.106105 0.082883 +v -0.078015 -0.106105 0.082883 +v -0.078015 -0.103930 0.089736 +v -0.078015 -0.103930 0.089736 +v -0.078559 -0.102734 0.090714 +v -0.079103 -0.101537 0.091585 +v -0.080299 -0.098927 0.092781 +v -0.079973 -0.098927 0.093107 +v -0.078885 -0.098709 0.093651 +v -0.078885 -0.098709 0.093651 +v -0.078885 -0.098709 0.093651 +v -0.078885 -0.098709 0.093651 +v -0.079429 -0.096099 0.094957 +v -0.079429 -0.096099 0.094957 +v -0.079429 -0.096099 0.094957 +v -0.079429 -0.096099 0.094957 +v -0.079429 -0.096099 0.094957 +v -0.079429 -0.096099 0.094957 +v -0.073120 -0.090225 0.098002 +v -0.073120 -0.090225 0.098002 +v -0.073120 -0.090225 0.098002 +v -0.073120 -0.090225 0.098002 +v -0.074099 -0.052808 0.117363 +v -0.074099 -0.052808 0.117363 +v -0.074099 -0.052808 0.117363 +v -0.074861 -0.024745 0.131830 +v -0.074861 -0.024745 0.131830 +v -0.074970 -0.022244 0.132809 +v -0.074970 -0.022244 0.132809 +v -0.074970 -0.022244 0.132809 +v -0.074970 -0.022244 0.132809 +v -0.073447 -0.018654 0.132700 +v -0.073447 -0.000054 0.132700 +v -0.073447 0.038559 0.132700 +v -0.073447 0.072169 0.132700 +v -0.074534 0.090878 0.133352 +v -0.073882 0.094902 0.133352 +v -0.073882 0.094902 0.133352 +v -0.073882 0.094902 0.133352 +v -0.072577 0.097404 0.133352 +v -0.072577 0.097404 0.133352 +v -0.072577 0.097404 0.133352 +v -0.072577 0.097404 0.133352 +v -0.072577 0.097404 0.133352 +v -0.076166 0.102734 0.133352 +v -0.076166 0.102734 0.133352 +v -0.076166 0.102734 0.133352 +v -0.076166 0.102734 0.133352 +v -0.074534 0.104474 0.133352 +v -0.074534 0.104474 0.133352 +v -0.074534 0.104474 0.133352 +v -0.074534 0.104474 0.133352 +v -0.071489 0.106541 0.133352 +v -0.071489 0.106541 0.133352 +v -0.071489 0.106541 0.133352 +v -0.071489 0.106541 0.133352 +v -0.068770 0.107737 0.133352 +v -0.068770 0.107737 0.133352 +v -0.068770 0.107737 0.133352 +v -0.065942 0.109695 0.132809 +v -0.065942 0.110021 0.132591 +v -0.064310 0.109804 0.132809 +v -0.064310 0.109804 0.132809 +v -0.056261 0.109804 0.132809 +v -0.007967 0.109804 0.132809 +v -0.000027 0.109477 0.133026 +v -0.000027 0.109477 0.133026 +v 0.022489 0.108716 0.133352 +v 0.064257 0.108716 0.133352 +v 0.067411 0.108716 0.133244 +v 0.067411 0.108716 0.133244 +v 0.070783 0.108716 0.132591 +v 0.071000 0.109151 0.131612 +v 0.071000 0.109151 0.131612 +v 0.074046 0.107628 0.131612 +v 0.074046 0.107628 0.131612 +v 0.074046 0.107628 0.131612 +v 0.074807 0.108390 0.127044 +v 0.074807 0.108390 0.127044 +v 0.074699 0.108172 0.126391 +v 0.074699 0.108172 0.126391 +v 0.074481 0.107846 0.126282 +v 0.075677 0.106758 0.126282 +v 0.075786 0.105997 0.126717 +v 0.075786 0.105997 0.126717 +v 0.075786 0.105997 0.126717 +v 0.075460 0.106432 0.126500 +v 0.076004 0.105888 0.126609 +v 0.076004 0.105888 0.126609 +v 0.076221 0.105670 0.126609 +v 0.076221 0.105670 0.126609 +v 0.076656 0.105344 0.126609 +v 0.076656 0.105344 0.126609 +v 0.076656 0.105344 0.126609 +v 0.076439 0.105127 0.126717 +v 0.076548 0.104800 0.126826 +v 0.076548 0.104800 0.126826 +v 0.075786 0.103930 0.127044 +v 0.075786 0.103930 0.127044 +v 0.075134 0.103277 0.126717 +v 0.075134 0.103277 0.126717 +v 0.075460 0.103495 0.131612 +v 0.075460 0.103495 0.131612 +v 0.075895 0.103495 0.131938 +v 0.075895 0.103495 0.131938 +v 0.075569 0.103495 0.131938 +v 0.075569 0.103495 0.131938 +v 0.075569 0.103495 0.131938 +v 0.076221 0.103821 0.131612 +v 0.076221 0.103821 0.131612 +v 0.078723 0.100232 0.131612 +v 0.078723 0.100232 0.131612 +v 0.078505 0.100123 0.131830 +v 0.079919 0.095990 0.131830 +v 0.080137 0.094467 0.131830 +v 0.080137 0.090769 0.131830 +v 0.080137 0.054657 0.131830 +v 0.080137 0.054657 0.131830 +v 0.079593 0.054548 0.132047 +v 0.079593 0.054548 0.132047 +v 0.079593 0.054548 0.132047 +v 0.079811 0.054331 0.131721 +v 0.079811 0.054331 0.131721 +v 0.079811 0.054331 0.131721 +v 0.079811 0.054331 0.131721 +v 0.080137 0.054331 0.131395 +v 0.080137 0.054331 0.131395 +v 0.080137 0.054331 0.131395 +v 0.080137 0.054331 0.131395 +v 0.080355 0.054440 0.131286 +v 0.080355 0.054440 0.131286 +v 0.080355 0.054440 0.131286 +v 0.080355 0.054440 0.131286 +v 0.081007 0.054440 0.128132 +v 0.081007 0.054440 0.128132 +v 0.080790 0.054331 0.128349 +v 0.080790 0.054331 0.128349 +v 0.080790 0.054331 0.128349 +v 0.080790 0.054222 0.128132 +v 0.080790 0.054222 0.128132 +v 0.080790 0.053678 0.128132 +v 0.080790 0.053678 0.128132 +v 0.080790 0.053678 0.128132 +v 0.074155 0.053678 0.128132 +v 0.074155 0.053678 0.128132 +v 0.074155 0.053569 0.128240 +v 0.074155 0.053569 0.128240 +v 0.074372 0.053569 0.131395 +v 0.074372 0.053569 0.131395 +v 0.074372 0.053569 0.131395 +v 0.074372 0.053569 0.131395 +v 0.074372 0.053569 0.131395 +v 0.074372 0.053352 0.131721 +v 0.074372 0.053352 0.131721 +v 0.074372 0.053352 0.131721 +v 0.074372 0.053352 0.131721 +v 0.074372 0.008647 0.131721 +v 0.074372 0.008647 0.131721 +v 0.074372 0.008647 0.131721 +v 0.074372 0.008647 0.131721 +v 0.074590 0.008430 0.131612 +v 0.074590 0.008430 0.131612 +v 0.074590 0.008430 0.131612 +v 0.074155 0.008430 0.128240 +v 0.074155 0.008430 0.128240 +v 0.074155 0.008212 0.128023 +v 0.074155 0.008212 0.128023 +v 0.073937 0.007886 0.127914 +v 0.074155 0.007777 0.128132 +v 0.074155 0.007777 0.128132 +v 0.073937 0.007451 0.128240 +v 0.073937 0.007451 0.128240 +v 0.073937 0.007451 0.128240 +v 0.074046 0.007342 0.131177 +v 0.074046 0.007342 0.131177 +v 0.074046 0.007342 0.131177 +v 0.074372 0.007342 0.131721 +v 0.074372 0.007342 0.131721 +v 0.074372 -0.009952 0.131721 +v 0.074372 -0.018654 0.131721 +v 0.074372 -0.018654 0.131721 +v 0.074046 -0.020612 0.130960 +v 0.074046 -0.020612 0.130960 +v 0.074046 -0.023766 0.129981 +v 0.074046 -0.023766 0.129981 +v 0.074046 -0.023766 0.129981 +v 0.074046 -0.023766 0.129981 +v 0.074046 -0.023766 0.129981 +v 0.074046 -0.023766 0.129981 +v 0.073067 -0.054657 0.111381 +v 0.073067 -0.054657 0.111381 +v 0.073067 -0.054657 0.111381 +v 0.073067 -0.054657 0.111381 +v 0.073067 -0.054657 0.111381 +v 0.073067 -0.054983 0.110946 +v 0.073067 -0.054983 0.110946 +v 0.073067 -0.055527 0.110946 +v 0.073067 -0.055527 0.110946 +v 0.073067 -0.055527 0.110946 +v 0.073067 -0.055527 0.110946 +v 0.072306 -0.089137 0.096153 +v 0.072306 -0.089137 0.096153 +v 0.072306 -0.089137 0.096153 +v 0.072306 -0.089137 0.096153 +v 0.072306 -0.089137 0.096153 +v 0.072306 -0.089137 0.096153 +v 0.072306 -0.089137 0.096153 +v 0.071870 -0.089137 0.089736 +v 0.071870 -0.089137 0.089736 +v 0.071870 -0.089137 0.089736 +v 0.071653 -0.089137 0.089192 +v 0.071653 -0.057703 0.089192 +v 0.071327 -0.057703 0.088974 +v 0.071327 -0.089137 0.088974 +v 0.069478 -0.090334 0.065262 +v 0.069478 -0.090334 0.065262 +v 0.069695 -0.087941 0.065262 +v 0.070892 -0.090443 0.088865 +v 0.070892 -0.090443 0.088865 +v 0.071000 -0.089137 0.088865 +v 0.071000 -0.089137 0.088865 +v 0.071435 -0.091748 0.089192 +v 0.070674 -0.094141 0.089192 +v 0.068934 -0.096099 0.088865 +v 0.068934 -0.096099 0.088865 +v 0.068934 -0.096099 0.088865 +v 0.067302 -0.097948 0.088865 +v 0.067302 -0.097948 0.088865 +v 0.065453 -0.099470 0.088865 +v 0.065453 -0.099470 0.088865 +v 0.062190 -0.101102 0.088865 +v 0.062190 -0.101102 0.088865 +v 0.061102 -0.101972 0.089192 +v 0.058601 -0.102299 0.089192 +v 0.054358 -0.101646 0.088865 +v 0.054358 -0.101646 0.088865 +v 0.054358 -0.101646 0.088865 +v 0.054358 -0.101646 0.088865 +v 0.054358 -0.101646 0.088865 +v 0.054358 -0.101537 0.087125 +v 0.054358 -0.101537 0.087125 +v 0.054141 -0.100449 0.067655 +v 0.054141 -0.100449 0.067655 +v 0.054141 -0.100449 0.067655 +v 0.053923 -0.100449 0.065262 +v 0.053923 -0.100449 0.065262 +v 0.053923 -0.100449 0.065262 +v 0.053815 -0.100558 0.063087 +v 0.053815 -0.100558 0.063087 +v 0.053815 -0.100558 0.063087 +v 0.053706 -0.103386 0.062761 +v 0.053488 -0.103386 0.062652 +v 0.053706 -0.104256 0.063087 +v 0.053706 -0.104256 0.063087 +v 0.052944 -0.104474 0.063957 +v 0.052944 -0.104256 0.063087 +v 0.053271 -0.104691 0.063957 +v 0.053162 -0.105453 0.077988 +v 0.052836 -0.105779 0.079185 +v 0.052727 -0.105344 0.078967 +v 0.052727 -0.105344 0.078967 +v 0.052727 -0.105344 0.078967 +v 0.052727 -0.105344 0.078967 +v 0.052727 -0.103713 0.083536 +v 0.052727 -0.103713 0.083536 +v 0.052727 -0.103713 0.083536 +v 0.052727 -0.103713 0.083536 +v 0.052727 -0.103713 0.083536 +v 0.052727 -0.103713 0.083536 +v 0.052727 -0.109586 0.080490 +v 0.052727 -0.109586 0.080490 +v 0.052727 -0.109586 0.080490 +v 0.052727 -0.109586 0.080490 +v 0.052727 -0.109586 0.080490 +v 0.052727 -0.108390 0.086690 +v 0.052727 -0.108390 0.086690 +v 0.052727 -0.108390 0.086690 +v 0.052618 -0.108390 0.087016 +v 0.052292 -0.103604 0.089627 +v 0.052292 -0.103604 0.089627 +v 0.052292 -0.103604 0.089627 +v 0.020748 -0.103277 0.089736 +v 0.013134 -0.103604 0.089627 +v 0.013134 -0.103604 0.089627 +v 0.013134 -0.103604 0.089627 +v 0.013134 -0.103604 0.089627 +v -0.000027 -0.108390 0.087125 +v -0.000027 -0.108390 0.087125 +v -0.000027 -0.108390 0.087125 +v -0.000027 -0.108390 0.087125 +v -0.000027 -0.108390 0.087125 +v -0.052345 -0.108390 0.087125 +v -0.052345 -0.108390 0.087125 +v -0.052345 -0.108390 0.087125 +v -0.052345 -0.108390 0.087125 +v -0.052563 -0.108390 0.087016 +v -0.052672 -0.108390 0.086690 +v -0.052672 -0.108390 0.086690 +v -0.052672 -0.108390 0.086690 +v -0.052672 -0.106214 0.083427 +v -0.052672 -0.106214 0.083427 +v -0.052672 -0.106214 0.083427 +v -0.052672 -0.106214 0.083427 +v -0.052672 -0.106214 0.083427 +v -0.052780 -0.106323 0.080055 +v -0.052780 -0.106323 0.080055 +v -0.052780 -0.106323 0.080055 +v -0.052780 -0.106323 0.080055 +v -0.052780 -0.106323 0.080055 +v -0.052780 -0.105235 0.078532 +v -0.052780 -0.105235 0.078532 +v -0.052780 -0.105235 0.078532 +v -0.052889 -0.106105 0.079511 +v -0.053107 -0.105453 0.077988 +v -0.053107 -0.105453 0.077988 +v -0.053107 -0.104691 0.063957 +v -0.053433 -0.104691 0.063957 +v -0.053542 -0.105453 0.077988 +v -0.053542 -0.105453 0.077988 +v -0.053542 -0.105562 0.078423 +v -0.053542 -0.106214 0.079511 +v -0.053651 -0.107737 0.080055 +v -0.053868 -0.107737 0.080164 +v -0.053977 -0.109586 0.080490 +v -0.053977 -0.109586 0.080490 +v -0.053977 -0.109586 0.080490 +v -0.054086 -0.109804 0.080490 +v -0.054303 -0.109912 0.080490 +v -0.054303 -0.109912 0.080490 +v -0.066377 -0.110021 0.080055 +v -0.066377 -0.110021 0.080055 +v -0.066377 -0.110021 0.080055 +v -0.068443 -0.108607 0.086690 +v -0.068443 -0.108607 0.086690 +v -0.069313 -0.109695 0.080273 +v -0.069313 -0.109695 0.080273 +v -0.072903 -0.108498 0.080925 +v -0.072903 -0.108498 0.080925 +v -0.074317 -0.107846 0.080925 +v -0.074317 -0.107846 0.080925 +v -0.076384 -0.106214 0.081795 +v -0.078668 -0.104365 0.082774 +v -0.079212 -0.103060 0.083427 +v -0.080517 -0.102081 0.084188 +v -0.079755 -0.103713 0.084188 +v -0.079755 -0.103713 0.084188 +v -0.080299 -0.100014 0.091476 +v -0.080299 -0.100014 0.091476 +v -0.081278 -0.097078 0.092781 +v -0.081278 -0.097078 0.092781 +v -0.080734 -0.096207 0.094413 +v -0.079973 -0.093488 0.096262 +v -0.079973 -0.093488 0.096262 +v -0.081061 -0.093271 0.095935 +v -0.081061 -0.093271 0.095935 +v -0.081713 -0.092727 0.095065 +v -0.081713 -0.092727 0.095065 +v -0.081061 -0.058682 0.113665 +v -0.079973 -0.054331 0.116493 +v -0.079647 -0.054331 0.116602 +v -0.079647 -0.054331 0.116602 +v -0.079647 -0.054331 0.116602 +v -0.079647 -0.054331 0.116602 +v -0.079647 -0.024745 0.131830 +v -0.079647 -0.024745 0.131830 +v -0.079647 -0.023549 0.132374 +v -0.079647 -0.023549 0.132374 +v -0.079647 -0.021047 0.133135 +v -0.079647 -0.021047 0.133135 +v -0.079647 -0.021047 0.133135 +v -0.079647 -0.021047 0.133135 +v -0.074970 -0.018654 0.133352 +v -0.074970 -0.018654 0.133352 +v -0.074970 -0.013216 0.133352 +v -0.074970 -0.013216 0.133352 +v -0.074970 -0.013216 0.133352 +v -0.074970 0.014521 0.133352 +v -0.074970 0.014521 0.133352 +v -0.074970 0.014521 0.133352 +v -0.074970 0.053896 0.133352 +v -0.074970 0.053896 0.133352 +v -0.074970 0.053896 0.133352 +v -0.074970 0.053896 0.133352 +v -0.074970 0.089464 0.133352 +v -0.074970 0.089464 0.133352 +v -0.074970 0.089464 0.133352 +v -0.074752 0.092292 0.133352 +v -0.074752 0.092292 0.133352 +v -0.074752 0.092292 0.133352 +v -0.074752 0.092292 0.133352 +v -0.078994 0.097513 0.133352 +v -0.078994 0.097513 0.133352 +v -0.078994 0.097513 0.133352 +v -0.078994 0.097513 0.133352 +v -0.078994 0.097513 0.133352 +v -0.078994 0.097513 0.133352 +v -0.077798 0.100232 0.133352 +v -0.077798 0.100232 0.133352 +v -0.077798 0.100232 0.133352 +v -0.078450 0.102516 0.132591 +v -0.076927 0.104365 0.132591 +v -0.077036 0.104583 0.132374 +v -0.076819 0.104909 0.132374 +v -0.075840 0.105888 0.132374 +v -0.075622 0.105670 0.132591 +v -0.073773 0.107193 0.132591 +v -0.069205 0.109260 0.132591 +v -0.069422 0.109804 0.131612 +v -0.069422 0.109804 0.131612 +v -0.066050 0.110456 0.131503 +v -0.066050 0.110456 0.131503 +v -0.064310 0.110565 0.131503 +v -0.064310 0.110565 0.131503 +v -0.064310 0.110565 0.131503 +v -0.064310 0.110565 0.131503 +v -0.056261 0.110565 0.131503 +v -0.056261 0.110565 0.131503 +v -0.056261 0.110565 0.131503 +v -0.056261 0.110565 0.131503 +v -0.007967 0.110021 0.132482 +v -0.007967 0.110021 0.132482 +v -0.000027 0.110021 0.132482 +v -0.000027 0.110021 0.132482 +v -0.000027 0.109804 0.132809 +v -0.000027 0.109804 0.132809 +v 0.056208 0.109804 0.132809 +v 0.065888 0.109695 0.132809 +v 0.065997 0.110021 0.132591 +v 0.065997 0.110021 0.132591 +v 0.067737 0.110239 0.131612 +v 0.067737 0.110239 0.131612 +v 0.067737 0.110239 0.131612 +v 0.067737 0.110239 0.131612 +v 0.071762 0.110021 0.127044 +v 0.071762 0.110021 0.127044 +v 0.071762 0.110021 0.127044 +v 0.071762 0.110021 0.127044 +v 0.073285 0.109042 0.126391 +v 0.073285 0.109042 0.126391 +v 0.073285 0.109042 0.126391 +v 0.071435 0.109042 0.126391 +v 0.072741 0.108172 0.126500 +v 0.073502 0.107737 0.126717 +v 0.073502 0.107737 0.126717 +v 0.075569 0.106105 0.127153 +v 0.075569 0.106105 0.127153 +v 0.076004 0.105779 0.126717 +v 0.075786 0.105562 0.126826 +v 0.075460 0.104909 0.126717 +v 0.075460 0.103713 0.126717 +v 0.074590 0.103277 0.125847 +v 0.074263 0.102951 0.124651 +v 0.074263 0.102951 0.124651 +v 0.073937 0.103277 0.124651 +v 0.073937 0.103277 0.124651 +v 0.072958 0.102299 0.106595 +v 0.072958 0.102299 0.106595 +v 0.073285 0.102081 0.106595 +v 0.073285 0.101646 0.106595 +v 0.074263 0.102516 0.124760 +v 0.074263 0.102516 0.124760 +v 0.074699 0.102842 0.126065 +v 0.074699 0.102842 0.126065 +v 0.071000 0.099035 0.131612 +v 0.071000 0.099035 0.131612 +v 0.071218 0.099144 0.131938 +v 0.071435 0.098927 0.132047 +v 0.071435 0.098927 0.132047 +v 0.071435 0.098927 0.132047 +v 0.071435 0.098927 0.132047 +v 0.077309 0.101102 0.132047 +v 0.077309 0.101102 0.132047 +v 0.077309 0.101102 0.132047 +v 0.077309 0.101102 0.132047 +v 0.077309 0.101102 0.132047 +v 0.079049 0.097186 0.132047 +v 0.079049 0.097186 0.132047 +v 0.079049 0.097186 0.132047 +v 0.079049 0.097186 0.132047 +v 0.079593 0.093053 0.132047 +v 0.079593 0.093053 0.132047 +v 0.079593 0.093053 0.132047 +v 0.079593 0.093053 0.132047 +v 0.075025 0.089464 0.132047 +v 0.075025 0.089464 0.132047 +v 0.075025 0.089464 0.132047 +v 0.075025 0.089464 0.132047 +v 0.075025 0.073692 0.132047 +v 0.075025 0.073692 0.132047 +v 0.075025 0.073692 0.132047 +v 0.075025 0.054657 0.132047 +v 0.075025 0.054657 0.132047 +v 0.075025 0.054657 0.132047 +v 0.075025 0.054657 0.132047 +v 0.075025 0.054657 0.132047 +v 0.074699 0.054331 0.131721 +v 0.074699 0.054331 0.131721 +v 0.074699 0.054331 0.131721 +v 0.074372 0.054657 0.131721 +v 0.074372 0.054657 0.131721 +v 0.074372 0.054331 0.131177 +v 0.074372 0.054331 0.131177 +v 0.074372 0.054331 0.131177 +v 0.074372 0.054331 0.131177 +v 0.074155 0.054331 0.128240 +v 0.074155 0.054331 0.128240 +v 0.074155 0.054222 0.128132 +v 0.074155 0.054222 0.128132 +v 0.073937 0.053896 0.127914 +v 0.073937 0.053461 0.127914 +v 0.073937 0.053461 0.127914 +v 0.073937 0.053461 0.127914 +v 0.073937 0.053352 0.128240 +v 0.073937 0.053352 0.128240 +v 0.074046 0.053352 0.131177 +v 0.074046 0.053352 0.131177 +v 0.074046 0.008647 0.131177 +v 0.074046 0.008647 0.131177 +v 0.073937 0.008647 0.128240 +v 0.073937 0.008647 0.128240 +v 0.073937 0.008539 0.127914 +v 0.073937 0.008539 0.127914 +v 0.073937 0.008539 0.127914 +v 0.073937 0.007560 0.127914 +v 0.073937 0.007560 0.127914 +v 0.073937 0.007560 0.127914 +v 0.073937 0.007560 0.127914 +v 0.074046 0.005275 0.131177 +v 0.074046 0.005275 0.131177 +v 0.074046 0.005275 0.131177 +v 0.074046 0.005275 0.131177 +v 0.074046 0.005275 0.131177 +v 0.074046 -0.018654 0.131177 +v 0.074046 -0.018654 0.131177 +v 0.074046 -0.018654 0.131177 +v 0.074046 -0.018654 0.131177 +v 0.074046 -0.018654 0.131177 +v 0.074046 -0.018654 0.131177 +v 0.071870 -0.057703 0.089736 +v 0.071870 -0.057703 0.089736 +v 0.071870 -0.057703 0.089736 +v 0.071870 -0.057703 0.089736 +v 0.071870 -0.057703 0.089736 +v 0.071870 -0.057703 0.089736 +v 0.071870 -0.057703 0.089736 +v 0.071653 -0.018654 0.089192 +v 0.071000 -0.057703 0.088865 +v 0.071000 -0.057703 0.088865 +v 0.071000 -0.057703 0.088865 +v 0.068716 -0.056506 0.045248 +v 0.068716 -0.056506 0.045248 +v 0.068716 -0.056506 0.045248 +v 0.068716 -0.056506 0.045248 +v 0.068716 -0.086853 0.045248 +v 0.068716 -0.086853 0.045248 +v 0.068716 -0.086853 0.045248 +v 0.068716 -0.086853 0.045248 +v 0.068716 -0.086853 0.045248 +v 0.068716 -0.086853 0.045248 +v 0.067846 -0.090443 0.044705 +v 0.067846 -0.090443 0.044705 +v 0.068172 -0.090443 0.045248 +v 0.068172 -0.090443 0.045248 +v 0.070021 -0.093923 0.088865 +v 0.070021 -0.093923 0.088865 +v 0.070021 -0.093923 0.088865 +v 0.067628 -0.094793 0.065262 +v 0.067085 -0.095990 0.067655 +v 0.066214 -0.096860 0.067655 +v 0.066214 -0.096860 0.067655 +v 0.064365 -0.098383 0.067655 +v 0.062081 -0.099470 0.065262 +v 0.058709 -0.100449 0.067655 +v 0.058601 -0.101646 0.088865 +v 0.058601 -0.101646 0.088865 +v 0.055772 -0.100449 0.067655 +v 0.055772 -0.100449 0.067655 +v 0.054141 -0.100232 0.063087 +v 0.054141 -0.100232 0.063087 +v 0.054141 -0.100232 0.063087 +v 0.054141 -0.100232 0.063087 +v 0.053923 -0.100341 0.063087 +v 0.053706 -0.100341 0.062761 +v 0.053706 -0.100558 0.062761 +v 0.053706 -0.100558 0.062761 +v 0.052944 -0.103386 0.062761 +v 0.052836 -0.103821 0.063087 +v 0.052836 -0.103821 0.063087 +v 0.052836 -0.103821 0.063087 +v 0.052836 -0.104256 0.063631 +v 0.052836 -0.104256 0.063631 +v 0.052836 -0.104256 0.063631 +v 0.052836 -0.104256 0.063631 +v 0.052836 -0.104256 0.063631 +v 0.052836 -0.105344 0.077988 +v 0.052836 -0.105344 0.077988 +v 0.052836 -0.105344 0.077988 +v 0.052727 -0.103495 0.083318 +v 0.052727 -0.103495 0.083318 +v 0.052727 -0.103495 0.083318 +v 0.052727 -0.103495 0.083318 +v 0.052727 -0.103495 0.083318 +v 0.052727 -0.103060 0.087343 +v 0.052727 -0.103060 0.087343 +v 0.052727 -0.103060 0.087343 +v 0.052727 -0.103060 0.087343 +v 0.052727 -0.103386 0.089300 +v 0.052727 -0.103386 0.089300 +v 0.052727 -0.103386 0.089300 +v 0.052727 -0.103386 0.089300 +v 0.052618 -0.102734 0.089518 +v 0.052292 -0.102625 0.089518 +v 0.052292 -0.102625 0.089518 +v 0.013134 -0.102625 0.089518 +v -0.000027 -0.102951 0.089736 +v -0.000027 -0.102625 0.089518 +v -0.000027 -0.102625 0.089518 +v -0.000027 -0.102625 0.089518 +v -0.000027 -0.103604 0.089627 +v -0.000027 -0.103604 0.089627 +v -0.000027 -0.103604 0.089627 +v -0.000027 -0.103604 0.089627 +v -0.000027 -0.103604 0.089627 +v -0.052345 -0.103604 0.089627 +v -0.052345 -0.103604 0.089627 +v -0.052345 -0.103604 0.089627 +v -0.052563 -0.103495 0.089518 +v -0.052672 -0.103386 0.089300 +v -0.052672 -0.103386 0.089300 +v -0.052672 -0.103386 0.089300 +v -0.052672 -0.103386 0.089300 +v -0.052672 -0.103386 0.089300 +v -0.052672 -0.102625 0.087016 +v -0.052672 -0.101972 0.088539 +v -0.052672 -0.101972 0.088539 +v -0.052672 -0.101972 0.088539 +v -0.052672 -0.101972 0.088539 +v -0.052672 -0.101863 0.086799 +v -0.052672 -0.101863 0.086799 +v -0.052672 -0.101863 0.086799 +v -0.052672 -0.102951 0.085058 +v -0.052672 -0.102951 0.085058 +v -0.052672 -0.102951 0.085058 +v -0.052672 -0.102951 0.085058 +v -0.052672 -0.102951 0.085058 +v -0.052672 -0.102951 0.085058 +v -0.052672 -0.102951 0.085058 +v -0.052780 -0.105235 0.077988 +v -0.052780 -0.105235 0.077988 +v -0.052780 -0.105235 0.077988 +v -0.052780 -0.105235 0.077988 +v -0.052889 -0.104474 0.063957 +v -0.052889 -0.104256 0.063631 +v -0.052889 -0.104256 0.063631 +v -0.052889 -0.104256 0.063631 +v -0.052998 -0.104256 0.063087 +v -0.053433 -0.104365 0.063087 +v -0.053759 -0.104474 0.063957 +v -0.053868 -0.105344 0.077988 +v -0.053868 -0.105344 0.077988 +v -0.053868 -0.105344 0.077988 +v -0.053868 -0.106105 0.079511 +v -0.053868 -0.106867 0.080055 +v -0.053977 -0.107193 0.080381 +v -0.053977 -0.106214 0.083427 +v -0.053977 -0.106214 0.083427 +v -0.053977 -0.108498 0.086472 +v -0.053977 -0.108498 0.086472 +v -0.054086 -0.108825 0.086472 +v -0.054086 -0.108607 0.086690 +v -0.054412 -0.108607 0.087016 +v -0.054412 -0.108607 0.087016 +v -0.054303 -0.108934 0.086472 +v -0.054303 -0.108934 0.086472 +v -0.054303 -0.108934 0.086472 +v -0.065506 -0.108934 0.086472 +v -0.065506 -0.108934 0.086472 +v -0.066920 -0.108498 0.087016 +v -0.069422 -0.107955 0.087343 +v -0.070836 -0.107846 0.087016 +v -0.070836 -0.107846 0.087016 +v -0.073120 -0.106649 0.087560 +v -0.073120 -0.106649 0.087560 +v -0.075078 -0.107084 0.081687 +v -0.075078 -0.107084 0.081687 +v -0.075078 -0.107084 0.081687 +v -0.075078 -0.107084 0.081687 +v -0.077145 -0.105235 0.082665 +v -0.077145 -0.105235 0.082665 +v -0.078124 -0.104148 0.082992 +v -0.079538 -0.101755 0.084297 +v -0.080734 -0.100449 0.084732 +v -0.081604 -0.099253 0.085602 +v -0.082148 -0.096316 0.087125 +v -0.081713 -0.099579 0.086255 +v -0.081713 -0.099579 0.086255 +v -0.082366 -0.096642 0.087669 +v -0.082366 -0.096642 0.087669 +v -0.081713 -0.094141 0.094086 +v -0.081713 -0.094141 0.094086 +v -0.081822 -0.092618 0.094739 +v -0.081822 -0.092618 0.094739 +v -0.081822 -0.092618 0.094739 +v -0.081822 -0.092618 0.094739 +v -0.081822 -0.058138 0.112577 +v -0.081822 -0.058138 0.112577 +v -0.081713 -0.053678 0.115297 +v -0.081713 -0.024093 0.130524 +v -0.081061 -0.024528 0.131395 +v -0.079973 -0.024745 0.131830 +v -0.081061 -0.020938 0.132591 +v -0.081061 -0.020938 0.132591 +v -0.081061 -0.018654 0.132809 +v -0.079647 -0.018654 0.133352 +v -0.079647 -0.018654 0.133352 +v -0.079647 -0.012889 0.133352 +v -0.079647 -0.012889 0.133352 +v -0.079647 -0.012889 0.133352 +v -0.079647 -0.012889 0.133352 +v -0.079647 -0.012889 0.133352 +v -0.079647 0.093053 0.133352 +v -0.079647 0.093053 0.133352 +v -0.079647 0.093053 0.133352 +v -0.079647 0.093053 0.133352 +v -0.079647 0.093053 0.133352 +v -0.081278 0.090769 0.132591 +v -0.081169 0.094685 0.132591 +v -0.081169 0.094685 0.132591 +v -0.080517 0.097948 0.132591 +v -0.079755 0.101320 0.131612 +v -0.079755 0.101320 0.131612 +v -0.077036 0.105127 0.131612 +v -0.077036 0.105127 0.131612 +v -0.075405 0.106541 0.131612 +v -0.075405 0.106541 0.131612 +v -0.072577 0.108498 0.131612 +v -0.072577 0.108498 0.131612 +v -0.071815 0.110021 0.127044 +v -0.071815 0.110021 0.127044 +v -0.068552 0.111000 0.127044 +v -0.068552 0.111000 0.127044 +v -0.065071 0.111326 0.127044 +v -0.065071 0.111326 0.127044 +v -0.065071 0.111326 0.127044 +v -0.065071 0.111326 0.127044 +v -0.057022 0.111326 0.127044 +v -0.057022 0.111326 0.127044 +v -0.057022 0.111326 0.127044 +v -0.057022 0.111326 0.127044 +v -0.000027 0.110565 0.131503 +v -0.000027 0.110565 0.131503 +v -0.000027 0.110565 0.131503 +v -0.000027 0.110565 0.131503 +v -0.000027 0.110565 0.131503 +v -0.000027 0.110565 0.131503 +v 0.056208 0.110565 0.131503 +v 0.056208 0.110565 0.131503 +v 0.056208 0.110565 0.131503 +v 0.064257 0.110565 0.131503 +v 0.064257 0.110565 0.131503 +v 0.065127 0.111326 0.127044 +v 0.065127 0.111326 0.127044 +v 0.065127 0.111326 0.127044 +v 0.065127 0.111326 0.127044 +v 0.068499 0.111000 0.127044 +v 0.068499 0.111000 0.127044 +v 0.070130 0.110348 0.126391 +v 0.070130 0.110348 0.126391 +v 0.069913 0.109586 0.126391 +v 0.070783 0.109042 0.126717 +v 0.070783 0.109042 0.126717 +v 0.071218 0.107628 0.131286 +v 0.071218 0.107628 0.131286 +v 0.074807 0.105344 0.131395 +v 0.074807 0.105344 0.131395 +v 0.074807 0.105344 0.131395 +v 0.075677 0.105562 0.127044 +v 0.075677 0.105562 0.127044 +v 0.075242 0.105127 0.127044 +v 0.075351 0.105127 0.126826 +v 0.074590 0.104148 0.126391 +v 0.074155 0.103713 0.125847 +v 0.073720 0.103386 0.124651 +v 0.072741 0.102299 0.106595 +v 0.072741 0.102299 0.106595 +v 0.072741 0.102299 0.106595 +v 0.073176 0.101972 0.105942 +v 0.072958 0.101537 0.105398 +v 0.072741 0.101863 0.105398 +v 0.072197 0.100667 0.104963 +v 0.071653 0.101102 0.104963 +v 0.069913 0.099362 0.104963 +v 0.069913 0.099362 0.104963 +v 0.070348 0.098927 0.104963 +v 0.069804 0.098165 0.105072 +v 0.070456 0.098818 0.105290 +v 0.072632 0.100993 0.105398 +v 0.073176 0.101537 0.106051 +v 0.070239 0.098383 0.122367 +v 0.070239 0.098383 0.122367 +v 0.070783 0.098818 0.131395 +v 0.070783 0.098818 0.131395 +v 0.071218 0.098709 0.131938 +v 0.071218 0.098709 0.131938 +v 0.072197 0.097948 0.132047 +v 0.072197 0.097948 0.132047 +v 0.072197 0.097948 0.132047 +v 0.072197 0.097948 0.132047 +v 0.074046 0.094576 0.132047 +v 0.074046 0.094576 0.132047 +v 0.074046 0.094576 0.132047 +v 0.074699 0.092074 0.132047 +v 0.074699 0.092074 0.132047 +v 0.074699 0.092074 0.132047 +v 0.074372 0.089464 0.131721 +v 0.074372 0.089464 0.131721 +v 0.074372 0.065425 0.131721 +v 0.074046 0.054657 0.131177 +v 0.074046 0.054657 0.131177 +v 0.074046 0.054657 0.131177 +v 0.073937 0.054548 0.128240 +v 0.073937 0.054548 0.128240 +v 0.073937 0.054548 0.128240 +v 0.073937 0.054440 0.127914 +v 0.073937 0.054440 0.127914 +v 0.073937 0.054440 0.127914 +v 0.073937 0.054440 0.127914 +v 0.073611 0.052590 0.122911 +v 0.073611 0.052590 0.122911 +v 0.073611 0.052590 0.122911 +v 0.073611 0.052590 0.122911 +v 0.073937 0.013216 0.128349 +v 0.071870 0.018437 0.089736 +v 0.071870 0.018437 0.089736 +v 0.071870 0.018437 0.089736 +v 0.071870 0.018437 0.089736 +v 0.071870 0.018437 0.089736 +v 0.071870 0.018437 0.089736 +v 0.071653 0.031489 0.089192 +v 0.071000 0.002121 0.088865 +v 0.071000 0.002121 0.088865 +v 0.071000 0.002121 0.088865 +v 0.071000 0.002121 0.088865 +v 0.068716 0.005819 0.045248 +v 0.068716 0.005819 0.045248 +v 0.068716 0.005819 0.045248 +v 0.068716 0.005819 0.045248 +v 0.068607 -0.056506 0.045031 +v 0.068607 -0.056506 0.045031 +v 0.068607 -0.086853 0.045031 +v 0.068607 -0.086853 0.045031 +v 0.068390 -0.056506 0.044705 +v 0.068390 -0.086853 0.044705 +v 0.068390 -0.086853 0.044705 +v 0.068390 -0.086853 0.044705 +v 0.068172 -0.086853 0.044487 +v 0.068172 -0.086853 0.044487 +v 0.068390 -0.088050 0.044705 +v 0.068390 -0.088050 0.044705 +v 0.067302 -0.090225 0.044487 +v 0.067302 -0.090225 0.044487 +v 0.067302 -0.090225 0.044487 +v 0.066432 -0.092292 0.044487 +v 0.066432 -0.092292 0.044487 +v 0.066432 -0.092292 0.044487 +v 0.066976 -0.092618 0.044705 +v 0.067193 -0.092727 0.045248 +v 0.067193 -0.092727 0.045248 +v 0.065018 -0.095664 0.045248 +v 0.065018 -0.095664 0.045248 +v 0.063169 -0.097186 0.045248 +v 0.063169 -0.097186 0.045248 +v 0.060993 -0.098383 0.045248 +v 0.060993 -0.098383 0.045248 +v 0.056316 -0.099362 0.045248 +v 0.056316 -0.099362 0.045248 +v 0.053923 -0.100232 0.062543 +v 0.053923 -0.100232 0.062543 +v 0.053923 -0.100232 0.062543 +v 0.052944 -0.100232 0.062434 +v 0.052944 -0.100232 0.062434 +v 0.052944 -0.100232 0.062434 +v 0.052944 -0.100341 0.062761 +v 0.052944 -0.100558 0.062761 +v 0.052944 -0.100558 0.062761 +v 0.052944 -0.100558 0.063087 +v 0.052944 -0.100558 0.063087 +v 0.052944 -0.100558 0.063087 +v 0.052836 -0.100667 0.065262 +v 0.052836 -0.100667 0.065262 +v 0.052836 -0.100667 0.065262 +v 0.052836 -0.100667 0.065262 +v 0.052727 -0.101646 0.083318 +v 0.052727 -0.101646 0.083318 +v 0.052727 -0.101646 0.083318 +v 0.052727 -0.101646 0.083318 +v 0.052727 -0.101863 0.086799 +v 0.052727 -0.101863 0.086799 +v 0.052727 -0.101863 0.086799 +v 0.052618 -0.101102 0.077988 +v 0.052618 -0.101646 0.087016 +v 0.052618 -0.101755 0.088757 +v 0.052618 -0.101755 0.088757 +v 0.052727 -0.101972 0.088539 +v 0.052727 -0.101972 0.088539 +v 0.052727 -0.101972 0.088539 +v 0.052727 -0.101972 0.088539 +v 0.052618 -0.102299 0.089083 +v 0.052292 -0.102299 0.089192 +v 0.052292 -0.102299 0.089192 +v -0.000027 -0.102299 0.089192 +v -0.000027 -0.102299 0.089192 +v -0.000027 -0.102299 0.089192 +v -0.020693 -0.102407 0.089300 +v -0.052345 -0.102625 0.089518 +v -0.052345 -0.102625 0.089518 +v -0.052563 -0.102734 0.089518 +v -0.052563 -0.102299 0.089083 +v -0.052563 -0.101755 0.088757 +v -0.052563 -0.101755 0.088757 +v -0.052563 -0.101755 0.088757 +v -0.052563 -0.101646 0.087016 +v -0.052780 -0.101646 0.083318 +v -0.052780 -0.101646 0.083318 +v -0.052780 -0.101646 0.083318 +v -0.052780 -0.101646 0.083318 +v -0.052780 -0.101646 0.083318 +v -0.052780 -0.101646 0.083318 +v -0.052780 -0.101646 0.083318 +v -0.052889 -0.100667 0.065262 +v -0.052889 -0.100667 0.065262 +v -0.052889 -0.100667 0.065262 +v -0.052889 -0.100667 0.065262 +v -0.052889 -0.103821 0.063087 +v -0.052889 -0.103821 0.063087 +v -0.052889 -0.103821 0.063087 +v -0.052889 -0.103821 0.063087 +v -0.052889 -0.103821 0.063087 +v -0.052998 -0.103821 0.062869 +v -0.053651 -0.104256 0.063087 +v -0.053759 -0.104256 0.063631 +v -0.053759 -0.104256 0.063631 +v -0.053759 -0.104256 0.063631 +v -0.053977 -0.105127 0.077988 +v -0.053977 -0.105127 0.077988 +v -0.053977 -0.105127 0.077988 +v -0.053977 -0.105127 0.077988 +v -0.053977 -0.105127 0.077988 +v -0.053977 -0.105888 0.079729 +v -0.053977 -0.105888 0.079729 +v -0.053977 -0.105888 0.079729 +v -0.053977 -0.105888 0.079729 +v -0.053977 -0.104148 0.087886 +v -0.053977 -0.104148 0.087886 +v -0.053977 -0.104148 0.087886 +v -0.053977 -0.104148 0.087886 +v -0.053977 -0.108390 0.086690 +v -0.053977 -0.108390 0.086690 +v -0.053977 -0.108390 0.086690 +v -0.054194 -0.108390 0.087125 +v -0.054194 -0.108390 0.087125 +v -0.054194 -0.108390 0.087125 +v -0.065506 -0.108390 0.087125 +v -0.065506 -0.108390 0.087125 +v -0.065506 -0.108390 0.087125 +v -0.065506 -0.108390 0.087125 +v -0.065506 -0.108390 0.087125 +v -0.068226 -0.108172 0.087234 +v -0.068226 -0.108172 0.087234 +v -0.068226 -0.108172 0.087234 +v -0.070510 -0.107411 0.087669 +v -0.070510 -0.107411 0.087669 +v -0.070510 -0.107411 0.087669 +v -0.070510 -0.107411 0.087669 +v -0.071815 -0.107193 0.087669 +v -0.073991 -0.105888 0.088322 +v -0.076166 -0.104148 0.088865 +v -0.076166 -0.104148 0.088865 +v -0.076166 -0.104148 0.088865 +v -0.078015 -0.104148 0.083209 +v -0.078015 -0.104148 0.083209 +v -0.079429 -0.101755 0.084515 +v -0.079429 -0.101755 0.084515 +v -0.080626 -0.099035 0.085711 +v -0.081061 -0.097621 0.086364 +v -0.081496 -0.097621 0.086146 +v -0.082366 -0.093379 0.088539 +v -0.082366 -0.093379 0.088539 +v -0.082583 -0.093488 0.088974 +v -0.082583 -0.093488 0.088974 +v -0.082583 -0.093488 0.088974 +v -0.082583 -0.093488 0.088974 +v -0.082583 -0.057594 0.107574 +v -0.082583 -0.057594 0.107574 +v -0.081822 -0.023875 0.130198 +v -0.081822 -0.023875 0.130198 +v -0.081822 -0.023875 0.130198 +v -0.081822 -0.023875 0.130198 +v -0.081061 -0.023331 0.131830 +v -0.081061 -0.023331 0.131830 +v -0.081496 -0.018654 0.132265 +v -0.081061 -0.012889 0.132809 +v -0.081061 0.029966 0.132809 +v -0.081061 0.065643 0.132809 +v -0.081278 0.065643 0.132591 +v -0.081822 0.090769 0.131503 +v -0.081822 0.090769 0.131503 +v -0.081713 0.094793 0.131503 +v -0.081713 0.094793 0.131503 +v -0.081713 0.094793 0.131503 +v -0.081713 0.094793 0.131503 +v -0.081061 0.098165 0.131612 +v -0.081061 0.098165 0.131612 +v -0.081278 0.100558 0.127044 +v -0.081278 0.100558 0.127044 +v -0.079647 0.103604 0.127044 +v -0.079647 0.103604 0.127044 +v -0.077798 0.105888 0.127044 +v -0.077798 0.105888 0.127044 +v -0.074861 0.108390 0.127044 +v -0.074861 0.108390 0.127044 +v -0.073229 0.109042 0.126391 +v -0.073229 0.109042 0.126391 +v -0.070075 0.110348 0.126391 +v -0.070075 0.110348 0.126391 +v -0.066812 0.111000 0.126391 +v -0.066812 0.111000 0.126391 +v -0.065071 0.111109 0.126391 +v -0.065071 0.111109 0.126391 +v -0.065071 0.111109 0.126391 +v -0.057022 0.111109 0.126391 +v -0.057022 0.111109 0.126391 +v -0.057022 0.111109 0.126391 +v -0.000027 0.110891 0.126282 +v -0.000027 0.110891 0.126282 +v -0.012753 0.111109 0.126391 +v -0.012753 0.111109 0.126391 +v -0.012753 0.111109 0.126391 +v 0.014984 0.111109 0.126391 +v -0.007967 0.111326 0.127044 +v -0.007967 0.111326 0.127044 +v -0.007967 0.111326 0.127044 +v -0.007967 0.111326 0.127044 +v -0.007967 0.111326 0.127044 +v 0.056969 0.111326 0.127044 +v 0.056969 0.111326 0.127044 +v 0.056969 0.111326 0.127044 +v 0.056969 0.111326 0.127044 +v 0.065127 0.111109 0.126391 +v 0.065127 0.111109 0.126391 +v 0.065127 0.111109 0.126391 +v 0.066758 0.111000 0.126391 +v 0.066758 0.111000 0.126391 +v 0.066649 0.110130 0.126391 +v 0.067955 0.109804 0.126717 +v 0.067955 0.109804 0.126717 +v 0.069913 0.108172 0.131286 +v 0.069913 0.108172 0.131286 +v 0.071218 0.107520 0.131612 +v 0.071109 0.107411 0.131830 +v 0.072306 0.106432 0.131938 +v 0.074807 0.105235 0.131612 +v 0.074807 0.105235 0.131612 +v 0.074481 0.104583 0.131938 +v 0.074590 0.104583 0.131612 +v 0.074590 0.104583 0.131612 +v 0.074263 0.104148 0.126717 +v 0.073828 0.103713 0.126065 +v 0.073502 0.103277 0.124760 +v 0.073502 0.103277 0.124760 +v 0.072632 0.102299 0.106595 +v 0.072523 0.102190 0.106051 +v 0.072523 0.102190 0.106051 +v 0.071979 0.101646 0.105398 +v 0.071979 0.101646 0.105398 +v 0.071653 0.101211 0.105290 +v 0.071653 0.101211 0.105290 +v 0.069804 0.099471 0.105290 +v 0.069151 0.098709 0.105072 +v 0.069042 0.098492 0.104637 +v 0.069042 0.098492 0.104637 +v 0.069478 0.098165 0.104528 +v 0.069260 0.097621 0.104528 +v 0.069260 0.097621 0.104528 +v 0.069478 0.097839 0.108988 +v 0.069478 0.097839 0.108988 +v 0.070021 0.098165 0.117907 +v 0.070021 0.098165 0.117907 +v 0.070565 0.098165 0.128240 +v 0.070565 0.098165 0.128240 +v 0.070783 0.098274 0.131177 +v 0.070783 0.098274 0.131177 +v 0.070783 0.098274 0.131177 +v 0.071000 0.098492 0.131830 +v 0.071000 0.098492 0.131830 +v 0.072414 0.096643 0.131830 +v 0.072414 0.096643 0.131830 +v 0.073502 0.094358 0.131830 +v 0.074155 0.091965 0.131721 +v 0.073828 0.091857 0.131177 +v 0.073828 0.091857 0.131177 +v 0.073828 0.091857 0.131177 +v 0.073828 0.091857 0.131177 +v 0.074046 0.087506 0.131177 +v 0.074046 0.087506 0.131177 +v 0.074046 0.087506 0.131177 +v 0.074046 0.087506 0.131177 +v 0.074046 0.087506 0.131177 +v 0.074046 0.087506 0.131177 +v 0.071870 0.085439 0.089736 +v 0.071870 0.085439 0.089736 +v 0.071870 0.085439 0.089736 +v 0.071870 0.085439 0.089736 +v 0.071653 0.072604 0.089192 +v 0.071000 0.085439 0.088865 +v 0.071000 0.085439 0.088865 +v 0.071000 0.085439 0.088865 +v 0.071000 0.053569 0.088865 +v 0.071000 0.053569 0.088865 +v 0.071000 0.053569 0.088865 +v 0.071000 0.053569 0.088865 +v 0.071000 0.053569 0.088865 +v 0.068716 0.070755 0.045248 +v 0.068716 0.070755 0.045248 +v 0.068716 0.070755 0.045248 +v 0.068390 0.040843 0.044705 +v 0.068390 0.063359 0.044705 +v 0.067737 0.040843 0.044487 +v 0.067737 0.040843 0.044487 +v 0.067737 0.040843 0.044487 +v 0.067737 0.040843 0.044487 +v 0.067411 0.075541 0.038070 +v 0.067411 0.075541 0.038070 +v 0.067411 0.075541 0.038070 +v 0.067411 0.075541 0.038070 +v 0.067411 0.075541 0.038070 +v 0.067411 0.075541 0.038070 +v 0.067411 0.075541 0.038070 +v 0.067411 0.045955 0.037852 +v 0.067411 0.045955 0.037852 +v 0.067411 0.045955 0.037852 +v 0.066323 0.036057 0.016098 +v 0.066323 0.036057 0.016098 +v 0.066323 0.036057 0.016098 +v 0.066323 0.036057 0.016098 +v 0.066323 0.036057 0.016098 +v 0.066323 0.036057 0.016098 +v 0.066323 0.036057 0.016098 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067411 0.030510 0.038070 +v 0.067302 0.019198 0.035677 +v 0.067302 0.019198 0.035677 +v 0.067302 0.019198 0.035677 +v 0.067302 0.019198 0.035677 +v 0.067302 0.019198 0.035677 +v 0.067411 0.019524 0.038070 +v 0.067411 0.019524 0.038070 +v 0.067411 0.019524 0.038070 +v 0.067411 0.019524 0.038070 +v 0.067411 0.019524 0.038070 +v 0.067737 0.023222 0.044487 +v 0.067737 0.023222 0.044487 +v 0.067737 0.023222 0.044487 +v 0.067737 0.023222 0.044487 +v 0.068390 0.017349 0.044705 +v 0.068172 -0.056506 0.044487 +v 0.067737 -0.086853 0.044487 +v 0.067737 -0.086853 0.044487 +v 0.067737 -0.086853 0.044487 +v 0.065671 -0.087288 0.009137 +v 0.065671 -0.087288 0.009137 +v 0.065671 -0.087288 0.009137 +v 0.065018 -0.089355 0.009028 +v 0.065018 -0.089355 0.009028 +v 0.063713 -0.091095 0.007505 +v 0.063930 -0.091421 0.009028 +v 0.063930 -0.091421 0.009028 +v 0.062516 -0.093162 0.009028 +v 0.062516 -0.093162 0.009028 +v 0.064365 -0.095011 0.044487 +v 0.064365 -0.095011 0.044487 +v 0.064909 -0.095446 0.044705 +v 0.061972 -0.097621 0.044705 +v 0.059797 -0.098492 0.044705 +v 0.056316 -0.099035 0.044705 +v 0.056316 -0.099035 0.044705 +v 0.048376 -0.099035 0.044705 +v 0.048376 -0.099362 0.045248 +v 0.048376 -0.099362 0.045248 +v 0.048376 -0.099362 0.045248 +v 0.048376 -0.099362 0.045248 +v 0.048376 -0.099362 0.045248 +v 0.048376 -0.099362 0.045248 +v 0.048376 -0.099362 0.045248 +v 0.052509 -0.100232 0.063087 +v 0.052509 -0.100232 0.063087 +v 0.052509 -0.100232 0.063087 +v 0.052836 -0.100341 0.063087 +v 0.052727 -0.100449 0.065262 +v 0.052401 -0.101537 0.087125 +v 0.052401 -0.101537 0.087125 +v 0.052401 -0.101537 0.087125 +v 0.052292 -0.101646 0.088865 +v 0.052292 -0.101646 0.088865 +v 0.052292 -0.101646 0.088865 +v 0.052292 -0.101646 0.088865 +v 0.052292 -0.101863 0.088974 +v 0.052292 -0.101863 0.088974 +v 0.052292 -0.101863 0.088974 +v 0.013134 -0.101863 0.088974 +v 0.013134 -0.101863 0.088974 +v -0.000027 -0.101863 0.088974 +v -0.000027 -0.101863 0.088974 +v -0.052345 -0.102299 0.089192 +v -0.052345 -0.102299 0.089192 +v -0.052345 -0.102299 0.089192 +v -0.052345 -0.102299 0.089192 +v -0.052345 -0.101646 0.088865 +v -0.052345 -0.101646 0.088865 +v -0.052345 -0.101646 0.088865 +v -0.052345 -0.101646 0.088865 +v -0.052345 -0.101537 0.087125 +v -0.052345 -0.101537 0.087125 +v -0.052345 -0.101537 0.087125 +v -0.052780 -0.100449 0.065262 +v -0.052780 -0.100449 0.065262 +v -0.052889 -0.100558 0.063087 +v -0.052889 -0.100558 0.063087 +v -0.052889 -0.100558 0.063087 +v -0.052998 -0.103386 0.062761 +v -0.052998 -0.103386 0.062761 +v -0.053651 -0.103386 0.062761 +v -0.053651 -0.103386 0.062761 +v -0.053759 -0.103821 0.063087 +v -0.053759 -0.103821 0.063087 +v -0.053759 -0.103821 0.063087 +v -0.053759 -0.100558 0.063087 +v -0.053759 -0.100558 0.063087 +v -0.053759 -0.100558 0.063087 +v -0.053759 -0.100667 0.065262 +v -0.053759 -0.100667 0.065262 +v -0.053759 -0.100667 0.065262 +v -0.053759 -0.100667 0.065262 +v -0.053759 -0.100667 0.065262 +v -0.053759 -0.100667 0.065262 +v -0.053977 -0.101863 0.086799 +v -0.053977 -0.101863 0.086799 +v -0.053977 -0.101863 0.086799 +v -0.053977 -0.101863 0.086799 +v -0.053977 -0.101863 0.086799 +v -0.053977 -0.101863 0.086799 +v -0.053977 -0.101972 0.088539 +v -0.053977 -0.101972 0.088539 +v -0.053977 -0.102516 0.088974 +v -0.053977 -0.103386 0.089300 +v -0.053977 -0.103386 0.089300 +v -0.053977 -0.103386 0.089300 +v -0.054086 -0.103495 0.089518 +v -0.054412 -0.103604 0.089627 +v -0.054412 -0.103604 0.089627 +v -0.054412 -0.103604 0.089627 +v -0.054412 -0.103604 0.089627 +v -0.058545 -0.103604 0.089627 +v -0.058545 -0.103604 0.089627 +v -0.058545 -0.103604 0.089627 +v -0.062787 -0.103060 0.089844 +v -0.062787 -0.103060 0.089844 +v -0.062787 -0.103060 0.089844 +v -0.062787 -0.103060 0.089844 +v -0.062787 -0.103060 0.089844 +v -0.065289 -0.102081 0.090388 +v -0.065289 -0.102081 0.090388 +v -0.065289 -0.102081 0.090388 +v -0.065289 -0.102081 0.090388 +v -0.066268 -0.101102 0.090823 +v -0.067573 -0.100667 0.091150 +v -0.067573 -0.100667 0.091150 +v -0.067573 -0.100667 0.091150 +v -0.072685 -0.106323 0.088213 +v -0.072685 -0.106323 0.088213 +v -0.072685 -0.106323 0.088213 +v -0.072685 -0.106323 0.088213 +v -0.072685 -0.106323 0.088213 +v -0.075622 -0.103821 0.089518 +v -0.075622 -0.103821 0.089518 +v -0.075622 -0.103821 0.089518 +v -0.075948 -0.104039 0.089300 +v -0.077145 -0.103060 0.089409 +v -0.077145 -0.103060 0.089409 +v -0.078559 -0.100667 0.090606 +v -0.078559 -0.100667 0.090606 +v -0.079973 -0.096642 0.092455 +v -0.079973 -0.096642 0.092455 +v -0.079973 -0.096642 0.092455 +v -0.079973 -0.096642 0.092455 +v -0.080843 -0.097730 0.086690 +v -0.080843 -0.097730 0.086690 +v -0.081496 -0.094793 0.087886 +v -0.081387 -0.093379 0.088757 +v -0.081387 -0.093379 0.088757 +v -0.081496 -0.093379 0.088539 +v -0.081496 -0.093379 0.088539 +v -0.082148 -0.093271 0.088539 +v -0.082148 -0.093271 0.088539 +v -0.082148 -0.057376 0.107030 +v -0.082366 -0.052590 0.109641 +v -0.082583 -0.021700 0.126065 +v -0.082583 -0.021700 0.126065 +v -0.082583 -0.021700 0.126065 +v -0.082583 -0.021700 0.126065 +v -0.082583 -0.020612 0.126717 +v -0.082583 -0.020612 0.126717 +v -0.081822 -0.020721 0.131286 +v -0.081822 -0.020721 0.131286 +v -0.081822 -0.012889 0.131503 +v -0.081822 -0.012889 0.131503 +v -0.081822 -0.012889 0.131503 +v -0.081822 0.000816 0.131503 +v -0.081822 0.000816 0.131503 +v -0.081822 0.000816 0.131503 +v -0.081822 0.056289 0.131503 +v -0.081822 0.056289 0.131503 +v -0.081822 0.056289 0.131503 +v -0.081822 0.056289 0.131503 +v -0.081822 0.056289 0.131503 +v -0.082583 0.091530 0.127044 +v -0.082583 0.091530 0.127044 +v -0.082583 0.091530 0.127044 +v -0.082475 0.095555 0.127044 +v -0.082475 0.095555 0.127044 +v -0.082475 0.095555 0.127044 +v -0.082257 0.097295 0.127044 +v -0.082257 0.097295 0.127044 +v -0.082257 0.097295 0.127044 +v -0.082257 0.097295 0.127044 +v -0.081061 0.100449 0.126391 +v -0.081061 0.100449 0.126391 +v -0.079429 0.103495 0.126391 +v -0.079103 0.103169 0.126282 +v -0.079103 0.103169 0.126282 +v -0.077906 0.105453 0.126391 +v -0.077798 0.105888 0.126609 +v -0.076710 0.106758 0.126391 +v -0.076710 0.106758 0.126391 +v -0.074426 0.107846 0.126282 +v -0.076057 0.105888 0.126609 +v -0.076057 0.105888 0.126609 +v -0.072794 0.108172 0.126500 +v -0.071380 0.109042 0.126391 +v -0.068335 0.109912 0.126391 +v -0.065071 0.110239 0.126391 +v -0.057022 0.110239 0.126391 +v -0.057022 0.110239 0.126391 +v -0.000027 0.110239 0.126391 +v -0.000027 0.110239 0.126391 +v -0.000027 0.110674 0.126282 +v -0.000027 0.110674 0.126282 +v -0.000027 0.110674 0.126282 +v 0.056969 0.111109 0.126391 +v 0.056969 0.111109 0.126391 +v 0.056969 0.111109 0.126391 +v 0.056969 0.111109 0.126391 +v 0.065127 0.110239 0.126391 +v 0.065127 0.110021 0.126826 +v 0.065127 0.110021 0.126826 +v 0.064257 0.109260 0.131286 +v 0.064257 0.109260 0.131286 +v 0.067085 0.108934 0.131286 +v 0.067085 0.108934 0.131286 +v 0.068499 0.108390 0.131830 +v 0.070892 0.106867 0.132047 +v 0.070892 0.106867 0.132047 +v 0.070892 0.106867 0.132047 +v 0.074263 0.104691 0.132047 +v 0.074263 0.104691 0.132047 +v 0.074263 0.104691 0.132047 +v 0.074263 0.104691 0.132047 +v 0.069913 0.100341 0.132047 +v 0.069913 0.100341 0.132047 +v 0.069913 0.100341 0.132047 +v 0.069695 0.100123 0.131938 +v 0.069695 0.100123 0.131938 +v 0.070130 0.100232 0.131938 +v 0.070239 0.100123 0.131612 +v 0.070239 0.100123 0.131612 +v 0.069804 0.099688 0.131177 +v 0.069804 0.099688 0.131177 +v 0.069260 0.098927 0.117907 +v 0.069260 0.098927 0.117907 +v 0.068716 0.098274 0.106812 +v 0.068716 0.098274 0.106812 +v 0.068607 0.098165 0.104528 +v 0.068607 0.098165 0.104528 +v 0.068499 0.097839 0.103658 +v 0.068499 0.097839 0.103658 +v 0.068499 0.097839 0.103658 +v 0.068934 0.097839 0.103984 +v 0.069042 0.097513 0.104093 +v 0.069151 0.097078 0.104093 +v 0.069151 0.097078 0.104093 +v 0.069369 0.097186 0.106812 +v 0.069369 0.097186 0.106812 +v 0.071544 0.097404 0.131177 +v 0.071544 0.097404 0.131177 +v 0.073285 0.094250 0.131177 +v 0.073285 0.094250 0.131177 +v 0.073285 0.094250 0.131177 +v 0.073285 0.094250 0.131177 +v 0.073285 0.094250 0.131177 +v 0.071653 0.089899 0.089736 +v 0.071653 0.089899 0.089736 +v 0.071653 0.089899 0.089736 +v 0.071653 0.089899 0.089736 +v 0.071653 0.089899 0.089736 +v 0.071653 0.087397 0.089192 +v 0.070892 0.088485 0.088865 +v 0.070892 0.088485 0.088865 +v 0.070892 0.088485 0.088865 +v 0.069695 0.086092 0.065262 +v 0.068716 0.083155 0.045248 +v 0.068716 0.083155 0.045248 +v 0.068716 0.083155 0.045248 +v 0.068716 0.083155 0.045248 +v 0.068390 0.083155 0.044705 +v 0.067737 0.078260 0.044487 +v 0.067737 0.078260 0.044487 +v 0.067737 0.078260 0.044487 +v 0.067737 0.078260 0.044487 +v 0.067411 0.076411 0.037526 +v 0.067411 0.076411 0.037526 +v 0.067411 0.076411 0.037526 +v 0.067411 0.076411 0.037526 +v 0.067411 0.076411 0.037526 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.067085 0.076411 0.031217 +v 0.066323 0.076411 0.016751 +v 0.066323 0.076411 0.016751 +v 0.066432 0.076411 0.018165 +v 0.066432 0.076411 0.018165 +v 0.066432 0.076411 0.018165 +v 0.066432 0.076411 0.018165 +v 0.066323 0.076194 0.016098 +v 0.066323 0.076194 0.016098 +v 0.066323 0.076194 0.016098 +v 0.066323 0.076194 0.016098 +v 0.066323 0.076194 0.016098 +v 0.066323 0.059008 0.015880 +v 0.066323 0.059008 0.015880 +v 0.066323 0.059008 0.015880 +v 0.066323 0.059008 0.015880 +v 0.066323 0.059008 0.015880 +v 0.066323 0.042584 0.015880 +v 0.066323 0.042584 0.015880 +v 0.066323 0.042584 0.015880 +v 0.066323 0.042584 0.015880 +v 0.066323 0.042584 0.015880 +v 0.066323 0.042584 0.015880 +v 0.066323 0.025072 0.016098 +v 0.066323 0.025072 0.016098 +v 0.066323 0.025072 0.016098 +v 0.066323 0.025072 0.016098 +v 0.066323 0.025072 0.015880 +v 0.066323 0.025072 0.015880 +v 0.066323 0.025072 0.015880 +v 0.066323 0.025072 0.015880 +v 0.066323 0.025072 0.015880 +v 0.066323 0.025072 0.015880 +v 0.066323 0.019524 0.015880 +v 0.066323 0.019524 0.015880 +v 0.066323 0.019524 0.015880 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.066323 0.018654 0.016751 +v 0.067302 0.018654 0.035677 +v 0.067302 0.018654 0.035677 +v 0.067302 0.018654 0.035677 +v 0.067302 0.018654 0.035677 +v 0.067411 0.018654 0.037199 +v 0.067411 0.018654 0.037199 +v 0.067411 0.018654 0.037199 +v 0.067411 0.018654 0.037199 +v 0.067411 0.018654 0.037199 +v 0.067737 0.009626 0.044487 +v 0.067737 0.009626 0.044487 +v 0.067737 0.009626 0.044487 +v 0.067737 0.009626 0.044487 +v 0.067737 0.009626 0.044487 +v 0.067737 -0.056506 0.044487 +v 0.067737 -0.056506 0.044487 +v 0.067737 -0.056506 0.044487 +v 0.067737 -0.056506 0.044487 +v 0.065888 -0.085004 0.009246 +v 0.065888 -0.085004 0.009246 +v 0.065344 -0.087071 0.007614 +v 0.063821 -0.088811 0.006200 +v 0.063821 -0.088811 0.006200 +v 0.059036 -0.087397 0.001305 +v 0.059036 -0.087397 0.001305 +v 0.061646 -0.092183 0.006200 +v 0.061646 -0.092183 0.006200 +v 0.060776 -0.094576 0.009028 +v 0.060776 -0.094576 0.009028 +v 0.061646 -0.096969 0.044487 +v 0.061646 -0.096969 0.044487 +v 0.059579 -0.097839 0.044487 +v 0.059579 -0.097839 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.056208 -0.098383 0.044487 +v 0.047723 -0.096969 0.016751 +v 0.047723 -0.096969 0.016751 +v 0.047723 -0.096969 0.016751 +v 0.047723 -0.096969 0.016751 +v 0.047723 -0.096969 0.016751 +v 0.047723 -0.096969 0.016751 +v 0.043155 -0.097186 0.022407 +v 0.043155 -0.097186 0.022407 +v 0.043155 -0.097186 0.022407 +v 0.043155 -0.097186 0.022407 +v 0.043155 -0.096969 0.018056 +v 0.043155 -0.096969 0.018056 +v 0.043155 -0.096969 0.018056 +v 0.043155 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.036955 -0.096969 0.018056 +v 0.040110 -0.096860 0.016098 +v 0.040110 -0.096860 0.016098 +v 0.040110 -0.096860 0.016098 +v 0.040110 -0.096860 0.016098 +v 0.040110 -0.096860 0.016098 +v 0.040110 -0.096860 0.016098 +v 0.033910 -0.096860 0.015880 +v 0.033910 -0.096860 0.015880 +v 0.033910 -0.096860 0.015880 +v 0.009110 -0.096860 0.015880 +v 0.009110 -0.096860 0.015880 +v 0.009110 -0.096860 0.015880 +v 0.009110 -0.096860 0.015880 +v 0.009110 -0.096860 0.015880 +v 0.006064 -0.096860 0.015880 +v 0.006064 -0.096860 0.015880 +v 0.006064 -0.096860 0.015880 +v 0.006064 -0.096860 0.015880 +v 0.006064 -0.096860 0.015880 +v -0.009163 -0.096860 0.015880 +v -0.009163 -0.096860 0.015880 +v -0.009163 -0.096860 0.015880 +v -0.009163 -0.096860 0.015880 +v -0.009163 -0.096860 0.015880 +v -0.009163 -0.096969 0.016751 +v -0.009163 -0.096969 0.016751 +v -0.009163 -0.096969 0.016751 +v -0.009163 -0.096969 0.016751 +v -0.009163 -0.096969 0.016751 +v -0.009163 -0.096969 0.016751 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v 0.009110 -0.097730 0.031217 +v -0.009163 -0.097730 0.031217 +v -0.009163 -0.097730 0.031217 +v -0.009163 -0.097730 0.031217 +v -0.009163 -0.097730 0.031217 +v -0.009163 -0.097730 0.031217 +v -0.009163 -0.097730 0.031217 +v -0.009490 -0.097948 0.037199 +v -0.009490 -0.097948 0.037199 +v -0.009490 -0.097948 0.037199 +v -0.009490 -0.097948 0.037199 +v -0.009490 -0.097948 0.037199 +v -0.009490 -0.097948 0.037199 +v -0.009163 -0.098056 0.038070 +v -0.009163 -0.098056 0.038070 +v -0.009163 -0.098056 0.038070 +v -0.009163 -0.098056 0.038070 +v -0.009163 -0.098056 0.038070 +v -0.009163 -0.098056 0.038070 +v -0.008728 -0.098383 0.044487 +v -0.008728 -0.098383 0.044487 +v -0.008728 -0.098383 0.044487 +v -0.008728 -0.098383 0.044487 +v -0.008728 -0.098383 0.044487 +v -0.006662 -0.098383 0.044487 +v -0.006662 -0.098383 0.044487 +v -0.006662 -0.098383 0.044487 +v -0.006662 -0.098383 0.044487 +v -0.006662 -0.098383 0.044487 +v 0.012264 -0.099035 0.044705 +v 0.012264 -0.099362 0.045248 +v 0.012264 -0.099362 0.045248 +v 0.012264 -0.099362 0.045248 +v 0.052509 -0.100341 0.065262 +v 0.052509 -0.100341 0.065262 +v 0.052509 -0.100341 0.065262 +v 0.052509 -0.100341 0.065262 +v 0.052509 -0.100341 0.065262 +v 0.052509 -0.100341 0.065262 +v -0.015037 -0.101646 0.088865 +v -0.015037 -0.101646 0.088865 +v -0.015037 -0.101646 0.088865 +v -0.015037 -0.101646 0.088865 +v -0.015037 -0.101646 0.088865 +v -0.015037 -0.101646 0.088865 +v -0.015037 -0.101646 0.088865 +v -0.052563 -0.100232 0.063087 +v -0.052563 -0.100232 0.063087 +v -0.052563 -0.100232 0.063087 +v -0.052563 -0.100232 0.063087 +v -0.052563 -0.100232 0.063087 +v -0.052563 -0.100232 0.063087 +v -0.052563 -0.100232 0.063087 +v -0.052780 -0.100341 0.063087 +v -0.052889 -0.100341 0.062761 +v -0.052998 -0.100558 0.062761 +v -0.052998 -0.100558 0.062761 +v -0.053651 -0.100558 0.062761 +v -0.053651 -0.100558 0.062761 +v -0.053651 -0.100558 0.062761 +v -0.053651 -0.100558 0.062761 +v -0.053759 -0.100341 0.062761 +v -0.053868 -0.100341 0.063087 +v -0.053977 -0.100341 0.065262 +v -0.053977 -0.100558 0.067655 +v -0.053977 -0.100558 0.067655 +v -0.054086 -0.101646 0.087016 +v -0.054086 -0.101646 0.087016 +v -0.054086 -0.101755 0.088757 +v -0.054086 -0.102299 0.089083 +v -0.054086 -0.102734 0.089518 +v -0.054303 -0.102625 0.089518 +v -0.054303 -0.102625 0.089518 +v -0.058545 -0.102625 0.089518 +v -0.061156 -0.102407 0.089627 +v -0.063657 -0.101646 0.089844 +v -0.064854 -0.101102 0.090062 +v -0.066920 -0.099797 0.090714 +v -0.067029 -0.100014 0.091041 +v -0.069640 -0.098709 0.092129 +v -0.069640 -0.098709 0.092129 +v -0.069640 -0.098709 0.092129 +v -0.069640 -0.098709 0.092129 +v -0.069640 -0.098709 0.092129 +v -0.069640 -0.098709 0.092129 +v -0.077145 -0.101755 0.090606 +v -0.077145 -0.101755 0.090606 +v -0.077145 -0.101755 0.090606 +v -0.078233 -0.100776 0.090932 +v -0.079647 -0.096969 0.092890 +v -0.079647 -0.096969 0.092890 +v -0.080517 -0.092509 0.094521 +v -0.080517 -0.092509 0.094521 +v -0.080517 -0.092509 0.094521 +v -0.080517 -0.092509 0.094521 +v -0.081278 -0.093488 0.088974 +v -0.081278 -0.093488 0.088974 +v -0.081278 -0.093488 0.088974 +v -0.081278 -0.093488 0.088974 +v -0.081278 -0.054113 0.109314 +v -0.081278 -0.054113 0.109314 +v -0.081278 -0.054113 0.109314 +v -0.081278 -0.054113 0.109314 +v -0.081387 -0.052699 0.109858 +v -0.081496 -0.057376 0.107139 +v -0.082366 -0.021591 0.125630 +v -0.082366 -0.021591 0.125630 +v -0.082366 -0.020394 0.126174 +v -0.082366 -0.018654 0.126391 +v -0.082583 -0.018654 0.126826 +v -0.082583 -0.018654 0.126826 +v -0.082583 -0.006689 0.127044 +v -0.082583 -0.006689 0.127044 +v -0.082583 -0.006689 0.127044 +v -0.082583 -0.006689 0.127044 +v -0.082583 -0.006689 0.127044 +v -0.082583 0.051612 0.127044 +v -0.082583 0.051612 0.127044 +v -0.082583 0.051612 0.127044 +v -0.082583 0.051612 0.127044 +v -0.082583 0.051612 0.127044 +v -0.082366 0.066296 0.126391 +v -0.082366 0.088376 0.126391 +v -0.082366 0.093814 0.126391 +v -0.082040 0.097186 0.126391 +v -0.082040 0.097186 0.126391 +v -0.080843 0.098600 0.126391 +v -0.079538 0.101646 0.126391 +v -0.078668 0.102951 0.126500 +v -0.076384 0.106432 0.126282 +v -0.076384 0.106432 0.126282 +v -0.076275 0.105670 0.126609 +v -0.076275 0.105670 0.126609 +v -0.075731 0.105562 0.126826 +v -0.075731 0.105562 0.126826 +v -0.075840 0.105997 0.126717 +v -0.075840 0.105997 0.126717 +v -0.075840 0.105997 0.126717 +v -0.075622 0.106105 0.127153 +v -0.075622 0.106105 0.127153 +v -0.074643 0.106867 0.126717 +v -0.074643 0.106867 0.126717 +v -0.072141 0.108390 0.126717 +v -0.072141 0.108390 0.126717 +v -0.069422 0.109477 0.126717 +v -0.069422 0.109477 0.126717 +v -0.065071 0.110021 0.126826 +v -0.065071 0.110021 0.126826 +v -0.057022 0.110021 0.126826 +v -0.057022 0.110021 0.126826 +v -0.057022 0.110021 0.126826 +v -0.057022 0.110021 0.126826 +v -0.018409 0.109804 0.127805 +v -0.018409 0.109804 0.127805 +v -0.018409 0.109804 0.127805 +v -0.018953 0.109804 0.127805 +v -0.018953 0.109804 0.127805 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v -0.016016 0.110021 0.126826 +v 0.019226 0.110130 0.126609 +v 0.019226 0.110130 0.126609 +v 0.056969 0.110130 0.126609 +v 0.056969 0.110130 0.126609 +v 0.056969 0.110130 0.126609 +v 0.056969 0.110130 0.126609 +v 0.056208 0.109260 0.131286 +v 0.056208 0.109260 0.131286 +v 0.056208 0.109260 0.131286 +v 0.056208 0.109260 0.131286 +v 0.056208 0.109260 0.131286 +v 0.019008 0.108716 0.131612 +v 0.019334 0.108934 0.131830 +v 0.019334 0.108934 0.131830 +v 0.019334 0.108934 0.131830 +v 0.019334 0.108607 0.131938 +v 0.019334 0.108607 0.131938 +v 0.056208 0.108934 0.131830 +v 0.056208 0.108934 0.131830 +v 0.056208 0.108934 0.131830 +v 0.064257 0.108934 0.131830 +v 0.068281 0.107846 0.132047 +v 0.068281 0.107846 0.132047 +v 0.068281 0.107846 0.132047 +v 0.068281 0.107846 0.132047 +v 0.067955 0.101755 0.132047 +v 0.067955 0.101755 0.132047 +v 0.067955 0.101755 0.132047 +v 0.067955 0.101755 0.132047 +v 0.067955 0.101755 0.132047 +v 0.067628 0.101211 0.131830 +v 0.069586 0.099906 0.131830 +v 0.069586 0.099906 0.131830 +v 0.069369 0.099688 0.131177 +v 0.069369 0.099688 0.131177 +v 0.069369 0.099688 0.131177 +v 0.069260 0.099579 0.128240 +v 0.069260 0.099579 0.128240 +v 0.068172 0.098165 0.104637 +v 0.068172 0.098165 0.104637 +v 0.068172 0.098165 0.104637 +v 0.068607 0.097621 0.102897 +v 0.068607 0.097621 0.102897 +v 0.068607 0.097621 0.102897 +v 0.068607 0.097621 0.102897 +v 0.069695 0.094793 0.089736 +v 0.069695 0.094793 0.089736 +v 0.070892 0.092400 0.089736 +v 0.070892 0.092400 0.089736 +v 0.071435 0.089899 0.089192 +v 0.071435 0.089899 0.089192 +v 0.070456 0.090878 0.088865 +v 0.070456 0.090878 0.088865 +v 0.070456 0.090878 0.088865 +v 0.068607 0.086201 0.045248 +v 0.068607 0.086201 0.045248 +v 0.068390 0.086201 0.044705 +v 0.068172 0.085004 0.044487 +v 0.067737 0.085004 0.044487 +v 0.067737 0.085004 0.044487 +v 0.067737 0.085004 0.044487 +v 0.067737 0.085004 0.044487 +v 0.066432 0.083590 0.018056 +v 0.066432 0.083590 0.018056 +v 0.066432 0.083590 0.018056 +v 0.066432 0.083590 0.018056 +v 0.066432 0.083590 0.018056 +v 0.065453 0.086418 0.009137 +v 0.065453 0.086418 0.009137 +v 0.065888 0.083155 0.009245 +v 0.065888 0.083155 0.009245 +v 0.065888 0.083155 0.009245 +v 0.065888 0.081415 0.009245 +v 0.065888 0.081415 0.009245 +v 0.065888 0.081415 0.009245 +v 0.065888 0.081415 0.009245 +v 0.065888 0.081415 0.009245 +v 0.065888 0.052373 0.009245 +v 0.065888 0.052373 0.009245 +v 0.065888 0.052373 0.009245 +v 0.065888 0.052373 0.009245 +v 0.065888 0.052373 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 0.017023 0.009245 +v 0.065888 -0.055527 0.009245 +v 0.065888 -0.055527 0.009245 +v 0.065888 -0.055527 0.009245 +v 0.064583 -0.055419 0.006309 +v 0.064583 -0.055419 0.006309 +v 0.064583 -0.084895 0.006309 +v 0.064583 -0.084895 0.006309 +v 0.059579 -0.084895 0.001305 +v 0.059579 -0.084895 0.001305 +v 0.058927 -0.084895 0.000761 +v 0.057404 -0.084895 0.000109 +v 0.057948 -0.087832 0.000761 +v 0.055772 -0.086636 0.000000 +v 0.055772 -0.086636 0.000000 +v 0.055772 -0.086636 0.000000 +v 0.055772 -0.088485 0.000326 +v 0.055772 -0.088485 0.000326 +v 0.057622 -0.089029 0.001305 +v 0.057622 -0.089029 0.001305 +v 0.055881 -0.090007 0.001305 +v 0.055881 -0.090007 0.001305 +v 0.055881 -0.090007 0.001305 +v 0.058274 -0.094467 0.006200 +v 0.058274 -0.094467 0.006200 +v 0.059362 -0.094467 0.006853 +v 0.057730 -0.095990 0.009137 +v 0.057730 -0.095990 0.009137 +v 0.057730 -0.095990 0.009137 +v 0.054794 -0.096969 0.018056 +v 0.047615 -0.096860 0.016424 +v 0.047615 -0.096860 0.016424 +v 0.047615 -0.096860 0.016424 +v 0.047615 -0.096860 0.016424 +v 0.047615 -0.096860 0.016424 +v 0.047615 -0.096860 0.016424 +v 0.047615 -0.096860 0.016424 +v 0.046853 -0.096860 0.015880 +v 0.046853 -0.096860 0.015880 +v 0.046853 -0.096860 0.015880 +v 0.046527 -0.096534 0.009246 +v 0.046527 -0.096534 0.009246 +v 0.046527 -0.096534 0.009246 +v 0.038587 -0.096534 0.009246 +v -0.000027 -0.096534 0.009246 +v -0.000027 -0.096534 0.009246 +v -0.000027 -0.096534 0.009246 +v -0.000027 -0.096534 0.009246 +v -0.009707 -0.096534 0.009246 +v -0.009707 -0.096534 0.009246 +v -0.009707 -0.096534 0.009246 +v -0.009707 -0.096534 0.009246 +v -0.009707 -0.096534 0.009246 +v -0.009816 -0.096860 0.016098 +v -0.009816 -0.096860 0.016098 +v -0.009816 -0.096860 0.016098 +v -0.009816 -0.096860 0.016098 +v -0.009816 -0.096860 0.016098 +v -0.010034 -0.097186 0.022407 +v -0.010034 -0.097186 0.022407 +v -0.010034 -0.097186 0.022407 +v -0.010034 -0.097186 0.022407 +v -0.010034 -0.097186 0.022407 +v -0.010034 -0.097404 0.026758 +v -0.010034 -0.097404 0.026758 +v -0.010034 -0.097404 0.026758 +v -0.010034 -0.097404 0.026758 +v -0.010034 -0.097404 0.026758 +v -0.010034 -0.097404 0.026758 +v -0.010034 -0.097404 0.026758 +v -0.009816 -0.097730 0.031217 +v -0.009816 -0.097730 0.031217 +v -0.009816 -0.097730 0.031217 +v -0.010034 -0.097948 0.035568 +v -0.010034 -0.097948 0.035568 +v -0.010034 -0.097948 0.035568 +v -0.010034 -0.097948 0.035568 +v -0.010034 -0.097948 0.035568 +v -0.010034 -0.097948 0.035568 +v -0.010034 -0.098056 0.037526 +v -0.010034 -0.098056 0.037526 +v -0.010034 -0.098056 0.037526 +v -0.010034 -0.098056 0.037526 +v -0.010034 -0.098056 0.037526 +v -0.010034 -0.098056 0.037526 +v -0.012535 -0.098383 0.044487 +v -0.012535 -0.098383 0.044487 +v -0.012535 -0.098383 0.044487 +v -0.006662 -0.099035 0.044705 +v -0.006662 -0.099035 0.044705 +v -0.012535 -0.099362 0.045248 +v -0.012535 -0.099362 0.045248 +v -0.012535 -0.099362 0.045248 +v -0.012535 -0.099362 0.045248 +v -0.012535 -0.099362 0.045248 +v -0.012535 -0.099362 0.045248 +v -0.048321 -0.099362 0.045248 +v -0.048321 -0.099362 0.045248 +v -0.048321 -0.099362 0.045248 +v -0.048321 -0.099362 0.045248 +v -0.048321 -0.099362 0.045248 +v -0.052780 -0.100232 0.062543 +v -0.052780 -0.100232 0.062543 +v -0.052780 -0.100232 0.062543 +v -0.053215 -0.100232 0.062325 +v -0.053215 -0.100232 0.062325 +v -0.053215 -0.100232 0.062325 +v -0.053977 -0.100232 0.062543 +v -0.053977 -0.100232 0.062543 +v -0.054194 -0.100232 0.063087 +v -0.054194 -0.100232 0.063087 +v -0.054194 -0.100232 0.063087 +v -0.054194 -0.100449 0.067655 +v -0.054194 -0.100449 0.067655 +v -0.054194 -0.100449 0.067655 +v -0.054303 -0.101537 0.087125 +v -0.054303 -0.101537 0.087125 +v -0.054303 -0.101537 0.087125 +v -0.054303 -0.101537 0.087125 +v -0.054303 -0.101646 0.088865 +v -0.054303 -0.101646 0.088865 +v -0.054303 -0.101646 0.088865 +v -0.054303 -0.102299 0.089192 +v -0.054303 -0.102299 0.089192 +v -0.054303 -0.102299 0.089192 +v -0.058545 -0.102299 0.089192 +v -0.061156 -0.101972 0.089192 +v -0.064854 -0.100993 0.089736 +v -0.064854 -0.100993 0.089736 +v -0.067029 -0.099579 0.090388 +v -0.067029 -0.099579 0.090388 +v -0.068008 -0.098709 0.090932 +v -0.068008 -0.098709 0.090932 +v -0.069857 -0.097078 0.092564 +v -0.071271 -0.096425 0.093325 +v -0.071271 -0.096425 0.093325 +v -0.071271 -0.096425 0.093325 +v -0.079212 -0.096751 0.093107 +v -0.079212 -0.096751 0.093107 +v -0.079212 -0.096751 0.093107 +v -0.079212 -0.096751 0.093107 +v -0.079212 -0.096751 0.093107 +v -0.079212 -0.096751 0.093107 +v -0.079647 -0.092835 0.095174 +v -0.079647 -0.092835 0.095174 +v -0.079647 -0.092835 0.095174 +v -0.079647 -0.092835 0.095174 +v -0.079647 -0.092835 0.095174 +v -0.080190 -0.055962 0.113991 +v -0.080190 -0.055962 0.113991 +v -0.080190 -0.055962 0.113991 +v -0.080190 -0.055962 0.113991 +v -0.080190 -0.055962 0.113991 +v -0.080190 -0.055745 0.113991 +v -0.080190 -0.055745 0.113991 +v -0.080190 -0.055745 0.113991 +v -0.080517 -0.055962 0.113448 +v -0.080517 -0.055962 0.113448 +v -0.080517 -0.055962 0.113448 +v -0.080843 -0.055854 0.111272 +v -0.080734 -0.055636 0.111163 +v -0.080952 -0.054983 0.110946 +v -0.080843 -0.054983 0.111163 +v -0.080843 -0.054657 0.111381 +v -0.080843 -0.054657 0.111381 +v -0.080843 -0.054657 0.111381 +v -0.080517 -0.054657 0.114100 +v -0.080517 -0.054657 0.114100 +v -0.080517 -0.054657 0.114100 +v -0.080517 -0.054657 0.114100 +v -0.080517 -0.054657 0.114100 +v -0.080517 -0.054657 0.114100 +v -0.080190 -0.023984 0.130416 +v -0.080190 -0.023984 0.130416 +v -0.079864 -0.053026 0.115623 +v -0.080190 -0.054657 0.114644 +v -0.080190 -0.054657 0.114644 +v -0.080190 -0.054657 0.114644 +v -0.079755 -0.054875 0.114644 +v -0.079755 -0.054875 0.114644 +v -0.079647 -0.054875 0.114753 +v -0.079647 -0.054875 0.114753 +v -0.079647 -0.054875 0.114753 +v -0.079647 -0.054875 0.114753 +v -0.079973 -0.024093 0.130633 +v -0.079973 -0.024093 0.130633 +v -0.080190 -0.021917 0.131286 +v -0.080517 -0.023766 0.129981 +v -0.080517 -0.023766 0.129981 +v -0.080517 -0.023766 0.129981 +v -0.080517 -0.023766 0.129981 +v -0.080517 -0.023766 0.129981 +v -0.081387 -0.021700 0.125847 +v -0.081387 -0.021700 0.125847 +v -0.081387 -0.021700 0.125847 +v -0.081387 -0.021700 0.125847 +v -0.081387 -0.021700 0.125847 +v -0.081496 -0.021591 0.125630 +v -0.081496 -0.021591 0.125630 +v -0.081496 -0.021591 0.125630 +v -0.081496 -0.018654 0.126391 +v -0.081496 -0.018654 0.126391 +v -0.081278 -0.018654 0.126826 +v -0.081278 -0.018654 0.126826 +v -0.081496 -0.012889 0.126391 +v -0.081496 -0.012889 0.126391 +v -0.081496 -0.000598 0.126391 +v -0.081496 -0.000598 0.126391 +v -0.081496 -0.000598 0.126391 +v -0.082366 -0.002121 0.126391 +v -0.082366 -0.002121 0.126391 +v -0.082366 -0.002121 0.126391 +v -0.082366 0.040952 0.126391 +v -0.082366 0.040952 0.126391 +v -0.081931 0.075759 0.126282 +v -0.081822 0.095446 0.126282 +v -0.081387 0.095446 0.126391 +v -0.081387 0.095446 0.126391 +v -0.080952 0.096860 0.126717 +v -0.080952 0.096860 0.126717 +v -0.080190 0.099797 0.126717 +v -0.080190 0.099797 0.126717 +v -0.078776 0.102516 0.126717 +v -0.078776 0.102516 0.126717 +v -0.077036 0.104691 0.127153 +v -0.077036 0.104691 0.127153 +v -0.076710 0.105018 0.126717 +v -0.076710 0.105344 0.126609 +v -0.076710 0.105344 0.126609 +v -0.076057 0.105453 0.126717 +v -0.076057 0.105453 0.126717 +v -0.075296 0.105127 0.126826 +v -0.075622 0.105562 0.127044 +v -0.075622 0.105562 0.127044 +v -0.074861 0.105344 0.131395 +v -0.074861 0.105344 0.131395 +v -0.074861 0.105344 0.131395 +v -0.073773 0.106214 0.131286 +v -0.073773 0.106214 0.131286 +v -0.071271 0.107628 0.131286 +v -0.071271 0.107628 0.131286 +v -0.068552 0.108607 0.131286 +v -0.068552 0.108607 0.131286 +v -0.064310 0.109260 0.131286 +v -0.064310 0.109260 0.131286 +v -0.056261 0.109260 0.131286 +v -0.056261 0.109260 0.131286 +v -0.056261 0.109260 0.131286 +v -0.056261 0.109260 0.131286 +v -0.019279 0.109260 0.131286 +v -0.019279 0.109260 0.131286 +v -0.019279 0.109260 0.131286 +v -0.019279 0.109695 0.128349 +v -0.019279 0.109695 0.128349 +v -0.019062 0.109695 0.128023 +v -0.018300 0.109695 0.128023 +v -0.018300 0.109695 0.128023 +v -0.018083 0.109804 0.128131 +v -0.017974 0.109260 0.131286 +v -0.017974 0.109260 0.131286 +v -0.017974 0.109260 0.131286 +v -0.017974 0.109260 0.131286 +v -0.017974 0.109151 0.131612 +v -0.017974 0.109151 0.131612 +v -0.017974 0.108934 0.131830 +v -0.017974 0.108934 0.131830 +v -0.017974 0.108934 0.131830 +v -0.017974 0.103713 0.132047 +v -0.017974 0.103713 0.132047 +v -0.018191 0.103713 0.131938 +v -0.018191 0.103713 0.131938 +v -0.018083 0.108390 0.132047 +v -0.018083 0.108390 0.132047 +v -0.018300 0.108390 0.131721 +v -0.018300 0.108716 0.131612 +v 0.018138 0.108390 0.132047 +v 0.018138 0.108390 0.132047 +v 0.018029 0.108934 0.131830 +v 0.018029 0.108934 0.131830 +v 0.018029 0.108934 0.131830 +v 0.018029 0.109260 0.131286 +v 0.018029 0.109260 0.131286 +v 0.018029 0.109260 0.131286 +v 0.018029 0.109260 0.131286 +v 0.018029 0.109695 0.128349 +v 0.018029 0.109695 0.128349 +v 0.018029 0.109695 0.128349 +v 0.018355 0.109804 0.127805 +v 0.018355 0.109804 0.127805 +v 0.018355 0.109804 0.127805 +v 0.019008 0.109804 0.127805 +v 0.019008 0.109804 0.127805 +v 0.019334 0.109695 0.128349 +v 0.019334 0.109695 0.128349 +v 0.019334 0.109695 0.128349 +v 0.019334 0.109260 0.131286 +v 0.019334 0.109260 0.131286 +v 0.019334 0.109260 0.131286 +v 0.019008 0.108934 0.131286 +v 0.019117 0.108390 0.131938 +v 0.019117 0.108390 0.131938 +v 0.019334 0.108390 0.132047 +v 0.019334 0.108390 0.132047 +v 0.019334 0.108390 0.132047 +v 0.019334 0.108390 0.132047 +v 0.056208 0.108390 0.132047 +v 0.056208 0.108390 0.132047 +v 0.056208 0.108390 0.132047 +v 0.056208 0.108390 0.132047 +v 0.065671 0.108281 0.132047 +v 0.065671 0.108281 0.132047 +v 0.065671 0.108281 0.132047 +v 0.064474 0.103277 0.132047 +v 0.064474 0.103277 0.132047 +v 0.064474 0.103277 0.132047 +v 0.064474 0.103277 0.132047 +v 0.065453 0.102299 0.131830 +v 0.066432 0.101537 0.131177 +v 0.066432 0.101537 0.131177 +v 0.068390 0.100449 0.131177 +v 0.068390 0.100449 0.131177 +v 0.067520 0.098492 0.102897 +v 0.068064 0.096751 0.089736 +v 0.068064 0.096751 0.089736 +v 0.068064 0.096751 0.089736 +v 0.068064 0.096751 0.089736 +v 0.067846 0.096534 0.089192 +v 0.067846 0.096534 0.089192 +v 0.070130 0.093488 0.089192 +v 0.069478 0.093162 0.088865 +v 0.069478 0.093162 0.088865 +v 0.067737 0.089790 0.045248 +v 0.067737 0.089790 0.045248 +v 0.067737 0.089790 0.045248 +v 0.067737 0.089790 0.045248 +v 0.067846 0.088593 0.044705 +v 0.067302 0.088376 0.044487 +v 0.067302 0.088376 0.044487 +v 0.063930 0.089572 0.009028 +v 0.063930 0.089572 0.009028 +v 0.064474 0.088485 0.008267 +v 0.064692 0.086092 0.006853 +v 0.065779 0.084134 0.008375 +v 0.064583 0.081306 0.006309 +v 0.064583 0.081306 0.006309 +v 0.064583 0.081306 0.006309 +v 0.064583 0.081306 0.006309 +v 0.064583 0.032577 0.006309 +v 0.064583 0.032577 0.006309 +v 0.064583 0.032577 0.006309 +v 0.064583 0.032577 0.006309 +v 0.064583 -0.011475 0.006309 +v 0.064583 -0.011475 0.006309 +v 0.064583 -0.011475 0.006309 +v 0.064583 -0.011475 0.006309 +v 0.059579 -0.055419 0.001305 +v 0.059579 -0.055419 0.001305 +v 0.059579 -0.055419 0.001305 +v 0.058927 -0.023766 0.000761 +v 0.057404 -0.009409 0.000109 +v 0.057404 -0.055419 0.000109 +v 0.056425 -0.085548 0.000000 +v 0.056425 -0.085548 0.000000 +v 0.056425 -0.085548 0.000000 +v 0.056425 -0.085548 0.000000 +v 0.056425 -0.085548 0.000000 +v 0.051313 -0.075650 0.000000 +v 0.051313 -0.075650 0.000000 +v 0.051313 -0.075650 0.000000 +v 0.051313 -0.075650 0.000000 +v 0.049681 -0.003644 0.000326 +v 0.048811 0.032577 0.000761 +v 0.048159 -0.050524 0.001305 +v 0.048159 -0.050524 0.001305 +v 0.048159 -0.050524 0.001305 +v 0.046309 -0.045412 0.003154 +v 0.046309 -0.045412 0.003154 +v 0.046309 -0.045412 0.003154 +v 0.046309 -0.045412 0.003154 +v 0.046309 0.070755 0.003154 +v 0.046309 0.070755 0.003154 +v 0.046309 0.070755 0.003154 +v 0.046309 0.071843 0.003154 +v 0.046309 0.071843 0.003154 +v 0.046309 0.071843 0.003154 +v 0.045330 0.073257 0.003589 +v 0.045330 0.073257 0.003589 +v 0.045330 0.073257 0.003589 +v 0.044569 0.074018 0.003589 +v 0.044569 0.074018 0.003589 +v 0.043155 0.074997 0.003154 +v 0.043155 0.074997 0.003154 +v 0.040436 0.074997 0.003154 +v 0.040436 0.074997 0.003154 +v 0.040436 0.074997 0.003154 +v 0.040436 0.074997 0.003154 +v -0.006009 0.074345 0.003698 +v -0.035703 0.071843 0.004460 +v -0.035703 0.071843 0.004460 +v -0.035703 0.071843 0.004460 +v -0.035703 0.071843 0.004460 +v -0.035703 0.071843 0.004460 +v -0.035703 0.071843 0.004460 +v -0.035703 0.071843 0.004460 +v -0.043100 0.071843 0.004460 +v -0.043100 0.071843 0.004460 +v -0.043100 0.070755 0.004460 +v -0.043100 0.070755 0.004460 +v -0.043100 0.070755 0.004460 +v -0.043100 0.070755 0.004460 +v -0.043100 0.070755 0.004460 +v -0.043100 0.070755 0.004460 +v -0.043100 -0.049545 0.004460 +v -0.043100 -0.049545 0.004460 +v -0.043100 -0.049545 0.004460 +v -0.043970 0.011910 0.004351 +v -0.043970 -0.073801 0.004351 +v -0.044188 -0.076302 0.003481 +v -0.044188 -0.076302 0.003481 +v -0.044949 -0.078695 0.001305 +v -0.044949 -0.078695 0.001305 +v -0.037444 -0.078695 0.001305 +v -0.037444 -0.078695 0.001305 +v -0.005900 -0.081088 0.000109 +v 0.045004 -0.081850 0.000000 +v 0.045004 -0.081850 0.000000 +v 0.045004 -0.081850 0.000000 +v 0.055011 -0.086962 0.000000 +v 0.055011 -0.086962 0.000000 +v 0.055011 -0.086962 0.000000 +v 0.055011 -0.086962 0.000000 +v 0.055011 -0.086962 0.000000 +v 0.055011 -0.086962 0.000000 +v 0.054358 -0.087941 0.000109 +v 0.054358 -0.087941 0.000109 +v 0.054250 -0.090225 0.001305 +v 0.054250 -0.090225 0.001305 +v 0.054250 -0.090225 0.001305 +v 0.056316 -0.095011 0.006200 +v 0.056316 -0.095011 0.006200 +v 0.056316 -0.095011 0.006200 +v 0.056316 -0.095011 0.006200 +v 0.054358 -0.096534 0.009246 +v 0.054358 -0.096534 0.009246 +v 0.054358 -0.096534 0.009246 +v 0.046418 -0.095772 0.006961 +v -0.000027 -0.095772 0.006961 +v -0.046472 -0.096534 0.009246 +v -0.046472 -0.096534 0.009246 +v -0.046472 -0.096534 0.009246 +v -0.046472 -0.096534 0.009246 +v -0.046472 -0.096534 0.009246 +v -0.046472 -0.096534 0.009246 +v -0.046472 -0.096534 0.009246 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.098383 0.044487 +v -0.048321 -0.099035 0.044705 +v -0.048321 -0.099035 0.044705 +v -0.048321 -0.099035 0.044705 +v -0.056261 -0.099035 0.044705 +v -0.056261 -0.099362 0.045248 +v -0.056261 -0.099362 0.045248 +v -0.058545 -0.101646 0.088865 +v -0.058545 -0.101646 0.088865 +v -0.058545 -0.101646 0.088865 +v -0.062135 -0.101102 0.088865 +v -0.062135 -0.101102 0.088865 +v -0.064745 -0.100667 0.089192 +v -0.068008 -0.098600 0.089736 +v -0.068008 -0.098600 0.089736 +v -0.069749 -0.096642 0.092020 +v -0.069749 -0.096642 0.092020 +v -0.071271 -0.094793 0.093869 +v -0.072359 -0.093814 0.094630 +v -0.072359 -0.093814 0.094630 +v -0.072359 -0.093814 0.094630 +v -0.072359 -0.093814 0.094630 +v -0.073120 -0.089572 0.096914 +v -0.073120 -0.089572 0.096914 +v -0.073120 -0.089572 0.096914 +v -0.073120 -0.089572 0.096914 +v -0.073120 -0.089572 0.096914 +v -0.072794 -0.089572 0.096806 +v -0.073664 -0.057703 0.113230 +v -0.073447 -0.055962 0.113991 +v -0.073447 -0.055962 0.113991 +v -0.073447 -0.055745 0.113883 +v -0.073664 -0.055636 0.113774 +v -0.073664 -0.055636 0.113774 +v -0.074099 -0.055745 0.114100 +v -0.074099 -0.055745 0.114100 +v -0.079647 -0.055636 0.114100 +v -0.079647 -0.055636 0.114100 +v -0.079647 -0.055636 0.114100 +v -0.080190 -0.055636 0.113339 +v -0.080190 -0.055636 0.113339 +v -0.080190 -0.055636 0.113339 +v -0.080517 -0.055636 0.111381 +v -0.080517 -0.055636 0.111381 +v -0.080626 -0.055527 0.111163 +v -0.080626 -0.055527 0.111163 +v -0.080626 -0.054983 0.111163 +v -0.080626 -0.054983 0.111163 +v -0.080626 -0.054875 0.111381 +v -0.080626 -0.054875 0.111381 +v -0.080626 -0.054875 0.111381 +v -0.080190 -0.054875 0.114100 +v -0.074099 -0.054875 0.114644 +v -0.074099 -0.054875 0.114644 +v -0.074099 -0.054766 0.114862 +v -0.074099 -0.054766 0.114862 +v -0.074099 -0.054766 0.114862 +v -0.073773 -0.054766 0.114753 +v -0.073773 -0.054766 0.114753 +v -0.074861 -0.024201 0.130633 +v -0.074861 -0.024201 0.130633 +v -0.074861 -0.024201 0.130633 +v -0.079647 -0.024201 0.130633 +v -0.079647 -0.024201 0.130633 +v -0.079647 -0.024201 0.130633 +v -0.079647 -0.024201 0.130633 +v -0.079647 -0.021917 0.131503 +v -0.079647 -0.021917 0.131503 +v -0.079647 -0.018654 0.132047 +v -0.079647 -0.018654 0.132047 +v -0.080190 -0.018654 0.131721 +v -0.079647 -0.012998 0.132047 +v -0.079647 -0.012998 0.132047 +v -0.080190 -0.012998 0.131721 +v -0.080190 0.007342 0.131830 +v -0.080190 0.007342 0.131830 +v -0.079647 0.007451 0.132047 +v -0.079647 0.007451 0.132047 +v -0.079647 0.007451 0.132047 +v -0.079647 0.007451 0.132047 +v -0.074970 -0.012998 0.132047 +v -0.074970 -0.012998 0.132047 +v -0.074970 0.007342 0.132047 +v -0.074970 0.007342 0.132047 +v -0.074970 0.007342 0.132047 +v -0.074970 0.007342 0.132047 +v -0.074970 0.007668 0.131721 +v -0.079973 0.007668 0.131612 +v -0.079973 0.007668 0.131612 +v -0.080952 0.007451 0.128349 +v -0.080517 0.007342 0.131286 +v -0.080517 0.007342 0.131286 +v -0.080517 -0.018654 0.131286 +v -0.080517 -0.020721 0.131068 +v -0.080517 -0.020721 0.131068 +v -0.081278 -0.020503 0.126500 +v -0.081278 -0.020503 0.126500 +v -0.081278 -0.020503 0.126500 +v -0.080952 -0.020177 0.128132 +v -0.080952 -0.012998 0.128349 +v -0.081278 -0.012889 0.126826 +v -0.081278 -0.012889 0.126826 +v -0.081278 0.000925 0.126826 +v -0.081278 0.000925 0.126826 +v -0.081278 0.000925 0.126826 +v -0.081278 0.000925 0.126826 +v -0.081278 0.000925 0.126826 +v -0.080952 0.002121 0.128349 +v -0.081061 0.007451 0.128132 +v -0.081061 0.007451 0.128132 +v -0.081061 0.007451 0.128132 +v -0.081061 0.007777 0.127805 +v -0.081061 0.007777 0.127805 +v -0.081061 0.007777 0.127805 +v -0.081061 0.007777 0.127805 +v -0.080734 0.007668 0.128240 +v -0.080734 0.007668 0.128240 +v -0.080734 0.007668 0.128240 +v -0.080734 0.007668 0.128240 +v -0.080843 0.007777 0.128132 +v -0.080843 0.007777 0.128132 +v -0.080843 0.008321 0.128132 +v -0.080843 0.008321 0.128132 +v -0.081061 0.008321 0.127805 +v -0.081061 0.008321 0.127805 +v -0.081061 0.008321 0.127805 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081278 0.010061 0.126826 +v -0.081496 0.030293 0.126391 +v -0.081496 0.091530 0.126391 +v -0.081278 0.093814 0.126826 +v -0.081278 0.093814 0.126826 +v -0.081278 0.093814 0.126826 +v -0.080517 0.090769 0.131286 +v -0.080517 0.090769 0.131286 +v -0.080517 0.090769 0.131286 +v -0.080408 0.094467 0.131286 +v -0.080408 0.094467 0.131286 +v -0.079864 0.097404 0.131286 +v -0.079864 0.097404 0.131286 +v -0.078776 0.100232 0.131286 +v -0.078776 0.100232 0.131286 +v -0.077254 0.102734 0.131395 +v -0.077254 0.102734 0.131395 +v -0.076166 0.103930 0.131286 +v -0.076166 0.103930 0.131286 +v -0.076166 0.103930 0.131286 +v -0.076492 0.104691 0.127044 +v -0.076492 0.104691 0.127044 +v -0.076601 0.104800 0.126826 +v -0.076601 0.104800 0.126826 +v -0.076492 0.105127 0.126717 +v -0.076492 0.105127 0.126717 +v -0.076492 0.105127 0.126717 +v -0.075840 0.104474 0.126717 +v -0.074970 0.104365 0.126609 +v -0.074426 0.104148 0.126500 +v -0.074534 0.104474 0.126935 +v -0.074643 0.104583 0.131612 +v -0.074643 0.104583 0.131612 +v -0.074752 0.105235 0.131612 +v -0.074752 0.105235 0.131612 +v -0.072468 0.106867 0.131612 +v -0.071271 0.107520 0.131612 +v -0.071163 0.107411 0.131830 +v -0.067138 0.108607 0.131830 +v -0.064310 0.108934 0.131830 +v -0.064310 0.108934 0.131830 +v -0.056261 0.108934 0.131830 +v -0.019279 0.108934 0.131830 +v -0.019279 0.108934 0.131830 +v -0.019170 0.108390 0.131938 +v -0.019170 0.108390 0.131938 +v -0.019062 0.108716 0.131612 +v -0.019062 0.108934 0.131286 +v -0.019062 0.109477 0.128240 +v -0.019062 0.109477 0.128240 +v -0.018953 0.109586 0.128131 +v -0.018953 0.109586 0.128131 +v -0.018409 0.109586 0.128131 +v -0.018409 0.109586 0.128131 +v -0.018300 0.109477 0.128240 +v -0.018300 0.109477 0.128240 +v -0.018300 0.109477 0.128240 +v -0.018300 0.109477 0.128240 +v -0.018300 0.108934 0.131286 +v -0.018300 0.108934 0.131286 +v -0.018300 0.103169 0.131395 +v -0.018300 0.103169 0.131395 +v -0.017974 0.103060 0.131721 +v -0.017974 0.103060 0.131721 +v -0.007967 0.103060 0.131721 +v -0.007967 0.103713 0.132047 +v 0.018029 0.103713 0.132047 +v 0.018029 0.103713 0.132047 +v 0.018247 0.103713 0.131721 +v 0.018247 0.103713 0.131721 +v 0.018247 0.108390 0.131830 +v 0.018247 0.108390 0.131830 +v 0.018247 0.108934 0.131286 +v 0.018247 0.108934 0.131286 +v 0.018355 0.109477 0.128240 +v 0.018355 0.109477 0.128240 +v 0.018355 0.109586 0.128131 +v 0.018355 0.109586 0.128131 +v 0.018899 0.109586 0.128131 +v 0.018899 0.109586 0.128131 +v 0.019008 0.109477 0.128240 +v 0.019008 0.109477 0.128240 +v 0.019008 0.102951 0.128240 +v 0.019008 0.102951 0.128240 +v 0.019008 0.103169 0.131395 +v 0.019008 0.103169 0.131395 +v 0.019117 0.103169 0.131721 +v 0.019117 0.103713 0.131938 +v 0.019117 0.103713 0.131938 +v 0.019334 0.103713 0.132047 +v 0.019334 0.103713 0.132047 +v 0.019334 0.103713 0.132047 +v 0.051748 0.103386 0.131938 +v 0.060776 0.103713 0.132047 +v 0.060776 0.103713 0.132047 +v 0.060776 0.103713 0.132047 +v 0.060776 0.103713 0.132047 +v 0.060776 0.103713 0.132047 +v 0.060776 0.103713 0.132047 +v 0.063169 0.102951 0.131721 +v 0.063169 0.102951 0.131721 +v 0.064257 0.102407 0.131177 +v 0.064257 0.102407 0.131177 +v 0.064257 0.102407 0.131177 +v 0.064257 0.102407 0.131177 +v 0.062407 0.100123 0.089736 +v 0.062407 0.100123 0.089736 +v 0.064909 0.099035 0.089736 +v 0.064909 0.099035 0.089736 +v 0.064800 0.098818 0.089192 +v 0.065453 0.097621 0.088865 +v 0.065453 0.097621 0.088865 +v 0.065453 0.097621 0.088865 +v 0.067302 0.096099 0.088865 +v 0.067302 0.096099 0.088865 +v 0.065018 0.093814 0.045248 +v 0.065018 0.093814 0.045248 +v 0.066541 0.091965 0.045248 +v 0.066541 0.091965 0.045248 +v 0.066976 0.090769 0.044705 +v 0.066976 0.090769 0.044705 +v 0.065779 0.091421 0.044487 +v 0.065779 0.091421 0.044487 +v 0.065779 0.091421 0.044487 +v 0.064365 0.093162 0.044487 +v 0.064365 0.093162 0.044487 +v 0.062516 0.091313 0.009028 +v 0.062516 0.091313 0.009028 +v 0.062734 0.089899 0.006744 +v 0.063821 0.086962 0.006200 +v 0.063821 0.086962 0.006200 +v 0.064583 0.084025 0.006309 +v 0.064583 0.084025 0.006309 +v 0.059579 0.081306 0.001305 +v 0.059579 0.081306 0.001305 +v 0.059579 0.081306 0.001305 +v 0.059579 0.081306 0.001305 +v 0.059579 0.015935 0.001305 +v 0.059579 0.015935 0.001305 +v 0.059579 0.015935 0.001305 +v 0.059579 0.015935 0.001305 +v 0.057404 0.062162 0.000109 +v 0.056534 -0.023766 0.000000 +v 0.056534 -0.023766 0.000000 +v 0.056534 -0.023766 0.000000 +v 0.056534 -0.023766 0.000000 +v 0.051313 0.073692 -0.000000 +v 0.051313 0.073692 -0.000000 +v 0.051313 0.073692 -0.000000 +v 0.051313 0.073692 -0.000000 +v 0.049681 0.073692 0.000326 +v 0.048159 0.072604 0.001305 +v 0.048159 0.072604 0.001305 +v 0.048159 0.072604 0.001305 +v 0.048159 0.072604 0.001305 +v 0.048267 0.074997 0.000979 +v 0.048267 0.074997 0.000979 +v 0.047941 0.076302 0.000870 +v 0.047941 0.076302 0.000870 +v 0.047941 0.076302 0.000870 +v 0.046309 0.077064 0.000979 +v 0.046309 0.077064 0.000979 +v 0.042611 0.076846 0.001305 +v 0.042611 0.076846 0.001305 +v 0.042611 0.076846 0.001305 +v -0.000027 0.076846 0.001305 +v -0.000027 0.076846 0.001305 +v -0.000027 0.076846 0.001305 +v -0.000027 0.076846 0.001305 +v -0.000027 0.076846 0.001305 +v -0.006009 0.076846 0.001305 +v -0.006009 0.076846 0.001305 +v -0.006009 0.076846 0.001305 +v -0.006009 0.074997 0.003154 +v -0.006009 0.074997 0.003154 +v -0.006009 0.074997 0.003154 +v -0.006009 0.074997 0.003154 +v -0.006009 0.074997 0.003154 +v -0.006009 0.074997 0.003154 +v -0.035703 0.074345 0.003698 +v -0.043100 0.074997 0.003154 +v -0.043100 0.074997 0.003154 +v -0.043100 0.074997 0.003154 +v -0.043100 0.074345 0.003698 +v -0.043100 0.072713 0.004351 +v -0.043970 0.071843 0.004351 +v -0.045601 0.032033 0.003698 +v -0.045601 -0.049545 0.003698 +v -0.046254 -0.073801 0.003154 +v -0.046254 -0.073801 0.003154 +v -0.045710 -0.074780 0.003481 +v -0.045710 -0.074780 0.003481 +v -0.048321 -0.076846 0.000979 +v -0.048321 -0.076846 0.000979 +v -0.047994 -0.078151 0.000870 +v -0.047994 -0.078151 0.000870 +v -0.046254 -0.078913 0.000979 +v -0.046254 -0.078913 0.000979 +v -0.044949 -0.079457 0.000761 +v -0.044949 -0.081088 0.000109 +v -0.047016 -0.081632 0.000000 +v -0.047016 -0.081632 0.000000 +v -0.044949 -0.081850 0.000000 +v -0.044949 -0.081850 0.000000 +v -0.044949 -0.081850 0.000000 +v -0.000027 -0.081850 0.000000 +v -0.000027 -0.081850 0.000000 +v -0.000027 -0.081850 0.000000 +v -0.000027 -0.081850 0.000000 +v 0.010742 -0.087071 0.000000 +v 0.010742 -0.087071 0.000000 +v 0.010742 -0.087071 0.000000 +v 0.010742 -0.087941 0.000109 +v 0.046418 -0.090225 0.001305 +v 0.046418 -0.090225 0.001305 +v 0.010850 -0.090225 0.001305 +v 0.010850 -0.090225 0.001305 +v 0.054250 -0.095228 0.006309 +v 0.054250 -0.095228 0.006309 +v -0.046363 -0.095228 0.006309 +v -0.046363 -0.095228 0.006309 +v -0.046363 -0.095228 0.006309 +v -0.054303 -0.095228 0.006309 +v -0.054303 -0.095228 0.006309 +v -0.054412 -0.096534 0.009246 +v -0.054412 -0.096534 0.009246 +v -0.056261 -0.098383 0.044487 +v -0.056261 -0.098383 0.044487 +v -0.056261 -0.098383 0.044487 +v -0.056261 -0.098709 0.044487 +v -0.056261 -0.098709 0.044487 +v -0.059850 -0.098492 0.044705 +v -0.059850 -0.098818 0.045248 +v -0.059850 -0.098818 0.045248 +v -0.059850 -0.100232 0.067655 +v -0.062243 -0.099579 0.067655 +v -0.064419 -0.100123 0.088865 +v -0.064419 -0.100123 0.088865 +v -0.067356 -0.097948 0.088865 +v -0.067356 -0.097948 0.088865 +v -0.064310 -0.098383 0.067655 +v -0.066268 -0.096860 0.067655 +v -0.066268 -0.096860 0.067655 +v -0.067029 -0.095990 0.067655 +v -0.067682 -0.094793 0.065262 +v -0.068878 -0.096099 0.088865 +v -0.068878 -0.096099 0.088865 +v -0.067791 -0.098383 0.089192 +v -0.070292 -0.095446 0.089736 +v -0.070292 -0.095446 0.089736 +v -0.071054 -0.094358 0.093216 +v -0.071054 -0.094358 0.093216 +v -0.072141 -0.092292 0.095174 +v -0.072141 -0.092292 0.095174 +v -0.072577 -0.089464 0.096697 +v -0.073338 -0.057703 0.113121 +v -0.073120 -0.055962 0.113339 +v -0.073120 -0.055962 0.113339 +v -0.073120 -0.055962 0.113339 +v -0.073338 -0.055636 0.111381 +v -0.073338 -0.055636 0.111381 +v -0.073338 -0.055527 0.111163 +v -0.073338 -0.055527 0.111163 +v -0.073338 -0.055527 0.111163 +v -0.073012 -0.054983 0.110946 +v -0.073012 -0.054983 0.110946 +v -0.073012 -0.054983 0.110946 +v -0.073120 -0.055310 0.111055 +v -0.073120 -0.055310 0.111055 +v -0.073338 -0.055201 0.111163 +v -0.073338 -0.055201 0.111163 +v -0.073338 -0.055201 0.111163 +v -0.073338 -0.054875 0.111381 +v -0.073338 -0.054875 0.111381 +v -0.073556 -0.054875 0.114209 +v -0.073556 -0.054875 0.114209 +v -0.073447 -0.054657 0.114644 +v -0.073447 -0.054657 0.114644 +v -0.074317 -0.023984 0.130416 +v -0.074317 -0.023984 0.130416 +v -0.074970 -0.021917 0.131612 +v -0.074970 -0.021917 0.131612 +v -0.074970 -0.021917 0.131612 +v -0.074970 -0.021917 0.131612 +v -0.074970 -0.018654 0.132047 +v -0.074970 -0.018654 0.132047 +v -0.074317 -0.018654 0.131721 +v -0.074317 -0.012998 0.131721 +v -0.074317 0.007342 0.131721 +v -0.074317 0.007342 0.131721 +v -0.074426 0.007560 0.131721 +v -0.074426 0.007668 0.131395 +v -0.074426 0.007668 0.131395 +v -0.074426 0.007668 0.131395 +v -0.074208 0.007668 0.128240 +v -0.074208 0.007668 0.128240 +v -0.074208 0.007668 0.128240 +v -0.074208 0.007668 0.128240 +v -0.074208 0.007777 0.128132 +v -0.074208 0.007777 0.128132 +v -0.074208 0.008321 0.128132 +v -0.074208 0.008321 0.128132 +v -0.081061 0.008647 0.128132 +v -0.081061 0.008647 0.128132 +v -0.081061 0.008647 0.128132 +v -0.080952 0.008647 0.128349 +v -0.080952 0.008647 0.128349 +v -0.080952 0.053352 0.128349 +v -0.080952 0.053352 0.128349 +v -0.080952 0.053352 0.128349 +v -0.080952 0.053352 0.128349 +v -0.081061 0.053461 0.127914 +v -0.081061 0.053461 0.127914 +v -0.081061 0.053461 0.127914 +v -0.081061 0.053461 0.127914 +v -0.081278 0.056833 0.126826 +v -0.081278 0.056833 0.126826 +v -0.081278 0.056833 0.126826 +v -0.081278 0.056833 0.126826 +v -0.081278 0.056833 0.126826 +v -0.081278 0.091530 0.126826 +v -0.081278 0.091530 0.126826 +v -0.081278 0.091530 0.126826 +v -0.081278 0.091530 0.126826 +v -0.081278 0.091530 0.126826 +v -0.080517 0.054657 0.131286 +v -0.080517 0.054657 0.131286 +v -0.080517 0.054657 0.131286 +v -0.080517 0.054657 0.131286 +v -0.079973 0.054331 0.131612 +v -0.080190 0.054657 0.131830 +v -0.080190 0.054657 0.131830 +v -0.080190 0.075541 0.131830 +v -0.080190 0.093053 0.131830 +v -0.079864 0.095990 0.131830 +v -0.078559 0.100123 0.131830 +v -0.078668 0.100232 0.131612 +v -0.076275 0.103821 0.131612 +v -0.076275 0.103821 0.131612 +v -0.075731 0.103821 0.131503 +v -0.075731 0.103821 0.131503 +v -0.075840 0.103930 0.127044 +v -0.075731 0.104039 0.126826 +v -0.075078 0.103604 0.126391 +v -0.074643 0.103277 0.125847 +v -0.074099 0.103713 0.125847 +v -0.073664 0.103386 0.124651 +v -0.073991 0.103277 0.124651 +v -0.073012 0.102299 0.106595 +v -0.072794 0.102299 0.106595 +v -0.072577 0.102299 0.106595 +v -0.073447 0.103277 0.124760 +v -0.073447 0.103277 0.124760 +v -0.073556 0.103386 0.125195 +v -0.073991 0.103930 0.126391 +v -0.070184 0.100123 0.131612 +v -0.070184 0.100123 0.131612 +v -0.070184 0.100232 0.131938 +v -0.074426 0.104583 0.131938 +v -0.074208 0.104691 0.132047 +v -0.074208 0.104691 0.132047 +v -0.074208 0.104691 0.132047 +v -0.074208 0.104691 0.132047 +v -0.070836 0.106867 0.132047 +v -0.070836 0.106867 0.132047 +v -0.070836 0.106867 0.132047 +v -0.066920 0.108063 0.132047 +v -0.066920 0.108063 0.132047 +v -0.066920 0.108063 0.132047 +v -0.066920 0.108063 0.132047 +v -0.064310 0.108390 0.132047 +v -0.064310 0.108390 0.132047 +v -0.064310 0.108390 0.132047 +v -0.064310 0.108390 0.132047 +v -0.064310 0.108390 0.132047 +v -0.019279 0.108390 0.132047 +v -0.019279 0.108390 0.132047 +v -0.019279 0.108390 0.132047 +v -0.019279 0.108390 0.132047 +v -0.019279 0.103713 0.132047 +v -0.019279 0.103713 0.132047 +v -0.019279 0.103713 0.132047 +v -0.052889 0.103386 0.131938 +v -0.019279 0.103060 0.131721 +v -0.019279 0.103060 0.131721 +v -0.019062 0.103277 0.131612 +v -0.019062 0.103277 0.131612 +v -0.019062 0.102951 0.128240 +v -0.019062 0.102951 0.128240 +v -0.019062 0.102951 0.128240 +v -0.018953 0.102951 0.128131 +v -0.018953 0.102951 0.128131 +v -0.018409 0.102951 0.128131 +v -0.018409 0.102951 0.128131 +v -0.018409 0.102951 0.128131 +v -0.018300 0.102951 0.128240 +v -0.018300 0.102951 0.128240 +v -0.017974 0.102842 0.131177 +v -0.017974 0.102842 0.131177 +v -0.017974 0.102842 0.131177 +v -0.007967 0.102842 0.131177 +v -0.007967 0.102842 0.131177 +v -0.007967 0.102842 0.131177 +v 0.018029 0.103060 0.131721 +v 0.018029 0.103060 0.131721 +v 0.018247 0.103060 0.131177 +v 0.018247 0.103060 0.131177 +v 0.018355 0.102951 0.128240 +v 0.018355 0.102951 0.128240 +v 0.018355 0.102951 0.128131 +v 0.018355 0.102951 0.128131 +v 0.018899 0.102951 0.128131 +v 0.018899 0.102951 0.128131 +v 0.019334 0.102734 0.128240 +v 0.019334 0.102734 0.128240 +v 0.019334 0.102734 0.128240 +v 0.019334 0.102842 0.131177 +v 0.019334 0.102842 0.131177 +v 0.019334 0.102842 0.131177 +v 0.019334 0.102842 0.131177 +v 0.019334 0.103060 0.131721 +v 0.019334 0.103060 0.131721 +v 0.060776 0.103060 0.131721 +v 0.060776 0.102842 0.131177 +v 0.060776 0.102842 0.131177 +v 0.058601 0.100667 0.089736 +v 0.058601 0.100667 0.089736 +v 0.058601 0.100667 0.089736 +v 0.058601 0.100667 0.089736 +v 0.050660 0.100449 0.089192 +v 0.058601 0.100449 0.089192 +v 0.058601 0.100449 0.089192 +v 0.062407 0.099797 0.089192 +v 0.063386 0.098818 0.088865 +v 0.063386 0.098818 0.088865 +v 0.063278 0.097186 0.067655 +v 0.062190 0.095990 0.045248 +v 0.062190 0.095990 0.045248 +v 0.064909 0.093597 0.044705 +v 0.062625 0.094576 0.044487 +v 0.062625 0.094576 0.044487 +v 0.059797 0.093271 0.009028 +v 0.059797 0.093271 0.009028 +v 0.060558 0.092400 0.007505 +v 0.061646 0.090334 0.006200 +v 0.061646 0.090334 0.006200 +v 0.058383 0.086418 0.001305 +v 0.058383 0.086418 0.001305 +v 0.059362 0.084569 0.001305 +v 0.059362 0.084569 0.001305 +v 0.058165 0.083046 0.000326 +v 0.056425 0.083264 -0.000000 +v 0.056425 0.083264 -0.000000 +v 0.056425 0.083264 -0.000000 +v 0.056425 0.083264 -0.000000 +v 0.051095 0.075759 -0.000000 +v 0.051095 0.075759 -0.000000 +v 0.051095 0.075759 -0.000000 +v 0.051095 0.075759 -0.000000 +v 0.050008 0.077934 -0.000000 +v 0.050008 0.077934 -0.000000 +v 0.050008 0.077934 -0.000000 +v 0.049573 0.075323 0.000218 +v 0.047506 0.078151 0.000218 +v 0.047506 0.078151 0.000218 +v 0.045004 0.077608 0.000761 +v -0.000027 0.077608 0.000761 +v -0.000027 0.077608 0.000761 +v -0.005900 0.077608 0.000761 +v -0.037444 0.076846 0.001305 +v -0.037444 0.076846 0.001305 +v -0.037444 0.076846 0.001305 +v -0.037444 0.076846 0.001305 +v -0.044949 0.076846 0.001305 +v -0.044949 0.076846 0.001305 +v -0.044949 0.076846 0.001305 +v -0.044623 0.074018 0.003589 +v -0.044623 0.074018 0.003589 +v -0.044623 0.074018 0.003589 +v -0.045384 0.073257 0.003589 +v -0.045384 0.073257 0.003589 +v -0.046254 0.071843 0.003154 +v -0.046254 0.071843 0.003154 +v -0.046254 0.070755 0.003154 +v -0.046254 0.070755 0.003154 +v -0.046254 0.070755 0.003154 +v -0.046254 0.070755 0.003154 +v -0.046254 -0.049545 0.003154 +v -0.046254 -0.049545 0.003154 +v -0.046254 -0.049545 0.003154 +v -0.046254 -0.049545 0.003154 +v -0.048103 -0.050524 0.001305 +v -0.048103 -0.050524 0.001305 +v -0.048103 -0.050524 0.001305 +v -0.048103 -0.075650 0.001305 +v -0.048103 -0.075650 0.001305 +v -0.048103 -0.075650 0.001305 +v -0.049300 -0.078151 0.000218 +v -0.048321 -0.079565 0.000218 +v -0.049952 -0.079783 0.000000 +v -0.049952 -0.079783 0.000000 +v -0.049952 -0.079783 0.000000 +v -0.049191 -0.080653 0.000000 +v -0.049191 -0.080653 0.000000 +v -0.049191 -0.080653 0.000000 +v -0.049191 -0.080653 0.000000 +v -0.054521 -0.087071 0.000000 +v -0.054521 -0.087071 0.000000 +v -0.054521 -0.087071 0.000000 +v -0.054521 -0.087071 0.000000 +v -0.054521 -0.087071 0.000000 +v -0.017648 -0.087941 0.000109 +v -0.000027 -0.088811 0.000326 +v -0.000027 -0.089572 0.000761 +v -0.000027 -0.089572 0.000761 +v -0.046363 -0.090225 0.001305 +v -0.046363 -0.090225 0.001305 +v -0.054303 -0.090225 0.001305 +v -0.054303 -0.090225 0.001305 +v -0.057240 -0.094793 0.006200 +v -0.057240 -0.094793 0.006200 +v -0.056370 -0.095990 0.007614 +v -0.056587 -0.096316 0.009137 +v -0.056587 -0.096316 0.009137 +v -0.058763 -0.095664 0.009028 +v -0.058763 -0.095664 0.009028 +v -0.059633 -0.097839 0.044487 +v -0.059633 -0.097839 0.044487 +v -0.062678 -0.096425 0.044487 +v -0.062678 -0.096425 0.044487 +v -0.062026 -0.097621 0.044705 +v -0.062135 -0.097839 0.045248 +v -0.062135 -0.097839 0.045248 +v -0.065071 -0.095664 0.045248 +v -0.065071 -0.095664 0.045248 +v -0.067247 -0.092727 0.045248 +v -0.067247 -0.092727 0.045248 +v -0.070075 -0.093923 0.088865 +v -0.070075 -0.093923 0.088865 +v -0.070075 -0.095337 0.089192 +v -0.071380 -0.093053 0.089736 +v -0.071380 -0.093053 0.089736 +v -0.071924 -0.091857 0.094630 +v -0.071924 -0.091857 0.094630 +v -0.071924 -0.091857 0.094630 +v -0.071924 -0.091857 0.094630 +v -0.072250 -0.089137 0.096153 +v -0.072250 -0.089137 0.096153 +v -0.072250 -0.089137 0.096153 +v -0.072250 -0.089137 0.096153 +v -0.073012 -0.055854 0.111381 +v -0.073012 -0.055854 0.111381 +v -0.073012 -0.055854 0.111381 +v -0.073012 -0.055854 0.111381 +v -0.073012 -0.055527 0.110946 +v -0.073012 -0.055527 0.110946 +v -0.073012 -0.055527 0.110946 +v -0.073012 -0.055527 0.110946 +v -0.072903 -0.052917 0.108118 +v -0.072903 -0.052917 0.108118 +v -0.072903 -0.052917 0.108118 +v -0.072903 -0.052917 0.108118 +v -0.072903 -0.052917 0.108118 +v -0.072903 -0.052917 0.108118 +v -0.073012 -0.054657 0.111163 +v -0.073012 -0.054657 0.111163 +v -0.073012 -0.054657 0.111163 +v -0.073012 -0.054657 0.111163 +v -0.073229 -0.054657 0.113991 +v -0.073229 -0.054657 0.113991 +v -0.073229 -0.054657 0.113991 +v -0.073229 -0.054657 0.113991 +v -0.073991 -0.023766 0.129981 +v -0.073991 -0.023766 0.129981 +v -0.073991 -0.023766 0.129981 +v -0.073991 -0.023766 0.129981 +v -0.074099 -0.021700 0.130742 +v -0.074099 -0.021700 0.130742 +v -0.074099 -0.021700 0.130742 +v -0.074317 -0.020721 0.131612 +v -0.074317 -0.020721 0.131612 +v -0.074099 -0.018654 0.131503 +v -0.074208 0.007342 0.131503 +v -0.074208 0.007342 0.131503 +v -0.074099 0.007342 0.131177 +v -0.074099 0.007342 0.131177 +v -0.074099 0.007342 0.131177 +v -0.073991 0.007451 0.128240 +v -0.073991 0.007451 0.128240 +v -0.073991 0.007451 0.128240 +v -0.073991 0.007668 0.128023 +v -0.073991 0.007668 0.128023 +v -0.073991 0.007668 0.128023 +v -0.073991 0.008321 0.128023 +v -0.073991 0.008321 0.128023 +v -0.073991 0.008430 0.128240 +v -0.073991 0.008430 0.128240 +v -0.073991 0.008430 0.128240 +v -0.073991 0.008430 0.128240 +v -0.074317 0.008430 0.131177 +v -0.074317 0.008430 0.131177 +v -0.074317 0.008430 0.131177 +v -0.074317 0.008647 0.131721 +v -0.074317 0.008647 0.131721 +v -0.074317 0.020068 0.131721 +v -0.074317 0.053352 0.131721 +v -0.074317 0.053352 0.131721 +v -0.074426 0.053569 0.131395 +v -0.074426 0.053569 0.131395 +v -0.080734 0.053569 0.128240 +v -0.080734 0.053569 0.128240 +v -0.080734 0.053569 0.128240 +v -0.080843 0.053678 0.128132 +v -0.080843 0.053678 0.128132 +v -0.080843 0.054222 0.128132 +v -0.080843 0.054222 0.128132 +v -0.080843 0.054222 0.128132 +v -0.080843 0.054222 0.128132 +v -0.080952 0.054331 0.128132 +v -0.080952 0.054331 0.128132 +v -0.081061 0.054222 0.127805 +v -0.081061 0.054222 0.127805 +v -0.081061 0.054222 0.127805 +v -0.080952 0.054548 0.128349 +v -0.080952 0.054548 0.128349 +v -0.080952 0.054548 0.128349 +v -0.080952 0.054548 0.128349 +v -0.080734 0.054331 0.128240 +v -0.080734 0.054331 0.128240 +v -0.080734 0.054331 0.128240 +v -0.080190 0.054331 0.131286 +v -0.079647 0.054331 0.131721 +v -0.079647 0.054331 0.131721 +v -0.074970 0.054331 0.131721 +v -0.074970 0.054440 0.131938 +v -0.079647 0.054548 0.132047 +v -0.079647 0.054548 0.132047 +v -0.079647 0.054548 0.132047 +v -0.079647 0.093053 0.132047 +v -0.079647 0.093053 0.132047 +v -0.079647 0.093053 0.132047 +v -0.079647 0.093053 0.132047 +v -0.079647 0.093053 0.132047 +v -0.078994 0.097186 0.132047 +v -0.078994 0.097186 0.132047 +v -0.078994 0.097186 0.132047 +v -0.078994 0.097186 0.132047 +v -0.077362 0.101102 0.132047 +v -0.077362 0.101102 0.132047 +v -0.077362 0.101102 0.132047 +v -0.077362 0.101102 0.132047 +v -0.075622 0.103386 0.131938 +v -0.075622 0.103386 0.131938 +v -0.075622 0.103386 0.131938 +v -0.075405 0.103495 0.131612 +v -0.075405 0.103495 0.131612 +v -0.074861 0.103060 0.126391 +v -0.074534 0.102625 0.125630 +v -0.074317 0.102951 0.124651 +v -0.073229 0.102081 0.106595 +v -0.073012 0.102081 0.105942 +v -0.072794 0.101755 0.105398 +v -0.072141 0.100667 0.104963 +v -0.070401 0.098927 0.104963 +v -0.069966 0.099362 0.104963 +v -0.069205 0.098709 0.105072 +v -0.068770 0.098383 0.108988 +v -0.068770 0.098383 0.108988 +v -0.069205 0.098927 0.117907 +v -0.069205 0.098927 0.117907 +v -0.069857 0.099797 0.131395 +v -0.069857 0.099797 0.131395 +v -0.069857 0.099797 0.131395 +v -0.069531 0.099906 0.131830 +v -0.069531 0.099906 0.131830 +v -0.069966 0.100341 0.132047 +v -0.069966 0.100341 0.132047 +v -0.069966 0.100341 0.132047 +v -0.067899 0.101755 0.132047 +v -0.067899 0.101755 0.132047 +v -0.067899 0.101755 0.132047 +v -0.067899 0.101755 0.132047 +v -0.067899 0.101755 0.132047 +v -0.064419 0.103277 0.132047 +v -0.064419 0.103277 0.132047 +v -0.064419 0.103277 0.132047 +v -0.064419 0.103277 0.132047 +v -0.060721 0.103713 0.132047 +v -0.060721 0.103713 0.132047 +v -0.060721 0.103713 0.132047 +v -0.060721 0.103713 0.132047 +v -0.060721 0.103713 0.132047 +v -0.060721 0.103713 0.132047 +v -0.060721 0.103060 0.131721 +v -0.060721 0.103060 0.131721 +v -0.052889 0.103060 0.131721 +v -0.019279 0.102842 0.131177 +v -0.019279 0.102842 0.131177 +v -0.019279 0.102842 0.131177 +v -0.019279 0.102842 0.131177 +v -0.019279 0.102734 0.128240 +v -0.019279 0.102734 0.128240 +v -0.019279 0.102734 0.128240 +v -0.019170 0.102625 0.127914 +v -0.019170 0.102625 0.127914 +v -0.019170 0.102625 0.127914 +v -0.019170 0.102625 0.127914 +v -0.018409 0.102625 0.127805 +v -0.018409 0.102625 0.127805 +v -0.018409 0.102625 0.127805 +v -0.018083 0.102734 0.128240 +v -0.018083 0.102734 0.128240 +v -0.018083 0.102734 0.128240 +v -0.018083 0.102734 0.128240 +v -0.018083 0.102734 0.128240 +v 0.018029 0.102842 0.131177 +v 0.018029 0.102842 0.131177 +v 0.018029 0.102842 0.131177 +v 0.018029 0.102842 0.131177 +v 0.018029 0.102842 0.131177 +v 0.018029 0.102734 0.128240 +v 0.018029 0.102734 0.128240 +v 0.018029 0.102734 0.128240 +v 0.018355 0.102625 0.127805 +v 0.018355 0.102625 0.127805 +v 0.018355 0.102625 0.127805 +v 0.019117 0.102625 0.127914 +v 0.019117 0.102625 0.127914 +v 0.019117 0.102625 0.127914 +v 0.019117 0.102625 0.127914 +v 0.052836 0.102842 0.131177 +v 0.052836 0.102842 0.131177 +v 0.052836 0.102842 0.131177 +v 0.052836 0.102842 0.131177 +v 0.052836 0.102842 0.131177 +v 0.052836 0.102842 0.131177 +v 0.050660 0.100667 0.089736 +v 0.050660 0.100667 0.089736 +v 0.050660 0.100667 0.089736 +v -0.000027 0.100449 0.089192 +v -0.000027 0.100449 0.089192 +v 0.050660 0.100123 0.088974 +v 0.058601 0.099688 0.088865 +v 0.058601 0.099688 0.088865 +v 0.058601 0.099688 0.088865 +v 0.058601 0.099688 0.088865 +v 0.059797 0.099688 0.088865 +v 0.059797 0.099688 0.088865 +v 0.061102 0.098057 0.067655 +v 0.059906 0.096860 0.045248 +v 0.059906 0.096860 0.045248 +v 0.061972 0.095772 0.044705 +v 0.060667 0.095664 0.044487 +v 0.060667 0.095664 0.044487 +v 0.056643 0.094467 0.009137 +v 0.056643 0.094467 0.009137 +v 0.056425 0.094032 0.007614 +v 0.058274 0.092618 0.006200 +v 0.058274 0.092618 0.006200 +v 0.058274 0.092618 0.006200 +v 0.056751 0.087723 0.001305 +v 0.056751 0.087723 0.001305 +v 0.056751 0.087723 0.001305 +v 0.057186 0.086744 0.000761 +v 0.055990 0.084460 -0.000000 +v 0.055990 0.084460 -0.000000 +v 0.055990 0.084460 -0.000000 +v 0.055990 0.084460 -0.000000 +v 0.049137 0.078695 -0.000000 +v 0.049137 0.078695 -0.000000 +v 0.045004 0.080001 -0.000000 +v 0.045004 0.080001 -0.000000 +v -0.000027 0.080001 -0.000000 +v -0.000027 0.080001 -0.000000 +v -0.000027 0.080001 -0.000000 +v -0.000027 0.080001 -0.000000 +v -0.000027 0.080001 -0.000000 +v -0.000027 0.079239 0.000109 +v -0.000027 0.079239 0.000109 +v -0.044949 0.079239 0.000109 +v -0.044949 0.079239 0.000109 +v -0.046254 0.077064 0.000979 +v -0.046254 0.077064 0.000979 +v -0.047559 0.076738 0.000870 +v -0.047559 0.076738 0.000870 +v -0.047559 0.076738 0.000870 +v -0.048212 0.076955 0.000435 +v -0.048212 0.075759 0.000870 +v -0.048212 0.075759 0.000870 +v -0.048103 0.072604 0.001305 +v -0.048103 0.072604 0.001305 +v -0.048103 0.072604 0.001305 +v -0.048103 0.012019 0.001305 +v -0.048103 0.012019 0.001305 +v -0.048103 0.012019 0.001305 +v -0.048103 0.012019 0.001305 +v -0.050496 -0.050524 0.000109 +v -0.050496 -0.075650 0.000109 +v -0.050496 -0.075650 0.000109 +v -0.051040 -0.077608 0.000000 +v -0.051040 -0.077608 0.000000 +v -0.051040 -0.077608 0.000000 +v -0.051040 -0.077608 0.000000 +v -0.055717 -0.086636 0.000000 +v -0.055717 -0.086636 0.000000 +v -0.055717 -0.086636 0.000000 +v -0.055717 -0.086636 0.000000 +v -0.055717 -0.086636 0.000000 +v -0.054303 -0.087941 0.000109 +v -0.056805 -0.089572 0.001305 +v -0.056805 -0.089572 0.001305 +v -0.060829 -0.092944 0.006200 +v -0.060829 -0.092944 0.006200 +v -0.058545 -0.095337 0.007505 +v -0.060829 -0.094576 0.009028 +v -0.060829 -0.094576 0.009028 +v -0.062570 -0.093162 0.009028 +v -0.062570 -0.093162 0.009028 +v -0.064419 -0.095011 0.044487 +v -0.064419 -0.095011 0.044487 +v -0.064854 -0.095446 0.044705 +v -0.067029 -0.092618 0.044705 +v -0.067029 -0.092618 0.044705 +v -0.067899 -0.090443 0.044705 +v -0.068117 -0.090443 0.045248 +v -0.068117 -0.090443 0.045248 +v -0.068117 -0.090443 0.045248 +v -0.070727 -0.091639 0.088865 +v -0.070727 -0.091639 0.088865 +v -0.071054 -0.092944 0.089192 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.089137 0.089736 +v -0.071924 -0.057703 0.089736 +v -0.071924 -0.057703 0.089736 +v -0.071924 -0.057703 0.089736 +v -0.071924 -0.057703 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.071924 -0.012563 0.089736 +v -0.074099 -0.018654 0.131177 +v -0.074099 -0.018654 0.131177 +v -0.074099 -0.018654 0.131177 +v -0.074099 -0.018654 0.131177 +v -0.074099 -0.018654 0.131177 +v -0.074099 -0.018654 0.131177 +v -0.074099 -0.018654 0.131177 +v -0.073882 0.007560 0.127914 +v -0.073882 0.007560 0.127914 +v -0.073882 0.007560 0.127914 +v -0.073882 0.007560 0.127914 +v -0.073882 0.007560 0.127914 +v -0.073882 0.008321 0.127805 +v -0.073882 0.008321 0.127805 +v -0.073882 0.008321 0.127805 +v -0.073991 0.008647 0.128240 +v -0.073991 0.008647 0.128240 +v -0.074099 0.008647 0.131177 +v -0.074099 0.008647 0.131177 +v -0.074099 0.008647 0.131177 +v -0.074099 0.008647 0.131177 +v -0.074099 0.053352 0.131177 +v -0.074099 0.053352 0.131177 +v -0.074099 0.053352 0.131177 +v -0.074099 0.053352 0.131177 +v -0.074208 0.053569 0.128240 +v -0.074208 0.053569 0.128240 +v -0.074208 0.053569 0.128240 +v -0.074208 0.053678 0.128132 +v -0.074208 0.053678 0.128132 +v -0.074208 0.053678 0.128132 +v -0.074208 0.054222 0.128132 +v -0.074208 0.054222 0.128132 +v -0.074208 0.054222 0.128132 +v -0.074208 0.054222 0.128132 +v -0.074208 0.054331 0.128240 +v -0.074208 0.054331 0.128240 +v -0.074317 0.054331 0.131177 +v -0.074317 0.054331 0.131177 +v -0.074534 0.054331 0.131612 +v -0.074317 0.054657 0.131721 +v -0.074317 0.054657 0.131721 +v -0.074970 0.054657 0.132047 +v -0.074970 0.054657 0.132047 +v -0.074970 0.054657 0.132047 +v -0.074970 0.054657 0.132047 +v -0.074970 0.089464 0.132047 +v -0.074970 0.089464 0.132047 +v -0.074970 0.089464 0.132047 +v -0.074426 0.093271 0.132047 +v -0.074426 0.093271 0.132047 +v -0.074426 0.093271 0.132047 +v -0.074426 0.093271 0.132047 +v -0.073556 0.095772 0.132047 +v -0.073556 0.095772 0.132047 +v -0.073556 0.095772 0.132047 +v -0.073556 0.095772 0.132047 +v -0.071380 0.098927 0.132047 +v -0.071380 0.098927 0.132047 +v -0.071380 0.098927 0.132047 +v -0.071380 0.098927 0.132047 +v -0.071271 0.099144 0.131938 +v -0.071054 0.099035 0.131612 +v -0.071054 0.099035 0.131612 +v -0.070836 0.098818 0.131395 +v -0.070836 0.098818 0.131395 +v -0.074317 0.102516 0.124760 +v -0.073229 0.101646 0.106595 +v -0.073120 0.101537 0.106051 +v -0.072577 0.100993 0.105398 +v -0.070510 0.098818 0.105290 +v -0.069857 0.098165 0.105072 +v -0.069422 0.098165 0.104528 +v -0.069096 0.098492 0.104637 +v -0.068661 0.098165 0.104528 +v -0.068661 0.098165 0.104528 +v -0.068552 0.098709 0.113447 +v -0.068552 0.098709 0.113447 +v -0.069205 0.099579 0.128240 +v -0.069205 0.099579 0.128240 +v -0.069205 0.099579 0.128240 +v -0.069313 0.099688 0.131177 +v -0.069313 0.099688 0.131177 +v -0.069313 0.099688 0.131177 +v -0.067682 0.101211 0.131830 +v -0.065506 0.102299 0.131830 +v -0.063114 0.102951 0.131721 +v -0.060721 0.102842 0.131177 +v -0.060721 0.102842 0.131177 +v -0.060721 0.102842 0.131177 +v -0.060721 0.102842 0.131177 +v -0.052889 0.102842 0.131177 +v -0.052889 0.102842 0.131177 +v -0.052889 0.102842 0.131177 +v -0.052889 0.102842 0.131177 +v -0.052889 0.102842 0.131177 +v -0.052889 0.102842 0.131177 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v 0.013787 0.100667 0.089736 +v -0.000027 0.100558 0.089409 +v -0.007423 0.100558 0.089409 +v -0.007423 0.100449 0.089192 +v -0.007423 0.100449 0.089192 +v -0.000027 0.100123 0.088974 +v 0.050660 0.099688 0.088865 +v 0.050660 0.099688 0.088865 +v 0.050660 0.099688 0.088865 +v 0.050660 0.099688 0.088865 +v 0.057404 0.098600 0.067655 +v 0.057404 0.098600 0.067655 +v 0.056316 0.097404 0.045248 +v 0.056316 0.097404 0.045248 +v 0.056316 0.097404 0.045248 +v 0.056316 0.097404 0.045248 +v 0.056316 0.097404 0.045248 +v 0.059797 0.096642 0.044705 +v 0.058492 0.096316 0.044487 +v 0.058492 0.096316 0.044487 +v 0.054358 0.094685 0.009245 +v 0.054358 0.094685 0.009245 +v 0.054250 0.093379 0.006309 +v 0.054250 0.093379 0.006309 +v 0.054250 0.088376 0.001305 +v 0.054250 0.088376 0.001305 +v 0.054250 0.088376 0.001305 +v 0.054250 0.087723 0.000761 +v 0.054250 0.087723 0.000761 +v 0.037173 0.087723 0.000761 +v 0.054902 0.085113 -0.000000 +v 0.054902 0.085113 -0.000000 +v 0.054902 0.085113 -0.000000 +v 0.054902 0.085113 -0.000000 +v 0.054902 0.085113 -0.000000 +v -0.005900 0.085222 -0.000000 +v -0.005900 0.085222 -0.000000 +v -0.005900 0.085222 -0.000000 +v -0.005900 0.085222 -0.000000 +v -0.044949 0.080001 -0.000000 +v -0.044949 0.080001 -0.000000 +v -0.044949 0.080001 -0.000000 +v -0.044949 0.080001 -0.000000 +v -0.044949 0.080001 -0.000000 +v -0.047016 0.079783 -0.000000 +v -0.047016 0.079783 -0.000000 +v -0.047016 0.079783 -0.000000 +v -0.047559 0.078151 0.000218 +v -0.048973 0.077064 0.000218 +v -0.049626 0.073692 0.000326 +v -0.050496 0.019307 0.000109 +v -0.051258 -0.018654 0.000000 +v -0.051258 -0.018654 0.000000 +v -0.051258 -0.075650 0.000000 +v -0.051258 -0.075650 0.000000 +v -0.051258 -0.075650 0.000000 +v -0.051258 -0.075650 0.000000 +v -0.051258 -0.075650 0.000000 +v -0.056370 -0.085548 0.000000 +v -0.056370 -0.085548 0.000000 +v -0.056370 -0.085548 0.000000 +v -0.056370 -0.085548 0.000000 +v -0.056370 -0.085548 0.000000 +v -0.057349 -0.084895 0.000109 +v -0.057349 -0.084895 0.000109 +v -0.056914 -0.086527 0.000109 +v -0.057240 -0.088485 0.000761 +v -0.058436 -0.088267 0.001305 +v -0.058436 -0.088267 0.001305 +v -0.059198 -0.086962 0.001305 +v -0.059198 -0.086962 0.001305 +v -0.062896 -0.090660 0.006200 +v -0.062896 -0.090660 0.006200 +v -0.062243 -0.092836 0.007505 +v -0.063331 -0.092292 0.009028 +v -0.063331 -0.092292 0.009028 +v -0.065071 -0.089355 0.009028 +v -0.065071 -0.089355 0.009028 +v -0.066377 -0.092292 0.044487 +v -0.066377 -0.092292 0.044487 +v -0.066377 -0.092292 0.044487 +v -0.067247 -0.090225 0.044487 +v -0.067247 -0.090225 0.044487 +v -0.068335 -0.088050 0.044705 +v -0.068117 -0.086853 0.044487 +v -0.068443 -0.086853 0.044705 +v -0.068661 -0.056506 0.045031 +v -0.068661 -0.056506 0.045031 +v -0.068661 -0.086853 0.045031 +v -0.068661 -0.086853 0.045248 +v -0.068661 -0.086853 0.045248 +v -0.068661 -0.086853 0.045248 +v -0.068661 -0.086853 0.045248 +v -0.070945 -0.089137 0.088865 +v -0.070945 -0.089137 0.088865 +v -0.070945 -0.089137 0.088865 +v -0.071380 -0.089137 0.088974 +v -0.071380 -0.089137 0.088974 +v -0.071598 -0.089137 0.089192 +v -0.071598 -0.089137 0.089192 +v -0.071815 -0.057703 0.089409 +v -0.071815 -0.057703 0.089409 +v -0.071815 -0.012563 0.089409 +v -0.071924 0.018437 0.089736 +v -0.071924 0.018437 0.089736 +v -0.071924 0.018437 0.089736 +v -0.073664 0.010170 0.122911 +v -0.073664 0.010170 0.122911 +v -0.073664 0.010170 0.122911 +v -0.073664 0.010170 0.122911 +v -0.073664 0.010170 0.122911 +v -0.073991 0.053352 0.128240 +v -0.073991 0.053352 0.128240 +v -0.073882 0.053461 0.127914 +v -0.073882 0.053461 0.127914 +v -0.073882 0.053896 0.127805 +v -0.073882 0.053896 0.127805 +v -0.073882 0.053896 0.127805 +v -0.073882 0.053896 0.127805 +v -0.073882 0.054440 0.127914 +v -0.073882 0.054440 0.127914 +v -0.073882 0.054440 0.127914 +v -0.073882 0.054440 0.127914 +v -0.073991 0.054548 0.128240 +v -0.073991 0.054548 0.128240 +v -0.073991 0.054548 0.128240 +v -0.074099 0.054657 0.131177 +v -0.074099 0.054657 0.131177 +v -0.074099 0.054657 0.131177 +v -0.074099 0.054657 0.131177 +v -0.074099 0.084787 0.131177 +v -0.074099 0.084787 0.131177 +v -0.074099 0.084787 0.131177 +v -0.074317 0.087506 0.131721 +v -0.074317 0.090769 0.131721 +v -0.073882 0.093162 0.131830 +v -0.073012 0.095555 0.131830 +v -0.070945 0.098492 0.131830 +v -0.070945 0.098492 0.131830 +v -0.070727 0.098274 0.131177 +v -0.070727 0.098274 0.131177 +v -0.070727 0.098274 0.131177 +v -0.070619 0.098165 0.128240 +v -0.070619 0.098165 0.128240 +v -0.070619 0.098165 0.128240 +v -0.069966 0.098165 0.117907 +v -0.069966 0.098165 0.117907 +v -0.069531 0.097839 0.108988 +v -0.069531 0.097839 0.108988 +v -0.069205 0.097621 0.104528 +v -0.069205 0.097621 0.104528 +v -0.069096 0.097513 0.104093 +v -0.069096 0.097513 0.104093 +v -0.068987 0.097839 0.103984 +v -0.068552 0.097839 0.103658 +v -0.068552 0.097839 0.103658 +v -0.068117 0.098057 0.104093 +v -0.068117 0.098057 0.104093 +v -0.067464 0.098492 0.102897 +v -0.066377 0.101537 0.131177 +v -0.066377 0.101537 0.131177 +v -0.064201 0.102407 0.131177 +v -0.064201 0.102407 0.131177 +v -0.061156 0.100449 0.089736 +v -0.061156 0.100449 0.089736 +v -0.061156 0.100449 0.089736 +v -0.061156 0.100449 0.089736 +v -0.058654 0.100667 0.089736 +v -0.058654 0.100667 0.089736 +v -0.050714 0.100667 0.089736 +v -0.050714 0.100667 0.089736 +v -0.050714 0.100667 0.089736 +v -0.050714 0.100449 0.089192 +v -0.050714 0.100123 0.088974 +v -0.050605 0.099688 0.088865 +v -0.050605 0.099688 0.088865 +v -0.050605 0.099688 0.088865 +v -0.007423 0.099688 0.088865 +v -0.007423 0.099688 0.088865 +v -0.007423 0.099688 0.088865 +v -0.007423 0.099688 0.088865 +v -0.007423 0.099688 0.088865 +v -0.000027 0.097404 0.045248 +v -0.000027 0.097404 0.045248 +v -0.000027 0.097404 0.045248 +v -0.000027 0.097404 0.045248 +v -0.000027 0.097404 0.045248 +v 0.048376 0.097404 0.045248 +v 0.048376 0.097404 0.045248 +v 0.048376 0.097404 0.045248 +v 0.048376 0.097186 0.044705 +v 0.048376 0.097186 0.044705 +v 0.056316 0.097186 0.044705 +v 0.056316 0.097186 0.044705 +v 0.056208 0.096534 0.044487 +v 0.056208 0.096534 0.044487 +v 0.056208 0.096534 0.044487 +v 0.046527 0.094685 0.009245 +v 0.046527 0.094685 0.009245 +v 0.046527 0.094685 0.009245 +v 0.046527 0.094685 0.009245 +v 0.046418 0.093379 0.006309 +v 0.046418 0.093379 0.006309 +v 0.046418 0.093379 0.006309 +v 0.046418 0.093379 0.006309 +v -0.000027 0.093923 0.006961 +v -0.000027 0.093923 0.006961 +v -0.000027 0.093379 0.006309 +v -0.000027 0.093379 0.006309 +v -0.000027 0.093379 0.006309 +v -0.000027 0.093379 0.006309 +v 0.042829 0.088376 0.001305 +v 0.042829 0.088376 0.001305 +v 0.042829 0.088376 0.001305 +v 0.042829 0.088376 0.001305 +v 0.042829 0.088376 0.001305 +v -0.000027 0.087723 0.000761 +v -0.036030 0.086092 0.000109 +v -0.054956 0.085113 -0.000000 +v -0.054956 0.085113 -0.000000 +v -0.054956 0.085113 -0.000000 +v -0.054956 0.085113 -0.000000 +v -0.054956 0.085113 -0.000000 +v -0.054956 0.085113 -0.000000 +v -0.049191 0.078695 -0.000000 +v -0.049191 0.078695 -0.000000 +v -0.049191 0.078695 -0.000000 +v -0.049191 0.078695 -0.000000 +v -0.050822 0.076302 -0.000000 +v -0.050822 0.076302 -0.000000 +v -0.050822 0.076302 -0.000000 +v -0.051258 0.073692 -0.000000 +v -0.051258 0.073692 -0.000000 +v -0.051258 0.073692 -0.000000 +v -0.056479 -0.005602 0.000000 +v -0.056479 -0.005602 0.000000 +v -0.056479 -0.005602 0.000000 +v -0.056479 -0.005602 0.000000 +v -0.057349 -0.050741 0.000109 +v -0.059633 -0.084895 0.001305 +v -0.059633 -0.084895 0.001305 +v -0.059633 -0.084895 0.001305 +v -0.064201 -0.087941 0.006200 +v -0.064201 -0.087941 0.006200 +v -0.064745 -0.089137 0.007505 +v -0.065398 -0.088376 0.009137 +v -0.065398 -0.088376 0.009137 +v -0.067791 -0.086853 0.044487 +v -0.067791 -0.086853 0.044487 +v -0.068117 -0.056506 0.044487 +v -0.068443 -0.056506 0.044705 +v -0.068443 -0.056506 0.044705 +v -0.068661 -0.056506 0.045248 +v -0.068661 -0.056506 0.045248 +v -0.068661 -0.056506 0.045248 +v -0.068661 -0.006689 0.045248 +v -0.068661 -0.006689 0.045248 +v -0.068661 -0.006689 0.045248 +v -0.068661 -0.006689 0.045248 +v -0.070945 -0.057703 0.088865 +v -0.070945 -0.057703 0.088865 +v -0.070945 -0.057703 0.088865 +v -0.070945 -0.057703 0.088865 +v -0.070945 -0.057703 0.088865 +v -0.070945 -0.057703 0.088865 +v -0.071380 -0.057703 0.088974 +v -0.071380 -0.057703 0.088974 +v -0.071598 -0.057703 0.089192 +v -0.071598 -0.057703 0.089192 +v -0.071598 -0.009300 0.089192 +v -0.071598 0.031489 0.089192 +v -0.071924 0.051068 0.089736 +v -0.071924 0.051068 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.071924 0.085439 0.089736 +v -0.074099 0.089464 0.131177 +v -0.074099 0.089464 0.131177 +v -0.074099 0.089464 0.131177 +v -0.074099 0.089464 0.131177 +v -0.073229 0.094250 0.131177 +v -0.073229 0.094250 0.131177 +v -0.071489 0.097404 0.131177 +v -0.071489 0.097404 0.131177 +v -0.071489 0.097404 0.131177 +v -0.069205 0.097078 0.104637 +v -0.069205 0.097078 0.104637 +v -0.068661 0.097513 0.102897 +v -0.068661 0.097513 0.102897 +v -0.068661 0.097513 0.102897 +v -0.068661 0.097513 0.102897 +v -0.068008 0.096751 0.089736 +v -0.068008 0.096751 0.089736 +v -0.066050 0.098383 0.089736 +v -0.066050 0.098383 0.089736 +v -0.063657 0.099688 0.089736 +v -0.063657 0.099688 0.089736 +v -0.062352 0.099797 0.089192 +v -0.062352 0.099797 0.089192 +v -0.058654 0.100449 0.089192 +v -0.058545 0.099688 0.088865 +v -0.058545 0.099688 0.088865 +v -0.057349 0.098492 0.065262 +v -0.041577 0.097404 0.045248 +v -0.041577 0.097404 0.045248 +v -0.041577 0.097404 0.045248 +v -0.041577 0.097404 0.045248 +v -0.041577 0.097404 0.045248 +v -0.006662 0.097186 0.044705 +v -0.006662 0.097186 0.044705 +v -0.006662 0.097186 0.044705 +v 0.012264 0.097186 0.044705 +v 0.048376 0.096860 0.044487 +v 0.048376 0.096860 0.044487 +v 0.048376 0.096534 0.044487 +v 0.048376 0.096534 0.044487 +v 0.048376 0.096534 0.044487 +v 0.048376 0.096534 0.044487 +v -0.006662 0.096534 0.044487 +v -0.006662 0.096534 0.044487 +v -0.006662 0.096534 0.044487 +v -0.006662 0.096534 0.044487 +v 0.013243 0.094685 0.009245 +v 0.013243 0.094685 0.009245 +v 0.013243 0.094685 0.009245 +v 0.013243 0.094685 0.009245 +v 0.013243 0.094685 0.009245 +v -0.000027 0.094250 0.007614 +v -0.046472 0.094685 0.009245 +v -0.046472 0.094685 0.009245 +v -0.046472 0.094685 0.009245 +v -0.046472 0.094576 0.008375 +v -0.038531 0.093923 0.006961 +v -0.046363 0.093379 0.006309 +v -0.046363 0.093379 0.006309 +v -0.046363 0.093379 0.006309 +v -0.046363 0.093379 0.006309 +v -0.042882 0.088376 0.001305 +v -0.042882 0.088376 0.001305 +v -0.042882 0.088376 0.001305 +v -0.042882 0.086962 0.000326 +v -0.054303 0.086092 0.000109 +v -0.054303 0.086092 0.000109 +v -0.055935 0.084460 -0.000000 +v -0.055935 0.084460 -0.000000 +v -0.055935 0.084460 -0.000000 +v -0.055935 0.084460 -0.000000 +v -0.056479 0.083264 -0.000000 +v -0.056479 0.083264 -0.000000 +v -0.057349 0.053243 0.000109 +v -0.059633 0.040626 0.001305 +v -0.059633 0.040626 0.001305 +v -0.059633 0.040626 0.001305 +v -0.059633 0.040626 0.001305 +v -0.059633 -0.055419 0.001305 +v -0.059633 -0.055419 0.001305 +v -0.059633 -0.055419 0.001305 +v -0.064636 -0.084895 0.006309 +v -0.064636 -0.084895 0.006309 +v -0.065180 -0.084895 0.006853 +v -0.065942 -0.085004 0.009246 +v -0.065942 -0.085004 0.009246 +v -0.065942 -0.055527 0.009245 +v -0.065942 -0.055527 0.009245 +v -0.065942 -0.055527 0.009245 +v -0.067791 -0.056506 0.044487 +v -0.067791 -0.056506 0.044487 +v -0.067791 -0.056506 0.044487 +v -0.067791 -0.056506 0.044487 +v -0.067791 -0.008430 0.044487 +v -0.067791 -0.008430 0.044487 +v -0.067791 -0.008430 0.044487 +v -0.067791 -0.008430 0.044487 +v -0.068443 0.002230 0.044705 +v -0.068443 0.002230 0.044705 +v -0.068661 0.014303 0.045248 +v -0.068661 0.014303 0.045248 +v -0.068661 0.014303 0.045248 +v -0.070945 -0.019633 0.088865 +v -0.070945 -0.019633 0.088865 +v -0.070945 -0.019633 0.088865 +v -0.070945 -0.019633 0.088865 +v -0.070945 -0.019633 0.088865 +v -0.070945 0.012019 0.088865 +v -0.070945 0.012019 0.088865 +v -0.070945 0.012019 0.088865 +v -0.070945 0.012019 0.088865 +v -0.071380 0.041278 0.088974 +v -0.070945 0.085439 0.088865 +v -0.070945 0.085439 0.088865 +v -0.071380 0.072604 0.088974 +v -0.071598 0.087397 0.089192 +v -0.071815 0.088593 0.089736 +v -0.071815 0.088593 0.089736 +v -0.071815 0.088593 0.089736 +v -0.071815 0.088593 0.089736 +v -0.071380 0.091204 0.089736 +v -0.071380 0.091204 0.089736 +v -0.070292 0.093597 0.089736 +v -0.070292 0.093597 0.089736 +v -0.069422 0.094576 0.089192 +v -0.067791 0.096534 0.089192 +v -0.064745 0.098818 0.089192 +v -0.062135 0.099144 0.088865 +v -0.062135 0.099144 0.088865 +v -0.062135 0.099144 0.088865 +v -0.060938 0.097948 0.065262 +v -0.059742 0.098274 0.065262 +v -0.056261 0.097404 0.045248 +v -0.056261 0.097404 0.045248 +v -0.056261 0.097186 0.044705 +v -0.048321 0.097186 0.044705 +v -0.006662 0.096860 0.044487 +v -0.006662 0.096860 0.044487 +v -0.006662 0.096860 0.044487 +v -0.006662 0.096860 0.044487 +v -0.048321 0.096534 0.044487 +v -0.048321 0.096534 0.044487 +v -0.048321 0.096534 0.044487 +v -0.048321 0.096534 0.044487 +v -0.048321 0.096534 0.044487 +v -0.054412 0.094685 0.009245 +v -0.054412 0.094685 0.009245 +v -0.054303 0.093923 0.006961 +v -0.054303 0.093379 0.006309 +v -0.054303 0.093379 0.006309 +v -0.054303 0.088376 0.001305 +v -0.054303 0.088376 0.001305 +v -0.054303 0.088376 0.001305 +v -0.057675 0.087179 0.001305 +v -0.057675 0.087179 0.001305 +v -0.056805 0.087723 0.001305 +v -0.056805 0.087723 0.001305 +v -0.056370 0.086309 0.000326 +v -0.057349 0.083046 0.000109 +v -0.059633 0.083046 0.001305 +v -0.059633 0.083046 0.001305 +v -0.059633 0.083046 0.001305 +v -0.064528 0.084025 0.006309 +v -0.064528 0.084025 0.006309 +v -0.064636 0.081306 0.006309 +v -0.064636 0.081306 0.006309 +v -0.064636 0.081306 0.006309 +v -0.064636 0.081306 0.006309 +v -0.064636 -0.023766 0.006309 +v -0.064636 -0.023766 0.006309 +v -0.064636 -0.023766 0.006309 +v -0.064636 -0.023766 0.006309 +v -0.065180 -0.050850 0.006961 +v -0.065942 0.024419 0.009245 +v -0.065942 0.024419 0.009245 +v -0.065942 0.024419 0.009245 +v -0.065942 0.024419 0.009245 +v -0.065942 0.024419 0.009245 +v -0.067791 0.040843 0.044487 +v -0.067791 0.040843 0.044487 +v -0.067791 0.040843 0.044487 +v -0.067791 0.040843 0.044487 +v -0.068443 0.049762 0.044705 +v -0.068661 0.049762 0.045248 +v -0.068661 0.049762 0.045248 +v -0.068661 0.049762 0.045248 +v -0.068661 0.049762 0.045248 +v -0.068661 0.083155 0.045248 +v -0.068661 0.083155 0.045248 +v -0.068661 0.083155 0.045248 +v -0.070945 0.087288 0.088865 +v -0.070945 0.087288 0.088865 +v -0.071380 0.089899 0.089192 +v -0.071380 0.089899 0.089192 +v -0.070619 0.092292 0.089192 +v -0.068878 0.094250 0.088865 +v -0.068878 0.094250 0.088865 +v -0.067356 0.096099 0.088865 +v -0.067356 0.096099 0.088865 +v -0.066268 0.095011 0.067655 +v -0.065506 0.097621 0.088865 +v -0.065506 0.097621 0.088865 +v -0.063222 0.096969 0.065262 +v -0.059850 0.096860 0.045248 +v -0.059850 0.096860 0.045248 +v -0.058654 0.096969 0.044705 +v -0.056261 0.096860 0.044487 +v -0.056261 0.096534 0.044487 +v -0.056261 0.096534 0.044487 +v -0.056587 0.094467 0.009137 +v -0.056587 0.094467 0.009137 +v -0.056587 0.094467 0.009137 +v -0.057349 0.092944 0.006200 +v -0.057349 0.092944 0.006200 +v -0.060068 0.091639 0.006200 +v -0.060068 0.091639 0.006200 +v -0.062243 0.089572 0.006200 +v -0.062243 0.089572 0.006200 +v -0.062243 0.089572 0.006200 +v -0.058980 0.085548 0.001305 +v -0.058980 0.085548 0.001305 +v -0.064201 0.085983 0.006200 +v -0.064201 0.085983 0.006200 +v -0.064745 0.086092 0.006853 +v -0.065180 0.083046 0.006961 +v -0.065942 0.081415 0.009245 +v -0.065942 0.081415 0.009245 +v -0.065942 0.081415 0.009245 +v -0.065942 0.081415 0.009245 +v -0.067791 0.083155 0.044487 +v -0.067791 0.083155 0.044487 +v -0.067791 0.083155 0.044487 +v -0.068443 0.085004 0.044705 +v -0.068443 0.085004 0.044705 +v -0.068661 0.086201 0.045248 +v -0.068661 0.086201 0.045248 +v -0.068661 0.086201 0.045248 +v -0.070401 0.090878 0.088865 +v -0.070401 0.090878 0.088865 +v -0.070401 0.090878 0.088865 +v -0.067791 0.093162 0.067655 +v -0.065071 0.093814 0.045248 +v -0.065071 0.093814 0.045248 +v -0.063222 0.095337 0.045248 +v -0.063222 0.095337 0.045248 +v -0.060938 0.096207 0.044705 +v -0.058436 0.096316 0.044487 +v -0.058436 0.096316 0.044487 +v -0.058436 0.096316 0.044487 +v -0.058763 0.093814 0.009028 +v -0.058763 0.093814 0.009028 +v -0.059415 0.092618 0.006853 +v -0.062243 0.090986 0.007505 +v -0.062243 0.090986 0.007505 +v -0.063440 0.087941 0.006200 +v -0.063440 0.087941 0.006200 +v -0.065071 0.087506 0.009028 +v -0.065071 0.087506 0.009028 +v -0.065833 0.084243 0.009137 +v -0.065833 0.084243 0.009137 +v -0.067791 0.085004 0.044487 +v -0.067791 0.085004 0.044487 +v -0.067791 0.085004 0.044487 +v -0.068226 0.087397 0.044705 +v -0.068117 0.088593 0.045248 +v -0.068117 0.088593 0.045248 +v -0.066594 0.091965 0.045248 +v -0.066594 0.091965 0.045248 +v -0.064854 0.093597 0.044705 +v -0.063005 0.095120 0.044705 +v -0.061700 0.095120 0.044487 +v -0.061700 0.095120 0.044487 +v -0.060829 0.092727 0.009028 +v -0.060829 0.092727 0.009028 +v -0.062570 0.091313 0.009028 +v -0.062570 0.091313 0.009028 +v -0.062570 0.091313 0.009028 +v -0.063984 0.089572 0.009028 +v -0.063984 0.089572 0.009028 +v -0.067247 0.088376 0.044487 +v -0.067247 0.088376 0.044487 +v -0.067464 0.089681 0.044705 +v -0.066377 0.091748 0.044705 +v -0.064419 0.093162 0.044487 +v -0.064419 0.093162 0.044487 +v -0.065833 0.091421 0.044487 +v -0.065833 0.091421 0.044487 +vn -0.9849 -0.0000 -0.1732 +vn 0.9849 -0.0000 -0.1733 +vn 0.6980 0.7098 -0.0948 +vn 0.4557 0.5362 -0.7105 +vn 0.7030 0.7030 -0.1073 +vn 0.1559 0.8274 -0.5396 +vn 0.6562 0.7501 -0.0821 +vn 0.0087 0.9945 -0.1047 +vn 0.0356 0.9946 -0.0977 +vn 0.0759 0.3796 -0.9220 +vn 0.8696 -0.0000 -0.4938 +vn 0.7016 -0.0000 -0.7126 +vn 0.6435 -0.0000 -0.7654 +vn 0.7015 -0.0000 -0.7127 +vn 0.1733 -0.0000 -0.9849 +vn 0.6433 -0.0000 -0.7656 +vn 0.6303 0.1851 -0.7539 +vn 0.1828 0.9815 -0.0577 +vn -0.0000 1.0000 -0.0079 +vn -0.0000 1.0000 -0.0080 +vn -0.0188 0.9800 -0.1979 +vn -0.0889 0.1877 -0.9782 +vn -0.0000 -0.0000 -1.0000 +vn -0.0690 -0.9759 -0.2070 +vn -0.0889 -0.1876 -0.9782 +vn 0.0317 -0.9198 -0.3912 +vn 0.0889 -0.1877 -0.9782 +vn 0.6303 -0.1853 -0.7539 +vn 0.1375 -0.9898 -0.0367 +vn 0.9500 -0.1856 -0.2511 +vn 0.9391 -0.0000 -0.3435 +vn 0.9392 -0.0000 -0.3433 +vn 0.9392 -0.0000 -0.3434 +vn 0.9392 -0.0000 -0.3435 +vn 0.9500 0.1856 -0.2511 +vn 0.9630 0.1905 -0.1905 +vn 0.3734 0.9224 -0.0988 +vn -0.1074 -0.0359 -0.9936 +vn -0.1005 0.2008 -0.9745 +vn 0.1004 0.2009 -0.9745 +vn 1.0000 -0.0000 -0.0000 +vn 0.0079 -0.0000 -1.0000 +vn 0.0325 -0.0000 -0.9995 +vn -0.0000 -1.0000 -0.0000 +vn -0.0000 0.0944 -0.9955 +vn -0.0520 0.0521 -0.9973 +vn -0.0000 0.1838 -0.9830 +vn -0.6079 0.1581 -0.7781 +vn -0.3584 0.4479 -0.8191 +vn -0.4987 0.3197 -0.8056 +vn -0.3197 0.4987 -0.8056 +vn -0.5496 0.0110 -0.8354 +vn -0.4807 0.3037 -0.8226 +vn -0.1854 0.0093 -0.9826 +vn -0.3037 0.4807 -0.8226 +vn -0.0002 0.1837 -0.9830 +vn -0.0521 0.0520 -0.9973 +vn -0.0945 -0.0000 -0.9955 +vn -0.0000 0.3679 -0.9299 +vn -0.0000 0.1837 -0.9830 +vn -0.0000 0.5446 -0.8387 +vn -0.0000 0.6314 -0.7754 +vn 0.0112 0.6247 -0.7808 +vn -0.0001 0.5446 -0.8387 +vn -0.3196 0.4987 -0.8057 +vn 0.0111 0.6247 -0.7808 +vn -0.0112 0.7071 -0.7071 +vn -0.1711 0.5990 -0.7823 +vn -0.0923 0.5308 -0.8425 +vn -0.3234 0.3981 -0.8584 +vn -0.1274 0.2655 -0.9557 +vn -0.2151 0.1936 -0.9572 +vn -0.3583 0.4478 -0.8192 +vn -0.5717 0.1310 -0.8099 +vn -0.1838 -0.0000 -0.9830 +vn -0.5495 0.0110 -0.8354 +vn -0.6300 0.0225 -0.7763 +vn -0.6957 0.0113 -0.7182 +vn -0.6958 0.0112 -0.7182 +vn -0.6314 -0.0000 -0.7755 +vn -0.5446 -0.0000 -0.8387 +vn -0.0944 -0.0000 -0.9955 +vn -0.3679 -0.0000 -0.9299 +vn -0.1837 -0.0010 -0.9830 +vn -0.1837 -0.0001 -0.9830 +vn -0.0520 -0.0521 -0.9973 +vn -0.0000 -0.3679 -0.9299 +vn -0.0000 -0.0944 -0.9955 +vn -0.0521 -0.0520 -0.9973 +vn 0.0520 0.0521 -0.9973 +vn -0.0000 0.1944 -0.9809 +vn -0.0112 0.6247 -0.7808 +vn -0.0000 0.6314 -0.7755 +vn -0.0000 0.7016 -0.7126 +vn -0.0000 0.7127 -0.7015 +vn -0.0000 0.7126 -0.7015 +vn -0.0260 0.0780 -0.9966 +vn 0.6113 -0.6113 -0.5026 +vn 0.6676 -0.6276 -0.4006 +vn 0.6390 -0.5439 -0.5439 +vn 0.3418 0.4304 -0.8354 +vn 0.6472 0.3567 -0.6737 +vn -0.0000 -0.9988 -0.0495 +vn -0.0000 -0.9988 -0.0499 +vn -0.0000 -0.9988 -0.0496 +vn -0.0000 -0.9988 -0.0494 +vn -0.0000 -0.9988 -0.0497 +vn -0.6434 -0.0001 -0.7655 +vn -0.6434 -0.0000 -0.7655 +vn -0.3434 -0.0000 -0.9392 +vn -0.1733 -0.0000 -0.9849 +vn -0.8696 -0.0000 -0.4938 +vn -0.7754 -0.0000 -0.6315 +vn -0.8466 -0.1935 -0.4959 +vn -0.6304 -0.1853 -0.7539 +vn -0.1828 -0.9815 -0.0577 +vn -0.0188 -0.9800 -0.1980 +vn -0.0888 -0.1877 -0.9782 +vn 0.0186 -0.9822 -0.1871 +vn 0.0870 -0.3700 -0.9250 +vn 0.0889 0.1877 -0.9782 +vn -0.0759 0.3796 -0.9220 +vn 0.0188 0.9800 -0.1981 +vn -0.0187 0.9822 -0.1871 +vn -0.6302 0.1853 -0.7540 +vn -0.1375 0.9898 -0.0367 +vn -0.6866 0.6991 -0.1997 +vn -0.9500 0.1856 -0.2511 +vn -0.9424 0.0101 -0.3344 +vn -0.9392 -0.0000 -0.3434 +vn -0.7756 -0.0000 -0.6313 +vn -0.9500 -0.1856 -0.2513 +vn -0.3734 -0.9224 -0.0988 +vn -0.9500 -0.1856 -0.2511 +vn -0.9630 -0.1905 -0.1905 +vn -0.6473 0.3566 -0.6737 +vn -0.3418 0.4304 -0.8354 +vn -0.7034 0.3126 -0.6383 +vn -0.6991 0.1998 -0.6866 +vn -0.7035 0.3126 -0.6383 +vn -0.3677 0.4057 -0.8368 +vn -0.3678 0.4057 -0.8368 +vn -0.1777 0.4496 -0.8754 +vn -0.6578 0.3421 -0.6711 +vn -0.0699 0.6757 -0.7339 +vn -0.6090 0.4738 -0.6361 +vn -0.1045 0.9880 -0.1140 +vn -0.9436 0.2552 -0.2109 +vn 0.7655 -0.0000 0.6434 +vn 0.6434 -0.0000 0.7655 +vn 0.5949 -0.0000 0.8038 +vn 0.0923 0.5308 0.8425 +vn 0.0110 0.5496 0.8354 +vn 0.0001 0.1837 0.9830 +vn 0.5572 -0.0000 0.8304 +vn 0.6314 -0.0000 0.7755 +vn 0.0520 -0.0521 0.9973 +vn 0.1854 -0.0093 0.9826 +vn 0.1838 -0.0000 0.9830 +vn 0.5495 -0.0110 0.8354 +vn 0.5496 -0.0110 0.8354 +vn 0.4808 -0.3037 0.8226 +vn 0.0110 -0.5495 0.8354 +vn 0.1964 -0.5524 0.8101 +vn 0.0001 -0.1838 0.9830 +vn -0.0000 -0.1837 0.9830 +vn -0.0000 -0.0944 0.9955 +vn -0.0000 -0.0000 1.0000 +vn 0.0944 -0.0000 0.9955 +vn 0.1837 -0.0000 0.9830 +vn 0.5446 -0.0000 0.8387 +vn 0.5445 -0.0000 0.8388 +vn 0.6314 -0.0000 0.7754 +vn 0.6957 -0.0112 0.7182 +vn 0.4870 -0.3332 0.8074 +vn 0.6079 -0.1581 0.7781 +vn 0.4479 -0.3584 0.8191 +vn 0.2248 -0.5746 0.7870 +vn 0.1561 -0.1977 0.9678 +vn 0.2331 -0.4785 0.8466 +vn 0.2646 -0.5291 0.8063 +vn -0.0000 -0.6671 0.7450 +vn -0.0000 -0.3679 0.9299 +vn -0.0000 -0.5446 0.8387 +vn -0.0000 -0.1838 0.9830 +vn -0.0000 -0.5572 0.8304 +vn -0.0000 -0.1735 0.9848 +vn -0.0000 -0.1732 0.9849 +vn 0.0521 0.0520 0.9973 +vn 0.6781 0.0226 0.7346 +vn 0.7127 -0.0000 0.7015 +vn 0.7012 -0.0226 0.7126 +vn 0.7126 -0.0000 0.7015 +vn 0.6958 -0.0112 0.7182 +vn 0.6958 -0.0113 0.7182 +vn 0.6367 -0.0112 0.7710 +vn 0.5534 -0.1178 0.8245 +vn 0.0955 -0.0087 0.9954 +vn 0.0695 -0.0347 0.9970 +vn 0.3698 -0.2505 0.8947 +vn 0.0525 -0.0612 0.9967 +vn 0.0260 -0.0780 0.9966 +vn 0.0937 -0.2708 0.9581 +vn 0.2645 -0.5291 0.8063 +vn 0.0225 -0.6300 0.7763 +vn 0.0226 -0.6898 0.7237 +vn -0.0000 -0.7126 0.7016 +vn -0.0000 -0.7127 0.7015 +vn -0.0000 -0.7015 0.7127 +vn -0.0000 -0.7014 0.7128 +vn 0.0111 -0.6246 0.7808 +vn -0.2248 -0.5746 0.7869 +vn -0.0923 -0.5307 0.8425 +vn -0.1964 -0.5523 0.8102 +vn -0.0093 -0.1854 0.9826 +vn -0.0520 -0.0521 0.9973 +vn -0.1854 -0.0093 0.9826 +vn -0.1837 -0.0000 0.9830 +vn -0.0944 -0.0000 0.9955 +vn -0.1838 -0.0000 0.9830 +vn -0.1943 -0.0000 0.9809 +vn -0.0080 -0.0000 1.0000 +vn -0.0520 0.0521 0.9973 +vn -0.0000 0.1837 0.9830 +vn 0.0519 0.0523 0.9973 +vn 0.4808 0.3037 0.8226 +vn 0.4870 0.3332 0.8074 +vn 0.6957 0.0113 0.7182 +vn 0.5291 0.2645 0.8063 +vn 0.4994 0.0795 0.8627 +vn 0.6671 -0.0000 0.7450 +vn 0.7016 -0.0000 0.7126 +vn 0.0946 -0.0000 0.9955 +vn 0.0945 -0.0000 0.9955 +vn 0.6368 -0.0112 0.7710 +vn 0.0955 -0.0086 0.9954 +vn -0.1944 -0.0000 0.9809 +vn -0.0852 -0.0000 0.9964 +vn -0.1387 0.1387 0.9806 +vn -0.0622 0.0711 0.9955 +vn -0.4854 0.4066 0.7740 +vn -0.2945 0.5506 0.7811 +vn -0.2553 0.6510 0.7148 +vn -0.3266 0.6140 0.7185 +vn -0.0113 0.7071 0.7071 +vn -0.0225 0.6422 0.7662 +vn -0.0000 0.0945 0.9955 +vn -0.0000 -0.6313 0.7755 +vn -0.0000 -0.6314 0.7755 +vn -0.0000 -0.6314 0.7754 +vn -0.0000 -0.7015 0.7126 +vn -0.0112 -0.7070 0.7071 +vn -0.0226 -0.6898 0.7237 +vn -0.2248 -0.5746 0.7870 +vn -0.1711 -0.5989 0.7823 +vn -0.3583 -0.4480 0.8191 +vn -0.4870 -0.3332 0.8074 +vn -0.4808 -0.3037 0.8226 +vn -0.5496 -0.0110 0.8354 +vn -0.5446 -0.0000 0.8387 +vn -0.4808 0.3037 0.8225 +vn -0.2635 0.2635 0.9280 +vn -0.0103 0.3714 0.9284 +vn -0.0000 0.0944 0.9955 +vn -0.0104 0.3714 0.9284 +vn -0.0000 0.5446 0.8387 +vn -0.0000 0.6314 0.7754 +vn 0.2246 0.5746 0.7870 +vn 0.1964 0.5522 0.8102 +vn 0.2248 0.5746 0.7870 +vn 0.3584 0.4479 0.8191 +vn 0.2277 0.1951 0.9540 +vn 0.0525 0.0612 0.9967 +vn 0.0694 0.0347 0.9970 +vn -0.1946 -0.0000 0.9809 +vn -0.0945 -0.0000 0.9955 +vn -0.6368 0.0112 0.7709 +vn -0.4965 0.2483 0.8318 +vn -0.6140 0.3266 0.7185 +vn -0.5489 0.4418 0.7096 +vn -0.5831 0.3844 0.7157 +vn -0.4968 0.4968 0.7116 +vn -0.2838 0.7095 0.6450 +vn -0.5396 0.8274 0.1559 +vn -0.5431 0.5295 0.6517 +vn -0.6981 0.6981 0.1592 +vn -0.6995 0.7112 0.0700 +vn -0.5481 0.8336 0.0685 +vn -0.4684 0.8810 0.0669 +vn -0.2551 0.6376 0.7269 +vn -0.4531 0.8572 0.2449 +vn -0.1977 0.9677 0.1561 +vn -0.1939 0.9793 0.0582 +vn -0.2878 0.9557 0.0617 +vn -0.2830 0.9469 0.1525 +vn -0.0112 0.7608 0.6489 +vn -0.0000 0.7126 0.7015 +vn -0.0112 0.7071 0.7071 +vn -0.0000 0.6314 0.7755 +vn -0.0225 -0.6300 0.7763 +vn -0.1054 -0.5624 0.8201 +vn -0.0653 -0.1307 0.9893 +vn -0.1711 -0.5988 0.7824 +vn -0.3234 -0.3981 0.8584 +vn -0.2011 -0.1799 0.9629 +vn -0.3584 -0.4479 0.8191 +vn -0.4947 -0.1035 0.8629 +vn -0.5291 -0.2645 0.8063 +vn -0.6957 -0.0113 0.7182 +vn -0.7012 -0.0227 0.7126 +vn -0.6314 -0.0000 0.7754 +vn -0.7126 -0.0000 0.7016 +vn -0.4870 0.3332 0.8074 +vn -0.5291 0.2646 0.8063 +vn -0.3584 0.4479 0.8191 +vn -0.2248 0.5746 0.7869 +vn -0.1964 0.5524 0.8101 +vn 0.0112 0.6246 0.7809 +vn -0.0000 0.6313 0.7755 +vn -0.0000 0.5572 0.8304 +vn -0.0000 0.6315 0.7754 +vn -0.0000 0.7015 0.7126 +vn -0.0000 0.7127 0.7015 +vn 0.0226 0.6898 0.7237 +vn -0.0000 0.7015 0.7127 +vn 0.1711 0.5989 0.7823 +vn 0.3234 0.3981 0.8584 +vn 0.0653 0.1307 0.9893 +vn 0.0524 0.0614 0.9967 +vn 0.0260 0.0780 0.9966 +vn -0.0800 -0.0534 0.9954 +vn -0.4303 -0.3418 0.8355 +vn -0.0861 -0.0086 0.9963 +vn -0.0859 -0.0086 0.9963 +vn -0.6313 -0.0000 0.7756 +vn -0.6314 -0.0000 0.7755 +vn -0.7126 -0.0000 0.7015 +vn -0.7072 0.0113 0.7069 +vn -0.7071 0.0112 0.7071 +vn -0.6960 0.1200 0.7080 +vn -0.7586 0.1325 0.6379 +vn -0.6432 0.4154 0.6432 +vn -0.8725 0.4598 0.1652 +vn -0.8879 0.4551 0.0666 +vn -0.8248 0.5613 0.0687 +vn -0.8181 0.5534 0.1564 +vn -0.6900 0.6772 0.2556 +vn -0.5164 0.5707 0.6384 +vn -0.6260 0.7767 0.0696 +vn -0.1037 0.1037 0.9892 +vn -0.5163 0.5707 0.6386 +vn -0.0942 0.1131 0.9891 +vn -0.0560 0.1399 0.9886 +vn -0.1328 0.6637 0.7361 +vn -0.0226 0.7012 0.7126 +vn -0.0287 0.9771 0.2108 +vn -0.0254 0.9984 0.0508 +vn -0.0183 0.9876 0.1556 +vn -0.0150 0.9877 0.1555 +vn -0.0000 0.9883 0.1528 +vn -0.0000 0.8130 0.5823 +vn -0.0000 0.9027 0.4303 +vn -0.0000 0.9883 0.1527 +vn -0.0001 0.9027 0.4303 +vn -0.0000 0.7654 0.6435 +vn -0.0000 0.7126 0.7016 +vn -0.0000 -0.0946 0.9955 +vn -0.0260 -0.0780 0.9966 +vn -0.0525 -0.0612 0.9967 +vn -0.0694 -0.0347 0.9970 +vn -0.0955 -0.0086 0.9954 +vn -0.6367 -0.0112 0.7710 +vn -0.6902 -0.0000 0.7236 +vn -0.7015 -0.0000 0.7126 +vn -0.6957 0.0113 0.7182 +vn -0.6311 0.0818 0.7714 +vn -0.4785 0.2331 0.8466 +vn -0.2427 0.1372 0.9603 +vn -0.3234 0.3981 0.8584 +vn -0.0659 0.1412 0.9878 +vn -0.1432 0.5489 0.8235 +vn -0.1711 0.5989 0.7823 +vn -0.0112 0.7070 0.7071 +vn -0.0226 0.6898 0.7237 +vn -0.0000 0.6313 0.7756 +vn -0.0000 0.3802 0.9249 +vn -0.0000 0.1943 0.9809 +vn 0.0225 0.6300 0.7763 +vn 0.0225 0.6299 0.7763 +vn 0.1054 0.5623 0.8202 +vn -0.0000 -0.3802 0.9249 +vn -0.0000 -0.6313 0.7756 +vn -0.1193 -0.6203 0.7753 +vn -0.3930 -0.4979 0.7731 +vn -0.5615 -0.2807 0.7784 +vn -0.7127 -0.0001 0.7015 +vn -0.7006 -0.0460 0.7121 +vn -0.7613 -0.0577 0.6458 +vn -0.7655 -0.0000 0.6434 +vn -0.9027 -0.0000 0.4304 +vn -0.9027 -0.0000 0.4303 +vn -0.9877 0.0183 0.1555 +vn -0.9469 0.2830 0.1524 +vn -0.9557 0.2878 0.0617 +vn -0.9189 0.3892 0.0649 +vn -0.8947 0.3698 0.2505 +vn -0.5785 0.2390 0.7799 +vn -0.5163 0.5708 0.6385 +vn -0.5786 0.2390 0.7798 +vn -0.2662 0.1997 0.9430 +vn -0.8336 0.5481 0.0685 +vn -0.8810 0.4684 0.0669 +vn -0.8572 0.4531 0.2449 +vn -0.5047 0.4383 0.7438 +vn -0.1150 0.1150 0.9867 +vn -0.4636 0.4636 0.7550 +vn -0.4700 0.8693 0.1528 +vn -0.3762 0.9243 0.0645 +vn -0.2877 0.9557 0.0617 +vn -0.0277 0.1570 0.9872 +vn -0.0091 0.1646 0.9863 +vn -0.0000 0.1630 0.9866 +vn -0.0000 0.9197 0.3926 +vn -0.0000 0.9988 0.0497 +vn -0.0000 0.9988 0.0495 +vn -0.0000 0.7655 0.6434 +vn 0.1861 0.6822 0.7071 +vn 0.0112 0.7069 0.7072 +vn 0.0112 0.7071 0.7071 +vn 0.0112 0.6368 0.7710 +vn -0.0000 0.0947 0.9955 +vn 0.1944 -0.0000 0.9809 +vn 0.0852 -0.0000 0.9964 +vn -0.0946 -0.0000 0.9955 +vn -0.0780 0.0260 0.9966 +vn -0.0612 0.0525 0.9967 +vn -0.0525 0.0612 0.9967 +vn -0.0260 0.0780 0.9966 +vn -0.0225 0.6300 0.7763 +vn -0.0000 -0.9684 0.2493 +vn -0.0000 -0.7655 0.6434 +vn -0.0017 -0.9027 0.4303 +vn -0.0000 -0.9026 0.4305 +vn -0.0000 -0.7126 0.7015 +vn -0.0112 -0.7071 0.7071 +vn -0.1329 -0.6887 0.7128 +vn -0.1861 -0.6822 0.7071 +vn -0.4418 -0.5489 0.7096 +vn -0.3843 -0.5832 0.7157 +vn -0.4968 -0.4968 0.7116 +vn -0.6140 -0.3266 0.7185 +vn -0.6510 -0.2553 0.7148 +vn -0.7062 -0.2696 0.6546 +vn -0.9703 -0.1858 0.1549 +vn -0.9877 -0.0183 0.1556 +vn -0.9877 -0.0183 0.1555 +vn -0.9883 -0.0000 0.1527 +vn -0.9882 -0.0000 0.1529 +vn -0.9988 -0.0000 0.0497 +vn -0.9984 0.0254 0.0508 +vn -0.7012 0.0226 0.7126 +vn -0.9771 0.0287 0.2108 +vn -0.6762 0.1328 0.7246 +vn -0.8947 0.3698 0.2506 +vn -0.1493 0.0466 0.9877 +vn -0.9557 0.2877 0.0617 +vn -0.9275 0.2863 0.2405 +vn -0.6278 0.2691 0.7304 +vn -0.1341 0.0958 0.9863 +vn -0.8909 0.4509 0.0550 +vn -0.8290 0.5564 0.0568 +vn -0.7818 0.6209 0.0575 +vn -0.7059 0.7059 0.0579 +vn -0.7843 0.6033 0.1448 +vn -0.7221 0.6732 0.1591 +vn -0.7233 0.6471 0.2411 +vn -0.6345 0.1586 0.7565 +vn -0.7158 0.3123 0.6246 +vn -0.6198 0.4446 0.6467 +vn -0.5249 0.4845 0.6998 +vn -0.6598 0.6060 0.4444 +vn -0.6603 0.7111 0.2415 +vn -0.5615 0.7784 0.2809 +vn -0.5886 0.7764 0.2252 +vn -0.0966 0.1449 0.9847 +vn -0.3261 0.6000 0.7305 +vn -0.3261 0.6002 0.7304 +vn -0.4699 0.8694 0.1528 +vn -0.1067 0.6283 0.7706 +vn -0.2805 0.9495 0.1403 +vn -0.0191 0.9753 0.2200 +vn -0.0000 0.3555 0.9347 +vn -0.0000 0.9988 0.0498 +vn 0.0254 0.9984 0.0508 +vn 0.0091 0.9880 0.1542 +vn 0.0112 0.7608 0.6489 +vn 0.2130 0.7392 0.6389 +vn 0.0091 0.9880 0.1541 +vn 0.3764 0.9126 0.1597 +vn 0.5396 0.8274 0.1559 +vn 0.4154 0.6432 0.6432 +vn 0.3844 0.5831 0.7157 +vn 0.3266 0.6140 0.7185 +vn 0.2945 0.5506 0.7811 +vn 0.0112 0.6367 0.7710 +vn 0.2770 0.2655 0.9235 +vn 0.3197 -0.2055 0.9250 +vn 0.0622 -0.0711 0.9955 +vn -0.0000 -0.0852 0.9964 +vn 0.0112 -0.6367 0.7710 +vn 0.0112 -0.7071 0.7071 +vn -0.0000 -0.7124 0.7017 +vn 0.0112 -0.7608 0.6489 +vn -0.0000 -0.7656 0.6433 +vn 0.0372 -0.9867 0.1583 +vn 0.0373 -0.9867 0.1584 +vn -0.0000 -0.9883 0.1527 +vn -0.0000 -0.9883 0.1528 +vn -0.0000 -0.9026 0.4304 +vn -0.0091 -0.9880 0.1541 +vn -0.2130 -0.7393 0.6388 +vn -0.3764 -0.9126 0.1599 +vn -0.5396 -0.8274 0.1560 +vn -0.4154 -0.6433 0.6431 +vn -0.5431 -0.5295 0.6517 +vn -0.6981 -0.6981 0.1592 +vn -0.8219 -0.5439 0.1692 +vn -0.9126 -0.3764 0.1598 +vn -0.9189 -0.3892 0.0649 +vn -0.9793 -0.1939 0.0582 +vn -0.9936 -0.0994 0.0542 +vn -0.9984 -0.0254 0.0508 +vn -0.9988 -0.0000 0.0495 +vn -0.9988 -0.0000 0.0496 +vn -0.9764 -0.0000 0.2160 +vn -0.7016 -0.0000 0.7126 +vn -0.9771 0.0287 0.2109 +vn -0.7015 -0.0000 0.7127 +vn -0.1630 -0.0000 0.9866 +vn -0.1644 0.0091 0.9863 +vn -0.9216 0.0209 0.3875 +vn -0.9771 0.0287 0.2107 +vn -0.6637 0.1328 0.7361 +vn -0.6636 0.1327 0.7362 +vn -0.1598 0.0470 0.9860 +vn -0.9571 0.2851 0.0509 +vn -0.9607 0.2730 0.0506 +vn -0.7034 -0.3126 0.6383 +vn -0.9733 -0.0695 0.2187 +vn -0.6877 -0.2985 0.6618 +vn -0.7080 -0.1200 0.6960 +vn -0.9886 0.0559 0.1399 +vn -0.9603 0.2427 0.1372 +vn -0.6843 0.0112 0.7291 +vn -0.8911 0.4281 0.1505 +vn -0.0768 -0.4059 0.9107 +vn -0.2539 -0.0710 0.9646 +vn -0.3038 0.1302 0.9438 +vn -0.5564 0.6514 0.5158 +vn -0.3616 0.8006 0.4778 +vn -0.0669 0.1625 0.9844 +vn -0.2408 0.7351 0.6338 +vn -0.3615 0.8006 0.4778 +vn -0.0820 0.7501 0.6562 +vn -0.0184 0.1659 0.9860 +vn -0.0000 0.1733 0.9849 +vn -0.0226 0.7013 0.7125 +vn -0.0000 0.9764 0.2160 +vn -0.0000 0.9197 0.3927 +vn -0.0000 0.9764 0.2159 +vn -0.0000 0.9739 0.2271 +vn 0.0091 0.1644 0.9863 +vn 0.0226 0.7012 0.7126 +vn 0.0287 0.9771 0.2108 +vn 0.1939 0.9793 0.0582 +vn 0.3762 0.9243 0.0645 +vn 0.6995 0.7112 0.0700 +vn 0.5481 0.8336 0.0685 +vn 0.6981 0.6981 0.1593 +vn 0.5431 0.5295 0.6517 +vn 0.4968 0.4968 0.7116 +vn 0.6140 0.3266 0.7185 +vn 0.5615 0.2807 0.7784 +vn 0.6369 0.0112 0.7709 +vn 0.6315 -0.0000 0.7754 +vn 0.6313 -0.0000 0.7755 +vn 0.4966 -0.2483 0.8317 +vn 0.4829 -0.3916 0.7833 +vn 0.2945 -0.5506 0.7811 +vn 0.3266 -0.6140 0.7185 +vn 0.2553 -0.6510 0.7148 +vn 0.4598 -0.8725 0.1652 +vn 0.2838 -0.7095 0.6450 +vn 0.2830 -0.9469 0.1524 +vn 0.2877 -0.9557 0.0617 +vn 0.0254 -0.9984 0.0508 +vn 0.1939 -0.9793 0.0582 +vn -0.0000 -0.9988 0.0497 +vn -0.0000 -0.9988 0.0495 +vn -0.0254 -0.9984 0.0508 +vn -0.2878 -0.9557 0.0617 +vn -0.3761 -0.9243 0.0645 +vn -0.5481 -0.8336 0.0685 +vn -0.6995 -0.7112 0.0700 +vn -0.8247 -0.5613 0.0687 +vn -0.8248 -0.5613 0.0687 +vn -0.8035 -0.5398 0.2511 +vn -0.8947 -0.3698 0.2505 +vn -0.6297 -0.2267 0.7430 +vn -0.9678 -0.0917 0.2343 +vn -0.6757 -0.0699 0.7339 +vn -0.9712 -0.0000 0.2383 +vn -0.9739 -0.0000 0.2270 +vn -0.9764 -0.0000 0.2159 +vn -0.9197 -0.0000 0.3926 +vn -0.1646 0.0091 0.9863 +vn -0.9954 0.0800 0.0533 +vn -0.9986 0.0168 0.0503 +vn -0.9903 -0.0273 0.1363 +vn -0.9747 -0.0796 0.2089 +vn -0.7004 -0.2983 0.6485 +vn -0.1899 -0.4391 0.8782 +vn -0.1775 -0.4496 0.8754 +vn -0.1019 -0.4404 0.8920 +vn -0.0891 -0.4230 0.9017 +vn -0.0000 -0.4557 0.8902 +vn 0.5553 -0.5011 0.6637 +vn 0.0672 -0.4817 0.8738 +vn 0.4877 -0.5825 0.6503 +vn 0.5581 -0.6262 0.5445 +vn 0.4154 -0.6432 0.6432 +vn 0.0444 -0.4992 0.8653 +vn 0.6414 -0.7184 0.2694 +vn 0.3417 -0.6966 0.6308 +vn 0.0331 -0.5078 0.8608 +vn -0.0000 -0.4557 0.8901 +vn -0.0760 -0.3796 0.9220 +vn -0.0315 -0.3783 0.9252 +vn -0.1423 0.0759 0.9869 +vn -0.2401 0.6063 0.7581 +vn -0.2400 0.6064 0.7581 +vn -0.0226 0.6897 0.7237 +vn -0.0226 0.7452 0.6664 +vn -0.0000 0.6902 0.7236 +vn -0.0000 0.7448 0.6673 +vn -0.0000 0.3679 0.9299 +vn -0.0000 0.3557 0.9346 +vn -0.0000 0.1631 0.9866 +vn -0.0000 0.7016 0.7126 +vn 0.0277 0.1569 0.9872 +vn 0.1327 0.6637 0.7361 +vn 0.1842 0.9535 0.2384 +vn 0.3698 0.8947 0.2505 +vn 0.3261 0.6000 0.7305 +vn 0.6900 0.6772 0.2557 +vn 0.4803 0.4803 0.7339 +vn 0.8571 0.4531 0.2450 +vn 0.8810 0.4684 0.0669 +vn 0.8725 0.4599 0.1651 +vn 0.7095 0.2838 0.6450 +vn 0.6510 0.2553 0.7148 +vn 0.7071 0.0113 0.7071 +vn 0.7071 0.0112 0.7071 +vn 0.9684 -0.0000 0.2493 +vn 0.7656 -0.0000 0.6433 +vn 0.7015 -0.0000 0.7126 +vn 0.7059 -0.0579 0.7059 +vn 0.6960 -0.1200 0.7080 +vn 0.6140 -0.3266 0.7185 +vn 0.5832 -0.3843 0.7157 +vn 0.5489 -0.4418 0.7096 +vn 0.4968 -0.4968 0.7116 +vn 0.5431 -0.5295 0.6517 +vn 0.6980 -0.6981 0.1594 +vn 0.6995 -0.7112 0.0700 +vn 0.4684 -0.8810 0.0669 +vn 0.2878 -0.9557 0.0617 +vn 0.4682 -0.8811 0.0669 +vn 0.4531 -0.8572 0.2449 +vn 0.1842 -0.9535 0.2384 +vn 0.1994 -0.6479 0.7352 +vn 0.0112 -0.6489 0.7608 +vn 0.0191 -0.9753 0.2199 +vn -0.0000 -0.6554 0.7553 +vn -0.0000 -0.9764 0.2160 +vn -0.0000 -0.4937 0.8696 +vn -0.0000 -0.5447 0.8386 +vn -0.0226 -0.7013 0.7125 +vn -0.0191 -0.9753 0.2199 +vn -0.0226 -0.7012 0.7126 +vn -0.0192 -0.9753 0.2200 +vn -0.1327 -0.6637 0.7361 +vn -0.2749 -0.9279 0.2520 +vn -0.0970 -0.3340 0.9376 +vn -0.3261 -0.6000 0.7305 +vn -0.5398 -0.8035 0.2511 +vn -0.4803 -0.4803 0.7339 +vn -0.6900 -0.6772 0.2556 +vn -0.1319 -0.0753 0.9884 +vn -0.1569 -0.0277 0.9872 +vn -0.1629 -0.0000 0.9866 +vn -0.1631 -0.0000 0.9866 +vn -0.9739 -0.0000 0.2269 +vn -0.9197 0.0001 0.3927 +vn -0.7014 -0.0000 0.7128 +vn -0.1632 -0.0000 0.9866 +vn -0.9912 -0.0364 0.1273 +vn -0.9771 -0.0790 0.1976 +vn -0.9771 -0.0789 0.1974 +vn -0.9954 -0.0087 0.0952 +vn -0.6886 -0.1329 0.7128 +vn -0.1889 -0.4249 0.8853 +vn -0.0904 -0.4634 0.8815 +vn -0.0900 -0.4499 0.8885 +vn -0.0000 -0.4556 0.8902 +vn 0.0555 -0.4641 0.8840 +vn 0.1651 -0.4598 0.8725 +vn 0.6472 -0.3567 0.6737 +vn 0.6041 -0.4296 0.6712 +vn 0.6637 -0.5011 0.5553 +vn 0.5996 -0.5860 0.5451 +vn 0.6467 -0.6198 0.4446 +vn 0.5920 -0.6727 0.4440 +vn 0.6936 -0.6679 0.2699 +vn 0.7275 -0.6522 0.2132 +vn 0.6781 -0.7033 0.2135 +vn 0.6563 -0.7194 0.2272 +vn 0.6415 -0.3273 -0.6938 +vn 0.7345 -0.6771 0.0458 +vn 0.6381 -0.7585 -0.1326 +vn 0.5978 -0.8009 0.0338 +vn 0.5322 -0.8168 0.2228 +vn 0.4749 -0.8523 0.2192 +vn 0.4451 -0.8531 0.2723 +vn 0.1726 -0.6903 0.7026 +vn 0.0219 -0.5031 0.8639 +vn -0.0207 -0.3623 0.9318 +vn -0.0207 -0.3622 0.9319 +vn -0.1423 0.0760 0.9869 +vn -0.0208 0.3749 0.9268 +vn -0.0000 0.7553 0.6554 +vn -0.0000 0.6193 0.7852 +vn -0.0000 0.6788 0.7343 +vn -0.0000 0.3556 0.9346 +vn -0.0000 0.1734 0.9848 +vn 0.0191 0.9753 0.2202 +vn 0.0191 0.9753 0.2200 +vn 0.0659 0.1412 0.9878 +vn 0.1898 0.2903 0.9379 +vn 0.1036 0.1036 0.9892 +vn 0.2794 0.2012 0.9389 +vn 0.6000 0.3261 0.7305 +vn 0.9275 0.2863 0.2405 +vn 0.9557 0.2878 0.0617 +vn 0.9557 0.2877 0.0617 +vn 0.8810 0.4683 0.0669 +vn 0.9793 0.1939 0.0582 +vn 0.9580 0.2448 0.1491 +vn 0.9877 0.0183 0.1555 +vn 0.9883 -0.0000 0.1528 +vn 0.9847 -0.0965 0.1448 +vn 0.7585 -0.1324 0.6381 +vn 0.9469 -0.2830 0.1524 +vn 0.6432 -0.4154 0.6432 +vn 0.8219 -0.5439 0.1694 +vn 0.8248 -0.5613 0.0687 +vn 0.8247 -0.5613 0.0687 +vn 0.8035 -0.5398 0.2511 +vn 0.6900 -0.6772 0.2556 +vn 0.4618 -0.4486 0.7652 +vn 0.3261 -0.6000 0.7305 +vn 0.3260 -0.6001 0.7305 +vn 0.0753 -0.1318 0.9884 +vn 0.0275 -0.1466 0.9888 +vn 0.0091 -0.1645 0.9863 +vn -0.0000 -0.1631 0.9866 +vn -0.0000 -0.1630 0.9866 +vn -0.0277 -0.1569 0.9872 +vn -0.0564 -0.1504 0.9870 +vn -0.0848 -0.1224 0.9888 +vn -0.1038 -0.1037 0.9892 +vn -0.8810 -0.4684 0.0669 +vn -0.8248 -0.5612 0.0687 +vn -0.8810 -0.4682 0.0679 +vn -0.9973 -0.0520 0.0521 +vn -0.9739 -0.0000 0.2271 +vn -0.0961 -0.0175 0.9952 +vn -0.0940 -0.2707 0.9581 +vn -0.1011 -0.4268 0.8987 +vn -0.1011 -0.4269 0.8986 +vn -0.0000 -0.4303 0.9027 +vn -0.0000 -0.4430 0.8965 +vn 0.0904 -0.4634 0.8815 +vn 0.1777 -0.4495 0.8754 +vn 0.6578 -0.3421 0.6710 +vn 0.9441 -0.1647 0.2854 +vn 0.9665 -0.1351 0.2183 +vn 0.9535 -0.1842 0.2385 +vn 0.8759 -0.4015 0.2677 +vn 0.7535 -0.6003 0.2682 +vn 0.7834 -0.5844 0.2114 +vn 0.7728 -0.5983 0.2119 +vn 0.8376 -0.5433 0.0566 +vn 0.7191 -0.1860 -0.6695 +vn 0.2575 0.3161 -0.9131 +vn 0.2728 0.2387 -0.9320 +vn 0.6415 -0.3271 -0.6939 +vn 0.2490 0.1733 -0.9529 +vn 0.2491 0.1733 -0.9528 +vn 0.5796 -0.6738 -0.4583 +vn 0.5101 -0.7115 -0.4833 +vn 0.4426 -0.8961 0.0323 +vn 0.3892 -0.8963 0.2123 +vn 0.3197 -0.9249 0.2057 +vn 0.3022 -0.9183 0.2558 +vn 0.1610 -0.9555 0.2470 +vn 0.1072 -0.7149 0.6910 +vn 0.0112 -0.7292 0.6842 +vn -0.0000 -0.4684 0.8835 +vn -0.0207 -0.3624 0.9318 +vn -0.0000 -0.1733 0.9849 +vn -0.0000 -0.3073 0.9516 +vn -0.0000 0.6786 0.7345 +vn -0.0000 0.6901 0.7237 +vn -0.0000 0.7448 0.6672 +vn 0.0226 0.6897 0.7237 +vn 0.0226 0.7453 0.6664 +vn 0.0226 0.7452 0.6664 +vn 0.0820 0.7501 0.6562 +vn 0.0184 0.1659 0.9860 +vn 0.1994 0.6479 0.7351 +vn 0.3698 0.8947 0.2506 +vn 0.5398 0.8034 0.2511 +vn 0.6260 0.7767 0.0696 +vn 0.8336 0.5481 0.0685 +vn 0.1318 0.0753 0.9884 +vn 0.6479 0.1994 0.7352 +vn 0.6480 0.1993 0.7351 +vn 0.9275 0.2862 0.2405 +vn 0.7012 0.0226 0.7126 +vn 0.9771 0.0287 0.2108 +vn 0.9984 0.0254 0.0508 +vn 0.9988 -0.0000 0.0497 +vn 0.9988 -0.0000 0.0496 +vn 0.9988 -0.0000 0.0495 +vn 0.9936 -0.0994 0.0542 +vn 0.9557 -0.2878 0.0617 +vn 0.9189 -0.3892 0.0649 +vn 0.8947 -0.3698 0.2505 +vn 0.6000 -0.3261 0.7305 +vn 0.4617 -0.4486 0.7652 +vn 0.2351 -0.2462 0.9403 +vn 0.0754 -0.1318 0.9884 +vn 0.5481 -0.8336 0.0685 +vn 0.4022 -0.9132 0.0652 +vn 0.0191 -0.9753 0.2201 +vn -0.0000 -0.9764 0.2161 +vn -0.0000 -0.7016 0.7126 +vn -0.1939 -0.9793 0.0582 +vn -0.2877 -0.9557 0.0617 +vn -0.3762 -0.9243 0.0645 +vn -0.5398 -0.8035 0.2510 +vn -0.4804 -0.4804 0.7338 +vn -0.8034 -0.5399 0.2512 +vn -0.5661 -0.3686 0.7373 +vn -0.5685 -0.2526 0.7830 +vn -0.8947 -0.3698 0.2506 +vn -0.6890 -0.0459 0.7233 +vn -0.9678 -0.0917 0.2344 +vn -0.9712 -0.0000 0.2381 +vn -0.9898 -0.0000 0.1427 +vn -0.0000 -0.0242 0.9997 +vn -0.0000 -0.2721 0.9623 +vn -0.0000 -0.2722 0.9622 +vn 0.0938 -0.2708 0.9581 +vn 0.1011 -0.4269 0.8986 +vn 0.1889 -0.4249 0.8853 +vn 0.6681 -0.3275 0.6681 +vn 0.9384 -0.1546 0.3091 +vn 0.9644 -0.1141 0.2385 +vn 0.9656 -0.1246 0.2284 +vn 0.9842 -0.0868 0.1544 +vn 0.9830 -0.0973 0.1557 +vn 0.9853 -0.0765 0.1530 +vn 0.9844 0.0671 -0.1624 +vn 0.9834 0.0772 -0.1640 +vn 0.9998 -0.0161 -0.0081 +vn 0.9817 -0.1080 0.1571 +vn 0.9768 -0.1410 0.1611 +vn 0.9286 -0.3171 0.1925 +vn 0.9128 -0.3582 0.1964 +vn 0.8657 -0.4569 0.2044 +vn 0.9211 -0.3856 0.0535 +vn 0.7507 0.0341 -0.6597 +vn 0.7508 0.0339 -0.6597 +vn -0.3537 0.5633 -0.7467 +vn -0.5706 0.6386 -0.5163 +vn -0.2967 0.5934 -0.7482 +vn -0.2123 0.6120 -0.7618 +vn -0.1456 0.6429 -0.7520 +vn 0.4503 -0.4767 -0.7550 +vn 0.3806 -0.5250 -0.7613 +vn 0.1979 -0.9800 0.0188 +vn 0.1799 -0.9629 0.2010 +vn 0.0186 -0.9841 0.1764 +vn 0.0193 -0.9727 0.2312 +vn 0.0112 -0.7291 0.6843 +vn -0.0000 -0.7344 0.6787 +vn -0.0000 -0.5066 0.8622 +vn -0.0000 0.0325 0.9995 +vn 0.0082 0.0328 0.9994 +vn 0.0539 0.3985 0.9156 +vn 0.0539 0.3986 0.9155 +vn 0.2408 0.7351 0.6337 +vn 0.3615 0.8006 0.4779 +vn 0.0669 0.1627 0.9844 +vn 0.0966 0.1449 0.9847 +vn 0.1150 0.1150 0.9867 +vn 0.4748 0.4352 0.7649 +vn 0.6900 0.6772 0.2556 +vn 0.8571 0.4531 0.2449 +vn 0.6279 0.2691 0.7303 +vn 0.1555 0.0183 0.9877 +vn 0.3589 0.0103 0.9333 +vn 0.1645 0.0091 0.9863 +vn 0.3679 -0.0000 0.9299 +vn 0.7015 -0.0000 0.7127 +vn 0.9764 -0.0000 0.2160 +vn 0.6902 -0.0000 0.7236 +vn 0.9739 -0.0000 0.2269 +vn 0.3555 -0.0000 0.9347 +vn 0.9712 -0.0000 0.2381 +vn 0.9678 -0.0917 0.2343 +vn 0.6637 -0.1328 0.7361 +vn 0.1319 -0.0753 0.9884 +vn 0.8810 -0.4684 0.0669 +vn 0.8034 -0.5398 0.2511 +vn 0.6900 -0.6772 0.2557 +vn 0.4803 -0.4803 0.7339 +vn 0.5398 -0.8035 0.2511 +vn 0.3698 -0.8947 0.2507 +vn 0.2135 -0.6781 0.7033 +vn 0.0226 -0.7013 0.7125 +vn 0.0192 -0.9753 0.2200 +vn 0.0102 -0.3590 0.9333 +vn -0.0000 -0.3679 0.9298 +vn -0.0191 -0.9753 0.2201 +vn -0.1327 -0.6638 0.7361 +vn -0.2749 -0.9278 0.2521 +vn -0.2551 -0.6376 0.7269 +vn -0.0848 -0.1225 0.9888 +vn -0.1036 -0.1037 0.9892 +vn -0.1037 -0.1037 0.9892 +vn -0.1225 -0.0848 0.9888 +vn -0.9557 -0.2878 0.0617 +vn -0.1491 -0.0466 0.9877 +vn -0.1570 -0.0277 0.9872 +vn -0.9888 -0.0275 0.1465 +vn -0.0947 -0.0000 0.9955 +vn 0.0963 -0.0175 0.9952 +vn 0.6663 -0.0226 0.7454 +vn 0.0947 -0.0000 0.9955 +vn 0.9392 -0.0000 0.3434 +vn 0.9379 -0.0102 0.3467 +vn 0.6636 -0.1327 0.7362 +vn 0.0938 -0.2707 0.9581 +vn 0.6638 -0.1328 0.7360 +vn 0.9646 -0.0711 0.2539 +vn 0.9843 -0.0473 0.1703 +vn 0.9835 0.0771 -0.1639 +vn 0.6445 0.3419 -0.6839 +vn 0.6445 0.3420 -0.6839 +vn 0.9844 0.0670 -0.1626 +vn 0.6443 0.3420 -0.6840 +vn 0.6445 0.3418 -0.6839 +vn 0.7266 0.1724 -0.6651 +vn 0.9720 -0.2333 0.0292 +vn -0.4683 0.5085 -0.7226 +vn -0.6760 0.5409 -0.5004 +vn -0.7391 0.5543 -0.3827 +vn -0.6388 0.6655 -0.3861 +vn -0.5831 0.7157 -0.3844 +vn -0.4057 0.8368 -0.3677 +vn -0.2768 0.8905 -0.3610 +vn -0.0819 0.6438 -0.7608 +vn -0.0821 0.6438 -0.7608 +vn 0.2123 -0.6120 -0.7618 +vn 0.0244 -0.9997 0.0081 +vn 0.0281 -0.9836 0.1780 +vn -0.0000 -0.9849 0.1732 +vn -0.0000 -0.9849 0.1733 +vn -0.0000 -0.9739 0.2271 +vn -0.0000 -0.8039 0.5947 +vn -0.0000 -0.4558 0.8901 +vn -0.0000 -0.8039 0.5948 +vn -0.0000 -0.3801 0.9249 +vn 0.0207 -0.3623 0.9318 +vn 0.0538 0.3984 0.9157 +vn 0.1423 0.0759 0.9869 +vn 0.2400 0.6064 0.7581 +vn 0.2408 0.7351 0.6338 +vn 0.3616 0.8006 0.4778 +vn 0.5565 0.6514 0.5157 +vn 0.5615 0.7784 0.2808 +vn 0.5249 0.4845 0.6998 +vn 0.6598 0.6060 0.4443 +vn 0.6603 0.7111 0.2413 +vn 0.7222 0.6732 0.1591 +vn 0.5886 0.7764 0.2252 +vn 0.7059 0.7059 0.0579 +vn 0.8289 0.5565 0.0568 +vn 0.1341 0.0958 0.9863 +vn 0.1598 0.0470 0.9860 +vn 0.6637 0.1327 0.7361 +vn 0.9771 0.0288 0.2109 +vn 0.1631 -0.0000 0.9866 +vn 0.1630 -0.0000 0.9866 +vn 0.1569 -0.0277 0.9872 +vn 0.9793 -0.1939 0.0582 +vn 0.9278 -0.2749 0.2521 +vn 0.6337 -0.3564 0.6866 +vn 0.2903 -0.1898 0.9379 +vn 0.1037 -0.1037 0.9892 +vn 0.0753 -0.1319 0.9884 +vn 0.0277 -0.1570 0.9872 +vn -0.0000 -0.1629 0.9866 +vn -0.0000 -0.8304 0.5572 +vn -0.0000 -0.9197 0.3927 +vn -0.0000 -0.9739 0.2269 +vn -0.0000 -0.9898 0.1427 +vn -0.0000 -0.9988 0.0496 +vn -0.0277 -0.1568 0.9872 +vn -0.1080 -0.3241 0.9398 +vn -0.4684 -0.8810 0.0669 +vn -0.6995 -0.7112 0.0702 +vn -0.6995 -0.7112 0.0699 +vn -0.8725 -0.4599 0.1652 +vn -0.7157 -0.3844 0.5831 +vn -0.9428 -0.2960 0.1536 +vn -0.6824 -0.0578 0.7287 +vn -0.0769 -0.0085 0.9970 +vn 0.5947 -0.0000 0.8040 +vn 0.0853 -0.0000 0.9964 +vn 0.1733 -0.0000 0.9849 +vn 0.7655 -0.0000 0.6435 +vn 0.9654 -0.0000 0.2607 +vn 0.9654 -0.0000 0.2609 +vn 0.9654 -0.0000 0.2608 +vn 0.9849 -0.0000 0.1733 +vn 1.0000 -0.0000 -0.0079 +vn 0.9830 -0.0000 -0.1838 +vn 0.6428 0.1456 -0.7521 +vn 0.6414 0.3273 -0.6939 +vn -0.0550 0.4641 -0.8841 +vn -0.0552 0.4641 -0.8841 +vn -0.6336 0.3565 -0.6866 +vn -0.0553 0.4641 -0.8840 +vn -0.6364 0.3712 -0.6762 +vn -0.6364 0.3711 -0.6762 +vn -0.8725 0.2424 -0.4242 +vn -0.6363 0.3712 -0.6763 +vn -0.5634 0.4427 -0.6976 +vn -0.8732 0.3567 -0.3321 +vn -0.9379 0.2903 -0.1898 +vn -0.9286 0.3171 -0.1925 +vn -0.8322 0.5140 -0.2080 +vn -0.8230 0.5282 -0.2088 +vn -0.7275 0.6522 -0.2132 +vn -0.7155 0.6653 -0.2134 +vn -0.6781 0.7033 -0.2135 +vn -0.6563 0.7195 -0.2272 +vn -0.4605 0.8604 -0.2181 +vn -0.5464 0.8072 -0.2235 +vn -0.4034 0.8898 -0.2136 +vn -0.3197 0.9250 -0.2055 +vn -0.2532 0.9469 -0.1982 +vn -0.0890 0.9782 -0.1877 +vn -0.0750 0.9323 -0.3537 +vn -0.0225 0.6175 -0.7862 +vn 0.2122 -0.6121 -0.7617 +vn 0.0226 -0.6664 -0.7453 +vn 0.0226 -0.6663 -0.7453 +vn -0.0002 0.3679 -0.9299 +vn -0.0000 -0.6671 -0.7450 +vn -0.0000 -0.6671 -0.7449 +vn -0.0000 -1.0000 0.0079 +vn -0.0000 -0.9739 0.2270 +vn -0.0108 -0.4985 0.8668 +vn 0.0316 -0.3784 0.9251 +vn 0.2732 0.1051 0.9562 +vn 0.6762 0.3712 0.6364 +vn 0.7248 0.2977 0.6213 +vn 0.7233 0.6471 0.2411 +vn 0.8414 0.5214 0.1422 +vn 0.8376 0.5434 0.0566 +vn 0.8291 0.5564 0.0556 +vn 0.9571 0.2851 0.0509 +vn 0.8376 0.5433 0.0566 +vn 0.9607 0.2730 0.0506 +vn 0.9986 0.0168 0.0503 +vn 0.1644 0.0091 0.9863 +vn 0.1644 0.0092 0.9863 +vn 0.9771 0.0287 0.2109 +vn 0.9984 -0.0254 0.0508 +vn 0.9954 -0.0800 0.0524 +vn 0.9954 -0.0800 0.0533 +vn 0.9678 -0.0917 0.2345 +vn 0.1102 -0.0643 0.9918 +vn 0.6995 -0.7112 0.0699 +vn 0.1939 -0.9794 0.0572 +vn -0.0000 -0.9911 0.1330 +vn -0.0000 -0.9764 0.2159 +vn -0.0255 -0.9984 0.0508 +vn -0.1940 -0.9793 0.0582 +vn -0.2830 -0.9469 0.1525 +vn -0.4598 -0.8725 0.1652 +vn -0.3844 -0.7157 0.5831 +vn -0.5728 -0.5728 0.5864 +vn -0.4366 -0.2994 0.8484 +vn -0.6000 -0.3261 0.7305 +vn -0.6479 -0.1994 0.7352 +vn -0.6824 -0.0579 0.7287 +vn -0.0875 -0.0262 0.9958 +vn 0.4905 0.0557 0.8696 +vn 0.7409 0.1457 0.6556 +vn 0.9581 0.0937 0.2708 +vn 0.9782 0.0889 0.1878 +vn 0.9768 0.0997 0.1894 +vn 0.9602 0.1920 0.2027 +vn 0.0161 0.0081 -0.9998 +vn 0.6436 0.1856 -0.7425 +vn 0.6582 0.0577 -0.7506 +vn 0.9816 0.1888 0.0282 +vn 0.9952 0.0960 0.0173 +vn 1.0000 -0.0000 -0.0080 +vn 1.0000 -0.0000 -0.0081 +vn 0.9830 -0.0000 -0.1837 +vn 0.6422 0.0225 -0.7662 +vn -0.6422 0.0226 -0.7662 +vn -0.6415 0.3272 -0.6938 +vn -0.0554 0.4641 -0.8841 +vn -0.6337 0.3565 -0.6866 +vn -0.6415 0.3273 -0.6938 +vn -0.8758 0.2159 -0.4318 +vn -0.8757 0.2159 -0.4319 +vn -0.8725 0.2424 -0.4243 +vn -0.9469 0.1524 -0.2830 +vn -0.9441 0.1647 -0.2855 +vn -0.9441 0.1647 -0.2856 +vn -0.9287 0.3170 -0.1925 +vn -0.9817 0.1080 -0.1571 +vn -0.9830 0.0974 -0.1557 +vn -0.6445 0.3419 -0.6839 +vn -0.6200 0.3562 -0.6991 +vn -0.1651 0.4598 -0.8725 +vn -0.6473 0.3567 -0.6736 +vn -0.6040 0.4296 -0.6713 +vn -0.9398 0.1659 -0.2987 +vn -0.9398 0.1659 -0.2986 +vn -0.8796 0.3542 -0.3176 +vn -0.6041 0.4296 -0.6712 +vn -0.8795 0.3543 -0.3177 +vn -0.5157 0.5564 -0.6515 +vn -0.7645 0.5572 -0.3240 +vn -0.4877 0.5825 -0.6503 +vn -0.6681 0.6681 -0.3276 +vn -0.4591 0.6076 -0.6481 +vn -0.6140 0.7186 -0.3266 +vn -0.0923 0.5309 -0.8424 +vn -0.3417 0.6966 -0.6308 +vn -0.4870 0.8074 -0.3332 +vn -0.2687 0.7421 -0.6141 +vn -0.3567 0.8732 -0.3321 +vn -0.1193 0.6203 -0.7752 +vn -0.2305 0.9220 -0.3112 +vn -0.0697 0.6387 -0.7663 +vn -0.0226 0.6664 -0.7452 +vn -0.0198 0.9606 -0.2773 +vn -0.0186 0.9841 -0.1764 +vn -0.0281 0.9836 -0.1780 +vn 0.3751 0.4655 -0.8017 +vn 0.1994 0.6479 -0.7351 +vn 0.6993 0.4708 -0.5379 +vn 0.1828 0.9569 -0.2258 +vn 0.6993 0.4707 -0.5379 +vn 0.1829 0.9568 -0.2258 +vn 0.7026 0.6903 -0.1726 +vn 0.7070 0.6823 -0.1860 +vn 0.7497 0.1720 -0.6390 +vn 0.0001 0.9849 -0.1733 +vn -0.0000 0.9849 -0.1732 +vn -0.0000 0.9849 -0.1733 +vn -0.0203 0.9454 -0.3254 +vn -0.0112 0.6608 -0.7505 +vn -0.0000 0.6671 -0.7450 +vn -0.0000 0.6672 -0.7448 +vn -0.0000 -0.6672 -0.7449 +vn -0.0001 -0.9867 -0.1627 +vn -0.0000 -0.9866 -0.1630 +vn -0.0000 -0.9866 -0.1629 +vn -0.0000 -0.9739 0.2272 +vn -0.0193 -0.9727 0.2312 +vn -0.0112 -0.7291 0.6843 +vn -0.0112 -0.7292 0.6842 +vn -0.1072 -0.7149 0.6910 +vn -0.1997 -0.6866 0.6991 +vn -0.0330 -0.4947 0.8684 +vn 0.0645 -0.3761 0.9243 +vn 0.2485 -0.1036 0.9631 +vn 0.3064 -0.1423 0.9412 +vn 0.6213 0.1584 0.7674 +vn 0.8933 0.3811 0.2384 +vn 0.8414 0.5213 0.1423 +vn 0.9603 0.2428 0.1372 +vn 0.8933 0.3811 0.2383 +vn 0.9702 0.1123 0.2145 +vn 0.9903 -0.0272 0.1363 +vn 0.9734 -0.0695 0.2185 +vn 0.9747 -0.0796 0.2089 +vn 0.9912 -0.0364 0.1273 +vn 0.9739 -0.0000 0.2270 +vn 0.9712 -0.0000 0.2382 +vn 0.6637 -0.1327 0.7361 +vn 0.3408 -0.0310 0.9396 +vn 0.1490 -0.0466 0.9877 +vn 0.6981 -0.6981 0.1593 +vn 0.5439 -0.8219 0.1692 +vn 0.3844 -0.7157 0.5831 +vn 0.2830 -0.9469 0.1525 +vn 0.1997 -0.6739 0.7113 +vn 0.0273 -0.9903 0.1363 +vn 0.0272 -0.9903 0.1363 +vn -0.0000 -0.1945 0.9809 +vn -0.0226 -0.7014 0.7124 +vn -0.0272 -0.9903 0.1363 +vn -0.0314 -0.3658 0.9301 +vn -0.1994 -0.6479 0.7351 +vn -0.0273 -0.9903 0.1363 +vn -0.2404 -0.2863 0.9275 +vn -0.1640 -0.0772 0.9834 +vn 0.0787 0.0350 0.9963 +vn 0.3011 0.1183 0.9462 +vn 0.4905 0.0558 0.8696 +vn 0.7407 0.1457 0.6559 +vn 0.6971 0.2840 0.6583 +vn 0.9138 0.2812 0.2931 +vn 0.9139 0.2812 0.2929 +vn 0.9353 0.2817 0.2141 +vn 0.8981 0.3782 0.2245 +vn 0.9264 0.3727 0.0532 +vn 0.6240 0.2547 -0.7387 +vn -0.5471 -0.2799 -0.7889 +vn -0.5989 -0.1711 -0.7824 +vn -0.6333 -0.0576 -0.7717 +vn 0.6434 -0.0000 -0.7655 +vn 0.6435 -0.0000 -0.7655 +vn -0.6422 0.0225 -0.7662 +vn -0.9466 0.0101 -0.3223 +vn -0.9464 0.0957 -0.3085 +vn -0.8756 0.2159 -0.4320 +vn -0.9852 0.0765 -0.1531 +vn -0.9853 0.0765 -0.1530 +vn -0.9853 0.0766 -0.1530 +vn -0.9830 0.0973 -0.1557 +vn -0.9428 0.1535 -0.2960 +vn -0.9760 -0.1207 -0.1811 +vn -0.6539 0.1070 -0.7490 +vn -0.1679 -0.9654 -0.1994 +vn -0.0442 -0.8728 -0.4861 +vn -0.1005 0.2009 -0.9744 +vn 0.1074 -0.0358 -0.9936 +vn 0.0109 -0.5113 -0.8593 +vn 0.0304 -0.9522 -0.3039 +vn 0.3506 -0.5195 -0.7792 +vn 0.1005 0.2007 -0.9745 +vn -0.0000 0.4557 -0.8901 +vn -0.1652 0.4598 -0.8725 +vn -0.1528 0.4699 -0.8694 +vn -0.1411 0.4937 -0.8581 +vn 0.5291 -0.2646 -0.8063 +vn 0.6251 -0.1322 -0.7693 +vn 0.1557 0.3223 -0.9337 +vn 0.1833 0.3552 -0.9166 +vn -0.0000 0.4557 -0.8902 +vn -0.0685 0.5481 -0.8336 +vn -0.0450 0.5516 -0.8329 +vn -0.0222 0.5546 -0.8318 +vn -0.0112 0.6609 -0.7504 +vn -0.0000 0.9589 -0.2839 +vn -0.0000 0.6553 -0.7554 +vn -0.0000 0.5572 -0.8304 +vn -0.0000 0.4556 -0.8902 +vn 0.3750 0.4655 -0.8017 +vn 0.7025 0.2555 -0.6642 +vn 0.1846 0.3690 -0.9109 +vn 0.6964 -0.4285 -0.5756 +vn 0.9954 -0.0533 -0.0800 +vn 0.6963 -0.4285 -0.5758 +vn 0.7026 0.2555 -0.6641 +vn 0.9782 0.1198 -0.1697 +vn 0.9814 0.1828 -0.0577 +vn 0.9782 0.1699 -0.1198 +vn 0.9782 0.1697 -0.1198 +vn 0.7608 0.0112 -0.6489 +vn 0.7067 -0.0342 -0.7067 +vn 0.9994 0.0082 -0.0328 +vn 0.9816 -0.0283 -0.1889 +vn 0.9279 -0.2520 -0.2749 +vn 0.7149 -0.2553 -0.6510 +vn 0.0985 -0.0448 -0.9941 +vn 0.1133 -0.0000 -0.9936 +vn 0.0559 0.1400 -0.9886 +vn -0.0000 0.8467 -0.5321 +vn -0.0000 0.6671 -0.7449 +vn -0.0224 0.8051 -0.5927 +vn -0.0000 0.9435 -0.3313 +vn -0.0000 0.8767 -0.4811 +vn -0.0000 -0.6672 -0.7448 +vn -0.0000 -1.0000 0.0078 +vn -0.0186 -0.9841 0.1764 +vn -0.0281 -0.9836 0.1780 +vn -0.1799 -0.9629 0.2011 +vn -0.1610 -0.9555 0.2471 +vn -0.3022 -0.9183 0.2559 +vn -0.5181 -0.8088 0.2781 +vn -0.3417 -0.6966 0.6309 +vn -0.0444 -0.4993 0.8653 +vn 0.0768 -0.4058 0.9107 +vn 0.6843 0.0112 0.7292 +vn 0.6843 0.0112 0.7291 +vn 0.7080 -0.1200 0.6960 +vn 0.6877 -0.2985 0.6618 +vn 0.7034 -0.3126 0.6383 +vn 0.9771 -0.0790 0.1974 +vn 0.9924 -0.0361 0.1173 +vn 0.9988 -0.0000 0.0494 +vn 0.1632 -0.0000 0.9866 +vn 0.9945 -0.0896 0.0538 +vn 0.9936 -0.0993 0.0542 +vn 0.9557 -0.2877 0.0617 +vn 0.9660 -0.2516 0.0604 +vn 0.9659 -0.2516 0.0604 +vn 0.9428 -0.2960 0.1536 +vn 0.8725 -0.4599 0.1651 +vn 0.7157 -0.3844 0.5831 +vn 0.5728 -0.5728 0.5864 +vn 0.3381 -0.4007 0.8515 +vn 0.1670 -0.3119 0.9353 +vn 0.0633 -0.3377 0.9391 +vn -0.0000 -0.1943 0.9810 +vn -0.0000 -0.1038 0.9946 +vn -0.0000 -0.1835 0.9830 +vn -0.0000 -0.3678 0.9299 +vn -0.0000 -0.0947 0.9955 +vn -0.0313 -0.3657 0.9302 +vn -0.0174 -0.0960 0.9952 +vn -0.0674 -0.1734 0.9825 +vn -0.0675 -0.1735 0.9825 +vn -0.0533 -0.0800 0.9954 +vn -0.0622 -0.0711 0.9955 +vn 0.0705 0.0530 0.9961 +vn 0.6159 0.4150 0.6696 +vn 0.5540 0.4864 0.6757 +vn 0.4864 0.5540 0.6756 +vn 0.6126 0.5854 0.5311 +vn 0.6261 0.5581 0.5445 +vn 0.7857 0.5281 0.3220 +vn 0.8103 0.5360 0.2369 +vn 0.8290 0.5564 0.0565 +vn 0.5661 0.3686 -0.7373 +vn 0.0162 0.0162 -0.9997 +vn -0.6872 -0.4716 -0.5526 +vn -0.8291 -0.5073 -0.2351 +vn -0.8795 -0.3176 -0.3544 +vn -0.9237 -0.1577 -0.3492 +vn -0.9453 -0.0203 -0.3254 +vn -0.6433 -0.0000 -0.7656 +vn -0.9476 -0.0000 -0.3194 +vn -0.9849 -0.0000 -0.1733 +vn -0.9842 0.0473 -0.1704 +vn -0.9384 0.1546 -0.3091 +vn -0.9625 0.1255 -0.2406 +vn -0.9624 0.1255 -0.2408 +vn -0.9606 0.0506 -0.2733 +vn -0.6902 -0.0000 -0.7236 +vn -0.9629 0.0298 -0.2682 +vn -0.9553 -0.0000 -0.2955 +vn -0.9500 -0.1857 -0.2511 +vn -0.6303 -0.1854 -0.7539 +vn -0.1827 -0.9815 -0.0577 +vn -0.0186 -0.9822 -0.1869 +vn -0.0187 -0.9822 -0.1871 +vn -0.0759 -0.3797 -0.9220 +vn -0.1734 -0.0000 -0.9848 +vn -0.1871 0.0187 -0.9822 +vn -0.1891 0.1784 -0.9656 +vn -0.1888 0.4249 -0.8853 +vn -0.1775 0.4496 -0.8754 +vn -0.0107 0.4600 -0.8879 +vn -0.0000 0.4430 -0.8965 +vn 0.1775 0.4495 -0.8755 +vn 0.1899 0.4391 -0.8781 +vn 0.3847 0.4230 -0.8204 +vn 0.5965 0.7614 -0.2538 +vn 0.6611 0.4588 -0.5937 +vn -0.0160 0.9999 0.0002 +vn 0.3369 0.5312 -0.7774 +vn 0.0112 0.5748 -0.8182 +vn 0.0111 0.5748 -0.8182 +vn -0.0699 0.6756 -0.7339 +vn -0.0160 0.9999 -0.0001 +vn -0.0000 0.9936 -0.1133 +vn -0.1711 0.9765 -0.1308 +vn -0.8938 0.4180 -0.1625 +vn -0.1726 0.6903 -0.7026 +vn -0.8790 0.3052 -0.3663 +vn -0.6649 -0.3129 -0.6782 +vn -0.0000 0.3802 -0.9249 +vn -0.0000 0.6902 -0.7236 +vn -0.0000 -0.6789 -0.7343 +vn -0.1860 -0.6822 -0.7071 +vn -0.9727 -0.1534 -0.1741 +vn -0.9727 -0.1536 -0.1741 +vn -0.1711 -0.9765 -0.1309 +vn -0.0000 -1.0000 -0.0080 +vn -0.0000 -1.0000 -0.0079 +vn -0.0000 -0.9946 -0.1038 +vn 0.1903 -0.9810 -0.0381 +vn 0.6979 0.0947 -0.7099 +vn 0.6681 0.3275 -0.6681 +vn 0.1776 0.4496 -0.8754 +vn 0.3696 0.4206 -0.8285 +vn 0.3677 0.4057 -0.8368 +vn 0.1775 0.4496 -0.8754 +vn 0.2023 0.4284 -0.8806 +vn 0.1856 0.3828 -0.9050 +vn 0.6943 0.0578 -0.7174 +vn 0.9269 -0.2975 -0.2289 +vn 0.8490 -0.4676 -0.2461 +vn 0.7387 -0.6241 -0.2547 +vn 0.8289 -0.5565 -0.0568 +vn 0.8376 -0.5434 -0.0566 +vn 0.7059 -0.7059 -0.0579 +vn 0.7121 -0.7006 -0.0460 +vn 0.6282 -0.7767 -0.0457 +vn 0.6642 -0.7025 -0.2555 +vn 0.6611 -0.5937 -0.4587 +vn 0.1051 0.2732 -0.9562 +vn 0.0200 0.2889 -0.9571 +vn 0.0020 0.2954 -0.9554 +vn -0.0000 -0.5946 -0.8040 +vn 0.1837 -0.5760 -0.7965 +vn 0.1838 -0.5760 -0.7965 +vn 0.9749 -0.1625 -0.1523 +vn 0.9999 0.0080 -0.0080 +vn 0.9283 -0.3693 -0.0422 +vn 0.9328 -0.3470 -0.0976 +vn 0.7163 -0.6929 -0.0822 +vn 0.7539 -0.5026 -0.4232 +vn 0.7539 -0.5025 -0.4233 +vn 0.0946 -0.6859 -0.7215 +vn -0.0986 -0.0448 -0.9941 +vn -0.1133 -0.0000 -0.9936 +vn -0.1160 0.1256 -0.9853 +vn -0.1444 0.7944 -0.5900 +vn -0.1302 0.9438 -0.3038 +vn -0.0101 0.9466 -0.3223 +vn 0.1906 0.9630 -0.1905 +vn 0.1302 0.9438 -0.3038 +vn 0.7026 0.6903 -0.1727 +vn 0.1444 0.7945 -0.5899 +vn -0.0341 0.7612 -0.6476 +vn 0.1445 0.7944 -0.5899 +vn -0.0000 0.8129 -0.5823 +vn 0.0224 0.8051 -0.5927 +vn -0.0000 0.8131 -0.5822 +vn 0.0101 0.9466 -0.3223 +vn -0.0000 0.6672 -0.7449 +vn -0.0225 -0.6664 -0.7452 +vn -0.0226 -0.6663 -0.7453 +vn -0.0244 -0.9997 0.0080 +vn -0.0244 -0.9997 0.0081 +vn -0.1988 -0.6212 -0.7581 +vn -0.2773 -0.9606 0.0197 +vn -0.2532 -0.9469 0.1982 +vn -0.3197 -0.9250 0.2055 +vn -0.3892 -0.8964 0.2123 +vn -0.7706 -0.6283 -0.1067 +vn -0.5795 -0.6739 -0.4582 +vn -0.5979 -0.8009 0.0338 +vn -0.5101 -0.7115 -0.4833 +vn -0.4427 -0.8961 0.0324 +vn -0.5322 -0.8168 0.2228 +vn -0.5464 -0.8072 0.2235 +vn -0.6781 -0.7033 0.2135 +vn -0.6414 -0.7184 0.2694 +vn -0.4154 -0.6432 0.6432 +vn -0.5919 -0.6727 0.4440 +vn -0.5581 -0.6262 0.5445 +vn -0.4877 -0.5825 0.6503 +vn -0.0558 -0.4906 0.8696 +vn 0.0891 -0.4230 0.9018 +vn 0.0891 -0.4230 0.9017 +vn 0.0901 -0.4499 0.8885 +vn 0.1775 -0.4496 0.8754 +vn 0.1899 -0.4391 0.8781 +vn 0.7004 -0.2983 0.6485 +vn 0.6886 -0.1329 0.7128 +vn 0.9895 -0.0090 0.1441 +vn 0.9897 -0.0000 0.1429 +vn 0.9847 -0.0965 0.1449 +vn 0.6898 -0.0226 0.7237 +vn 0.6763 -0.1328 0.7246 +vn 0.9428 -0.2960 0.1535 +vn 0.7156 -0.3844 0.5832 +vn 0.4366 -0.2994 0.8484 +vn 0.0623 -0.0711 0.9955 +vn 0.1670 -0.3118 0.9354 +vn 0.0533 -0.0800 0.9954 +vn 0.0674 -0.1734 0.9825 +vn 0.0617 0.0617 0.9962 +vn 0.2928 0.3781 0.8782 +vn 0.4151 0.6160 0.6695 +vn 0.5581 0.6262 0.5445 +vn 0.6811 0.6549 0.3274 +vn 0.6988 0.6734 0.2415 +vn 0.6988 0.6734 0.2414 +vn 0.7226 0.6877 0.0699 +vn 0.6467 0.6198 -0.4446 +vn 0.5920 0.6727 -0.4439 +vn 0.4830 0.3916 -0.7832 +vn -0.6390 -0.5300 -0.5574 +vn -0.6113 -0.6113 -0.5026 +vn -0.6676 -0.6276 -0.4006 +vn -0.7506 -0.6255 -0.2127 +vn -0.8198 -0.5217 -0.2360 +vn -0.7467 -0.6202 -0.2404 +vn 0.5694 -0.8201 -0.0569 +vn 0.5763 -0.8044 -0.1440 +vn -0.5573 -0.7645 -0.3239 +vn -0.5685 -0.7829 -0.2526 +vn -0.7467 -0.6201 -0.2405 +vn -0.7798 -0.5786 -0.2390 +vn -0.9101 -0.3501 -0.2217 +vn -0.9101 -0.3500 -0.2217 +vn -0.9629 -0.1799 -0.2010 +vn -0.9654 -0.1679 -0.1994 +vn -0.9836 -0.0281 -0.1780 +vn -0.9841 -0.0186 -0.1764 +vn -0.9476 -0.0000 -0.3193 +vn -0.9477 -0.0000 -0.3193 +vn -0.6075 -0.2971 -0.7367 +vn -0.9746 0.0290 -0.2220 +vn -0.0105 0.4472 -0.8944 +vn -0.0106 0.4471 -0.8944 +vn -0.6074 -0.2972 -0.7367 +vn -0.1668 -0.0981 -0.9811 +vn -0.3732 -0.9160 -0.1470 +vn -0.0000 -0.9946 -0.1037 +vn 0.0188 -0.9800 -0.1979 +vn -0.0000 0.0242 -0.9997 +vn 0.6838 0.1200 -0.7198 +vn 0.1772 0.1770 -0.9681 +vn 0.6844 0.2841 -0.6715 +vn 0.1868 0.3968 -0.8987 +vn 0.6909 0.3129 -0.6517 +vn 0.6909 0.3128 -0.6518 +vn 0.9721 0.0802 -0.2205 +vn 0.9737 0.1927 -0.1217 +vn 0.5966 0.7613 -0.2538 +vn -0.0080 0.9999 -0.0080 +vn 0.1796 0.9776 -0.1097 +vn -0.0000 0.9946 -0.1039 +vn 0.1845 0.3690 -0.9109 +vn 0.7747 0.4202 -0.4726 +vn 0.3277 -0.3277 -0.8861 +vn -0.0000 -0.7016 -0.7126 +vn -0.0000 -0.6788 -0.7343 +vn -0.0000 -0.7015 -0.7127 +vn -0.0000 -0.9946 -0.1039 +vn 0.9805 -0.1844 -0.0679 +vn 0.7079 -0.6960 -0.1203 +vn 0.9703 -0.1858 -0.1548 +vn 0.9734 0.0903 -0.2107 +vn 0.6909 0.3129 -0.6518 +vn 0.6877 0.2985 -0.6618 +vn 0.6877 0.2984 -0.6618 +vn 0.7191 0.1860 -0.6695 +vn 0.1857 0.3829 -0.9049 +vn 0.6943 0.0579 -0.7174 +vn 0.9702 -0.1123 -0.2146 +vn 0.9269 -0.2974 -0.2289 +vn 0.9802 -0.1922 -0.0480 +vn 0.9264 -0.3727 -0.0532 +vn 0.9779 -0.2033 -0.0484 +vn 0.9211 -0.3856 -0.0535 +vn 0.8376 -0.5433 -0.0566 +vn 0.8290 -0.5564 -0.0568 +vn 0.8996 -0.3669 -0.2368 +vn 0.8103 -0.5360 -0.2369 +vn 0.5688 -0.3836 -0.7275 +vn 0.4953 -0.4819 -0.7228 +vn 0.6822 -0.6949 -0.2275 +vn 0.3266 -0.6140 -0.7186 +vn 0.4777 -0.8296 -0.2891 +vn 0.5695 -0.8200 -0.0570 +vn 0.3712 -0.6761 -0.6365 +vn 0.0509 0.2850 -0.9572 +vn 0.1586 -0.6343 -0.7566 +vn 0.0224 -0.5925 -0.8053 +vn -0.0000 -0.7553 -0.6554 +vn 0.1856 -0.7426 -0.6435 +vn 0.6715 -0.2841 -0.6843 +vn 0.9866 -0.1246 -0.1054 +vn 0.7538 -0.5026 -0.4232 +vn 0.7120 -0.7006 -0.0459 +vn 0.9822 -0.1869 -0.0187 +vn 0.9822 -0.1871 -0.0187 +vn 0.9932 -0.1153 -0.0178 +vn 0.9816 -0.1886 -0.0283 +vn 0.9243 -0.3761 -0.0645 +vn 0.9243 -0.3762 -0.0645 +vn 0.0966 -0.9847 -0.1448 +vn 0.0935 -0.9903 -0.1028 +vn 0.0946 -0.6860 -0.7214 +vn -0.0946 -0.6860 -0.7214 +vn -0.0946 -0.6859 -0.7215 +vn -0.7149 -0.2553 -0.6510 +vn -0.7115 -0.4833 -0.5101 +vn -0.9820 -0.1179 -0.1473 +vn -0.9997 -0.0000 -0.0242 +vn -0.9997 -0.0000 -0.0243 +vn -0.7067 -0.0342 -0.7067 +vn -0.7655 -0.0000 -0.6434 +vn -0.7497 0.1721 -0.6390 +vn -0.7026 0.6903 -0.1726 +vn -0.1905 0.9630 -0.1905 +vn -0.1828 0.9569 -0.2258 +vn -0.1996 0.7234 -0.6610 +vn -0.0000 0.5571 -0.8304 +vn -0.0000 0.6554 -0.7553 +vn -0.0000 0.5571 -0.8305 +vn -0.0000 0.9589 -0.2838 +vn -0.0000 0.9588 -0.2839 +vn -0.0000 0.6555 -0.7552 +vn -0.0000 0.8040 -0.5946 +vn 0.2114 0.7835 -0.5844 +vn 0.7026 0.6904 -0.1726 +vn 0.1159 0.1256 -0.9853 +vn 0.9810 -0.0000 -0.1943 +vn 0.9864 0.0091 -0.1644 +vn 0.0947 -0.6860 -0.7214 +vn 0.7232 -0.2411 -0.6472 +vn 0.1136 -0.0000 -0.9935 +vn -0.1134 -0.0000 -0.9935 +vn -0.0560 0.1400 -0.9886 +vn -0.7608 0.0111 -0.6489 +vn -0.2677 0.5863 -0.7646 +vn -0.1303 0.9438 -0.3038 +vn -0.0000 0.9435 -0.3314 +vn -0.0000 0.6788 -0.7343 +vn -0.0000 0.6787 -0.7344 +vn -0.0225 -0.6664 -0.7453 +vn 0.0112 0.6608 -0.7505 +vn -0.1988 -0.6213 -0.7579 +vn 0.0225 0.6176 -0.7862 +vn 0.0819 0.6437 -0.7609 +vn -0.3806 -0.5251 -0.7612 +vn -0.3806 -0.5250 -0.7613 +vn -0.4503 -0.4767 -0.7550 +vn -0.5195 -0.4396 -0.7327 +vn -0.6415 -0.3273 -0.6938 +vn -0.7427 -0.1198 -0.6588 +vn -0.8201 -0.5624 -0.1055 +vn -0.7345 -0.6771 0.0459 +vn -0.6563 -0.7194 0.2272 +vn -0.7275 -0.6522 0.2132 +vn -0.6936 -0.6679 0.2697 +vn -0.6467 -0.6198 0.4445 +vn -0.5996 -0.5860 0.5451 +vn -0.6637 -0.5011 0.5553 +vn -0.5553 -0.5012 0.6637 +vn -0.0673 -0.4817 0.8738 +vn 0.0900 -0.4499 0.8885 +vn 0.9898 -0.0000 0.1427 +vn 0.0881 -0.0353 0.9955 +vn 0.0800 -0.0534 0.9954 +vn -0.0260 0.0781 0.9966 +vn -0.0576 0.6334 0.7717 +vn -0.0697 0.7664 0.6386 +vn -0.0112 0.6489 0.7608 +vn -0.0000 0.6553 0.7553 +vn -0.0000 0.0852 0.9964 +vn -0.0000 0.6554 0.7553 +vn -0.0000 0.5065 0.8622 +vn -0.0000 0.5065 0.8623 +vn -0.0000 0.0851 0.9964 +vn 0.0629 0.3250 0.9436 +vn 0.0629 0.3251 0.9436 +vn 0.2840 0.6971 0.6584 +vn 0.0438 0.0700 0.9966 +vn 0.3663 0.8790 0.3053 +vn 0.5281 0.7857 0.3221 +vn 0.5361 0.8103 0.2368 +vn 0.5360 0.8103 0.2369 +vn 0.5564 0.8290 0.0568 +vn 0.3687 0.5661 -0.7373 +vn 0.5920 0.6727 -0.4438 +vn 0.0163 0.0163 -0.9997 +vn -0.1982 -0.2532 -0.9469 +vn -0.6020 -0.6823 -0.4148 +vn -0.5157 -0.6514 -0.5566 +vn -0.5976 -0.6248 -0.5025 +vn -0.5148 -0.5421 -0.6641 +vn -0.5295 -0.5431 -0.6517 +vn 0.0716 -0.0717 -0.9948 +vn -0.5449 -0.5721 -0.6130 +vn 0.1990 -0.6347 -0.7467 +vn -0.5431 -0.6517 -0.5295 +vn -0.5947 -0.7137 -0.3701 +vn -0.5684 -0.7830 -0.2526 +vn 0.5763 -0.8044 -0.1441 +vn 0.5276 -0.5139 -0.6764 +vn 0.7030 -0.7030 -0.1074 +vn 0.7214 -0.6860 -0.0947 +vn 0.7030 -0.7030 -0.1072 +vn 0.7071 -0.7070 -0.0112 +vn 0.7215 -0.6860 -0.0946 +vn 0.7390 -0.6687 -0.0821 +vn -0.1094 -0.3502 -0.9303 +vn 0.4558 -0.5362 -0.7105 +vn -0.4856 -0.6879 -0.5395 +vn 0.6865 -0.6992 -0.1998 +vn -0.7054 -0.5856 -0.3994 +vn -0.7406 -0.5422 -0.3969 +vn -0.6517 -0.5295 -0.5431 +vn -0.7612 -0.3807 -0.5250 +vn -0.7406 -0.5423 -0.3968 +vn -0.8663 -0.3341 -0.3714 +vn -0.7613 -0.3807 -0.5249 +vn -0.5614 -0.2807 -0.7785 +vn -0.6202 -0.1193 -0.7753 +vn -0.9203 -0.1704 -0.3522 +vn -0.6335 -0.0576 -0.7716 +vn -0.9411 -0.0205 -0.3376 +vn -0.9435 -0.0000 -0.3314 +vn -0.9435 -0.0000 -0.3313 +vn -0.6435 -0.0000 -0.7655 +vn -0.6303 0.1854 -0.7539 +vn -0.1374 0.9898 -0.0366 +vn -0.3522 0.9203 -0.1704 +vn -0.1375 0.9898 -0.0366 +vn -0.9681 0.1873 -0.1666 +vn -0.9681 0.1874 -0.1664 +vn -0.9681 0.1871 -0.1666 +vn -0.3522 0.9203 -0.1705 +vn -0.6739 0.4582 -0.5796 +vn -0.1725 -0.6903 -0.7026 +vn -0.9436 -0.0629 -0.3251 +vn -0.9631 -0.1904 -0.1904 +vn -0.1727 -0.6903 -0.7026 +vn -0.0000 -0.7014 -0.7128 +vn 0.0001 -0.7014 -0.7127 +vn 0.0001 -0.9946 -0.1038 +vn 0.1904 -0.9810 -0.0381 +vn 0.6778 -0.1725 -0.7147 +vn 0.1838 -0.0000 -0.9830 +vn 0.1854 -0.0092 -0.9826 +vn 0.6902 -0.0000 -0.7237 +vn 0.6902 -0.0000 -0.7236 +vn 0.6739 0.1996 -0.7113 +vn 0.1288 0.9805 -0.1486 +vn -0.0000 0.9946 -0.1038 +vn -0.0000 0.9924 -0.1230 +vn -0.0000 0.3678 -0.9299 +vn -0.0000 -0.0945 -0.9955 +vn 0.7500 -0.0820 -0.6563 +vn 0.2412 -0.7112 -0.6603 +vn 0.1795 -0.9776 -0.1097 +vn 0.9805 -0.1845 -0.0679 +vn 0.9724 -0.1841 -0.1433 +vn 0.1286 -0.9805 -0.1485 +vn 0.1839 -0.0000 -0.9830 +vn 0.7070 0.0112 -0.7071 +vn 0.1873 0.0187 -0.9821 +vn 0.1871 0.0187 -0.9822 +vn 0.9711 0.0393 -0.2356 +vn 0.9733 0.0695 -0.2185 +vn 0.9988 -0.0000 -0.0495 +vn 0.9805 0.1844 -0.0678 +vn 0.9805 0.1844 -0.0679 +vn 0.7746 0.4202 -0.4727 +vn 0.9734 0.0696 -0.2185 +vn 0.3278 -0.3277 -0.8861 +vn 0.9734 -0.0695 -0.2185 +vn 0.9988 -0.0000 -0.0496 +vn 0.9734 0.0695 -0.2185 +vn 0.9941 -0.0985 -0.0448 +vn 0.9936 -0.0994 -0.0542 +vn 0.9720 -0.0292 -0.2333 +vn 0.6898 -0.0226 -0.7236 +vn 0.3556 -0.0000 -0.9346 +vn 0.3590 -0.0103 -0.9333 +vn 0.9793 -0.1939 -0.0582 +vn 0.9984 -0.0254 -0.0508 +vn 0.1630 -0.0000 -0.9866 +vn 0.1660 -0.0184 -0.9860 +vn 0.6763 -0.1328 -0.7246 +vn 0.9540 -0.1951 -0.2277 +vn 0.6414 -0.2694 -0.7184 +vn 0.1437 -0.0862 -0.9859 +vn 0.1436 -0.0863 -0.9859 +vn 0.1150 -0.1150 -0.9867 +vn 0.0965 -0.1448 -0.9847 +vn 0.0473 -0.1704 -0.9842 +vn 0.1327 -0.6637 -0.7361 +vn 0.0112 -0.6608 -0.7505 +vn -0.0000 -0.1631 -0.9866 +vn -0.0000 -0.1629 -0.9866 +vn 0.2697 -0.7061 -0.6547 +vn 0.2694 -0.7061 -0.6548 +vn 0.7121 -0.7006 -0.0459 +vn 0.2033 -0.9779 -0.0484 +vn 0.1923 -0.9802 -0.0481 +vn 0.7122 -0.7005 -0.0459 +vn 0.9776 -0.1798 -0.1097 +vn 0.9809 -0.0000 -0.1945 +vn 0.9814 -0.0578 -0.1829 +vn 0.6995 -0.0700 -0.7112 +vn 0.0926 -0.0927 -0.9914 +vn 0.6999 -0.4845 -0.5248 +vn 0.9243 -0.3761 -0.0646 +vn 0.0706 -0.9677 -0.2419 +vn 0.5449 -0.5721 -0.6130 +vn -0.9243 -0.3762 -0.0645 +vn -0.6882 -0.4993 -0.5263 +vn -0.0966 -0.9847 -0.1448 +vn -0.0935 -0.9903 -0.1028 +vn -0.7071 -0.5736 -0.4135 +vn -0.9823 -0.1655 -0.0876 +vn -1.0000 -0.0000 -0.0079 +vn -1.0000 -0.0000 -0.0080 +vn -1.0000 -0.0000 -0.0081 +vn -0.9782 0.1698 -0.1198 +vn -0.9823 0.1812 -0.0477 +vn -0.9797 0.1089 -0.1683 +vn -0.6991 0.3562 -0.6200 +vn -0.1807 0.5060 -0.8434 +vn -0.1845 0.3690 -0.9109 +vn -0.0000 0.1132 -0.9936 +vn -0.0000 0.2953 -0.9554 +vn -0.0000 0.2955 -0.9553 +vn 0.0008 0.8039 -0.5948 +vn 0.1808 0.5060 -0.8434 +vn 0.2114 0.7834 -0.5845 +vn 0.6991 0.3562 -0.6200 +vn 0.9998 0.0081 -0.0163 +vn 0.9286 -0.1925 -0.3171 +vn 0.9835 -0.1719 -0.0573 +vn 0.7333 -0.5068 -0.4533 +vn 0.0934 -0.9903 -0.1029 +vn 0.3700 -0.9192 -0.1345 +vn -0.0935 -0.9903 -0.1027 +vn -0.0934 -0.9903 -0.1028 +vn -0.1361 -0.9635 -0.2305 +vn -0.0987 -0.0448 -0.9941 +vn -0.7608 0.0112 -0.6489 +vn -0.9782 0.1697 -0.1198 +vn -0.7070 0.6823 -0.1862 +vn -0.1906 0.9630 -0.1905 +vn 0.0281 0.9836 -0.1780 +vn -0.0000 0.9436 -0.3312 +vn 0.1799 0.9629 -0.2011 +vn 0.1461 0.9216 -0.3597 +vn 0.3466 0.8666 -0.3589 +vn 0.1456 0.6429 -0.7520 +vn 0.2123 0.6120 -0.7619 +vn -0.2770 0.2655 -0.9235 +vn 0.0327 0.4686 -0.8828 +vn -0.7560 -0.0458 -0.6529 +vn -0.8376 -0.5433 0.0565 +vn -0.7728 -0.5983 0.2118 +vn -0.8733 -0.4426 0.2034 +vn -0.8428 -0.4710 0.2605 +vn -0.9123 -0.3275 0.2459 +vn -0.6041 -0.4296 0.6712 +vn -0.0555 -0.4641 0.8840 +vn -0.1651 -0.4599 0.8725 +vn -0.6473 -0.3567 0.6737 +vn -0.9441 -0.1647 0.2855 +vn -0.6472 -0.3567 0.6737 +vn -0.6578 -0.3421 0.6710 +vn -0.1777 -0.4496 0.8754 +vn -0.0905 -0.4634 0.8815 +vn -0.0552 -0.4641 0.8840 +vn -0.0000 -0.2720 0.9623 +vn 0.0962 -0.0175 0.9952 +vn 0.0960 -0.0175 0.9952 +vn -0.0788 0.0350 0.9963 +vn -0.0807 0.0628 0.9948 +vn -0.6160 0.4151 0.6695 +vn -0.5540 0.4863 0.6757 +vn -0.6262 0.5581 0.5445 +vn -0.6127 0.5854 0.5310 +vn -0.5580 0.6262 0.5445 +vn -0.4864 0.5540 0.6757 +vn -0.0529 0.0705 0.9961 +vn -0.4151 0.6160 0.6695 +vn -0.0438 0.0700 0.9966 +vn -0.2132 0.7274 0.6522 +vn -0.2812 0.9138 0.2932 +vn -0.0937 0.9581 0.2708 +vn -0.0195 0.9670 0.2541 +vn -0.0195 0.9670 0.2540 +vn -0.0000 0.9684 0.2495 +vn -0.0000 0.8304 0.5572 +vn -0.0000 0.7755 0.6314 +vn 0.0112 0.6489 0.7608 +vn 0.0696 0.7664 0.6386 +vn 0.0697 0.7663 0.6386 +vn 0.1899 0.9379 0.2903 +vn 0.1799 0.9629 0.2011 +vn 0.3641 0.9042 0.2232 +vn 0.3781 0.8981 0.2245 +vn 0.3782 0.8981 0.2245 +vn 0.3856 0.9211 0.0534 +vn 0.3114 0.5968 -0.7395 +vn -0.2936 -0.5360 -0.7915 +vn -0.3977 -0.7292 -0.5569 +vn -0.4870 -0.8073 -0.3332 +vn -0.4929 -0.8380 -0.2341 +vn -0.6120 -0.7619 -0.2123 +vn -0.6064 -0.7580 -0.2400 +vn -0.7691 -0.5926 -0.2395 +vn -0.6889 -0.6228 -0.3709 +vn -0.7857 -0.1191 -0.6071 +vn -0.6259 -0.5715 -0.5307 +vn -0.5586 -0.5586 -0.6132 +vn -0.2976 0.2290 -0.9268 +vn -0.0355 0.0977 -0.9946 +vn 0.5352 -0.1429 -0.8325 +vn 0.7391 -0.6686 -0.0821 +vn 0.6372 0.4880 -0.5965 +vn 0.7647 0.6332 -0.1195 +vn 0.6202 0.7753 -0.1193 +vn 0.3764 0.9126 -0.1597 +vn 0.6203 0.7752 -0.1193 +vn 0.7608 0.6258 -0.1718 +vn 0.7763 -0.6300 -0.0225 +vn 0.7761 -0.6302 -0.0225 +vn 0.7505 -0.6583 -0.0577 +vn 0.7506 -0.6582 -0.0577 +vn 0.7071 -0.7071 -0.0112 +vn 0.7071 -0.6822 -0.1860 +vn 0.5489 -0.4418 -0.7096 +vn 0.1377 -0.1277 -0.9822 +vn -0.1245 -0.1054 -0.9866 +vn -0.2553 -0.2109 -0.9436 +vn -0.1330 -0.0855 -0.9874 +vn -0.1329 -0.0855 -0.9874 +vn -0.1583 -0.0373 -0.9867 +vn 0.0424 0.3820 -0.9232 +vn 0.0484 0.9779 -0.2033 +vn 0.0425 0.3820 -0.9232 +vn 0.0482 0.9779 -0.2033 +vn 0.6695 0.1860 -0.7191 +vn 0.3797 0.9220 -0.0759 +vn -0.0000 0.7015 -0.7127 +vn 0.1858 0.6823 -0.7071 +vn 0.1860 -0.6823 -0.7070 +vn 0.7112 -0.0699 -0.6995 +vn 0.9227 -0.2422 -0.3000 +vn 0.3727 -0.9264 -0.0532 +vn 0.9724 -0.1841 -0.1432 +vn 0.6903 -0.0000 -0.7236 +vn 0.9987 -0.0083 -0.0501 +vn 0.9971 -0.0000 -0.0761 +vn 0.9724 0.1842 -0.1433 +vn 0.1795 0.9776 -0.1097 +vn 0.9805 0.1844 -0.0680 +vn 0.9227 0.2422 -0.2999 +vn 0.9227 0.2423 -0.2999 +vn 0.7501 -0.0820 -0.6562 +vn 0.9227 -0.2423 -0.2999 +vn 0.9227 -0.2422 -0.2999 +vn 0.9712 -0.0000 -0.2382 +vn 0.9739 -0.0000 -0.2271 +vn 0.9764 -0.0000 -0.2160 +vn 0.9764 -0.0000 -0.2161 +vn 0.9712 -0.0000 -0.2381 +vn 0.1632 -0.0000 -0.9866 +vn 0.6611 -0.1995 -0.7233 +vn 0.9771 -0.0288 -0.2108 +vn 0.9275 -0.2862 -0.2405 +vn 0.9793 -0.1940 -0.0582 +vn 0.9557 -0.2877 -0.0617 +vn 0.9189 -0.3892 -0.0649 +vn 0.9189 -0.3891 -0.0649 +vn 0.1625 -0.0669 -0.9844 +vn 0.8248 -0.5613 -0.0687 +vn 0.7663 -0.6386 -0.0697 +vn 0.6995 -0.7112 -0.0700 +vn 0.5481 -0.8336 -0.0685 +vn 0.2878 -0.9557 -0.0617 +vn 0.3762 -0.9243 -0.0645 +vn 0.0994 -0.9936 -0.0542 +vn 0.0254 -0.9984 -0.0508 +vn -0.0000 -0.1632 -0.9866 +vn 0.1845 -0.9805 -0.0679 +vn 0.1846 -0.9805 -0.0680 +vn 0.6960 -0.7080 -0.1200 +vn 0.2124 -0.7618 -0.6120 +vn 0.6678 -0.2698 -0.6937 +vn 0.6957 0.0112 -0.7182 +vn -0.6995 -0.0700 -0.7112 +vn -0.9814 -0.0577 -0.1831 +vn -1.0000 -0.0002 -0.0080 +vn -0.9932 -0.1153 -0.0177 +vn -0.9283 -0.3694 -0.0422 +vn -0.7163 -0.6929 -0.0822 +vn -1.0000 -0.0001 -0.0079 +vn -1.0000 0.0001 -0.0080 +vn -0.9815 0.0777 -0.1749 +vn -0.6963 -0.4285 -0.5758 +vn -0.1711 -0.5989 -0.7823 +vn -0.0022 -0.6070 -0.7947 +vn -0.0001 -0.2607 -0.9654 +vn -0.0000 -0.5947 -0.8039 +vn -0.0000 -0.5447 -0.8386 +vn 0.7025 0.2554 -0.6643 +vn 0.9815 0.0777 -0.1751 +vn 0.9997 0.0162 -0.0162 +vn 0.8367 -0.5467 -0.0335 +vn 0.9284 -0.3692 -0.0422 +vn 0.6882 -0.4993 -0.5263 +vn -0.0946 -0.6860 -0.7215 +vn -0.7148 -0.6910 -0.1074 +vn -0.7545 -0.4898 -0.4368 +vn -0.9999 -0.0000 -0.0160 +vn -1.0000 0.0001 -0.0079 +vn -0.7026 0.6903 -0.1727 +vn -0.9123 0.3276 -0.2458 +vn -0.1994 0.6479 -0.7351 +vn -0.1829 0.9569 -0.2258 +vn 0.0186 0.9841 -0.1764 +vn 0.0198 0.9606 -0.2773 +vn 0.1546 0.9383 -0.3092 +vn 0.0226 0.6663 -0.7453 +vn 0.0697 0.6387 -0.7663 +vn 0.2992 0.8978 -0.3232 +vn 0.3334 0.9198 -0.2070 +vn 0.3892 0.8964 -0.2123 +vn 0.4749 0.8523 -0.2192 +vn 0.5322 0.8168 -0.2228 +vn 0.4530 0.8025 -0.3883 +vn 0.5833 0.7157 -0.3842 +vn 0.5295 0.6517 -0.5431 +vn 0.6127 0.5854 -0.5310 +vn -0.1989 0.3861 -0.9008 +vn -0.7427 0.1198 -0.6588 +vn -0.7027 0.2554 -0.6641 +vn -0.9509 -0.3067 0.0408 +vn -0.9008 -0.3860 0.1989 +vn -0.9379 -0.2903 0.1898 +vn -0.9529 -0.2490 0.1732 +vn -0.9768 -0.1410 0.1611 +vn -0.9535 -0.1842 0.2385 +vn -0.9665 -0.1351 0.2182 +vn -0.9665 -0.1351 0.2183 +vn -0.9656 -0.1246 0.2284 +vn -0.9398 -0.1659 0.2986 +vn -0.9384 -0.1545 0.3091 +vn -0.6681 -0.3275 0.6681 +vn -0.0984 -0.3602 0.9277 +vn -0.6636 -0.1328 0.7362 +vn -0.0990 -0.1781 0.9790 +vn -0.6663 -0.0226 0.7454 +vn -0.6637 -0.1319 0.7363 +vn -0.0963 -0.0175 0.9952 +vn -0.0991 -0.1780 0.9790 +vn -0.7655 -0.0000 0.6435 +vn -0.0853 -0.0000 0.9964 +vn -0.7608 0.0819 0.6438 +vn -0.0868 0.0173 0.9961 +vn -0.7608 0.0820 0.6437 +vn -0.0866 0.0173 0.9961 +vn -0.7194 0.2272 0.6564 +vn -0.8415 0.4396 0.3142 +vn -0.6938 0.6415 0.3273 +vn -0.5859 0.7421 0.3256 +vn -0.4396 0.8415 0.3141 +vn -0.4497 0.8628 0.2309 +vn -0.2817 0.9353 0.2141 +vn -0.3781 0.8981 0.2245 +vn -0.1920 0.9602 0.2027 +vn -0.0889 0.9782 0.1877 +vn -0.0281 0.9836 0.1780 +vn -0.0186 0.9841 0.1764 +vn -0.0000 0.9849 0.1733 +vn -0.0000 0.9684 0.2494 +vn -0.0000 0.7755 0.6313 +vn -0.0000 0.9684 0.2493 +vn 0.0195 0.9670 0.2541 +vn 0.1898 0.9379 0.2903 +vn 0.0186 0.9841 0.1764 +vn 0.1799 0.9629 0.2010 +vn 0.0281 0.9836 0.1780 +vn 0.1920 0.9602 0.2027 +vn 0.1888 0.9816 0.0283 +vn 0.3856 0.9211 0.0536 +vn 0.1857 0.6435 -0.7426 +vn 0.3113 0.5969 -0.7395 +vn 0.1857 0.6436 -0.7425 +vn -0.1843 -0.5897 -0.7863 +vn -0.3134 -0.8921 -0.3256 +vn -0.3500 -0.9101 -0.2217 +vn -0.4066 -0.8849 -0.2272 +vn -0.5925 -0.7691 -0.2395 +vn -0.8798 -0.4098 -0.2409 +vn -0.8141 0.5627 -0.1437 +vn -0.6980 0.7098 -0.0947 +vn -0.7030 0.7030 -0.1073 +vn -0.4291 0.5900 -0.6839 +vn 0.0695 0.6260 -0.7767 +vn 0.1725 0.7148 -0.6777 +vn -0.0087 0.9954 -0.0952 +vn -0.3666 0.9273 -0.0755 +vn 0.6963 0.5758 -0.4285 +vn 0.8326 -0.1427 -0.5352 +vn 0.4117 0.5447 -0.7306 +vn -0.4557 0.5362 -0.7105 +vn -0.4833 0.5101 -0.7115 +vn 0.5249 -0.4843 -0.6999 +vn 0.7329 -0.6738 -0.0946 +vn 0.7098 -0.6980 -0.0948 +vn 0.7378 -0.6664 -0.1072 +vn 0.7717 -0.6334 -0.0576 +vn 0.8242 -0.5645 -0.0451 +vn 0.4000 0.1000 -0.9110 +vn 0.2840 0.2272 -0.9315 +vn 0.1473 0.1179 -0.9820 +vn 0.1461 0.1071 -0.9834 +vn 0.1780 0.0281 -0.9836 +vn 0.1839 -0.0000 -0.9829 +vn 0.6123 0.0111 -0.7906 +vn 0.6124 0.0111 -0.7905 +vn 0.3856 0.9211 -0.0536 +vn 0.7113 -0.0699 -0.6994 +vn 0.9988 -0.0001 -0.0495 +vn 0.9979 0.0256 -0.0597 +vn 0.9739 -0.0000 -0.2270 +vn 0.9764 -0.0000 -0.2162 +vn 0.9197 -0.0000 -0.3927 +vn 0.9771 -0.0287 -0.2108 +vn 0.9216 -0.0210 -0.3875 +vn 0.9216 -0.0209 -0.3876 +vn 0.7015 -0.0000 -0.7126 +vn 0.7071 -0.0112 -0.7070 +vn 0.3714 -0.0104 -0.9284 +vn 0.7070 -0.0112 -0.7071 +vn 0.6877 -0.0700 -0.7226 +vn 0.6878 -0.0699 -0.7226 +vn 0.9771 -0.0288 -0.2107 +vn 0.6610 -0.1995 -0.7234 +vn 0.1597 -0.0470 -0.9860 +vn 0.1436 -0.0861 -0.9859 +vn 0.1436 -0.0862 -0.9859 +vn 0.6000 -0.3261 -0.7305 +vn 0.8572 -0.4531 -0.2449 +vn 0.8810 -0.4684 -0.0669 +vn 0.7047 -0.7047 -0.0822 +vn 0.2422 -0.9227 -0.2999 +vn -0.0695 -0.9733 -0.2186 +vn -0.4845 -0.6998 -0.5249 +vn -0.6822 -0.1861 -0.7071 +vn -0.9809 0.0024 -0.1943 +vn -0.7120 -0.7007 -0.0459 +vn -0.9822 -0.1871 -0.0187 +vn -0.7121 -0.7006 -0.0459 +vn -0.9821 -0.1872 -0.0187 +vn -0.7650 -0.4748 -0.4352 +vn -0.9838 -0.1269 -0.1266 +vn -0.9838 -0.1267 -0.1267 +vn -0.7228 -0.4953 -0.4819 +vn -0.1857 -0.7425 -0.6436 +vn -0.0000 -0.6902 -0.7236 +vn -0.0000 -0.5446 -0.8387 +vn 0.0001 -0.5946 -0.8040 +vn -0.0000 0.2955 -0.9554 +vn -0.0000 0.2954 -0.9554 +vn 0.1712 -0.5989 -0.7823 +vn 0.7228 -0.4819 -0.4953 +vn 0.9838 -0.1266 -0.1267 +vn 0.7650 -0.4747 -0.4352 +vn 0.7650 -0.4748 -0.4352 +vn 0.9838 -0.1269 -0.1266 +vn 0.6935 -0.2698 -0.6680 +vn -0.9932 -0.1153 -0.0178 +vn -0.9816 -0.1890 -0.0283 +vn -0.8290 -0.5563 -0.0568 +vn -0.9815 -0.1750 -0.0778 +vn -0.9320 -0.2387 -0.2728 +vn -0.9999 0.0080 -0.0080 +vn -0.9797 0.1090 -0.1683 +vn -0.3750 0.4655 -0.8017 +vn -0.0000 0.6553 -0.7553 +vn -0.0000 0.5698 -0.8218 +vn 0.0222 0.5547 -0.8317 +vn 0.0450 0.5516 -0.8329 +vn 0.2263 0.7544 -0.6161 +vn 0.4277 0.8427 -0.3270 +vn 0.3124 0.7158 -0.6245 +vn 0.6140 0.7185 -0.3266 +vn 0.6781 0.7033 -0.2135 +vn 0.6563 0.7194 -0.2272 +vn 0.7155 0.6652 -0.2134 +vn 0.6389 0.6655 -0.3860 +vn 0.7391 0.5543 -0.3827 +vn 0.6882 0.4993 -0.5264 +vn 0.5102 0.4833 -0.7114 +vn -0.1258 0.4345 -0.8918 +vn -0.6444 0.3419 -0.6840 +vn -0.9844 0.0669 -0.1627 +vn -0.9877 -0.1553 0.0183 +vn -0.9817 -0.1080 0.1571 +vn -0.9830 -0.0973 0.1557 +vn -0.9852 -0.0765 0.1531 +vn -0.9853 -0.0765 0.1530 +vn -0.9644 -0.1140 0.2385 +vn -0.6678 -0.2697 0.6938 +vn -0.6680 -0.2697 0.6936 +vn -0.8696 -0.0000 0.4938 +vn -0.6671 -0.0000 0.7449 +vn -0.6553 -0.0000 0.7554 +vn -0.6434 -0.0000 0.7655 +vn -0.9654 -0.0000 0.2607 +vn -0.9581 0.0937 0.2708 +vn -0.9138 0.2812 0.2930 +vn -0.9353 0.2817 0.2141 +vn -0.8628 0.4496 0.2309 +vn -0.8981 0.3781 0.2245 +vn -0.8103 0.5360 0.2369 +vn -0.6988 0.6734 0.2413 +vn -0.6988 0.6734 0.2414 +vn -0.6064 0.7581 0.2400 +vn -0.5360 0.8103 0.2369 +vn -0.5564 0.8290 0.0568 +vn -0.3856 0.9211 0.0536 +vn -0.3114 0.5968 -0.7395 +vn -0.1856 0.6435 -0.7426 +vn -0.1888 0.9816 0.0283 +vn -0.0577 0.6582 -0.7506 +vn -0.0244 0.9997 -0.0083 +vn -0.0225 0.6176 -0.7862 +vn -0.0000 0.6193 -0.7852 +vn -0.0000 0.3434 -0.9392 +vn -0.0000 0.6194 -0.7851 +vn -0.0000 0.9997 -0.0242 +vn 0.0244 0.9997 -0.0082 +vn 0.0225 0.6177 -0.7861 +vn 0.0244 0.9997 -0.0083 +vn 0.0577 0.6582 -0.7506 +vn 0.0579 0.6582 -0.7506 +vn -0.0576 -0.6334 -0.7717 +vn -0.1442 -0.9319 -0.3328 +vn -0.1799 -0.9629 -0.2011 +vn -0.3361 -0.9157 -0.2202 +vn -0.3216 -0.8658 -0.3834 +vn -0.3932 -0.8370 -0.3805 +vn -0.3656 -0.7704 -0.5223 +vn -0.5713 -0.7175 -0.3986 +vn -0.2662 -0.5579 -0.7861 +vn -0.1647 -0.2856 -0.9441 +vn -0.5150 -0.6640 -0.5421 +vn -0.5362 0.4557 -0.7105 +vn -0.6761 -0.5003 -0.5409 +vn -0.7071 0.6822 -0.1861 +vn -0.7071 0.7070 -0.0112 +vn -0.6808 0.7278 -0.0822 +vn -0.6582 0.7506 -0.0578 +vn -0.6422 0.7662 -0.0226 +vn -0.7070 0.7071 -0.0112 +vn -0.6300 0.7763 -0.0225 +vn -0.6459 0.7612 -0.0577 +vn -0.6459 0.7612 -0.0576 +vn -0.6789 0.7263 -0.1071 +vn -0.4834 0.5102 -0.7114 +vn -0.6910 0.7148 -0.1072 +vn -0.3397 0.7445 -0.5747 +vn -0.3396 0.7446 -0.5747 +vn 0.6260 0.0696 -0.7767 +vn 0.7292 -0.6835 -0.0342 +vn 0.8232 -0.5673 -0.0223 +vn 0.8231 -0.5675 -0.0223 +vn 0.8505 0.5216 -0.0681 +vn 0.8486 0.5020 -0.1673 +vn 0.6246 0.3123 -0.7158 +vn 0.5322 0.4258 -0.7318 +vn 0.5778 0.3545 -0.7352 +vn 0.1462 0.1071 -0.9834 +vn 0.1719 0.0573 -0.9834 +vn 0.5776 0.3545 -0.7353 +vn 0.6337 0.2408 -0.7351 +vn 0.6714 0.1199 -0.7313 +vn 0.9555 0.1610 -0.2471 +vn 0.9555 0.1611 -0.2470 +vn 0.9987 0.0004 -0.0501 +vn 0.9988 0.0001 -0.0495 +vn 0.9988 -0.0000 -0.0497 +vn 0.3679 -0.0000 -0.9299 +vn 0.3714 -0.0103 -0.9284 +vn 0.1629 -0.0000 -0.9866 +vn 0.9519 -0.3000 -0.0621 +vn 0.9519 -0.3001 -0.0621 +vn 0.9126 -0.3764 -0.1597 +vn 0.8181 -0.5534 -0.1566 +vn 0.6995 -0.7112 -0.0699 +vn 0.1036 -0.1036 -0.9892 +vn 0.4803 -0.4803 -0.7339 +vn 0.6900 -0.6772 -0.2556 +vn 0.5398 -0.8034 -0.2513 +vn 0.3261 -0.6000 -0.7305 +vn 0.3698 -0.8947 -0.2507 +vn 0.1995 -0.6610 -0.7234 +vn 0.0288 -0.9771 -0.2109 +vn 0.0112 -0.7071 -0.7071 +vn 0.0112 -0.7070 -0.7071 +vn -0.0000 -0.9764 -0.2161 +vn -0.1845 -0.9805 -0.0680 +vn -0.6910 -0.7149 -0.1072 +vn -0.9776 -0.1798 -0.1097 +vn -0.9821 -0.1873 -0.0187 +vn -0.2035 -0.9779 -0.0484 +vn -0.2555 -0.7025 -0.6643 +vn -0.2555 -0.7025 -0.6642 +vn -0.7649 -0.4749 -0.4352 +vn -0.1828 -0.2258 -0.9569 +vn -0.0000 -0.2381 -0.9712 +vn -0.0000 -0.2382 -0.9712 +vn -0.0000 -0.2383 -0.9712 +vn -0.0000 -0.6901 -0.7237 +vn 0.3557 -0.6060 -0.7115 +vn 0.1828 -0.2258 -0.9569 +vn 0.7649 -0.4748 -0.4353 +vn 0.2555 -0.7025 -0.6642 +vn 0.2035 -0.9779 -0.0484 +vn 0.7120 -0.7007 -0.0459 +vn 0.9821 -0.1872 -0.0187 +vn 0.9776 -0.1797 -0.1097 +vn 0.9809 -0.0000 -0.1944 +vn 0.9814 -0.0577 -0.1829 +vn 0.6995 -0.0699 -0.7112 +vn -0.9822 -0.0187 -0.1871 +vn -0.9612 0.1149 -0.2507 +vn -0.9945 0.0538 -0.0896 +vn -0.7025 0.2554 -0.6643 +vn -0.1845 0.3689 -0.9110 +vn -0.2585 0.0930 -0.9615 +vn -0.1051 0.2732 -0.9562 +vn -0.1423 0.3064 -0.9412 +vn 0.0685 0.5481 -0.8336 +vn 0.0923 0.5308 -0.8424 +vn 0.3860 0.6655 -0.6389 +vn 0.6681 0.6681 -0.3276 +vn 0.7275 0.6522 -0.2132 +vn 0.8322 0.5140 -0.2080 +vn 0.8230 0.5282 -0.2088 +vn 0.9379 0.2903 -0.1898 +vn 0.9286 0.3171 -0.1925 +vn 0.8732 0.3567 -0.3321 +vn 0.5998 0.3999 -0.6931 +vn 0.8726 0.2423 -0.4241 +vn 0.6364 0.3712 -0.6762 +vn -0.3696 0.4206 -0.8286 +vn -0.6445 0.3420 -0.6839 +vn -0.3564 0.4330 -0.8279 +vn -0.9844 0.0669 -0.1626 +vn -0.9835 0.0771 -0.1639 +vn -0.9834 0.0773 -0.1639 +vn -0.9843 -0.0473 0.1703 +vn -0.9849 -0.0375 0.1688 +vn -0.9644 -0.1141 0.2387 +vn -0.9640 -0.0502 0.2612 +vn -0.9654 -0.0000 0.2608 +vn -0.9849 -0.0000 0.1733 +vn -0.9782 0.0889 0.1877 +vn -0.9768 0.0997 0.1894 +vn -0.9602 0.1921 0.2027 +vn -0.9816 0.1888 0.0283 +vn -0.9264 0.3727 0.0532 +vn -0.6241 0.2547 -0.7387 +vn -0.5661 0.3687 -0.7373 +vn -0.0163 0.0163 -0.9997 +vn -0.5067 0.4533 -0.7334 +vn -0.7227 0.6877 0.0698 +vn -0.6468 0.6197 -0.4446 +vn -0.4533 0.5067 -0.7333 +vn 0.5976 -0.6248 -0.5025 +vn 0.3977 -0.7292 -0.5569 +vn 0.2383 -0.5645 -0.7903 +vn -0.1856 0.6436 -0.7425 +vn 0.1193 -0.6201 -0.7754 +vn -0.0577 0.6584 -0.7505 +vn -0.0225 0.6177 -0.7861 +vn 0.0112 -0.6367 -0.7710 +vn -0.0000 -0.6434 -0.7655 +vn -0.0000 0.6194 -0.7850 +vn -0.0000 0.6192 -0.7852 +vn 0.0001 -0.6434 -0.7655 +vn -0.0000 -0.6435 -0.7655 +vn -0.0000 -0.0079 -1.0000 +vn -0.0112 -0.6367 -0.7710 +vn -0.0206 -0.9365 -0.3500 +vn -0.0281 -0.9836 -0.1782 +vn -0.0186 -0.9841 -0.1764 +vn -0.1577 -0.9237 -0.3492 +vn -0.1581 -0.6079 -0.7781 +vn -0.0664 -0.1519 -0.9862 +vn -0.0950 -0.1235 -0.9878 +vn -0.1957 0.0685 -0.9783 +vn -0.0685 0.1957 -0.9783 +vn 0.0886 0.4097 -0.9079 +vn -0.4558 0.5362 -0.7105 +vn -0.6822 0.7071 -0.1861 +vn -0.7071 0.7071 -0.0112 +vn -0.6789 0.7264 -0.1072 +vn -0.6362 0.7658 -0.0943 +vn 0.1706 0.9203 -0.3521 +vn 0.1705 0.9203 -0.3522 +vn 0.6255 0.5167 -0.5847 +vn 0.9403 -0.2462 -0.2351 +vn 0.8249 -0.5576 -0.0930 +vn 0.8201 0.5624 -0.1054 +vn 0.8421 0.5349 -0.0683 +vn 0.7558 0.6511 -0.0698 +vn 0.7767 0.6260 -0.0696 +vn 0.7967 0.6004 -0.0693 +vn 0.7502 0.6103 -0.2543 +vn 0.7831 0.5683 -0.2526 +vn 0.9295 0.3632 -0.0641 +vn 0.9068 0.3414 -0.2473 +vn 0.9295 0.3631 -0.0641 +vn 0.9793 0.1939 -0.0582 +vn 0.9815 0.1826 -0.0577 +vn 0.9535 0.1842 -0.2384 +vn 0.1660 0.0184 -0.9860 +vn 0.9936 0.0993 -0.0542 +vn 0.9984 0.0254 -0.0508 +vn 0.9988 0.0002 -0.0495 +vn 0.9892 -0.0181 -0.1455 +vn 0.9703 -0.1858 -0.1549 +vn 0.8938 -0.1625 -0.4179 +vn 0.7095 -0.2838 -0.6450 +vn 0.6140 -0.3266 -0.7185 +vn 0.6510 -0.2553 -0.7148 +vn 0.4968 -0.4968 -0.7116 +vn 0.5431 -0.5295 -0.6517 +vn 0.6980 -0.6981 -0.1594 +vn 0.5396 -0.8274 -0.1559 +vn 0.4684 -0.8810 -0.0669 +vn 0.0765 -0.1531 -0.9852 +vn -0.0000 -0.1630 -0.9866 +vn -0.1922 -0.9802 -0.0481 +vn -0.0000 -0.1038 -0.9946 +vn 0.6910 -0.7148 -0.1072 +vn 0.3976 -0.8206 -0.4104 +vn 0.6823 -0.1861 -0.7070 +vn 0.6957 0.0112 -0.7183 +vn -0.6996 -0.0697 -0.7111 +vn -0.9809 -0.0000 -0.1945 +vn -0.5446 -0.7306 -0.4118 +vn -0.6499 -0.3714 -0.6631 +vn -0.6959 -0.7080 -0.1200 +vn -0.9776 -0.1797 -0.1097 +vn -0.3727 -0.9264 -0.0533 +vn -0.7121 -0.7006 -0.0460 +vn -1.0000 0.0002 -0.0079 +vn -0.9866 -0.1246 -0.1054 +vn -0.7539 -0.5026 -0.4232 +vn -0.7704 -0.2954 -0.5650 +vn -0.1838 -0.5759 -0.7966 +vn -0.0224 -0.5925 -0.8053 +vn -0.1586 -0.6345 -0.7565 +vn -0.0509 0.2851 -0.9572 +vn -0.3712 -0.6762 -0.6364 +vn -0.6611 -0.5937 -0.4587 +vn -0.5291 -0.2645 -0.8063 +vn -0.1693 0.3386 -0.9256 +vn 0.1166 0.5127 -0.8506 +vn 0.4877 0.5825 -0.6502 +vn 0.7645 0.5572 -0.3241 +vn 0.8795 0.3543 -0.3176 +vn 0.5811 0.4731 -0.6622 +vn 0.5812 0.4730 -0.6622 +vn 0.8796 0.3543 -0.3176 +vn 0.6473 0.3566 -0.6737 +vn 0.8795 0.3543 -0.3177 +vn 0.9398 0.1660 -0.2986 +vn 0.9830 0.0974 -0.1557 +vn 0.9817 0.1080 -0.1571 +vn 0.9441 0.1647 -0.2856 +vn 0.9441 0.1647 -0.2855 +vn 0.8725 0.2424 -0.4242 +vn 0.9469 0.1524 -0.2832 +vn 0.8757 0.2159 -0.4319 +vn 0.6363 0.3712 -0.6763 +vn 0.6337 0.3565 -0.6865 +vn -0.6414 0.3273 -0.6939 +vn -0.9834 0.0771 -0.1640 +vn -0.6479 0.1994 -0.7352 +vn -0.9829 -0.0000 -0.1839 +vn -0.9849 -0.0000 0.1732 +vn -1.0000 -0.0000 -0.0082 +vn -0.6435 -0.0000 -0.7654 +vn -0.9952 0.0961 0.0174 +vn -0.6510 0.1326 -0.7474 +vn -0.9816 0.1888 0.0282 +vn -0.6242 0.2547 -0.7386 +vn -0.6508 0.1326 -0.7476 +vn 0.5989 -0.1711 -0.7824 +vn 0.5471 -0.2799 -0.7889 +vn -0.0162 0.0161 -0.9997 +vn 0.6872 -0.4716 -0.5526 +vn -0.0162 0.0162 -0.9997 +vn 0.5150 -0.5421 -0.6641 +vn 0.5586 -0.5586 -0.6131 +vn 0.6260 -0.5715 -0.5306 +vn 0.6303 0.1854 -0.7539 +vn 0.6889 -0.6228 -0.3709 +vn 0.7690 -0.5927 -0.2395 +vn 0.6064 -0.7581 -0.2400 +vn 0.6120 -0.7619 -0.2121 +vn 0.5645 -0.7903 -0.2383 +vn 0.6020 -0.6823 -0.4147 +vn 0.5603 -0.7558 -0.3388 +vn 0.3981 -0.8584 -0.3236 +vn 0.2321 -0.9168 -0.3250 +vn 0.0206 -0.9365 -0.3500 +vn -0.0000 -0.9392 -0.3434 +vn 0.0093 -0.9805 -0.1961 +vn -0.0000 -0.9849 -0.1733 +vn -0.0000 -0.8622 -0.5065 +vn 0.0013 -0.6435 -0.7654 +vn 0.0001 -0.8623 -0.5064 +vn -0.0000 -0.0082 -1.0000 +vn -0.0000 -0.8623 -0.5064 +vn 0.9966 -0.0437 -0.0699 +vn 0.1854 -0.6303 -0.7539 +vn 0.1939 -0.3307 -0.9236 +vn -0.0000 -0.3434 -0.9392 +vn 0.0005 -0.6434 -0.7655 +vn -0.0000 -0.6436 -0.7654 +vn -0.0000 -0.9391 -0.3435 +vn -0.0000 -0.6435 -0.7654 +vn -0.0112 -0.6489 -0.7608 +vn -0.0205 -0.9411 -0.3376 +vn -0.0372 -0.1584 -0.9867 +vn 0.1080 0.1571 -0.9817 +vn 0.2027 0.2927 -0.9345 +vn 0.0869 0.1544 -0.9842 +vn 0.3403 0.5890 -0.7330 +vn 0.4107 0.5300 -0.7419 +vn 0.2981 0.6349 -0.7128 +vn -0.5800 0.8143 -0.0223 +vn 0.4879 0.8569 -0.1666 +vn 0.4949 0.8663 -0.0675 +vn -0.5674 0.8231 -0.0222 +vn 0.5083 0.8585 -0.0678 +vn -0.6728 0.7398 -0.0112 +vn -0.5662 0.8203 -0.0809 +vn 0.1088 0.9352 -0.3371 +vn 0.5299 0.8311 -0.1686 +vn 0.7339 0.6757 -0.0700 +vn 0.6687 0.7390 -0.0821 +vn 0.7500 0.6563 -0.0820 +vn 0.8248 0.5613 -0.0687 +vn 0.9189 0.3892 -0.0649 +vn 0.9295 0.3633 -0.0641 +vn 0.9793 0.1940 -0.0582 +vn 0.8947 0.3698 -0.2505 +vn 0.9535 0.1842 -0.2385 +vn 0.6763 0.1329 -0.7246 +vn 0.6764 0.1328 -0.7245 +vn 0.1659 0.0184 -0.9860 +vn 0.1612 0.0569 -0.9853 +vn 0.9557 0.2877 -0.0617 +vn 0.9936 0.0994 -0.0542 +vn 0.9678 0.0917 -0.2344 +vn 0.6877 0.0699 -0.7226 +vn 0.3590 0.0103 -0.9333 +vn 0.9988 -0.0002 -0.0496 +vn 0.9986 0.0168 -0.0504 +vn 0.9986 0.0168 -0.0503 +vn 0.9988 0.0003 -0.0495 +vn 0.9897 -0.0000 -0.1429 +vn 0.7655 -0.0000 -0.6434 +vn 0.7710 -0.0112 -0.6367 +vn 0.7071 -0.0112 -0.7071 +vn 0.6368 -0.0112 -0.7709 +vn 0.5615 -0.2807 -0.7784 +vn 0.5572 -0.0000 -0.8304 +vn 0.1943 -0.0000 -0.9809 +vn 0.4304 -0.3418 -0.8354 +vn 0.0623 -0.0711 -0.9955 +vn 0.1461 -0.3596 -0.9216 +vn 0.3930 -0.4979 -0.7731 +vn 0.4418 -0.5489 -0.7096 +vn 0.1998 -0.6739 -0.7113 +vn 0.2553 -0.6510 -0.7148 +vn 0.2838 -0.7095 -0.6450 +vn 0.3694 -0.7256 -0.5805 +vn 0.2830 -0.9469 -0.1526 +vn 0.2877 -0.9557 -0.0617 +vn 0.0520 -0.9973 -0.0520 +vn 0.0083 -0.9987 -0.0499 +vn -0.0029 -0.9988 -0.0487 +vn -0.0000 -0.9971 -0.0761 +vn -0.0001 -0.9988 -0.0496 +vn -0.0000 -0.9988 -0.0491 +vn -0.0000 -0.9988 -0.0492 +vn 0.1845 -0.9805 -0.0680 +vn -0.0315 -0.9251 -0.3785 +vn -0.6500 -0.3713 -0.6631 +vn -0.2422 -0.9227 -0.2999 +vn -0.3856 -0.9211 -0.0536 +vn -0.2034 -0.9779 -0.0484 +vn -0.2697 -0.7061 -0.6547 +vn -0.3760 -0.3636 -0.8523 +vn -0.0226 -0.7453 -0.6663 +vn -0.0112 -0.6608 -0.7505 +vn -0.1327 -0.6637 -0.7361 +vn -0.4777 -0.8296 -0.2891 +vn -0.6642 -0.7026 -0.2555 +vn -0.7387 -0.6241 -0.2547 +vn -0.6562 -0.0820 -0.7501 +vn -0.1964 0.3582 -0.9128 +vn 0.1527 0.4699 -0.8694 +vn 0.1528 0.4699 -0.8694 +vn 0.1652 0.4599 -0.8725 +vn 0.3417 0.4304 -0.8355 +vn 0.1652 0.4598 -0.8725 +vn 0.6198 0.3562 -0.6992 +vn 0.6444 0.3420 -0.6840 +vn 0.5721 -0.5449 -0.6130 +vn 0.6539 0.1070 -0.7490 +vn 0.9760 -0.1207 -0.1811 +vn 0.9428 0.1535 -0.2960 +vn 0.9398 0.1659 -0.2987 +vn 0.9852 0.0765 -0.1531 +vn 0.9830 0.0973 -0.1557 +vn 0.9853 0.0765 -0.1530 +vn 0.7390 -0.3828 -0.5544 +vn 0.9782 0.0890 -0.1877 +vn 0.6637 0.5011 -0.5554 +vn 0.9596 0.2239 -0.1705 +vn 0.9852 0.0766 -0.1531 +vn 0.9852 0.0767 -0.1530 +vn 0.7035 0.3126 -0.6383 +vn 0.8853 0.4250 -0.1888 +vn 0.6091 0.4737 -0.6361 +vn 0.0302 0.9560 -0.2918 +vn 0.6578 0.3421 -0.6710 +vn 0.1777 0.4496 -0.8754 +vn 0.6991 0.1997 -0.6866 +vn 0.9624 0.1256 -0.2407 +vn 0.9624 0.1268 -0.2402 +vn 0.8758 0.2159 -0.4318 +vn 0.6415 0.3273 -0.6938 +vn 0.6424 0.0225 -0.7660 +vn 0.9466 0.0101 -0.3223 +vn 0.6423 0.0226 -0.7661 +vn 0.6436 -0.0000 -0.7654 +vn 0.6436 0.0001 -0.7654 +vn -0.6435 0.0003 -0.7655 +vn -0.0079 -0.0000 -1.0000 +vn -0.0161 0.0081 -0.9998 +vn 0.6334 -0.0576 -0.7717 +vn 0.6335 -0.0576 -0.7716 +vn 0.9236 -0.1577 -0.3493 +vn 0.8795 -0.3176 -0.3543 +vn 0.7993 -0.4899 -0.3481 +vn 0.8198 -0.5217 -0.2360 +vn 0.7507 -0.6255 -0.2127 +vn 0.7467 -0.6201 -0.2405 +vn 0.5683 -0.7831 -0.2526 +vn 0.5947 -0.7137 -0.3701 +vn 0.1063 -0.7912 -0.6023 +vn 0.5431 -0.6517 -0.5295 +vn 0.5295 -0.5431 -0.6517 +vn 0.1659 -0.0185 -0.9860 +vn 0.0717 0.0717 -0.9948 +vn 0.5275 0.5140 -0.6765 +vn 0.4845 0.5249 -0.6998 +vn 0.8141 0.5627 -0.1436 +vn 0.8798 -0.4098 -0.2410 +vn 0.5926 -0.7691 -0.2396 +vn 0.4209 -0.8779 -0.2285 +vn 0.4066 -0.8849 -0.2272 +vn 0.2553 -0.9436 -0.2109 +vn 0.0281 -0.9836 -0.1780 +vn 0.0186 -0.9841 -0.1764 +vn -0.0000 -0.9849 -0.1732 +vn -0.1903 -0.9631 -0.1905 +vn -0.0629 -0.9436 -0.3250 +vn -0.4583 -0.6738 -0.5796 +vn 0.2129 -0.7392 -0.6389 +vn 0.0093 -0.9805 -0.1962 +vn 0.2130 -0.7391 -0.6390 +vn 0.0094 -0.9805 -0.1961 +vn 0.0092 -0.9846 -0.1748 +vn -0.0000 -0.8696 -0.4938 +vn -0.0001 -0.8696 -0.4938 +vn -0.0000 -0.4811 -0.8767 +vn 0.1879 0.0890 -0.9781 +vn 0.6995 0.0700 -0.7112 +vn 0.3699 -0.0871 -0.9250 +vn 0.9995 -0.0000 -0.0326 +vn 0.9990 -0.0166 -0.0417 +vn -0.0000 -0.0852 -0.9964 +vn -0.0000 -0.0761 -0.9971 +vn -0.0000 -0.6433 -0.7656 +vn -0.0000 -0.4810 -0.8767 +vn -0.0000 -0.9435 -0.3314 +vn 0.1856 -0.9500 -0.2511 +vn 0.9999 -0.0080 -0.0080 +vn 0.1879 -0.0889 -0.9782 +vn -0.0000 -0.1733 -0.9849 +vn -0.0002 -0.1733 -0.9849 +vn 0.0473 0.1706 -0.9842 +vn 0.2408 0.6337 -0.7351 +vn 0.4129 0.8743 -0.2552 +vn 0.5541 0.7934 -0.2519 +vn 0.5825 0.7724 -0.2533 +vn 0.6133 0.7868 -0.0694 +vn 0.6132 0.7868 -0.0694 +vn 0.5874 0.8063 -0.0691 +vn 0.6260 0.7767 -0.0696 +vn 0.6183 0.7817 -0.0817 +vn 0.6311 0.7714 -0.0818 +vn 0.6686 0.7391 -0.0821 +vn 0.6995 0.7112 -0.0699 +vn 0.6995 0.7112 -0.0700 +vn 0.6900 0.6772 -0.2557 +vn 0.8034 0.5398 -0.2513 +vn 0.4803 0.4803 -0.7339 +vn 0.6001 0.3261 -0.7305 +vn 0.1437 0.0861 -0.9859 +vn 0.8810 0.4684 -0.0669 +vn 0.9557 0.2878 -0.0617 +vn 0.9189 0.3891 -0.0649 +vn 0.6610 0.1995 -0.7234 +vn 0.1597 0.0470 -0.9860 +vn 0.9961 0.0705 -0.0529 +vn 0.8181 0.5534 -0.1564 +vn 0.9580 0.2448 -0.1491 +vn 0.8572 0.4531 -0.2449 +vn 0.7870 0.2248 -0.5746 +vn 0.9693 0.0808 -0.2322 +vn 0.9903 0.0273 -0.1363 +vn 0.9898 -0.0000 -0.1427 +vn 0.6314 -0.0000 -0.7754 +vn 0.1944 -0.0000 -0.9809 +vn 0.0944 -0.0000 -0.9955 +vn -0.2723 -0.0000 -0.9622 +vn -0.5241 -0.0109 -0.8516 +vn -0.6313 -0.0000 -0.7755 +vn -0.7126 -0.0000 -0.7015 +vn -0.6314 -0.0000 -0.7754 +vn -0.4987 -0.3195 -0.8057 +vn -0.4808 -0.3037 -0.8226 +vn -0.3037 -0.4808 -0.8226 +vn 0.0111 -0.6246 -0.7808 +vn -0.0000 -0.6314 -0.7754 +vn 0.1837 -0.0000 -0.9830 +vn 0.1854 0.0093 -0.9826 +vn 0.0521 0.0520 -0.9973 +vn -0.0000 0.1943 -0.9809 +vn 0.1964 0.5523 -0.8102 +vn -0.0111 0.6246 -0.7808 +vn 0.2249 0.5746 -0.7870 +vn 0.0113 0.7071 -0.7071 +vn 0.0226 0.6898 -0.7237 +vn -0.0000 0.0945 -0.9955 +vn -0.0000 -0.1943 -0.9809 +vn -0.0000 -0.1944 -0.9809 +vn 0.1460 -0.3597 -0.9216 +vn 0.0112 -0.6368 -0.7709 +vn 0.1847 -0.6034 -0.7758 +vn 0.1997 -0.6739 -0.7113 +vn 0.1200 -0.6960 -0.7080 +vn 0.1324 -0.7586 -0.6380 +vn 0.2830 -0.9469 -0.1524 +vn 0.0182 -0.9892 -0.1454 +vn 0.0084 -0.9987 -0.0499 +vn -0.0000 -0.8218 -0.5698 +vn -0.0000 -0.9898 -0.1427 +vn -0.0000 -0.9897 -0.1429 +vn 0.0001 -0.9988 -0.0499 +vn -0.0000 -0.9764 -0.2162 +vn -0.0112 -0.7071 -0.7071 +vn -0.0287 -0.9771 -0.2108 +vn -0.0254 -0.9984 -0.0508 +vn -0.0000 -0.6670 -0.7451 +vn -0.0473 -0.1705 -0.9842 +vn -0.3266 -0.6140 -0.7185 +vn -0.6822 -0.6948 -0.2276 +vn -0.5124 -0.8576 -0.0446 +vn -0.6282 -0.7767 -0.0457 +vn -0.7059 -0.7059 -0.0579 +vn -0.8376 -0.5433 -0.0566 +vn -0.8490 -0.4676 -0.2461 +vn -0.6943 0.0579 -0.7174 +vn -0.3713 0.3342 -0.8663 +vn -0.3677 0.4059 -0.8367 +vn -0.3696 0.4206 -0.8285 +vn -0.6681 0.3275 -0.6681 +vn -0.6980 0.0946 -0.7098 +vn -0.5863 -0.5590 -0.5863 +vn -0.1785 -0.9656 -0.1889 +vn -0.0566 -0.8376 -0.5434 +vn 0.0442 -0.8728 -0.4861 +vn 0.5722 -0.5449 -0.6129 +vn 0.5722 -0.5448 -0.6130 +vn 0.1939 -0.9793 -0.0582 +vn 0.3768 -0.9197 -0.1108 +vn 0.1860 -0.6822 -0.7071 +vn 0.1725 0.6777 -0.7148 +vn 0.1711 0.9765 -0.1309 +vn 0.8852 0.4251 -0.1889 +vn 0.1712 0.9765 -0.1309 +vn 0.0081 0.9999 -0.0080 +vn -0.0000 0.9955 -0.0944 +vn -0.0182 0.9892 -0.1454 +vn -0.0110 0.5749 -0.8181 +vn -0.3369 0.5312 -0.7774 +vn -0.3826 0.4081 -0.8289 +vn -0.1899 0.4391 -0.8781 +vn -0.1776 0.4496 -0.8754 +vn 0.1888 0.4250 -0.8853 +vn 0.1888 0.4249 -0.8853 +vn 0.1871 0.2640 -0.9462 +vn 0.6671 -0.0000 -0.7450 +vn 0.1734 -0.0000 -0.9849 +vn -0.0889 -0.1877 -0.9782 +vn 0.0760 -0.3796 -0.9220 +vn -0.0189 -0.9800 -0.1979 +vn -0.0000 -0.9997 -0.0243 +vn 0.0759 -0.3797 -0.9220 +vn 0.6303 -0.1854 -0.7539 +vn 0.0250 -0.9988 -0.0419 +vn 0.9630 -0.1905 -0.1905 +vn 0.3735 -0.9223 -0.0988 +vn 0.9500 -0.1856 -0.2510 +vn 0.9435 0.0026 -0.3313 +vn 0.9622 -0.0000 -0.2724 +vn 0.9606 0.0506 -0.2731 +vn 0.9852 0.0765 -0.1533 +vn 0.9850 0.0375 -0.1686 +vn 0.9843 0.0473 -0.1703 +vn 0.9464 0.0957 -0.3084 +vn 0.9842 0.0473 -0.1704 +vn 0.9477 -0.0000 -0.3192 +vn 0.9848 -0.0000 -0.1735 +vn 0.9849 -0.0000 -0.1731 +vn 0.3731 -0.9225 -0.0988 +vn 0.9563 -0.1719 -0.2364 +vn 0.2615 -0.2501 -0.9322 +vn 0.9436 -0.0629 -0.3251 +vn 0.1725 0.6778 -0.7148 +vn 0.8667 0.0913 -0.4904 +vn -0.6436 -0.0001 -0.7654 +vn 0.9454 -0.0203 -0.3253 +vn 0.9453 -0.0203 -0.3254 +vn 0.9836 -0.0281 -0.1780 +vn 0.9782 -0.0889 -0.1877 +vn 0.9629 -0.1799 -0.2010 +vn 0.9436 -0.2553 -0.2109 +vn 0.9101 -0.3501 -0.2217 +vn 0.8779 -0.4209 -0.2285 +vn 0.7798 -0.5786 -0.2390 +vn 0.7467 -0.6202 -0.2405 +vn 0.3954 -0.8650 -0.3090 +vn -0.5763 -0.8044 -0.1441 +vn -0.4006 -0.6275 -0.6677 +vn -0.0717 -0.0718 -0.9948 +vn 0.0555 -0.1296 -0.9900 +vn -0.0977 -0.0355 -0.9946 +vn -0.0717 -0.0717 -0.9948 +vn -0.1925 0.3171 -0.9286 +vn 0.2532 0.7724 -0.5825 +vn 0.6860 0.7214 -0.0947 +vn 0.7071 0.7071 -0.0112 +vn 0.7071 0.6822 -0.1861 +vn 0.6760 -0.5005 -0.5408 +vn 0.5713 -0.7175 -0.3986 +vn 0.5275 -0.7518 -0.3957 +vn 0.5149 -0.6640 -0.5422 +vn 0.4258 -0.7317 -0.5323 +vn 0.3931 -0.8371 -0.3805 +vn 0.3656 -0.7704 -0.5223 +vn 0.2663 -0.5579 -0.7860 +vn 0.2367 -0.8996 -0.3671 +vn 0.1067 -0.6283 -0.7706 +vn 0.0205 -0.9411 -0.3376 +vn 0.0112 -0.6489 -0.7608 +vn 0.0112 -0.6488 -0.7609 +vn -0.0000 -0.6554 -0.7553 +vn -0.1852 -0.6303 -0.7539 +vn -0.9999 -0.0080 -0.0080 +vn -0.3751 -0.9026 -0.2110 +vn -0.9768 -0.1610 -0.1410 +vn -0.6857 -0.1589 -0.7103 +vn 0.6858 -0.1592 -0.7102 +vn 0.9224 -0.3733 -0.0991 +vn 0.9224 -0.3734 -0.0988 +vn 0.1856 -0.9500 -0.2512 +vn 0.1926 -0.8550 -0.4816 +vn 0.9810 0.1905 -0.0380 +vn 0.1861 0.6822 -0.7071 +vn -0.9800 0.0188 -0.1980 +vn -0.1877 0.0889 -0.9782 +vn -0.9198 -0.0317 -0.3912 +vn -0.3700 -0.0870 -0.9250 +vn -0.1853 -0.6303 -0.7540 +vn -0.9814 -0.1828 -0.0577 +vn -0.1856 -0.9500 -0.2511 +vn -0.9224 -0.3734 -0.0988 +vn -0.1905 -0.9630 -0.1905 +vn -0.6858 -0.1592 -0.7101 +vn -0.0629 -0.9436 -0.3251 +vn 0.2501 -0.2615 -0.9322 +vn 0.0629 -0.9436 -0.3250 +vn 0.1906 -0.9630 -0.1905 +vn 0.9224 -0.3733 -0.0988 +vn 0.9946 -0.0000 -0.1038 +vn 0.6882 0.4993 -0.5263 +vn 0.9809 0.1905 -0.0381 +vn 0.1878 0.0890 -0.9782 +vn 0.0001 0.1944 -0.9809 +vn 0.1199 0.6714 -0.7313 +vn 0.2482 0.9364 -0.2482 +vn 0.2516 0.9659 -0.0604 +vn 0.4286 0.9011 -0.0660 +vn 0.2877 0.9557 -0.0617 +vn 0.4684 0.8810 -0.0669 +vn 0.6900 0.6772 -0.2556 +vn 0.4803 0.4805 -0.7338 +vn 0.4531 0.8572 -0.2449 +vn 0.3261 0.6001 -0.7304 +vn 0.0959 0.1341 -0.9863 +vn 0.0958 0.1342 -0.9863 +vn 0.1037 0.1037 -0.9892 +vn 0.5758 0.2815 -0.7676 +vn 0.5757 0.2815 -0.7677 +vn 0.1341 0.0958 -0.9863 +vn 0.6981 0.6981 -0.1593 +vn 0.6386 0.5163 -0.5706 +vn 0.7095 0.2838 -0.6450 +vn 0.7613 0.0577 -0.6458 +vn 0.7006 0.0460 -0.7121 +vn 0.7071 0.0112 -0.7071 +vn 0.6314 -0.0000 -0.7755 +vn -0.7015 -0.0000 -0.7126 +vn -0.7016 -0.0000 -0.7126 +vn -0.6957 -0.0112 -0.7182 +vn -0.7071 0.0112 -0.7071 +vn -0.6079 -0.1581 -0.7781 +vn -0.4987 -0.3197 -0.8056 +vn -0.4479 -0.3584 -0.8191 +vn -0.3197 -0.4987 -0.8056 +vn -0.0000 -0.7126 -0.7015 +vn -0.0111 -0.6247 -0.7808 +vn 0.0110 -0.5496 -0.8354 +vn 0.0520 -0.0520 -0.9973 +vn 0.5446 -0.0000 -0.8387 +vn 0.6313 -0.0000 -0.7755 +vn 0.5522 0.1964 -0.8102 +vn 0.6079 0.1581 -0.7781 +vn 0.5746 0.2248 -0.7869 +vn 0.4479 0.3584 -0.8191 +vn 0.1712 0.5989 -0.7823 +vn 0.1564 0.5534 -0.8181 +vn 0.0225 0.6300 -0.7763 +vn 0.0260 0.0780 -0.9966 +vn -0.0000 -0.6313 -0.7755 +vn -0.0000 -0.7015 -0.7126 +vn -0.0000 -0.7656 -0.6433 +vn -0.0000 -0.7656 -0.6434 +vn -0.0112 -0.7710 -0.6368 +vn -0.0000 -0.7655 -0.6434 +vn -0.0181 -0.9892 -0.1455 +vn -0.0000 -0.3678 -0.9299 +vn -0.0103 -0.3714 -0.9284 +vn -0.1995 -0.6610 -0.7234 +vn -0.2862 -0.9275 -0.2405 +vn -0.2878 -0.9557 -0.0617 +vn -0.1939 -0.9793 -0.0582 +vn -0.2877 -0.9557 -0.0617 +vn -0.3762 -0.9243 -0.0645 +vn -0.4684 -0.8810 -0.0669 +vn -0.0862 -0.1436 -0.9859 +vn -0.6995 -0.7112 -0.0700 +vn -0.5481 -0.8336 -0.0685 +vn -0.7664 -0.6386 -0.0697 +vn -0.8248 -0.5613 -0.0687 +vn -0.8248 -0.5613 -0.0688 +vn -0.1436 -0.0861 -0.9859 +vn -0.1150 -0.1150 -0.9867 +vn -0.4953 -0.4819 -0.7228 +vn -0.8548 -0.4641 -0.2322 +vn -0.8840 -0.4641 -0.0552 +vn -0.9264 -0.3727 -0.0532 +vn -0.9269 -0.2975 -0.2289 +vn -0.7191 0.1860 -0.6696 +vn -0.1777 0.4495 -0.8754 +vn -0.7192 0.1859 -0.6695 +vn -0.6877 0.2985 -0.6618 +vn -0.6909 0.3129 -0.6518 +vn -0.9733 0.0903 -0.2109 +vn -0.9703 -0.1857 -0.1548 +vn -0.1796 -0.9776 -0.1098 +vn 0.0002 -1.0000 -0.0079 +vn -0.0000 -0.7014 -0.7127 +vn -0.9893 0.0462 -0.1387 +vn -0.6995 -0.0699 -0.7112 +vn -0.0578 0.6943 -0.7173 +vn -0.0000 -0.7016 -0.7125 +vn -0.1879 0.0889 -0.9781 +vn 0.0003 0.0947 -0.9955 +vn -0.0000 0.6901 -0.7237 +vn -0.1888 0.9816 -0.0283 +vn -0.6611 0.4587 -0.5937 +vn -0.6910 0.3128 -0.6517 +vn -0.6844 0.2841 -0.6715 +vn -0.1868 0.3968 -0.8987 +vn -0.1828 0.2258 -0.9569 +vn -0.0000 0.2722 -0.9622 +vn -0.0000 0.2721 -0.9623 +vn -0.1857 0.0093 -0.9826 +vn -0.1839 -0.0000 -0.9830 +vn -0.7070 0.0112 -0.7071 +vn -0.7015 -0.0000 -0.7127 +vn -0.6778 -0.1726 -0.7147 +vn -0.0890 -0.1877 -0.9782 +vn -0.4993 -0.6883 -0.5263 +vn -0.0001 -0.9946 -0.1036 +vn -0.0001 0.7014 -0.7127 +vn 0.3733 0.9224 -0.0989 +vn 0.9563 0.1718 -0.2364 +vn 0.9849 -0.0000 -0.1732 +vn 0.9849 -0.0000 -0.1734 +vn 0.0841 0.9901 -0.1122 +vn 0.9026 0.3752 -0.2110 +vn 0.8669 0.0108 -0.4984 +vn 0.9435 -0.0000 -0.3313 +vn 0.9350 -0.0860 -0.3440 +vn 0.6202 -0.1193 -0.7753 +vn 0.8946 -0.2505 -0.3700 +vn 0.8289 -0.4081 -0.3826 +vn 0.7613 -0.3806 -0.5250 +vn 0.7406 -0.5422 -0.3970 +vn 0.6517 -0.5295 -0.5431 +vn 0.7055 -0.5856 -0.3992 +vn 0.4856 -0.6879 -0.5395 +vn -0.5745 -0.8156 -0.0689 +vn -0.7071 -0.7071 -0.0112 +vn -0.7031 -0.7030 -0.1072 +vn -0.7215 -0.6859 -0.0948 +vn -0.5275 -0.5140 -0.6764 +vn -0.5998 -0.3999 -0.6931 +vn -0.4856 0.2614 -0.8342 +vn -0.6372 0.4880 -0.5965 +vn -0.2518 0.7935 -0.5541 +vn 0.0087 0.9954 -0.0952 +vn -0.6202 0.7753 -0.1193 +vn -0.3764 0.9126 -0.1597 +vn 0.3666 0.9273 -0.0755 +vn 0.6300 0.7763 -0.0225 +vn 0.6422 0.7662 -0.0225 +vn 0.7070 0.7071 -0.0112 +vn 0.6353 0.7715 -0.0341 +vn 0.6634 0.7450 -0.0699 +vn 0.4558 0.5362 -0.7105 +vn 0.6822 0.7071 -0.1861 +vn 0.5362 0.4557 -0.7105 +vn 0.1957 0.0685 -0.9783 +vn 0.0950 -0.1236 -0.9878 +vn 0.0664 -0.1517 -0.9862 +vn 0.0279 -0.1674 -0.9855 +vn -0.0000 -0.1734 -0.9848 +vn -0.1877 -0.0889 -0.9782 +vn -0.1876 0.0889 -0.9782 +vn -0.0014 0.1840 -0.9829 +vn -0.0007 0.6902 -0.7236 +vn -0.1859 0.6695 -0.7192 +vn -0.9999 -0.0161 -0.0000 +vn -0.9946 -0.0000 -0.1039 +vn -0.9946 -0.0000 -0.1038 +vn -0.7014 -0.0000 -0.7128 +vn 0.9946 -0.0000 -0.1037 +vn 0.9946 -0.0000 -0.1039 +vn 0.9776 0.1795 -0.1095 +vn 0.3762 0.9243 -0.0645 +vn -0.0000 0.9712 -0.2382 +vn -0.0000 0.9712 -0.2381 +vn -0.1860 0.6822 -0.7071 +vn -0.9809 0.1905 -0.0381 +vn -0.9946 -0.0000 -0.1037 +vn -0.6787 -0.0000 -0.7344 +vn 0.6787 -0.0000 -0.7344 +vn 0.7014 -0.0000 -0.7127 +vn 0.2439 0.3136 -0.9177 +vn 0.9776 0.1794 -0.1098 +vn 0.3769 0.9196 -0.1108 +vn 0.1843 0.9724 -0.1433 +vn 0.1860 0.6695 -0.7191 +vn -0.0000 0.7015 -0.7126 +vn 0.0226 0.7013 -0.7125 +vn -0.0000 0.1946 -0.9809 +vn 0.0190 0.9777 -0.2090 +vn 0.0254 0.9984 -0.0508 +vn 0.2515 0.9660 -0.0604 +vn 0.0190 0.9777 -0.2089 +vn 0.0112 0.7071 -0.7070 +vn 0.0113 0.7071 -0.7070 +vn 0.1995 0.6610 -0.7234 +vn 0.2728 0.9320 -0.2387 +vn 0.0664 0.1518 -0.9862 +vn 0.3891 0.9189 -0.0649 +vn 0.5613 0.8248 -0.0687 +vn 0.1342 0.0958 -0.9863 +vn 0.1045 0.1140 -0.9880 +vn 0.0958 0.1341 -0.9863 +vn 0.4598 0.8725 -0.1652 +vn 0.5026 0.7539 -0.4232 +vn 0.5431 0.5295 -0.6517 +vn 0.4968 0.4968 -0.7116 +vn 0.6510 0.2553 -0.7148 +vn 0.5489 0.4418 -0.7096 +vn 0.6739 0.1997 -0.7113 +vn 0.6032 0.1847 -0.7759 +vn 0.3802 -0.0000 -0.9249 +vn -0.0780 -0.0260 -0.9966 +vn -0.0607 -0.0434 -0.9972 +vn -0.3183 -0.0849 -0.9442 +vn -0.5534 -0.1564 -0.8181 +vn -0.3755 -0.2785 -0.8840 +vn -0.1393 -0.2678 -0.9534 +vn -0.0923 -0.5307 -0.8425 +vn -0.0000 -0.6314 -0.7755 +vn -0.0000 -0.5572 -0.8304 +vn 0.0226 -0.6897 -0.7237 +vn 0.3197 -0.4987 -0.8056 +vn 0.3197 -0.4988 -0.8056 +vn 0.3037 -0.4807 -0.8226 +vn 0.4808 -0.3035 -0.8226 +vn 0.7126 -0.0000 -0.7015 +vn 0.7012 0.0226 -0.7126 +vn 0.6299 0.0225 -0.7763 +vn 0.5534 0.1564 -0.8181 +vn 0.3196 0.1433 -0.9367 +vn 0.3981 0.3235 -0.8584 +vn 0.2255 0.2706 -0.9359 +vn 0.0607 0.0434 -0.9972 +vn 0.0520 0.0520 -0.9973 +vn -0.0000 -0.3802 -0.9249 +vn -0.0000 -0.5573 -0.8303 +vn -0.0113 -0.7071 -0.7071 +vn -0.1861 -0.6823 -0.7070 +vn -0.2132 -0.7275 -0.6522 +vn -0.1625 -0.8938 -0.4179 +vn -0.1977 -0.9678 -0.1561 +vn -0.1940 -0.9793 -0.0582 +vn -0.0473 -0.1704 -0.9842 +vn -0.3261 -0.6000 -0.7305 +vn -0.4531 -0.8572 -0.2449 +vn -0.8810 -0.4684 -0.0669 +vn -0.9189 -0.3892 -0.0649 +vn -0.1626 -0.0669 -0.9844 +vn -0.6140 -0.3266 -0.7185 +vn -0.9315 -0.2840 -0.2274 +vn -0.9534 -0.2973 -0.0512 +vn -0.9802 -0.1922 -0.0481 +vn -0.9702 -0.1123 -0.2146 +vn -0.9702 -0.1123 -0.2145 +vn -0.9733 0.0695 -0.2186 +vn -0.9988 -0.0000 -0.0496 +vn -0.9805 -0.1844 -0.0679 +vn -0.1861 -0.6822 -0.7071 +vn -0.9734 -0.0696 -0.2184 +vn -0.9988 -0.0000 -0.0495 +vn -0.9805 0.1843 -0.0679 +vn -0.3762 0.9243 -0.0645 +vn -0.9737 0.1927 -0.1217 +vn -0.9721 0.0802 -0.2205 +vn -0.9733 0.0903 -0.2108 +vn -0.9729 0.0492 -0.2261 +vn -0.6838 0.1200 -0.7197 +vn -0.6837 0.1199 -0.7198 +vn -0.9234 0.0104 -0.3838 +vn -0.9197 -0.0000 -0.3927 +vn -0.8987 -0.1868 -0.3968 +vn -0.9724 -0.1842 -0.1433 +vn -0.1795 -0.9776 -0.1097 +vn -0.7746 -0.4202 -0.4727 +vn -0.7740 -0.4067 -0.4854 +vn -0.1861 0.6947 -0.6948 +vn -0.6582 0.4299 -0.6180 +vn -0.7080 0.6959 -0.1200 +vn -0.1905 0.9809 -0.0381 +vn -0.6739 0.1995 -0.7114 +vn -0.6777 -0.1725 -0.7148 +vn -0.1905 -0.9809 -0.0381 +vn 0.3734 -0.9224 -0.0988 +vn 0.9630 -0.1904 -0.1905 +vn 0.9480 -0.1293 -0.2909 +vn 0.3733 -0.9224 -0.0988 +vn 0.6078 -0.1984 -0.7689 +vn 0.0104 0.9181 -0.3962 +vn 0.6911 0.6114 -0.3854 +vn 0.1724 0.6778 -0.7148 +vn 0.6839 0.1200 -0.7197 +vn 0.1611 0.9768 -0.1409 +vn -0.0000 0.9997 -0.0244 +vn 0.0188 0.9800 -0.1980 +vn -0.0700 0.6995 -0.7112 +vn 0.1583 -0.0373 -0.9867 +vn 0.1329 -0.0854 -0.9874 +vn 0.1247 -0.1054 -0.9866 +vn -0.2056 -0.3197 -0.9249 +vn -0.6866 -0.6991 -0.1997 +vn -0.7450 -0.6634 -0.0699 +vn -0.7665 -0.6406 -0.0458 +vn -0.7647 0.6332 -0.1195 +vn -0.7608 0.6258 -0.1718 +vn -0.5614 0.7084 -0.4277 +vn -0.5322 0.4257 -0.7318 +vn -0.5489 -0.4418 -0.7096 +vn -0.5232 -0.4696 -0.7112 +vn 0.4832 0.5101 -0.7116 +vn 0.6860 0.7214 -0.0946 +vn 0.7071 0.7070 -0.0112 +vn 0.7070 0.7071 -0.0113 +vn 0.6030 0.7964 -0.0456 +vn -0.2981 0.6349 -0.7128 +vn 0.0685 0.1957 -0.9783 +vn -0.0226 0.7013 -0.7125 +vn -0.1840 0.9724 -0.1433 +vn -0.9776 0.1797 -0.1097 +vn -0.1842 0.9724 -0.1435 +vn -0.9776 0.1793 -0.1098 +vn -0.1844 0.9805 -0.0680 +vn -0.2422 0.9227 -0.2999 +vn -0.4984 0.0107 -0.8669 +vn 0.2440 0.3137 -0.9176 +vn 0.0695 0.9734 -0.2185 +vn 0.1844 0.9805 -0.0680 +vn -0.0000 0.9988 -0.0495 +vn -0.0000 0.9988 -0.0496 +vn -0.1842 0.9724 -0.1433 +vn -0.9211 0.3856 -0.0535 +vn -0.1844 0.9805 -0.0679 +vn -0.6947 0.1860 -0.6948 +vn -0.0695 0.9734 -0.2185 +vn 0.2422 0.9227 -0.2999 +vn -0.0000 0.9988 -0.0497 +vn -0.0000 0.9764 -0.2161 +vn -0.0000 0.1631 -0.9866 +vn 0.0185 0.1660 -0.9859 +vn 0.0994 0.9936 -0.0542 +vn 0.2878 0.9557 -0.0616 +vn 0.3261 0.6000 -0.7305 +vn 0.1939 0.9793 -0.0582 +vn 0.1960 0.9699 -0.1445 +vn 0.1625 0.8938 -0.4179 +vn 0.2838 0.7096 -0.6449 +vn 0.2553 0.6510 -0.7148 +vn 0.3267 0.6140 -0.7185 +vn 0.2945 0.5506 -0.7811 +vn 0.4854 0.4066 -0.7740 +vn 0.3418 0.4304 -0.8355 +vn 0.0711 0.0623 -0.9955 +vn -0.0081 -0.0162 -0.9998 +vn -0.0081 -0.0161 -0.9998 +vn -0.0000 -0.0003 -1.0000 +vn 0.0001 -0.1528 -0.9883 +vn -0.0000 -0.1838 -0.9830 +vn 0.0111 -0.6246 -0.7809 +vn 0.0112 -0.6246 -0.7809 +vn 0.0923 -0.5308 -0.8425 +vn 0.1711 -0.5990 -0.7823 +vn 0.3584 -0.4479 -0.8191 +vn 0.4987 -0.3197 -0.8056 +vn 0.2972 -0.3091 -0.9034 +vn 0.4421 -0.1512 -0.8841 +vn 0.6957 -0.0112 -0.7182 +vn 0.0780 0.0260 -0.9966 +vn -0.0622 -0.0711 -0.9955 +vn -0.0000 -0.2051 -0.9787 +vn -0.2945 -0.5506 -0.7811 +vn -0.4418 -0.5489 -0.7096 +vn -0.4734 -0.5952 -0.6493 +vn -0.3400 -0.8436 -0.4156 +vn -0.3764 -0.9126 -0.1597 +vn -0.5396 -0.8274 -0.1559 +vn -0.6995 -0.7112 -0.0699 +vn -0.1037 -0.1037 -0.9892 +vn -0.4803 -0.4803 -0.7339 +vn -0.6900 -0.6772 -0.2556 +vn -0.6141 -0.3267 -0.7184 +vn -0.8572 -0.4531 -0.2449 +vn -0.6610 -0.1995 -0.7234 +vn -0.6140 -0.3266 -0.7186 +vn -0.9275 -0.2862 -0.2405 +vn -0.9275 -0.2863 -0.2405 +vn -0.9557 -0.2877 -0.0617 +vn -0.9793 -0.1939 -0.0582 +vn -0.1689 -0.0375 -0.9849 +vn -0.6610 -0.1996 -0.7234 +vn -0.9720 -0.0292 -0.2333 +vn -0.9973 -0.0521 -0.0520 +vn -0.9802 -0.1921 -0.0480 +vn -0.9941 -0.0985 -0.0448 +vn -0.9973 -0.0520 -0.0520 +vn -0.9988 -0.0000 -0.0497 +vn -0.9764 -0.0000 -0.2159 +vn -0.9712 -0.0000 -0.2381 +vn -0.9227 -0.2424 -0.2999 +vn -0.7746 -0.4201 -0.4727 +vn -0.6583 0.4299 -0.6179 +vn -0.9227 -0.2422 -0.2999 +vn -0.7078 0.6961 -0.1201 +vn -0.9805 0.1842 -0.0680 +vn -0.9724 0.1841 -0.1433 +vn -0.7080 0.6960 -0.1200 +vn -0.9724 0.1842 -0.1433 +vn -0.1796 -0.9776 -0.1097 +vn 0.0002 -0.7015 -0.7127 +vn -0.0000 0.6789 -0.7342 +vn -0.0834 0.9913 -0.1019 +vn -0.6695 0.1860 -0.7191 +vn -0.0889 0.1878 -0.9782 +vn -0.1376 -0.1277 -0.9822 +vn -0.7071 -0.6822 -0.1861 +vn -0.7763 -0.6300 -0.0225 +vn -0.7763 -0.6299 -0.0225 +vn -0.7716 -0.6335 -0.0576 +vn -0.7379 -0.6664 -0.1072 +vn -0.7098 -0.6980 -0.0947 +vn -0.7329 -0.6738 -0.0946 +vn -0.6260 0.0696 -0.7767 +vn 0.2670 0.7755 -0.5721 +vn 0.3504 0.9344 -0.0637 +vn 0.5673 0.8232 -0.0222 +vn -0.3408 0.9372 -0.0746 +vn 0.5675 0.8231 -0.0222 +vn -0.4950 0.8663 -0.0675 +vn 0.5801 0.8142 -0.0223 +vn -0.4951 0.8662 -0.0676 +vn -0.4880 0.8568 -0.1666 +vn -0.5825 0.7724 -0.2533 +vn -0.4107 0.5300 -0.7419 +vn -0.3403 0.5890 -0.7330 +vn -0.2026 0.2927 -0.9345 +vn -0.0164 0.0163 -0.9997 +vn -0.2408 0.6337 -0.7351 +vn -0.0473 0.1706 -0.9842 +vn -0.1199 0.6714 -0.7313 +vn -0.0226 0.7012 -0.7126 +vn -0.0190 0.9777 -0.2090 +vn 0.0001 0.9988 -0.0495 +vn 0.0002 0.9988 -0.0495 +vn -0.0000 0.9764 -0.2160 +vn -0.0000 0.9197 -0.3927 +vn -0.0000 0.9197 -0.3926 +vn -0.0000 0.1629 -0.9866 +vn 0.0190 0.9778 -0.2089 +vn 0.0279 0.1675 -0.9855 +vn 0.0275 0.9888 -0.1467 +vn 0.0112 0.7710 -0.6367 +vn 0.0112 0.7071 -0.7071 +vn 0.3266 0.6141 -0.7185 +vn 0.0112 0.6369 -0.7709 +vn 0.0110 0.5496 -0.8354 +vn 0.0110 0.5495 -0.8354 +vn -0.0000 0.0946 -0.9955 +vn 0.0260 -0.0780 -0.9966 +vn 0.1274 -0.2655 -0.9557 +vn 0.3234 -0.3981 -0.8584 +vn 0.2151 -0.1936 -0.9572 +vn -0.1944 -0.0000 -0.9809 +vn -0.3548 -0.4181 -0.8363 +vn -0.4854 -0.4066 -0.7740 +vn -0.6510 -0.2553 -0.7149 +vn -0.5832 -0.3844 -0.7157 +vn -0.6316 -0.4301 -0.6450 +vn -0.6432 -0.6432 -0.4154 +vn -0.6981 -0.6981 -0.1592 +vn -0.7885 -0.5944 -0.1577 +vn -0.1436 -0.0862 -0.9859 +vn -0.1437 -0.0862 -0.9859 +vn -0.1597 -0.0470 -0.9860 +vn -0.6877 -0.0700 -0.7226 +vn -0.3713 -0.0103 -0.9285 +vn -0.7071 -0.0112 -0.7070 +vn -0.9196 -0.0000 -0.3927 +vn -0.9216 -0.0209 -0.3875 +vn -0.9771 -0.0288 -0.2107 +vn -0.9984 -0.0254 -0.0508 +vn -0.1630 -0.0000 -0.9866 +vn -0.3590 -0.0103 -0.9333 +vn -0.6898 -0.0226 -0.7237 +vn -0.6899 -0.0226 -0.7236 +vn -0.9720 -0.0291 -0.2333 +vn -0.9085 -0.0000 -0.4178 +vn -0.9086 -0.0000 -0.4177 +vn -0.9988 -0.0000 -0.0494 +vn -0.9988 -0.0002 -0.0495 +vn -0.9979 0.0256 -0.0597 +vn -0.9971 -0.0000 -0.0760 +vn -0.9983 -0.0000 -0.0582 +vn -0.1795 -0.9776 -0.1098 +vn -0.3417 -0.6966 -0.6309 +vn -0.9842 -0.0186 -0.1763 +vn -0.3417 -0.6967 -0.6308 +vn -0.1860 0.6823 -0.7070 +vn -0.9842 -0.0186 -0.1762 +vn -0.9227 0.2422 -0.2999 +vn -0.3856 0.9211 -0.0535 +vn -0.9805 0.1844 -0.0679 +vn -0.9724 0.1843 -0.1433 +vn -0.1904 0.9810 -0.0380 +vn -0.9712 -0.0000 -0.2382 +vn -0.6824 0.0579 -0.7287 +vn -0.1704 0.0473 -0.9842 +vn -0.6566 0.1858 -0.7310 +vn -0.1424 0.0759 -0.9869 +vn -0.6074 0.2973 -0.7367 +vn -0.5322 0.4258 -0.7318 +vn -0.1474 0.1179 -0.9820 +vn -0.6246 0.3125 -0.7157 +vn -0.8242 -0.5645 -0.0451 +vn -0.8485 0.5020 -0.1673 +vn -0.8505 0.5216 -0.0680 +vn -0.8231 -0.5675 -0.0222 +vn -0.7071 -0.7070 -0.0112 +vn -0.7292 -0.6835 -0.0342 +vn -0.8947 -0.2505 -0.3698 +vn -0.6255 0.5167 -0.5847 +vn -0.1616 0.9004 -0.4040 +vn -0.1087 0.9352 -0.3371 +vn -0.5481 0.8336 -0.0685 +vn -0.6438 0.7608 -0.0819 +vn -0.6260 0.7767 -0.0696 +vn -0.6183 0.7817 -0.0817 +vn -0.4286 0.9011 -0.0659 +vn -0.6133 0.7868 -0.0694 +vn -0.6132 0.7869 -0.0694 +vn -0.4129 0.8743 -0.2552 +vn -0.2482 0.9363 -0.2484 +vn -0.2635 0.9627 -0.0608 +vn -0.0254 0.9984 -0.0508 +vn -0.1939 0.9793 -0.0582 +vn -0.0000 0.1630 -0.9866 +vn -0.0000 0.7128 -0.7014 +vn -0.0000 0.7125 -0.7016 +vn -0.0000 0.9897 -0.1429 +vn -0.0000 0.7655 -0.6434 +vn -0.0000 0.9898 -0.1428 +vn -0.0000 0.8218 -0.5698 +vn -0.0000 0.7655 -0.6435 +vn 0.0112 0.6368 -0.7709 +vn 0.0521 -0.0521 -0.9973 +vn 0.0780 -0.0260 -0.9966 +vn -0.0946 -0.0000 -0.9955 +vn -0.1943 -0.0000 -0.9809 +vn -0.6367 -0.0112 -0.7710 +vn -0.6368 -0.0112 -0.7710 +vn -0.5786 -0.2390 -0.7798 +vn -0.7071 -0.0112 -0.7071 +vn -0.6739 -0.1997 -0.7113 +vn -0.7314 -0.2270 -0.6430 +vn -0.8437 -0.3400 -0.4155 +vn -0.9126 -0.3764 -0.1597 +vn -0.9469 -0.2830 -0.1524 +vn -0.9519 -0.3000 -0.0621 +vn -0.1629 -0.0000 -0.9866 +vn -0.3678 -0.0000 -0.9299 +vn -0.7016 0.0001 -0.7126 +vn -0.9197 -0.0000 -0.3926 +vn -0.9764 0.0001 -0.2159 +vn -0.9771 -0.0287 -0.2107 +vn -0.3557 -0.0000 -0.9346 +vn -0.9727 0.0193 -0.2313 +vn -0.9067 0.3415 -0.2474 +vn -0.7831 0.5683 -0.2526 +vn -0.7502 0.6103 -0.2544 +vn -0.7767 0.6260 -0.0696 +vn -0.7767 0.6260 -0.0695 +vn -0.7868 0.6133 -0.0694 +vn -0.7869 0.6132 -0.0694 +vn -0.7664 0.6386 -0.0697 +vn -0.7558 0.6511 -0.0698 +vn -0.8424 0.5309 -0.0923 +vn -0.8232 -0.5673 -0.0222 +vn -0.8249 -0.5577 -0.0929 +vn -0.9403 -0.2462 -0.2351 +vn -0.9298 0.1328 -0.3432 +vn -0.6778 0.7147 -0.1726 +vn -0.6564 0.7500 -0.0820 +vn -0.7047 0.7047 -0.0822 +vn -0.6995 0.7112 -0.0700 +vn -0.3698 0.8947 -0.2505 +vn -0.1842 0.9535 -0.2384 +vn -0.1996 0.6610 -0.7233 +vn -0.1995 0.6610 -0.7234 +vn -0.1842 0.9535 -0.2386 +vn -0.0112 0.6726 -0.7399 +vn -0.0190 0.9777 -0.2089 +vn -0.0254 0.9984 -0.0515 +vn -0.0000 0.7126 -0.7016 +vn 0.0003 0.3678 -0.9299 +vn -0.0000 0.9898 -0.1427 +vn -0.0000 0.9142 -0.4052 +vn -0.0000 0.9713 -0.2380 +vn -0.0010 0.4811 -0.8767 +vn -0.0711 0.0622 -0.9955 +vn -0.7709 -0.0112 -0.6368 +vn -0.8269 -0.0110 -0.5623 +vn -0.9903 -0.0273 -0.1364 +vn -0.9739 -0.0000 -0.2270 +vn -0.3556 -0.0000 -0.9346 +vn -0.1632 -0.0000 -0.9866 +vn -0.1631 -0.0000 -0.9866 +vn -0.9678 0.0917 -0.2345 +vn -0.9936 0.0993 -0.0542 +vn -0.9936 0.0994 -0.0542 +vn -0.9988 0.0002 -0.0495 +vn -0.9557 0.2877 -0.0617 +vn -0.9344 0.3504 -0.0637 +vn -0.9011 0.4286 -0.0659 +vn -0.5688 0.3836 -0.7275 +vn -0.8571 0.4531 -0.2450 +vn -0.4803 0.4803 -0.7339 +vn -0.6900 0.6772 -0.2557 +vn -0.5398 0.8034 -0.2512 +vn -0.3261 0.6001 -0.7305 +vn -0.0470 0.1597 -0.9860 +vn -0.0469 0.1598 -0.9860 +vn -0.2878 0.9557 -0.0617 +vn -0.2877 0.9557 -0.0617 +vn -0.0112 0.7071 -0.7070 +vn -0.0000 0.9764 -0.2159 +vn -0.0000 0.3558 -0.9346 +vn -0.0000 0.3555 -0.9347 +vn -0.0182 0.9892 -0.1452 +vn -0.0110 0.8269 -0.5622 +vn -0.0112 0.7709 -0.6368 +vn -0.0113 0.7071 -0.7071 +vn -0.0112 0.6367 -0.7710 +vn -0.3931 0.4979 -0.7730 +vn -0.2945 0.5506 -0.7811 +vn -0.2056 0.3197 -0.9249 +vn -0.1961 0.0094 -0.9805 +vn -0.7006 0.0459 -0.7121 +vn -0.7655 -0.0000 -0.6435 +vn -0.8304 -0.0000 -0.5572 +vn -0.9898 -0.0000 -0.1427 +vn -0.9984 0.0254 -0.0508 +vn -0.6762 0.1328 -0.7246 +vn -0.9678 0.0917 -0.2343 +vn -0.9274 0.2863 -0.2406 +vn -0.6764 0.1329 -0.7245 +vn -0.6376 0.2551 -0.7269 +vn -0.1341 0.0959 -0.9863 +vn -0.1037 0.1037 -0.9892 +vn -0.0958 0.1341 -0.9863 +vn -0.4684 0.8810 -0.0669 +vn -0.2728 0.9320 -0.2387 +vn -0.1328 0.6763 -0.7246 +vn -0.0103 0.3714 -0.9284 +vn -0.0000 0.1628 -0.9867 +vn -0.1977 0.9677 -0.1561 +vn -0.2130 0.7392 -0.6389 +vn -0.1861 0.6822 -0.7071 +vn -0.3266 0.6140 -0.7185 +vn -0.3843 0.5832 -0.7157 +vn -0.4418 0.5489 -0.7096 +vn -0.5489 0.4418 -0.7096 +vn -0.6140 0.3266 -0.7185 +vn -0.5506 0.2945 -0.7811 +vn -0.6739 0.1997 -0.7113 +vn -0.7352 0.1993 -0.6479 +vn -0.7612 0.0577 -0.6459 +vn -0.7867 0.2260 -0.5745 +vn -0.8219 -0.0000 -0.5696 +vn -0.9898 -0.0000 -0.1428 +vn -0.9519 0.3000 -0.0621 +vn -0.1611 0.0569 -0.9853 +vn -0.8248 0.5613 -0.0687 +vn -0.6995 0.7112 -0.0699 +vn -0.5398 0.8035 -0.2511 +vn -0.2691 0.6278 -0.7303 +vn -0.0279 0.1674 -0.9855 +vn -0.1940 0.9793 -0.0582 +vn -0.3686 0.7373 -0.5661 +vn -0.4154 0.6432 -0.6432 +vn -0.6432 0.6432 -0.4154 +vn -0.5952 0.4735 -0.6493 +vn -0.6737 0.3567 -0.6472 +vn -0.6246 0.3123 -0.7157 +vn -0.9126 0.3764 -0.1600 +vn -0.9847 0.0966 -0.1451 +vn -0.6763 0.1328 -0.7246 +vn -0.9275 0.2863 -0.2405 +vn -0.9557 0.2878 -0.0617 +vn -0.8034 0.5398 -0.2511 +vn -0.6900 0.6772 -0.2556 +vn -0.3836 0.5688 -0.7276 +vn -0.0765 0.1532 -0.9852 +vn -0.4685 0.8809 -0.0669 +vn -0.5396 0.8274 -0.1559 +vn -0.6981 0.6981 -0.1592 +vn -0.8181 0.5534 -0.1567 +vn -0.9189 0.3892 -0.0649 +vn -0.9519 0.3001 -0.0621 +vn -0.1599 0.0470 -0.9860 +vn -0.5661 0.3686 -0.7373 +vn -0.1045 0.1141 -0.9880 +vn -0.1341 0.0958 -0.9863 +vt 0.277700 0.613615 +vt 0.279653 0.615568 +vt 0.277700 0.616545 +vt 0.006233 0.744466 +vt 0.003304 0.745443 +vt 0.003304 0.739584 +vt 0.278676 0.611662 +vt 0.279653 0.609709 +vt 0.279653 0.613615 +vt 0.002327 0.565767 +vt 0.002327 0.562837 +vt 0.002327 0.580414 +vt 0.002327 0.558931 +vt 0.001351 0.563814 +vt 0.001351 0.552096 +vt 0.457376 0.826492 +vt 0.460305 0.827468 +vt 0.460305 0.828445 +vt 0.461282 0.828445 +vt 0.363632 0.882152 +vt 0.363632 0.883129 +vt 0.265982 0.622404 +vt 0.265982 0.626310 +vt 0.265982 0.543307 +vt 0.362655 0.798173 +vt 0.363632 0.798173 +vt 0.362655 0.882152 +vt 0.362655 0.884105 +vt 0.372420 0.882152 +vt 0.372420 0.881176 +vt 0.372420 0.841139 +vt 0.372420 0.798173 +vt 0.373397 0.798173 +vt 0.363632 0.797197 +vt 0.362655 0.796220 +vt 0.266958 0.626310 +vt 0.266958 0.583344 +vt 0.266958 0.543307 +vt 0.362655 0.883129 +vt 0.361679 0.888988 +vt 0.362655 0.888988 +vt 0.152708 0.750325 +vt 0.153685 0.750325 +vt 0.152708 0.760090 +vt 0.846022 0.163450 +vt 0.846998 0.163450 +vt 0.847975 0.164426 +vt 0.847975 0.162473 +vt 0.844069 0.161497 +vt 0.844069 0.165403 +vt 0.844069 0.164426 +vt 0.846998 0.157591 +vt 0.846022 0.157591 +vt 0.846022 0.155638 +vt 0.846022 0.158567 +vt 0.844069 0.152708 +vt 0.846022 0.146849 +vt 0.844069 0.148802 +vt 0.844069 0.146849 +vt 0.841139 0.148802 +vt 0.841139 0.151732 +vt 0.844069 0.151732 +vt 0.841139 0.154661 +vt 0.841139 0.157591 +vt 0.844069 0.158567 +vt 0.847975 0.158567 +vt 0.846998 0.155638 +vt 0.846998 0.152708 +vt 0.846022 0.150755 +vt 0.846998 0.146849 +vt 0.844069 0.143920 +vt 0.840163 0.139037 +vt 0.842116 0.138061 +vt 0.843092 0.138061 +vt 0.841139 0.137084 +vt 0.277700 0.618498 +vt 0.277700 0.617521 +vt 0.279653 0.618498 +vt 0.279653 0.617521 +vt 0.842116 0.140990 +vt 0.844069 0.140990 +vt 0.841139 0.142943 +vt 0.844069 0.142943 +vt 0.841139 0.145873 +vt 0.833327 0.139037 +vt 0.831374 0.140014 +vt 0.824539 0.141967 +vt 0.829421 0.140014 +vt 0.826492 0.140990 +vt 0.824539 0.142943 +vt 0.840163 0.158567 +vt 0.841139 0.160520 +vt 0.842116 0.163450 +vt 0.842116 0.166379 +vt 0.843092 0.168332 +vt 0.845045 0.166379 +vt 0.846022 0.166379 +vt 0.848951 0.167356 +vt 0.848951 0.166379 +vt 0.848951 0.165403 +vt 0.848951 0.164426 +vt 0.848951 0.162473 +vt 0.848951 0.159544 +vt 0.848951 0.158567 +vt 0.848951 0.156614 +vt 0.848951 0.152708 +vt 0.848951 0.149779 +vt 0.848951 0.148802 +vt 0.848951 0.146849 +vt 0.848951 0.143920 +vt 0.848951 0.141967 +vt 0.844069 0.139037 +vt 0.847975 0.139037 +vt 0.380232 0.219110 +vt 0.380232 0.215204 +vt 0.380232 0.218133 +vt 0.380232 0.216180 +vt 0.846022 0.134155 +vt 0.845045 0.139037 +vt 0.845045 0.134155 +vt 0.844069 0.135131 +vt 0.843092 0.136108 +vt 0.840163 0.132202 +vt 0.840163 0.134155 +vt 0.838210 0.138061 +vt 0.836257 0.139037 +vt 0.355820 0.725913 +vt 0.355820 0.702477 +vt 0.355820 0.690759 +vt 0.840163 0.170285 +vt 0.840163 0.173214 +vt 0.844069 0.169309 +vt 0.845045 0.171262 +vt 0.846022 0.171262 +vt 0.846998 0.172238 +vt 0.847975 0.171262 +vt 0.848951 0.171262 +vt 0.849928 0.169309 +vt 0.849928 0.168332 +vt 0.850904 0.165403 +vt 0.851881 0.163450 +vt 0.851881 0.160520 +vt 0.851881 0.157591 +vt 0.851881 0.154661 +vt 0.851881 0.151732 +vt 0.851881 0.148802 +vt 0.851881 0.145873 +vt 0.850904 0.142943 +vt 0.850904 0.140014 +vt 0.849928 0.138061 +vt 0.846998 0.139037 +vt 0.848951 0.136108 +vt 0.847975 0.135131 +vt 0.846998 0.134155 +vt 0.842116 0.132202 +vt 0.837233 0.109742 +vt 0.900706 0.880199 +vt 0.898753 0.884105 +vt 0.896800 0.883129 +vt 0.011116 0.552096 +vt 0.011116 0.549166 +vt 0.018928 0.553072 +vt 0.019904 0.551119 +vt 0.018928 0.556002 +vt 0.021857 0.556002 +vt 0.022834 0.554049 +vt 0.023810 0.556002 +vt 0.024787 0.600921 +vt 0.840163 0.013069 +vt 0.845045 0.013069 +vt 0.838210 0.058964 +vt 0.930977 0.015998 +vt 0.946601 0.805009 +vt 0.944648 0.805009 +vt 0.965155 0.767902 +vt 0.943672 0.804032 +vt 0.893870 0.881176 +vt 0.021857 0.550143 +vt 0.895823 0.876293 +vt 0.892894 0.881176 +vt 0.020881 0.548190 +vt 0.013069 0.547213 +vt 0.009163 0.728842 +vt 0.009163 0.731772 +vt 0.006233 0.730795 +vt 0.007210 0.733725 +vt 0.362655 0.584320 +vt 0.363632 0.581391 +vt 0.365585 0.585297 +vt 0.367538 0.578461 +vt 0.368514 0.583344 +vt 0.364608 0.578461 +vt 0.362655 0.578461 +vt 0.012092 0.554049 +vt 0.016975 0.556002 +vt 0.020881 0.609709 +vt 0.025763 0.651699 +vt 0.021857 0.649746 +vt 0.019904 0.828445 +vt 0.021857 0.754231 +vt 0.022834 0.828445 +vt 0.929024 0.899729 +vt 0.930001 0.898753 +vt 0.932930 0.899729 +vt 0.930977 0.893870 +vt 0.843092 0.288441 +vt 0.838210 0.288441 +vt 0.838210 0.286488 +vt 0.024787 0.828445 +vt 0.024787 0.826492 +vt 0.024787 0.716148 +vt 0.358749 0.750325 +vt 0.358749 0.685876 +vt 0.355820 0.687829 +vt 0.837233 0.175168 +vt 0.842116 0.173214 +vt 0.853834 0.173214 +vt 0.852857 0.133178 +vt 0.935860 0.063847 +vt 0.984685 0.015998 +vt 0.974920 0.758137 +vt 0.982732 0.745443 +vt 0.981755 0.745443 +vt 0.978826 0.743490 +vt 0.976873 0.742513 +vt 0.970037 0.754231 +vt 0.888988 0.877270 +vt 0.022834 0.543307 +vt 0.016975 0.544284 +vt 0.009163 0.722007 +vt 0.006233 0.725913 +vt 0.003304 0.729819 +vt 0.004280 0.733725 +vt 0.365585 0.588226 +vt 0.503271 0.856763 +vt 0.503271 0.855787 +vt 0.504247 0.856763 +vt 0.513036 0.986638 +vt 0.438822 0.985661 +vt 0.836257 0.563814 +vt 0.831374 0.560884 +vt 0.837233 0.560884 +vt 0.825515 0.558931 +vt 0.826492 0.556978 +vt 0.792314 0.542331 +vt 0.804032 0.551119 +vt 0.838210 0.573579 +vt 0.829421 0.598968 +vt 0.817703 0.596038 +vt 0.818680 0.592132 +vt 0.806962 0.590179 +vt 0.806962 0.591156 +vt 0.811844 0.594085 +vt 0.828445 0.599944 +vt 0.839186 0.603850 +vt 0.861646 0.612639 +vt 0.855787 0.609709 +vt 0.862622 0.611662 +vt 0.863599 0.611662 +vt 0.869458 0.593109 +vt 0.792314 0.581391 +vt 0.777667 0.578461 +vt 0.790361 0.584320 +vt 0.777667 0.580414 +vt 0.790361 0.585297 +vt 0.797197 0.600921 +vt 0.810868 0.605803 +vt 0.341172 0.939766 +vt 0.341172 0.896800 +vt 0.342149 0.939766 +vt 0.342149 0.855787 +vt 0.341172 0.855787 +vt 0.340196 0.870434 +vt 0.340196 0.939766 +vt 0.340196 0.940742 +vt 0.350937 0.939766 +vt 0.350937 0.855787 +vt 0.350937 0.854810 +vt 0.342149 0.854810 +vt 0.341172 0.854810 +vt 0.339219 0.848951 +vt 0.340196 0.854810 +vt 0.340196 0.864575 +vt 0.998356 0.565767 +vt 0.998356 0.491553 +vt 0.961249 0.984685 +vt 0.960272 0.990544 +vt 0.960272 0.984685 +vt 0.990544 0.873364 +vt 0.991520 0.877270 +vt 0.990544 0.937813 +vt 0.704430 0.898753 +vt 0.705406 0.902659 +vt 0.704430 0.902659 +vt 0.991520 0.937813 +vt 0.991520 0.873364 +vt 0.990544 0.872387 +vt 0.989567 0.873364 +vt 0.713218 0.777667 +vt 0.714195 0.777667 +vt 0.714195 0.844069 +vt 0.770832 0.204462 +vt 0.770832 0.205439 +vt 0.766926 0.205439 +vt 0.585297 0.191768 +vt 0.641934 0.191768 +vt 0.538425 0.192744 +vt 0.319689 0.281606 +vt 0.319689 0.283559 +vt 0.271841 0.283559 +vt 0.319689 0.288441 +vt 0.247428 0.863599 +vt 0.245475 0.867505 +vt 0.244499 0.862622 +vt 0.239616 0.865552 +vt 0.240593 0.863599 +vt 0.242546 0.868481 +vt 0.321642 0.281606 +vt 0.320666 0.267935 +vt 0.271841 0.281606 +vt 0.192744 0.283559 +vt 0.762043 0.199580 +vt 0.619474 0.204462 +vt 0.539401 0.199580 +vt 0.271841 0.287465 +vt 0.491553 0.199580 +vt 0.491553 0.197627 +vt 0.248405 0.860669 +vt 0.244499 0.853834 +vt 0.241569 0.853834 +vt 0.239616 0.852857 +vt 0.238640 0.855787 +vt 0.237663 0.864575 +vt 0.221063 0.887035 +vt 0.326525 0.119507 +vt 0.327501 0.201533 +vt 0.322619 0.201533 +vt 0.182003 0.923165 +vt 0.185909 0.926095 +vt 0.320666 0.201533 +vt 0.320666 0.134155 +vt 0.320666 0.119507 +vt 0.273794 0.120484 +vt 0.176144 0.120484 +vt 0.049199 0.280629 +vt 0.762043 0.205439 +vt 0.764973 0.205439 +vt 0.764973 0.203486 +vt 0.764973 0.197627 +vt 0.762043 0.197627 +vt 0.545260 0.197627 +vt 0.488623 0.192744 +vt 0.247428 0.854810 +vt 0.247428 0.849928 +vt 0.241569 0.849928 +vt 0.238640 0.851881 +vt 0.467141 0.179074 +vt 0.478859 0.192744 +vt 0.475929 0.195674 +vt 0.235710 0.853834 +vt 0.237663 0.854810 +vt 0.236687 0.858716 +vt 0.226922 0.868481 +vt 0.171262 0.924142 +vt 0.181027 0.922189 +vt 0.329454 0.200556 +vt 0.328478 0.133178 +vt 0.328478 0.119507 +vt 0.326525 0.116578 +vt 0.326525 0.118531 +vt 0.322619 0.119507 +vt 0.320666 0.117554 +vt 0.273794 0.118531 +vt 0.223992 0.118531 +vt 0.156614 0.118531 +vt 0.049199 0.117554 +vt 0.052129 0.119507 +vt 0.049199 0.119507 +vt 0.049199 0.133178 +vt 0.048223 0.190791 +vt 0.044317 0.280629 +vt 0.046270 0.280629 +vt 0.767902 0.199580 +vt 0.772785 0.192744 +vt 0.770832 0.191768 +vt 0.765949 0.192744 +vt 0.695641 0.187862 +vt 0.635098 0.187862 +vt 0.538425 0.187862 +vt 0.488623 0.188838 +vt 0.470070 0.179074 +vt 0.481788 0.189815 +vt 0.471047 0.177121 +vt 0.467141 0.178097 +vt 0.466164 0.175168 +vt 0.463235 0.176144 +vt 0.453470 0.165403 +vt 0.460305 0.179074 +vt 0.466164 0.182003 +vt 0.238640 0.837233 +vt 0.232781 0.855787 +vt 0.233757 0.856763 +vt 0.197627 0.896800 +vt 0.170285 0.923165 +vt 0.335313 0.192744 +vt 0.334337 0.192744 +vt 0.333360 0.129272 +vt 0.333360 0.114625 +vt 0.333360 0.111695 +vt 0.331407 0.108766 +vt 0.323595 0.114625 +vt 0.320666 0.113648 +vt 0.265982 0.113648 +vt 0.139037 0.113648 +vt 0.049199 0.113648 +vt 0.046270 0.113648 +vt 0.047246 0.116578 +vt 0.045293 0.119507 +vt 0.044317 0.190791 +vt 0.042364 0.190791 +vt 0.040411 0.265982 +vt 0.039434 0.280629 +vt 0.037481 0.279653 +vt 0.773761 0.204462 +vt 0.770832 0.202509 +vt 0.775714 0.194697 +vt 0.775714 0.191768 +vt 0.788409 0.179074 +vt 0.777667 0.191768 +vt 0.774738 0.188838 +vt 0.768879 0.187862 +vt 0.658534 0.178097 +vt 0.591156 0.177121 +vt 0.528660 0.177121 +vt 0.472023 0.174191 +vt 0.467141 0.174191 +vt 0.463235 0.162473 +vt 0.459329 0.163450 +vt 0.455423 0.160520 +vt 0.458352 0.159544 +vt 0.450540 0.093142 +vt 0.445658 0.094118 +vt 0.442728 0.093142 +vt 0.439799 0.095095 +vt 0.449564 0.161497 +vt 0.447611 0.168332 +vt 0.226922 0.824539 +vt 0.235710 0.833327 +vt 0.224969 0.844069 +vt 0.175168 0.893870 +vt 0.178097 0.896800 +vt 0.168332 0.921212 +vt 0.169309 0.922189 +vt 0.338243 0.192744 +vt 0.336290 0.114625 +vt 0.335313 0.108766 +vt 0.331407 0.106813 +vt 0.329454 0.106813 +vt 0.324572 0.106813 +vt 0.320666 0.111695 +vt 0.273794 0.111695 +vt 0.049199 0.111695 +vt 0.041387 0.106813 +vt 0.038458 0.108766 +vt 0.043340 0.116578 +vt 0.041387 0.118531 +vt 0.041387 0.132202 +vt 0.033575 0.198603 +vt 0.034552 0.199580 +vt 0.032599 0.268911 +vt 0.032599 0.283559 +vt 0.777667 0.199580 +vt 0.776690 0.197627 +vt 0.778643 0.193721 +vt 0.525730 0.775714 +vt 0.526707 0.777667 +vt 0.524754 0.773761 +vt 0.514989 0.788409 +vt 0.515965 0.791338 +vt 0.514012 0.793291 +vt 0.516942 0.792314 +vt 0.787432 0.178097 +vt 0.739584 0.174191 +vt 0.528660 0.174191 +vt 0.473000 0.161497 +vt 0.469094 0.161497 +vt 0.464211 0.158567 +vt 0.454446 0.092165 +vt 0.450540 0.092165 +vt 0.892894 0.984685 +vt 0.973943 0.980779 +vt 0.973943 0.982732 +vt 0.974920 0.979802 +vt 0.001351 0.591156 +vt 0.002327 0.589203 +vt 0.002327 0.597015 +vt 0.442728 0.091189 +vt 0.439799 0.093142 +vt 0.435893 0.094118 +vt 0.435893 0.095095 +vt 0.444681 0.162473 +vt 0.223992 0.821609 +vt 0.213251 0.831374 +vt 0.164426 0.883129 +vt 0.162473 0.880199 +vt 0.164426 0.882152 +vt 0.182979 0.757161 +vt 0.182979 0.773761 +vt 0.180050 0.757161 +vt 0.180050 0.773761 +vt 0.179073 0.772785 +vt 0.178097 0.772785 +vt 0.165403 0.884105 +vt 0.165403 0.772785 +vt 0.165403 0.670252 +vt 0.160520 0.748372 +vt 0.161497 0.914377 +vt 0.348984 0.191768 +vt 0.339219 0.192744 +vt 0.337266 0.114625 +vt 0.337266 0.110719 +vt 0.334337 0.106813 +vt 0.330431 0.103883 +vt 0.324572 0.102907 +vt 0.275747 0.106813 +vt 0.118531 0.103883 +vt 0.049199 0.106813 +vt 0.045293 0.106813 +vt 0.039434 0.105836 +vt 0.035528 0.108766 +vt 0.037481 0.111695 +vt 0.037481 0.114625 +vt 0.036505 0.129272 +vt 0.032599 0.190791 +vt 0.030646 0.198603 +vt 0.027716 0.283559 +vt 0.528660 0.769855 +vt 0.528660 0.772785 +vt 0.523777 0.769855 +vt 0.514012 0.762043 +vt 0.015998 0.198603 +vt 0.016975 0.227898 +vt 0.014045 0.278676 +vt 0.017951 0.274770 +vt 0.509130 0.765949 +vt 0.510106 0.787432 +vt 0.510106 0.789385 +vt 0.512059 0.792314 +vt 0.514012 0.795244 +vt 0.517918 0.797197 +vt 0.786456 0.174191 +vt 0.782550 0.160520 +vt 0.516942 0.810868 +vt 0.511083 0.808915 +vt 0.515965 0.812821 +vt 0.782550 0.158567 +vt 0.695641 0.158567 +vt 0.529636 0.160520 +vt 0.473000 0.157591 +vt 0.467141 0.157591 +vt 0.458352 0.092165 +vt 0.002327 0.581391 +vt 0.002327 0.585297 +vt 0.001351 0.581391 +vt 0.001351 0.589203 +vt 0.974920 0.971990 +vt 0.973943 0.975896 +vt 0.891917 0.978826 +vt 0.539401 0.675135 +vt 0.539401 0.680017 +vt 0.538425 0.679041 +vt 0.537448 0.680994 +vt 0.538425 0.680994 +vt 0.541354 0.685876 +vt 0.892894 0.990544 +vt 0.883129 0.532566 +vt 0.882152 0.526707 +vt 0.964178 0.530613 +vt 0.964178 0.526707 +vt 0.882152 0.522801 +vt 0.438822 0.092165 +vt 0.435893 0.093142 +vt 0.421245 0.096071 +vt 0.421245 0.097048 +vt 0.252311 0.846998 +vt 0.250358 0.846998 +vt 0.250358 0.776690 +vt 0.250358 0.757161 +vt 0.249381 0.846998 +vt 0.421245 0.098024 +vt 0.170285 0.780596 +vt 0.182003 0.669276 +vt 0.178097 0.669276 +vt 0.178097 0.654628 +vt 0.360702 0.083377 +vt 0.364608 0.087283 +vt 0.351914 0.094118 +vt 0.164426 0.654628 +vt 0.160520 0.653652 +vt 0.347031 0.095095 +vt 0.343125 0.093142 +vt 0.342149 0.091189 +vt 0.284535 0.093142 +vt 0.267935 0.102907 +vt 0.046270 0.102907 +vt 0.042364 0.102907 +vt 0.037481 0.104860 +vt 0.035528 0.106813 +vt 0.033575 0.110719 +vt 0.032599 0.114625 +vt 0.029669 0.198603 +vt 0.019904 0.198603 +vt 0.017951 0.189815 +vt 0.018928 0.116578 +vt 0.283559 0.686853 +vt 0.280629 0.686853 +vt 0.280629 0.599944 +vt 0.000374 0.197627 +vt 0.005257 0.108766 +vt 0.002327 0.197627 +vt 0.495459 0.686853 +vt 0.496435 0.788409 +vt 0.497412 0.793291 +vt 0.500341 0.798173 +vt 0.502294 0.801103 +vt 0.507177 0.805985 +vt 0.508153 0.810868 +vt 0.511083 0.812821 +vt 0.786456 0.155638 +vt 0.782550 0.156614 +vt 0.533542 0.156614 +vt 0.526707 0.090212 +vt 0.466164 0.091189 +vt 0.974920 0.964178 +vt 0.975896 0.964178 +vt 0.973943 0.968084 +vt 0.975896 0.971990 +vt 0.972967 0.970037 +vt 0.891917 0.974920 +vt 0.539401 0.671229 +vt 0.538425 0.673182 +vt 0.536472 0.677088 +vt 0.532566 0.674158 +vt 0.534519 0.679041 +vt 0.536472 0.680994 +vt 0.535495 0.681970 +vt 0.531589 0.680994 +vt 0.532566 0.680017 +vt 0.535495 0.684900 +vt 0.536472 0.686853 +vt 0.535495 0.683923 +vt 0.536472 0.683923 +vt 0.538425 0.684900 +vt 0.540378 0.685876 +vt 0.965155 0.531589 +vt 0.965155 0.526707 +vt 0.963202 0.520848 +vt 0.882152 0.518895 +vt 0.882152 0.503271 +vt 0.963202 0.506200 +vt 0.963202 0.436869 +vt 0.881176 0.432963 +vt 0.252311 0.776690 +vt 0.250358 0.673182 +vt 0.249381 0.667323 +vt 0.182003 0.654628 +vt 0.367538 0.084353 +vt 0.362655 0.078494 +vt 0.359726 0.076541 +vt 0.356796 0.079471 +vt 0.349961 0.091189 +vt 0.346055 0.092165 +vt 0.342149 0.089236 +vt 0.276723 0.089236 +vt 0.193721 0.091189 +vt 0.118531 0.093142 +vt 0.027716 0.093142 +vt 0.024787 0.091189 +vt 0.024787 0.094118 +vt 0.023810 0.095095 +vt 0.018928 0.095095 +vt 0.007210 0.090212 +vt 0.003304 0.089236 +vt 0.284535 0.701500 +vt 0.284535 0.686853 +vt 0.283559 0.599944 +vt 0.490577 0.686853 +vt 0.493506 0.686853 +vt 0.492529 0.773761 +vt 0.492529 0.788409 +vt 0.494482 0.797197 +vt 0.496435 0.800126 +vt 0.498388 0.804032 +vt 0.501318 0.806962 +vt 0.504247 0.808915 +vt 0.794267 0.155638 +vt 0.790361 0.155638 +vt 0.795244 0.088259 +vt 0.789385 0.089236 +vt 0.784503 0.089236 +vt 0.642910 0.089236 +vt 0.587250 0.090212 +vt 0.526707 0.088259 +vt 0.975896 0.905588 +vt 0.974920 0.905588 +vt 0.972967 0.964178 +vt 0.891917 0.968084 +vt 0.890941 0.968084 +vt 0.890941 0.972967 +vt 0.539401 0.669276 +vt 0.537448 0.671229 +vt 0.530613 0.669276 +vt 0.525730 0.591156 +vt 0.528660 0.660487 +vt 0.525730 0.660487 +vt 0.527683 0.670252 +vt 0.529636 0.663417 +vt 0.528660 0.675135 +vt 0.528660 0.681970 +vt 0.533542 0.688806 +vt 0.536472 0.689782 +vt 0.537448 0.685876 +vt 0.540378 0.688806 +vt 0.966131 0.529636 +vt 0.967108 0.527683 +vt 0.966131 0.522801 +vt 0.966131 0.520848 +vt 0.965155 0.520848 +vt 0.964178 0.506200 +vt 0.962225 0.412457 +vt 0.881176 0.389997 +vt 0.252311 0.734701 +vt 0.251334 0.666346 +vt 0.252311 0.652675 +vt 0.250358 0.652675 +vt 0.249381 0.652675 +vt 0.249381 0.648769 +vt 0.182979 0.646816 +vt 0.249381 0.644863 +vt 0.250358 0.638028 +vt 0.182979 0.642910 +vt 0.182979 0.639004 +vt 0.356796 0.074588 +vt 0.354843 0.077518 +vt 0.346055 0.088259 +vt 0.342149 0.088259 +vt 0.284535 0.088259 +vt 0.193721 0.088259 +vt 0.027716 0.089236 +vt 0.024787 0.089236 +vt 0.023810 0.090212 +vt 0.020881 0.092165 +vt 0.010139 0.083377 +vt 0.009163 0.078494 +vt 0.005257 0.079471 +vt 0.284535 0.707359 +vt 0.351914 0.708336 +vt 0.351914 0.704430 +vt 0.350937 0.689782 +vt 0.350937 0.609709 +vt 0.424175 0.677088 +vt 0.426128 0.777667 +vt 0.425151 0.792314 +vt 0.817703 0.088259 +vt 0.816727 0.155638 +vt 0.811844 0.088259 +vt 0.809891 0.155638 +vt 0.805985 0.155638 +vt 0.805985 0.088259 +vt 0.802079 0.155638 +vt 0.802079 0.088259 +vt 0.799150 0.088259 +vt 0.798173 0.155638 +vt 0.795244 0.087283 +vt 0.789385 0.087283 +vt 0.784503 0.088259 +vt 0.757161 0.088259 +vt 0.659511 0.088259 +vt 0.585297 0.087283 +vt 0.277700 0.526707 +vt 0.278676 0.526707 +vt 0.277700 0.585297 +vt 0.002327 0.542331 +vt 0.065800 0.488623 +vt 0.061894 0.542331 +vt 0.109742 0.461282 +vt 0.001351 0.461282 +vt 0.892894 0.906565 +vt 0.891917 0.906565 +vt 0.888988 0.968084 +vt 0.539401 0.665370 +vt 0.527683 0.591156 +vt 0.524754 0.522801 +vt 0.522801 0.582367 +vt 0.523777 0.660487 +vt 0.523777 0.667323 +vt 0.525730 0.676111 +vt 0.512059 0.683923 +vt 0.514989 0.690759 +vt 0.509130 0.679041 +vt 0.510106 0.683923 +vt 0.511083 0.686853 +vt 0.512059 0.689782 +vt 0.511083 0.692712 +vt 0.514012 0.693688 +vt 0.518895 0.696618 +vt 0.530613 0.687829 +vt 0.536472 0.693688 +vt 0.539401 0.692712 +vt 0.541354 0.691735 +vt 0.967108 0.520848 +vt 0.966131 0.506200 +vt 0.965155 0.436869 +vt 0.965155 0.412457 +vt 0.964178 0.412457 +vt 0.964178 0.377303 +vt 0.964178 0.317736 +vt 0.963202 0.317736 +vt 0.881176 0.321642 +vt 0.881176 0.306018 +vt 0.908518 0.305042 +vt 0.963202 0.303089 +vt 0.882152 0.302112 +vt 0.252311 0.648769 +vt 0.250358 0.648769 +vt 0.251334 0.642910 +vt 0.251334 0.638028 +vt 0.250358 0.632169 +vt 0.182979 0.633145 +vt 0.350937 0.071659 +vt 0.348008 0.075565 +vt 0.342149 0.074588 +vt 0.284535 0.074588 +vt 0.154661 0.072635 +vt 0.099977 0.075565 +vt 0.028693 0.076541 +vt 0.025763 0.076541 +vt 0.018928 0.078494 +vt 0.015998 0.079471 +vt 0.014045 0.076541 +vt 0.006233 0.008186 +vt 0.000374 0.009163 +vt 0.350937 0.714195 +vt 0.352890 0.710289 +vt 0.352890 0.704430 +vt 0.352890 0.689782 +vt 0.350937 0.564790 +vt 0.423198 0.722007 +vt 0.424175 0.777667 +vt 0.424175 0.792314 +vt 0.817703 0.087283 +vt 0.813797 0.086306 +vt 0.811844 0.086306 +vt 0.807938 0.087283 +vt 0.802079 0.087283 +vt 0.797197 0.086306 +vt 0.791338 0.086306 +vt 0.787432 0.086306 +vt 0.784503 0.086306 +vt 0.723960 0.087283 +vt 0.201533 0.542331 +vt 0.175168 0.461282 +vt 0.138061 0.459329 +vt 0.001351 0.459329 +vt 0.001351 0.460305 +vt 0.890941 0.906565 +vt 0.889964 0.897776 +vt 0.570649 0.604827 +vt 0.528660 0.591156 +vt 0.527683 0.522801 +vt 0.520848 0.311877 +vt 0.519871 0.309924 +vt 0.527683 0.310901 +vt 0.526707 0.307971 +vt 0.519871 0.306995 +vt 0.522801 0.522801 +vt 0.522801 0.660487 +vt 0.511083 0.673182 +vt 0.510106 0.667323 +vt 0.507177 0.667323 +vt 0.508153 0.673182 +vt 0.508153 0.679041 +vt 0.510106 0.684900 +vt 0.510106 0.686853 +vt 0.511083 0.689782 +vt 0.510106 0.689782 +vt 0.503271 0.701500 +vt 0.504247 0.704430 +vt 0.506200 0.706383 +vt 0.507177 0.706383 +vt 0.515965 0.699547 +vt 0.518895 0.699547 +vt 0.523777 0.701500 +vt 0.540378 0.696618 +vt 0.971990 0.525730 +vt 0.970037 0.527683 +vt 0.968084 0.520848 +vt 0.968084 0.491553 +vt 0.966131 0.450540 +vt 0.966131 0.412457 +vt 0.965155 0.317736 +vt 0.965155 0.303089 +vt 0.964178 0.303089 +vt 0.111695 0.905588 +vt 0.111695 0.906565 +vt 0.107789 0.906565 +vt 0.108766 0.987614 +vt 0.102907 0.906565 +vt 0.104860 0.987614 +vt 0.252311 0.644863 +vt 0.252311 0.639981 +vt 0.252311 0.638028 +vt 0.252311 0.634122 +vt 0.251334 0.632169 +vt 0.250358 0.628263 +vt 0.350937 0.003304 +vt 0.345078 0.070682 +vt 0.342149 0.070682 +vt 0.284535 0.070682 +vt 0.032599 0.071659 +vt 0.026740 0.072635 +vt 0.022834 0.073612 +vt 0.017951 0.075565 +vt 0.010139 0.007210 +vt 0.006233 0.007210 +vt 0.000374 0.008186 +vt 0.352890 0.714195 +vt 0.353867 0.714195 +vt 0.353867 0.708336 +vt 0.353867 0.704430 +vt 0.353867 0.689782 +vt 0.574555 0.604827 +vt 0.655605 0.607756 +vt 0.574555 0.685876 +vt 0.656581 0.789385 +vt 0.351914 0.508153 +vt 0.423198 0.777667 +vt 0.422222 0.792314 +vt 0.817703 0.085330 +vt 0.813797 0.085330 +vt 0.809891 0.085330 +vt 0.805985 0.085330 +vt 0.802079 0.086306 +vt 0.654628 0.981755 +vt 0.572602 0.981755 +vt 0.654628 0.977849 +vt 0.572602 0.975896 +vt 0.280629 0.460305 +vt 0.275747 0.542331 +vt 0.276723 0.460305 +vt 0.269888 0.542331 +vt 0.270864 0.460305 +vt 0.265982 0.542331 +vt 0.265005 0.461282 +vt 0.262076 0.542331 +vt 0.203486 0.461282 +vt 0.175168 0.459329 +vt 0.101930 0.458352 +vt 0.009163 0.458352 +vt 0.564790 0.385115 +vt 0.474953 0.391950 +vt 0.521824 0.312854 +vt 0.462258 0.349961 +vt 0.531589 0.309924 +vt 0.531589 0.306995 +vt 0.531589 0.304065 +vt 0.524754 0.305042 +vt 0.519871 0.306018 +vt 0.521824 0.522801 +vt 0.513036 0.522801 +vt 0.512059 0.585297 +vt 0.511083 0.585297 +vt 0.509130 0.595062 +vt 0.505224 0.667323 +vt 0.505224 0.670252 +vt 0.506200 0.679041 +vt 0.509130 0.687829 +vt 0.501318 0.695641 +vt 0.501318 0.699547 +vt 0.285512 0.988591 +vt 0.283559 0.985661 +vt 0.283559 0.982732 +vt 0.281606 0.979802 +vt 0.281606 0.977849 +vt 0.505224 0.707359 +vt 0.508153 0.709312 +vt 0.509130 0.711265 +vt 0.510106 0.710289 +vt 0.519871 0.702477 +vt 0.523777 0.706383 +vt 0.522801 0.702477 +vt 0.529636 0.706383 +vt 0.527683 0.704430 +vt 0.398786 0.889964 +vt 0.388044 0.879223 +vt 0.388044 0.874340 +vt 0.387068 0.859693 +vt 0.970037 0.506200 +vt 0.970037 0.436869 +vt 0.967108 0.412457 +vt 0.966131 0.317736 +vt 0.966131 0.303089 +vt 0.111695 0.903635 +vt 0.109742 0.903635 +vt 0.105836 0.904612 +vt 0.100954 0.904612 +vt 0.099001 0.906565 +vt 0.099001 0.987614 +vt 0.097048 0.987614 +vt 0.095095 0.906565 +vt 0.092165 0.988591 +vt 0.089236 0.906565 +vt 0.090212 0.988591 +vt 0.252311 0.632169 +vt 0.251334 0.628263 +vt 0.350937 0.002327 +vt 0.345078 0.002327 +vt 0.345078 0.003304 +vt 0.277700 0.003304 +vt 0.219110 0.004280 +vt 0.069706 0.005257 +vt 0.025763 0.006233 +vt 0.019904 0.006233 +vt 0.014045 0.007210 +vt 0.012092 0.006233 +vt 0.996403 0.679041 +vt 0.996403 0.673182 +vt 0.997379 0.673182 +vt 0.995426 0.667323 +vt 0.996403 0.667323 +vt 0.402692 0.892894 +vt 0.403668 0.974920 +vt 0.398786 0.892894 +vt 0.397809 0.974920 +vt 0.394880 0.892894 +vt 0.390974 0.892894 +vt 0.390974 0.974920 +vt 0.387068 0.974920 +vt 0.387068 0.893870 +vt 0.655605 0.593109 +vt 0.574555 0.590179 +vt 0.573579 0.604827 +vt 0.573579 0.685876 +vt 0.573579 0.789385 +vt 0.574555 0.793291 +vt 0.656581 0.805009 +vt 0.574555 0.807938 +vt 0.655605 0.808915 +vt 0.574555 0.813797 +vt 0.572602 0.994450 +vt 0.572602 0.989567 +vt 0.654628 0.991520 +vt 0.654628 0.985661 +vt 0.572602 0.985661 +vt 0.655605 0.981755 +vt 0.285512 0.458352 +vt 0.280629 0.459329 +vt 0.276723 0.459329 +vt 0.270864 0.459329 +vt 0.265005 0.459329 +vt 0.268911 0.458352 +vt 0.265005 0.458352 +vt 0.726889 0.385115 +vt 0.730795 0.308948 +vt 0.639981 0.308948 +vt 0.577485 0.308948 +vt 0.583344 0.306018 +vt 0.541354 0.304065 +vt 0.531589 0.295277 +vt 0.523777 0.295277 +vt 0.513036 0.517918 +vt 0.512059 0.522801 +vt 0.510106 0.522801 +vt 0.508153 0.522801 +vt 0.507177 0.522801 +vt 0.506200 0.595062 +vt 0.497412 0.597991 +vt 0.495459 0.674158 +vt 0.496435 0.674158 +vt 0.498388 0.682947 +vt 0.499365 0.689782 +vt 0.498388 0.689782 +vt 0.287465 0.994450 +vt 0.287465 0.990544 +vt 0.286488 0.987614 +vt 0.285512 0.985661 +vt 0.283559 0.979802 +vt 0.282582 0.974920 +vt 0.506200 0.710289 +vt 0.511083 0.713218 +vt 0.515965 0.716148 +vt 0.528660 0.709312 +vt 0.402692 0.887035 +vt 0.400739 0.887035 +vt 0.400739 0.870434 +vt 0.398786 0.789385 +vt 0.398786 0.764973 +vt 0.388044 0.789385 +vt 0.971014 0.436869 +vt 0.969061 0.332384 +vt 0.969061 0.303089 +vt 0.967108 0.300159 +vt 0.109742 0.901682 +vt 0.105836 0.902659 +vt 0.102907 0.903635 +vt 0.099001 0.903635 +vt 0.095095 0.903635 +vt 0.095095 0.905588 +vt 0.087283 0.905588 +vt 0.085330 0.906565 +vt 0.084353 0.988591 +vt 0.252311 0.626310 +vt 0.348984 0.000374 +vt 0.345078 0.001351 +vt 0.345078 0.000374 +vt 0.277700 0.001351 +vt 0.285512 0.002327 +vt 0.157591 0.003304 +vt 0.086306 0.003304 +vt 0.021857 0.005257 +vt 0.017951 0.005257 +vt 0.996403 0.684900 +vt 0.997379 0.679041 +vt 0.409527 0.974920 +vt 0.406598 0.892894 +vt 0.402692 0.891917 +vt 0.396833 0.891917 +vt 0.392927 0.891917 +vt 0.386091 0.892894 +vt 0.572602 0.590179 +vt 0.572602 0.619474 +vt 0.572602 0.698571 +vt 0.572602 0.793291 +vt 0.573579 0.807938 +vt 0.572602 0.811844 +vt 0.655605 0.993473 +vt 0.655605 0.989567 +vt 0.657558 0.985661 +vt 0.656581 0.981755 +vt 0.284535 0.457376 +vt 0.280629 0.458352 +vt 0.746419 0.308948 +vt 0.743490 0.386091 +vt 0.736654 0.386091 +vt 0.274770 0.458352 +vt 0.730795 0.386091 +vt 0.735678 0.308948 +vt 0.730795 0.305042 +vt 0.685876 0.306018 +vt 0.620451 0.303089 +vt 0.542331 0.295277 +vt 0.531589 0.292347 +vt 0.558931 0.292347 +vt 0.567720 0.289418 +vt 0.531589 0.290394 +vt 0.526707 0.292347 +vt 0.510106 0.515965 +vt 0.507177 0.518895 +vt 0.499365 0.519871 +vt 0.498388 0.522801 +vt 0.496435 0.592132 +vt 0.261099 0.707359 +vt 0.260123 0.626310 +vt 0.261099 0.626310 +vt 0.494482 0.674158 +vt 0.496435 0.683923 +vt 0.496435 0.686853 +vt 0.290394 0.996403 +vt 0.289418 0.993473 +vt 0.288441 0.990544 +vt 0.285512 0.984685 +vt 0.284535 0.982732 +vt 0.283559 0.973943 +vt 0.283559 0.969061 +vt 0.282582 0.967108 +vt 0.512059 0.716148 +vt 0.514989 0.717124 +vt 0.520848 0.719077 +vt 0.225945 0.984685 +vt 0.214227 0.981755 +vt 0.227898 0.967108 +vt 0.414410 0.871411 +vt 0.402692 0.870434 +vt 0.402692 0.789385 +vt 0.400739 0.764973 +vt 0.983708 0.412457 +vt 0.981755 0.412457 +vt 0.981755 0.320666 +vt 0.971014 0.303089 +vt 0.770832 0.999332 +vt 0.768879 0.998356 +vt 0.773761 0.994450 +vt 0.768879 0.995426 +vt 0.771808 0.991520 +vt 0.769855 0.990544 +vt 0.768879 0.990544 +vt 0.767902 0.988591 +vt 0.769855 0.984685 +vt 0.767902 0.984685 +vt 0.768879 0.982732 +vt 0.767902 0.982732 +vt 0.766926 0.980779 +vt 0.765949 0.980779 +vt 0.763020 0.977849 +vt 0.091189 0.903635 +vt 0.085330 0.904612 +vt 0.082400 0.905588 +vt 0.078494 0.906565 +vt 0.080447 0.988591 +vt 0.001351 0.455423 +vt 0.002327 0.374373 +vt 0.062870 0.371444 +vt 0.285512 0.000374 +vt 0.226922 0.001351 +vt 0.169309 0.002327 +vt 0.025763 0.003304 +vt 0.996403 0.692712 +vt 0.995426 0.688806 +vt 0.997379 0.688806 +vt 0.997379 0.684900 +vt 0.416363 0.974920 +vt 0.413433 0.892894 +vt 0.409527 0.891917 +vt 0.406598 0.889964 +vt 0.402692 0.890941 +vt 0.396833 0.890941 +vt 0.390974 0.890941 +vt 0.386091 0.891917 +vt 0.386091 0.890941 +vt 0.571626 0.590179 +vt 0.571626 0.604827 +vt 0.411480 0.657558 +vt 0.462258 0.561861 +vt 0.488623 0.576508 +vt 0.490577 0.561861 +vt 0.489600 0.576508 +vt 0.488623 0.561861 +vt 0.487647 0.561861 +vt 0.411480 0.466164 +vt 0.571626 0.793291 +vt 0.571626 0.807938 +vt 0.656581 0.994450 +vt 0.656581 0.998356 +vt 0.656581 0.989567 +vt 0.680017 0.997379 +vt 0.675135 0.997379 +vt 0.675135 0.920236 +vt 0.669276 0.920236 +vt 0.671229 0.997379 +vt 0.748372 0.387068 +vt 0.753255 0.309924 +vt 0.037481 0.969061 +vt 0.040411 0.974920 +vt 0.038458 0.975896 +vt 0.042364 0.977849 +vt 0.741537 0.308948 +vt 0.737631 0.305042 +vt 0.737631 0.302112 +vt 0.702477 0.302112 +vt 0.642910 0.294300 +vt 0.623380 0.291371 +vt 0.717124 0.823562 +vt 0.716148 0.760090 +vt 0.716148 0.726889 +vt 0.713218 0.726889 +vt 0.711265 0.730795 +vt 0.711265 0.844069 +vt 0.623380 0.288441 +vt 0.529636 0.289418 +vt 0.507177 0.514989 +vt 0.498388 0.515965 +vt 0.498388 0.516942 +vt 0.497412 0.518895 +vt 0.497412 0.522801 +vt 0.495459 0.588226 +vt 0.262076 0.711265 +vt 0.723960 0.998356 +vt 0.722983 0.998356 +vt 0.723960 0.913400 +vt 0.262076 0.626310 +vt 0.492529 0.675135 +vt 0.493506 0.678064 +vt 0.494482 0.683923 +vt 0.293324 0.998356 +vt 0.305042 0.998356 +vt 0.300159 0.990544 +vt 0.297230 0.985661 +vt 0.296253 0.983708 +vt 0.295277 0.978826 +vt 0.295277 0.973943 +vt 0.295277 0.969061 +vt 0.284535 0.961249 +vt 0.283559 0.961249 +vt 0.281606 0.964178 +vt 0.283559 0.958319 +vt 0.519871 0.720054 +vt 0.264029 0.543307 +vt 0.265005 0.543307 +vt 0.265005 0.560884 +vt 0.226922 0.984685 +vt 0.229851 0.967108 +vt 0.416363 0.871411 +vt 0.414410 0.788409 +vt 0.402692 0.764973 +vt 0.985661 0.412457 +vt 0.982732 0.306018 +vt 0.981755 0.287465 +vt 0.788409 0.991520 +vt 0.774738 0.989567 +vt 0.772785 0.986638 +vt 0.769855 0.979802 +vt 0.768879 0.976873 +vt 0.767902 0.979802 +vt 0.765949 0.975896 +vt 0.723960 0.837233 +vt 0.724936 0.830398 +vt 0.727866 0.837233 +vt 0.732748 0.831374 +vt 0.736654 0.824539 +vt 0.725913 0.824539 +vt 0.078494 0.904612 +vt 0.061894 0.455423 +vt 0.001351 0.457376 +vt 0.001351 0.456399 +vt 0.078494 0.905588 +vt 0.061894 0.453470 +vt 0.122437 0.453470 +vt 0.121460 0.371444 +vt 0.179073 0.371444 +vt 0.321642 0.372420 +vt 0.254264 0.453470 +vt 0.297230 0.454446 +vt 0.324572 0.454446 +vt 0.325548 0.372420 +vt 0.331407 0.454446 +vt 0.329454 0.372420 +vt 0.417339 0.892894 +vt 0.415386 0.891917 +vt 0.409527 0.889964 +vt 0.882152 0.947578 +vt 0.879223 0.947578 +vt 0.875317 0.922189 +vt 0.875317 0.947578 +vt 0.868481 0.947578 +vt 0.872387 0.870434 +vt 0.865552 0.870434 +vt 0.861646 0.947578 +vt 0.859693 0.870434 +vt 0.412457 0.676111 +vt 0.412457 0.672205 +vt 0.489600 0.675135 +vt 0.488623 0.660487 +vt 0.491553 0.659511 +vt 0.491553 0.561861 +vt 0.488623 0.519871 +vt 0.488623 0.463235 +vt 0.411480 0.451517 +vt 0.488623 0.448587 +vt 0.411480 0.447611 +vt 0.686853 0.920236 +vt 0.683923 0.997379 +vt 0.680017 0.920236 +vt 0.036505 0.958319 +vt 0.036505 0.963202 +vt 0.033575 0.963202 +vt 0.035528 0.970037 +vt 0.035528 0.974920 +vt 0.037481 0.976873 +vt 0.040411 0.979802 +vt 0.043340 0.983708 +vt 0.038458 0.982732 +vt 0.743490 0.303089 +vt 0.745443 0.293324 +vt 0.717124 0.723960 +vt 0.715171 0.723960 +vt 0.714195 0.721030 +vt 0.711265 0.722983 +vt 0.703453 0.722007 +vt 0.703453 0.719077 +vt 0.701500 0.718101 +vt 0.702477 0.715171 +vt 0.701500 0.722007 +vt 0.702477 0.728842 +vt 0.702477 0.758137 +vt 0.702477 0.823562 +vt 0.643887 0.279653 +vt 0.585297 0.280629 +vt 0.531589 0.280629 +vt 0.556978 0.280629 +vt 0.542331 0.279653 +vt 0.531589 0.279653 +vt 0.497412 0.516942 +vt 0.495459 0.516942 +vt 0.496435 0.522801 +vt 0.979802 0.872387 +vt 0.979802 0.937813 +vt 0.980779 0.871411 +vt 0.723960 0.997379 +vt 0.724936 0.913400 +vt 0.724936 0.988591 +vt 0.491553 0.676111 +vt 0.485694 0.681970 +vt 0.480812 0.675135 +vt 0.734701 0.919259 +vt 0.734701 0.993473 +vt 0.734701 0.996403 +vt 0.735678 0.997379 +vt 0.735678 0.919259 +vt 0.351914 0.958319 +vt 0.351914 0.959296 +vt 0.346055 0.961249 +vt 0.351914 0.960272 +vt 0.482765 0.680994 +vt 0.306995 0.999332 +vt 0.302112 0.992497 +vt 0.301136 0.990544 +vt 0.299183 0.987614 +vt 0.298206 0.982732 +vt 0.297230 0.980779 +vt 0.296253 0.975896 +vt 0.296253 0.973943 +vt 0.296253 0.969061 +vt 0.297230 0.964178 +vt 0.297230 0.961249 +vt 0.285512 0.959296 +vt 0.306995 0.942695 +vt 0.306018 0.942695 +vt 0.305042 0.942695 +vt 0.705406 0.982732 +vt 0.703453 0.982732 +vt 0.704430 0.981755 +vt 0.699547 0.971014 +vt 0.692712 0.977849 +vt 0.692712 0.976873 +vt 0.694665 0.972967 +vt 0.292347 0.942695 +vt 0.284535 0.958319 +vt 0.263052 0.543307 +vt 0.263052 0.566743 +vt 0.335313 0.917306 +vt 0.336290 0.840163 +vt 0.337266 0.917306 +vt 0.264029 0.643887 +vt 0.265005 0.643887 +vt 0.414410 0.744466 +vt 0.415386 0.788409 +vt 0.414410 0.764973 +vt 0.997379 0.412457 +vt 0.985661 0.306018 +vt 0.985661 0.290394 +vt 0.982732 0.290394 +vt 0.788409 0.994450 +vt 0.791338 0.987614 +vt 0.791338 0.979802 +vt 0.790361 0.982732 +vt 0.774738 0.983708 +vt 0.773761 0.983708 +vt 0.772785 0.978826 +vt 0.769855 0.976873 +vt 0.765949 0.972967 +vt 0.729819 0.836257 +vt 0.734701 0.829421 +vt 0.737631 0.824539 +vt 0.764973 0.761067 +vt 0.721030 0.756184 +vt 0.070682 0.455423 +vt 0.061894 0.454446 +vt 0.198603 0.454446 +vt 0.264029 0.455423 +vt 0.329454 0.455423 +vt 0.333360 0.455423 +vt 0.331407 0.457376 +vt 0.417339 0.890941 +vt 0.413433 0.889964 +vt 0.886058 0.947578 +vt 0.883129 0.870434 +vt 0.877270 0.870434 +vt 0.870434 0.868481 +vt 0.870434 0.867505 +vt 0.865552 0.867505 +vt 0.858716 0.867505 +vt 0.492529 0.674158 +vt 0.494482 0.659511 +vt 0.493506 0.576508 +vt 0.492529 0.561861 +vt 0.491553 0.463235 +vt 0.491553 0.448587 +vt 0.032599 0.953437 +vt 0.035528 0.951484 +vt 0.033575 0.958319 +vt 0.038458 0.952460 +vt 0.032599 0.968084 +vt 0.034552 0.977849 +vt 0.025763 0.983708 +vt 0.031622 0.989567 +vt 0.024787 0.984685 +vt 0.030646 0.991520 +vt 0.027716 0.990544 +vt 0.022834 0.986638 +vt 0.023810 0.989567 +vt 0.712242 0.717124 +vt 0.703453 0.712242 +vt 0.702477 0.712242 +vt 0.701500 0.709312 +vt 0.700524 0.715171 +vt 0.700524 0.722007 +vt 0.701500 0.728842 +vt 0.701500 0.776690 +vt 0.701500 0.795244 +vt 0.701500 0.823562 +vt 0.643887 0.278676 +vt 0.603850 0.277700 +vt 0.571626 0.279653 +vt 0.542331 0.277700 +vt 0.531589 0.277700 +vt 0.979802 0.944648 +vt 0.980779 0.940742 +vt 0.980779 0.937813 +vt 0.980779 0.862622 +vt 0.985661 0.870434 +vt 0.984685 0.870434 +vt 0.984685 0.868481 +vt 0.986638 0.868481 +vt 0.729819 0.994450 +vt 0.733725 0.919259 +vt 0.733725 0.996403 +vt 0.659511 0.773761 +vt 0.658534 0.773761 +vt 0.659511 0.772785 +vt 0.658534 0.771808 +vt 0.657558 0.771808 +vt 0.658534 0.762043 +vt 0.658534 0.761067 +vt 0.657558 0.762043 +vt 0.746419 0.997379 +vt 0.748372 0.926095 +vt 0.350937 0.958319 +vt 0.344102 0.961249 +vt 0.306018 0.996403 +vt 0.303089 0.992497 +vt 0.312854 0.972967 +vt 0.314807 0.978826 +vt 0.312854 0.975896 +vt 0.314807 0.981755 +vt 0.297230 0.975896 +vt 0.296253 0.971014 +vt 0.297230 0.967108 +vt 0.298206 0.962225 +vt 0.315783 0.947578 +vt 0.315783 0.946601 +vt 0.317736 0.947578 +vt 0.716148 0.983708 +vt 0.717124 0.984685 +vt 0.716148 0.984685 +vt 0.715171 0.984685 +vt 0.705406 0.983708 +vt 0.700524 0.971014 +vt 0.127319 0.984685 +vt 0.130249 0.983708 +vt 0.131225 0.984685 +vt 0.133178 0.984685 +vt 0.132202 0.983708 +vt 0.126343 0.983708 +vt 0.693688 0.977849 +vt 0.693688 0.971990 +vt 0.693688 0.954413 +vt 0.694665 0.976873 +vt 0.694665 0.977849 +vt 0.334337 0.917306 +vt 0.335313 0.847975 +vt 0.335313 0.941719 +vt 0.336290 0.842116 +vt 0.336290 0.941719 +vt 0.337266 0.967108 +vt 0.338243 0.917306 +vt 0.415386 0.738607 +vt 0.998356 0.386091 +vt 0.998356 0.391950 +vt 0.998356 0.306018 +vt 0.997379 0.306018 +vt 0.998356 0.288441 +vt 0.803056 0.998356 +vt 0.790361 0.996403 +vt 0.792314 0.990544 +vt 0.793291 0.985661 +vt 0.793291 0.977849 +vt 0.791338 0.974920 +vt 0.789385 0.977849 +vt 0.773761 0.975896 +vt 0.768879 0.971990 +vt 0.731772 0.837233 +vt 0.735678 0.833327 +vt 0.739584 0.826492 +vt 0.767902 0.762043 +vt 0.792314 0.698571 +vt 0.792314 0.696618 +vt 0.715171 0.663417 +vt 0.162473 0.456399 +vt 0.297230 0.456399 +vt 0.545260 0.562837 +vt 0.574555 0.589203 +vt 0.542331 0.589203 +vt 0.329454 0.456399 +vt 0.540378 0.589203 +vt 0.535495 0.512059 +vt 0.535495 0.589203 +vt 0.531589 0.512059 +vt 0.888011 0.870434 +vt 0.884105 0.868481 +vt 0.877270 0.867505 +vt 0.875317 0.866528 +vt 0.870434 0.865552 +vt 0.860669 0.865552 +vt 0.695641 0.805985 +vt 0.692712 0.820633 +vt 0.693688 0.803056 +vt 0.691735 0.803056 +vt 0.691735 0.904612 +vt 0.692712 0.904612 +vt 0.410504 0.620451 +vt 0.411480 0.635098 +vt 0.409527 0.620451 +vt 0.409527 0.635098 +vt 0.410504 0.733725 +vt 0.494482 0.463235 +vt 0.414410 0.749349 +vt 0.412457 0.751302 +vt 0.410504 0.749349 +vt 0.411480 0.754231 +vt 0.412457 0.759114 +vt 0.029669 0.960272 +vt 0.030646 0.966131 +vt 0.031622 0.971014 +vt 0.020881 0.974920 +vt 0.022834 0.979802 +vt 0.019904 0.980779 +vt 0.017951 0.976873 +vt 0.016975 0.973943 +vt 0.016975 0.976873 +vt 0.017951 0.981755 +vt 0.713218 0.708336 +vt 0.704430 0.706383 +vt 0.703453 0.706383 +vt 0.702477 0.706383 +vt 0.700524 0.706383 +vt 0.700524 0.707359 +vt 0.700524 0.713218 +vt 0.699547 0.719077 +vt 0.699547 0.724936 +vt 0.699547 0.728842 +vt 0.699547 0.776690 +vt 0.698571 0.823562 +vt 0.699547 0.823562 +vt 0.643887 0.276723 +vt 0.568696 0.277700 +vt 0.571626 0.276723 +vt 0.565767 0.276723 +vt 0.562837 0.276723 +vt 0.982732 0.974920 +vt 0.979802 0.975896 +vt 0.982732 0.944648 +vt 0.989567 0.937813 +vt 0.706383 0.902659 +vt 0.705406 0.896800 +vt 0.705406 0.892894 +vt 0.709312 0.884105 +vt 0.719077 0.847975 +vt 0.720054 0.847975 +vt 0.340196 0.841139 +vt 0.341172 0.840163 +vt 0.341172 0.842116 +vt 0.341172 0.841139 +vt 0.818680 0.762043 +vt 0.817703 0.761067 +vt 0.818680 0.761067 +vt 0.718101 0.846998 +vt 0.707359 0.883129 +vt 0.704430 0.892894 +vt 0.703453 0.896800 +vt 0.705406 0.968084 +vt 0.694665 0.968084 +vt 0.695641 0.903635 +vt 0.115601 0.648769 +vt 0.115601 0.713218 +vt 0.114625 0.714195 +vt 0.114625 0.713218 +vt 0.666346 0.746419 +vt 0.665370 0.757161 +vt 0.665370 0.745443 +vt 0.665370 0.758137 +vt 0.659511 0.759114 +vt 0.985661 0.869458 +vt 0.658534 0.745443 +vt 0.658534 0.759114 +vt 0.657558 0.759114 +vt 0.985661 0.868481 +vt 0.664393 0.773761 +vt 0.663417 0.774737 +vt 0.663417 0.773761 +vt 0.659511 0.761067 +vt 0.747396 0.996403 +vt 0.746419 0.993473 +vt 0.350937 0.942695 +vt 0.350937 0.943672 +vt 0.348008 0.944648 +vt 0.340196 0.947578 +vt 0.316760 0.986638 +vt 0.317736 0.985661 +vt 0.319689 0.985661 +vt 0.316760 0.980779 +vt 0.315783 0.974920 +vt 0.320666 0.977849 +vt 0.316760 0.973943 +vt 0.314807 0.972967 +vt 0.313830 0.972967 +vt 0.310901 0.968084 +vt 0.310901 0.962225 +vt 0.311877 0.957343 +vt 0.718101 0.984685 +vt 0.717124 0.983708 +vt 0.717124 0.981755 +vt 0.716148 0.979802 +vt 0.704430 0.970037 +vt 0.707359 0.968084 +vt 0.708336 0.965155 +vt 0.708336 0.964178 +vt 0.710289 0.888988 +vt 0.711265 0.888988 +vt 0.710289 0.886058 +vt 0.709312 0.886058 +vt 0.133178 0.983708 +vt 0.130249 0.982732 +vt 0.694665 0.978826 +vt 0.695641 0.978826 +vt 0.335313 0.843092 +vt 0.334337 0.843092 +vt 0.323595 0.843092 +vt 0.322619 0.916330 +vt 0.322619 0.962225 +vt 0.977849 0.783526 +vt 0.990544 0.763020 +vt 0.977849 0.860669 +vt 0.988591 0.861646 +vt 0.264029 0.647793 +vt 0.265005 0.647793 +vt 0.265005 0.648769 +vt 0.559908 0.996403 +vt 0.560884 0.997379 +vt 0.265005 0.649746 +vt 0.265982 0.643887 +vt 0.990544 0.856763 +vt 0.991520 0.763020 +vt 0.992497 0.789385 +vt 0.991520 0.869458 +vt 0.266958 0.656581 +vt 0.265005 0.674158 +vt 0.999332 0.288441 +vt 0.804032 0.999332 +vt 0.805985 0.993473 +vt 0.805009 0.990544 +vt 0.805985 0.984685 +vt 0.805009 0.973943 +vt 0.806962 0.981755 +vt 0.805009 0.979802 +vt 0.806962 0.984685 +vt 0.792314 0.971990 +vt 0.790361 0.970037 +vt 0.791338 0.970037 +vt 0.789385 0.967108 +vt 0.788409 0.964178 +vt 0.788409 0.970037 +vt 0.771808 0.970037 +vt 0.733725 0.839186 +vt 0.737631 0.835280 +vt 0.741537 0.830398 +vt 0.741537 0.827468 +vt 0.773761 0.754231 +vt 0.797197 0.700524 +vt 0.794267 0.699547 +vt 0.797197 0.692712 +vt 0.744466 0.510106 +vt 0.748372 0.514989 +vt 0.740560 0.513036 +vt 0.709312 0.589203 +vt 0.652675 0.512059 +vt 0.542331 0.512059 +vt 0.538425 0.509130 +vt 0.533542 0.509130 +vt 0.525730 0.510106 +vt 0.525730 0.511083 +vt 0.522801 0.510106 +vt 0.882152 0.866528 +vt 0.877270 0.863599 +vt 0.515965 0.509130 +vt 0.508153 0.512059 +vt 0.510106 0.510106 +vt 0.504247 0.512059 +vt 0.868481 0.863599 +vt 0.860669 0.864575 +vt 0.496435 0.514012 +vt 0.495459 0.505224 +vt 0.682947 0.799150 +vt 0.682947 0.904612 +vt 0.400739 0.620451 +vt 0.401715 0.740560 +vt 0.402692 0.758137 +vt 0.402692 0.763996 +vt 0.018928 0.960272 +vt 0.018928 0.966131 +vt 0.017951 0.971990 +vt 0.015998 0.970037 +vt 0.015998 0.973943 +vt 0.015998 0.976873 +vt 0.713218 0.703453 +vt 0.705406 0.700524 +vt 0.704430 0.700524 +vt 0.705406 0.697594 +vt 0.703453 0.701500 +vt 0.701500 0.701500 +vt 0.700524 0.700524 +vt 0.700524 0.701500 +vt 0.699547 0.701500 +vt 0.241569 0.922189 +vt 0.233757 0.922189 +vt 0.242546 0.922189 +vt 0.234734 0.921212 +vt 0.244499 0.920236 +vt 0.691735 0.705406 +vt 0.691735 0.714195 +vt 0.690759 0.720054 +vt 0.690759 0.725913 +vt 0.690759 0.792314 +vt 0.690759 0.729819 +vt 0.698571 0.728842 +vt 0.696618 0.800126 +vt 0.695641 0.801103 +vt 0.696618 0.862622 +vt 0.603850 0.273794 +vt 0.643887 0.275747 +vt 0.585297 0.276723 +vt 0.148802 0.709312 +vt 0.151732 0.718101 +vt 0.150755 0.722983 +vt 0.152708 0.718101 +vt 0.152708 0.720054 +vt 0.153685 0.735678 +vt 0.153685 0.720054 +vt 0.152708 0.719077 +vt 0.339219 0.847975 +vt 0.339219 0.846998 +vt 0.351914 0.846998 +vt 0.826492 0.767902 +vt 0.822586 0.773761 +vt 0.825515 0.766926 +vt 0.826492 0.766926 +vt 0.709312 0.844069 +vt 0.698571 0.881176 +vt 0.695641 0.891917 +vt 0.694665 0.895823 +vt 0.114625 0.641934 +vt 0.115601 0.641934 +vt 0.114625 0.648769 +vt 0.113648 0.714195 +vt 0.664393 0.745443 +vt 0.663417 0.744466 +vt 0.659511 0.745443 +vt 0.657558 0.745443 +vt 0.664393 0.760090 +vt 0.663417 0.760090 +vt 0.659511 0.760090 +vt 0.748372 0.996403 +vt 0.747396 0.993473 +vt 0.749349 0.926095 +vt 0.345078 0.944648 +vt 0.340196 0.945625 +vt 0.345078 0.943672 +vt 0.340196 0.944648 +vt 0.827468 0.734701 +vt 0.826492 0.740560 +vt 0.818680 0.738607 +vt 0.820633 0.742513 +vt 0.825515 0.745443 +vt 0.822586 0.747396 +vt 0.995426 0.417339 +vt 0.996403 0.413433 +vt 0.997379 0.413433 +vt 0.321642 0.976873 +vt 0.317736 0.973943 +vt 0.315783 0.967108 +vt 0.313830 0.967108 +vt 0.312854 0.965155 +vt 0.313830 0.960272 +vt 0.314807 0.955390 +vt 0.318713 0.948554 +vt 0.718101 0.983708 +vt 0.717124 0.982732 +vt 0.718101 0.981755 +vt 0.716148 0.964178 +vt 0.716148 0.941719 +vt 0.712242 0.917306 +vt 0.706383 0.916330 +vt 0.712242 0.916330 +vt 0.713218 0.915353 +vt 0.711265 0.886058 +vt 0.133178 0.982732 +vt 0.132202 0.981755 +vt 0.710289 0.884105 +vt 0.712242 0.886058 +vt 0.712242 0.885082 +vt 0.132202 0.980779 +vt 0.130249 0.981755 +vt 0.126343 0.982732 +vt 0.694665 0.979802 +vt 0.695641 0.979802 +vt 0.706383 0.984685 +vt 0.705406 0.985661 +vt 0.706383 0.985661 +vt 0.321642 0.843092 +vt 0.322619 0.843092 +vt 0.320666 0.916330 +vt 0.321642 0.940742 +vt 0.320666 0.973943 +vt 0.976873 0.795244 +vt 0.976873 0.860669 +vt 0.976873 0.861646 +vt 0.988591 0.862622 +vt 0.264029 0.648769 +vt 0.561861 0.998356 +vt 0.565767 0.997379 +vt 0.565767 0.996403 +vt 0.568696 0.996403 +vt 0.566743 0.996403 +vt 0.560884 0.996403 +vt 0.560884 0.995426 +vt 0.559908 0.995426 +vt 0.559908 0.997379 +vt 0.265005 0.650722 +vt 0.536472 0.990544 +vt 0.534519 0.987614 +vt 0.530613 0.987614 +vt 0.529636 0.984685 +vt 0.526707 0.984685 +vt 0.806962 0.987614 +vt 0.805985 0.976873 +vt 0.804032 0.968084 +vt 0.804032 0.971014 +vt 0.804032 0.973943 +vt 0.792314 0.969061 +vt 0.790361 0.967108 +vt 0.789385 0.964178 +vt 0.786456 0.959296 +vt 0.783526 0.960272 +vt 0.745443 0.847975 +vt 0.749349 0.843092 +vt 0.742513 0.827468 +vt 0.773761 0.755208 +vt 0.799150 0.695641 +vt 0.747396 0.507177 +vt 0.740560 0.510106 +vt 0.706383 0.510106 +vt 0.634122 0.509130 +vt 0.570649 0.509130 +vt 0.535495 0.506200 +vt 0.527683 0.507177 +vt 0.521824 0.508153 +vt 0.515965 0.497412 +vt 0.512059 0.498388 +vt 0.506200 0.499365 +vt 0.501318 0.501318 +vt 0.246452 0.990544 +vt 0.246452 0.999332 +vt 0.243522 0.996403 +vt 0.242546 0.996403 +vt 0.243522 0.999332 +vt 0.680017 0.799150 +vt 0.680017 0.813797 +vt 0.680017 0.904612 +vt 0.680994 0.919259 +vt 0.398786 0.635098 +vt 0.400739 0.677088 +vt 0.401715 0.755208 +vt 0.401715 0.761067 +vt 0.017951 0.957343 +vt 0.015998 0.963202 +vt 0.014045 0.963202 +vt 0.014045 0.970037 +vt 0.716148 0.696618 +vt 0.708336 0.693688 +vt 0.707359 0.692712 +vt 0.169309 0.948554 +vt 0.169309 0.943672 +vt 0.171262 0.939766 +vt 0.171262 0.945625 +vt 0.172238 0.943672 +vt 0.172238 0.942695 +vt 0.172238 0.941719 +vt 0.173215 0.941719 +vt 0.173215 0.940742 +vt 0.172238 0.940742 +vt 0.172238 0.939766 +vt 0.173215 0.939766 +vt 0.175168 0.938789 +vt 0.233757 0.925118 +vt 0.233757 0.927071 +vt 0.242546 0.923165 +vt 0.243522 0.922189 +vt 0.243522 0.923165 +vt 0.250358 0.919259 +vt 0.690759 0.711265 +vt 0.689782 0.720054 +vt 0.689782 0.722983 +vt 0.689782 0.729819 +vt 0.689782 0.798173 +vt 0.687829 0.798173 +vt 0.688806 0.798173 +vt 0.960272 0.992497 +vt 0.962225 0.998356 +vt 0.962225 0.997379 +vt 0.690759 0.798173 +vt 0.696618 0.798173 +vt 0.695641 0.798173 +vt 0.695641 0.799150 +vt 0.972967 0.989567 +vt 0.971014 0.983708 +vt 0.351914 0.940742 +vt 0.351914 0.939766 +vt 0.351914 0.855787 +vt 0.351914 0.854810 +vt 0.351914 0.848951 +vt 0.351914 0.847975 +vt 0.823562 0.774737 +vt 0.823562 0.773761 +vt 0.826492 0.768879 +vt 0.708336 0.844069 +vt 0.699547 0.875317 +vt 0.694665 0.890941 +vt 0.115601 0.638028 +vt 0.114625 0.638028 +vt 0.113648 0.641934 +vt 0.113648 0.648769 +vt 0.531589 0.473000 +vt 0.475929 0.437846 +vt 0.475929 0.432963 +vt 0.659511 0.744466 +vt 0.666346 0.760090 +vt 0.665370 0.760090 +vt 0.753255 0.994450 +vt 0.751302 0.994450 +vt 0.750325 0.926095 +vt 0.349961 0.941719 +vt 0.826492 0.728842 +vt 0.816727 0.734701 +vt 0.994450 0.427104 +vt 0.996403 0.427104 +vt 0.992497 0.486670 +vt 0.340196 0.335313 +vt 0.335313 0.334337 +vt 0.341172 0.290394 +vt 0.339219 0.290394 +vt 0.997379 0.427104 +vt 0.997379 0.425151 +vt 0.996403 0.422222 +vt 0.995426 0.422222 +vt 0.996403 0.418316 +vt 0.998356 0.413433 +vt 0.322619 0.975896 +vt 0.319689 0.972967 +vt 0.317736 0.969061 +vt 0.315783 0.962225 +vt 0.314807 0.960272 +vt 0.315783 0.955390 +vt 0.319689 0.948554 +vt 0.570649 0.939766 +vt 0.573579 0.938789 +vt 0.573579 0.939766 +vt 0.591156 0.937813 +vt 0.609709 0.937813 +vt 0.614592 0.937813 +vt 0.706383 0.917306 +vt 0.712242 0.920236 +vt 0.713218 0.918283 +vt 0.713218 0.917306 +vt 0.714195 0.915353 +vt 0.714195 0.917306 +vt 0.712242 0.888988 +vt 0.713218 0.887035 +vt 0.717124 0.878246 +vt 0.136108 0.973943 +vt 0.706383 0.986638 +vt 0.707359 0.985661 +vt 0.714195 0.993473 +vt 0.310901 0.902659 +vt 0.311877 0.843092 +vt 0.310901 0.916330 +vt 0.320666 0.940742 +vt 0.362655 0.787432 +vt 0.361679 0.755208 +vt 0.362655 0.689782 +vt 0.361679 0.689782 +vt 0.361679 0.688806 +vt 0.362655 0.688806 +vt 0.354843 0.680017 +vt 0.362655 0.681970 +vt 0.357773 0.678064 +vt 0.359726 0.676111 +vt 0.567720 0.997379 +vt 0.569673 0.998356 +vt 0.570649 0.998356 +vt 0.149779 0.706383 +vt 0.149779 0.680017 +vt 0.150755 0.706383 +vt 0.150755 0.680017 +vt 0.571626 0.997379 +vt 0.570649 0.997379 +vt 0.568696 0.995426 +vt 0.565767 0.995426 +vt 0.565767 0.994450 +vt 0.537448 0.987614 +vt 0.538425 0.974920 +vt 0.532566 0.985661 +vt 0.526707 0.981755 +vt 0.523777 0.979802 +vt 0.808915 0.981755 +vt 0.807938 0.975896 +vt 0.805009 0.971014 +vt 0.805009 0.968084 +vt 0.803056 0.965155 +vt 0.802079 0.968084 +vt 0.789385 0.961249 +vt 0.787432 0.955390 +vt 0.784503 0.953437 +vt 0.781573 0.954413 +vt 0.780596 0.948554 +vt 0.782550 0.947578 +vt 0.756184 0.840163 +vt 0.754231 0.839186 +vt 0.781573 0.771808 +vt 0.752278 0.839186 +vt 0.782550 0.762043 +vt 0.805985 0.704430 +vt 0.806962 0.701500 +vt 0.808915 0.697594 +vt 0.745443 0.498388 +vt 0.740560 0.507177 +vt 0.730795 0.507177 +vt 0.679041 0.506200 +vt 0.604827 0.506200 +vt 0.538425 0.505224 +vt 0.532566 0.506200 +vt 0.524754 0.496435 +vt 0.519871 0.496435 +vt 0.251334 0.967108 +vt 0.249381 0.972967 +vt 0.247428 0.969061 +vt 0.245475 0.973943 +vt 0.245475 0.972967 +vt 0.244499 0.973943 +vt 0.244499 0.976873 +vt 0.245475 0.976873 +vt 0.248405 0.977849 +vt 0.243522 0.980779 +vt 0.246452 0.984685 +vt 0.242546 0.989567 +vt 0.240593 0.989567 +vt 0.240593 0.996403 +vt 0.240593 0.999332 +vt 0.678064 0.799150 +vt 0.678064 0.813797 +vt 0.680017 0.919259 +vt 0.397809 0.635098 +vt 0.398786 0.740560 +vt 0.399762 0.759114 +vt 0.398786 0.759114 +vt 0.015998 0.954413 +vt 0.014045 0.957343 +vt 0.215204 0.983708 +vt 0.221063 0.984685 +vt 0.219110 0.993473 +vt 0.226922 0.986638 +vt 0.711265 0.687829 +vt 0.708336 0.689782 +vt 0.168332 0.952460 +vt 0.170285 0.957343 +vt 0.170285 0.953437 +vt 0.171262 0.950507 +vt 0.173215 0.944648 +vt 0.173215 0.942695 +vt 0.175168 0.941719 +vt 0.176144 0.938789 +vt 0.177121 0.937813 +vt 0.179073 0.936836 +vt 0.181027 0.934883 +vt 0.182003 0.934883 +vt 0.204462 0.954413 +vt 0.229851 0.930001 +vt 0.205439 0.954413 +vt 0.230828 0.930977 +vt 0.205439 0.955390 +vt 0.231804 0.931954 +vt 0.181027 0.933907 +vt 0.178097 0.935860 +vt 0.232781 0.929024 +vt 0.246452 0.932930 +vt 0.246452 0.931954 +vt 0.247428 0.931954 +vt 0.248405 0.921212 +vt 0.688806 0.708336 +vt 0.688806 0.717124 +vt 0.687829 0.725913 +vt 0.069706 0.988591 +vt 0.061894 0.995426 +vt 0.069706 0.959296 +vt 0.680017 0.762043 +vt 0.680017 0.798173 +vt 0.963202 0.998356 +vt 0.971990 0.997379 +vt 0.971990 0.998356 +vt 0.070682 0.923165 +vt 0.071659 0.923165 +vt 0.972967 0.996403 +vt 0.972967 0.991520 +vt 0.960272 0.991520 +vt 0.972967 0.990544 +vt 0.074588 0.918283 +vt 0.075565 0.916330 +vt 0.075565 0.917306 +vt 0.077518 0.911447 +vt 0.077518 0.910471 +vt 0.078494 0.910471 +vt 0.678064 0.481788 +vt 0.593109 0.481788 +vt 0.678064 0.480811 +vt 0.593109 0.479835 +vt 0.823562 0.782550 +vt 0.823562 0.781573 +vt 0.824539 0.780596 +vt 0.823562 0.775714 +vt 0.591156 0.473976 +vt 0.590179 0.473976 +vt 0.586273 0.479835 +vt 0.590179 0.479835 +vt 0.707359 0.844069 +vt 0.706383 0.846998 +vt 0.693688 0.889964 +vt 0.113648 0.638028 +vt 0.537448 0.475929 +vt 0.540378 0.476906 +vt 0.474953 0.392927 +vt 0.475929 0.431987 +vt 0.474953 0.431987 +vt 0.415386 0.393903 +vt 0.418316 0.382185 +vt 0.990544 0.485694 +vt 0.992497 0.558931 +vt 0.993473 0.486670 +vt 0.279653 0.288441 +vt 0.275747 0.370467 +vt 0.332384 0.372420 +vt 0.655605 0.970037 +vt 0.655605 0.976873 +vt 0.654628 0.970037 +vt 0.617521 0.975896 +vt 0.572602 0.974920 +vt 0.348008 0.291371 +vt 0.998356 0.418316 +vt 0.571626 0.970037 +vt 0.615568 0.966131 +vt 0.611662 0.964178 +vt 0.571626 0.966131 +vt 0.610686 0.962225 +vt 0.570649 0.961249 +vt 0.610686 0.957343 +vt 0.570649 0.954413 +vt 0.614592 0.952460 +vt 0.610686 0.946601 +vt 0.570649 0.947578 +vt 0.316760 0.955390 +vt 0.609709 0.940742 +vt 0.618498 0.937813 +vt 0.712242 0.921212 +vt 0.713218 0.921212 +vt 0.713218 0.920236 +vt 0.715171 0.917306 +vt 0.714195 0.916330 +vt 0.713218 0.888988 +vt 0.717124 0.879223 +vt 0.135131 0.967108 +vt 0.133178 0.963202 +vt 0.125366 0.970037 +vt 0.713218 0.993473 +vt 0.715171 0.994450 +vt 0.309924 0.843092 +vt 0.308948 0.916330 +vt 0.309924 0.940742 +vt 0.308948 0.940742 +vt 0.310901 0.940742 +vt 0.372420 0.788409 +vt 0.372420 0.689782 +vt 0.372420 0.688806 +vt 0.372420 0.684900 +vt 0.375350 0.687829 +vt 0.373397 0.683923 +vt 0.370467 0.681970 +vt 0.360702 0.673182 +vt 0.360702 0.672205 +vt 0.148802 0.680017 +vt 0.148802 0.707359 +vt 0.149779 0.708336 +vt 0.150755 0.707359 +vt 0.151732 0.706383 +vt 0.567720 0.994450 +vt 0.566743 0.994450 +vt 0.566743 0.988591 +vt 0.561861 0.984685 +vt 0.561861 0.983708 +vt 0.560884 0.984685 +vt 0.543307 0.976873 +vt 0.541354 0.974920 +vt 0.537448 0.972967 +vt 0.534519 0.971990 +vt 0.531589 0.969061 +vt 0.522801 0.977849 +vt 0.808915 0.978826 +vt 0.807938 0.972967 +vt 0.805985 0.970037 +vt 0.805009 0.965155 +vt 0.803056 0.962225 +vt 0.801103 0.960272 +vt 0.799150 0.954413 +vt 0.800126 0.960272 +vt 0.797197 0.954413 +vt 0.786456 0.949531 +vt 0.784503 0.946601 +vt 0.785479 0.946601 +vt 0.783526 0.772785 +vt 0.787432 0.763996 +vt 0.809891 0.706383 +vt 0.808915 0.705406 +vt 0.810868 0.698571 +vt 0.740560 0.495459 +vt 0.745443 0.495459 +vt 0.740560 0.498388 +vt 0.729819 0.498388 +vt 0.532566 0.496435 +vt 0.536472 0.493506 +vt 0.529636 0.493506 +vt 0.253287 0.955390 +vt 0.253287 0.962225 +vt 0.250358 0.960272 +vt 0.246452 0.965155 +vt 0.243522 0.973943 +vt 0.242546 0.977849 +vt 0.241569 0.983708 +vt 0.231804 0.984685 +vt 0.231804 0.991520 +vt 0.231804 0.998356 +vt 0.669276 0.798173 +vt 0.669276 0.812821 +vt 0.677088 0.919259 +vt 0.396833 0.635098 +vt 0.394880 0.635098 +vt 0.395856 0.740560 +vt 0.396833 0.755208 +vt 0.396833 0.762043 +vt 0.388044 0.757161 +vt 0.207392 0.988591 +vt 0.213251 0.991520 +vt 0.215204 0.993473 +vt 0.221063 0.996403 +vt 0.169309 0.961249 +vt 0.170285 0.961249 +vt 0.170285 0.958319 +vt 0.178097 0.956366 +vt 0.178097 0.948554 +vt 0.174191 0.943672 +vt 0.175168 0.942695 +vt 0.178097 0.938789 +vt 0.180050 0.937813 +vt 0.182979 0.935860 +vt 0.206415 0.906565 +vt 0.204462 0.955390 +vt 0.204462 0.957343 +vt 0.203486 0.957343 +vt 0.205439 0.959296 +vt 0.204462 0.959296 +vt 0.206415 0.963202 +vt 0.208368 0.963202 +vt 0.209345 0.964178 +vt 0.208368 0.962225 +vt 0.205439 0.958319 +vt 0.205439 0.956366 +vt 0.233757 0.943672 +vt 0.247428 0.932930 +vt 0.249381 0.932930 +vt 0.125366 0.965155 +vt 0.123413 0.958319 +vt 0.136108 0.961249 +vt 0.134155 0.954413 +vt 0.133178 0.949531 +vt 0.123413 0.950507 +vt 0.132202 0.944648 +vt 0.133178 0.944648 +vt 0.071659 0.988591 +vt 0.071659 0.943672 +vt 0.072635 0.923165 +vt 0.075565 0.918283 +vt 0.680017 0.474953 +vt 0.678064 0.474953 +vt 0.676111 0.465188 +vt 0.601897 0.474953 +vt 0.593109 0.473976 +vt 0.613615 0.402692 +vt 0.991520 0.628263 +vt 0.992497 0.652675 +vt 0.993473 0.597991 +vt 0.167356 0.287465 +vt 0.160520 0.368514 +vt 0.142943 0.812821 +vt 0.142943 0.694665 +vt 0.153685 0.639004 +vt 0.377303 0.600921 +vt 0.378279 0.600921 +vt 0.378279 0.658534 +vt 0.154661 0.639004 +vt 0.143920 0.694665 +vt 0.879223 0.485694 +vt 0.880199 0.487647 +vt 0.379256 0.598968 +vt 0.881176 0.485694 +vt 0.880199 0.492530 +vt 0.878246 0.492530 +vt 0.879223 0.496435 +vt 0.656581 0.965155 +vt 0.654628 0.965155 +vt 0.653652 0.965155 +vt 0.652675 0.958319 +vt 0.652675 0.954413 +vt 0.652675 0.949531 +vt 0.651699 0.940742 +vt 0.619474 0.936836 +vt 0.715171 0.921212 +vt 0.716148 0.920236 +vt 0.715171 0.920236 +vt 0.716148 0.919259 +vt 0.721030 0.914377 +vt 0.721030 0.879223 +vt 0.721030 0.872387 +vt 0.721030 0.889964 +vt 0.722007 0.868481 +vt 0.721030 0.869458 +vt 0.136108 0.964178 +vt 0.715171 0.996403 +vt 0.715171 0.995426 +vt 0.308948 0.843092 +vt 0.307971 0.940742 +vt 0.375350 0.788409 +vt 0.374373 0.788409 +vt 0.375350 0.749349 +vt 0.374373 0.689782 +vt 0.375350 0.688806 +vt 0.376326 0.688806 +vt 0.426128 0.152708 +vt 0.427104 0.152708 +vt 0.428081 0.155638 +vt 0.429057 0.155638 +vt 0.432963 0.161497 +vt 0.370467 0.678064 +vt 0.444681 0.164426 +vt 0.459329 0.187862 +vt 0.452493 0.188838 +vt 0.459329 0.188838 +vt 0.149779 0.709312 +vt 0.150755 0.709312 +vt 0.151732 0.707359 +vt 0.151732 0.680017 +vt 0.569673 0.994450 +vt 0.567720 0.977849 +vt 0.544284 0.975896 +vt 0.539401 0.973943 +vt 0.535495 0.971014 +vt 0.533542 0.970037 +vt 0.530613 0.967108 +vt 0.526707 0.963202 +vt 0.815750 0.964178 +vt 0.806962 0.970037 +vt 0.803056 0.959296 +vt 0.801103 0.956366 +vt 0.797197 0.948554 +vt 0.796220 0.948554 +vt 0.763020 0.849928 +vt 0.791338 0.779620 +vt 0.810868 0.706383 +vt 0.808915 0.703453 +vt 0.749349 0.494482 +vt 0.740560 0.494482 +vt 0.729819 0.495459 +vt 0.649746 0.494482 +vt 0.583344 0.494482 +vt 0.583344 0.493506 +vt 0.536472 0.491553 +vt 0.528660 0.490577 +vt 0.251334 0.954413 +vt 0.248405 0.959296 +vt 0.238640 0.960272 +vt 0.236687 0.966131 +vt 0.234734 0.971014 +vt 0.233757 0.978826 +vt 0.231804 0.981755 +vt 0.230828 0.988591 +vt 0.230828 0.995426 +vt 0.230828 0.998356 +vt 0.667323 0.798173 +vt 0.668299 0.812821 +vt 0.382185 0.534519 +vt 0.382185 0.511083 +vt 0.382185 0.562837 +vt 0.384138 0.519871 +vt 0.667323 0.895823 +vt 0.669276 0.904612 +vt 0.386091 0.620451 +vt 0.388044 0.742513 +vt 0.387068 0.757161 +vt 0.206415 0.989567 +vt 0.209345 0.991520 +vt 0.172238 0.969061 +vt 0.173215 0.968084 +vt 0.173215 0.965155 +vt 0.179073 0.958319 +vt 0.179073 0.955390 +vt 0.179073 0.953437 +vt 0.179073 0.948554 +vt 0.179073 0.939766 +vt 0.181027 0.937813 +vt 0.206415 0.907541 +vt 0.207392 0.906565 +vt 0.203486 0.955390 +vt 0.203486 0.958319 +vt 0.203486 0.960272 +vt 0.210321 0.906565 +vt 0.214227 0.909494 +vt 0.216180 0.910471 +vt 0.217157 0.910471 +vt 0.207392 0.965155 +vt 0.208368 0.965155 +vt 0.209345 0.966131 +vt 0.215204 0.960272 +vt 0.227898 0.949531 +vt 0.242546 0.936836 +vt 0.246452 0.933907 +vt 0.250358 0.935860 +vt 0.136108 0.958319 +vt 0.135131 0.954413 +vt 0.134155 0.949531 +vt 0.135131 0.948554 +vt 0.072635 0.993473 +vt 0.072635 0.984685 +vt 0.680017 0.480811 +vt 0.742513 0.479835 +vt 0.738607 0.403668 +vt 0.991520 0.754231 +vt 0.992497 0.729819 +vt 0.993473 0.754231 +vt 0.993473 0.694665 +vt 0.070682 0.288441 +vt 0.039434 0.370467 +vt 0.143920 0.879223 +vt 0.142943 0.934883 +vt 0.143920 0.921212 +vt 0.144896 0.878246 +vt 0.298206 0.824539 +vt 0.298206 0.768879 +vt 0.339219 0.750325 +vt 0.298206 0.740560 +vt 0.286488 0.760090 +vt 0.302112 0.719077 +vt 0.297230 0.719077 +vt 0.286488 0.727866 +vt 0.144896 0.845045 +vt 0.143920 0.834304 +vt 0.144896 0.694665 +vt 0.155638 0.639004 +vt 0.156614 0.639004 +vt 0.878246 0.485694 +vt 0.811844 0.488623 +vt 0.571626 0.914377 +vt 0.513036 0.942695 +vt 0.569673 0.910471 +vt 0.511083 0.939766 +vt 0.805009 0.495459 +vt 0.809891 0.493506 +vt 0.807938 0.497412 +vt 0.567720 0.906565 +vt 0.565767 0.902659 +vt 0.507177 0.933907 +vt 0.655605 0.958319 +vt 0.653652 0.958319 +vt 0.653652 0.952460 +vt 0.653652 0.947578 +vt 0.653652 0.940742 +vt 0.859693 0.626310 +vt 0.874340 0.632169 +vt 0.859693 0.627286 +vt 0.652675 0.926095 +vt 0.857740 0.661464 +vt 0.855787 0.661464 +vt 0.854810 0.661464 +vt 0.717124 0.920236 +vt 0.721030 0.915353 +vt 0.722007 0.872387 +vt 0.716148 0.996403 +vt 0.307971 0.843092 +vt 0.306995 0.916330 +vt 0.767902 0.680017 +vt 0.744466 0.672205 +vt 0.744466 0.671229 +vt 0.376326 0.788409 +vt 0.375350 0.689782 +vt 0.376326 0.689782 +vt 0.451517 0.189815 +vt 0.008186 0.734701 +vt 0.009163 0.734701 +vt 0.005257 0.736654 +vt 0.009163 0.741537 +vt 0.008186 0.741537 +vt 0.151732 0.709312 +vt 0.152708 0.708336 +vt 0.158567 0.708336 +vt 0.158567 0.704430 +vt 0.158567 0.663417 +vt 0.571626 0.976873 +vt 0.569673 0.974920 +vt 0.567720 0.974920 +vt 0.566743 0.975896 +vt 0.566743 0.974920 +vt 0.565767 0.974920 +vt 0.559908 0.971014 +vt 0.553072 0.966131 +vt 0.548190 0.963202 +vt 0.547213 0.960272 +vt 0.545260 0.958319 +vt 0.532566 0.968084 +vt 0.527683 0.962225 +vt 0.525730 0.960272 +vt 0.814774 0.961249 +vt 0.812821 0.956366 +vt 0.809891 0.947578 +vt 0.802079 0.956366 +vt 0.800126 0.950507 +vt 0.799150 0.947578 +vt 0.798173 0.948554 +vt 0.763996 0.849928 +vt 0.792314 0.779620 +vt 0.795244 0.770832 +vt 0.819656 0.709312 +vt 0.747396 0.483741 +vt 0.751302 0.491553 +vt 0.745443 0.484717 +vt 0.744466 0.492530 +vt 0.729819 0.492530 +vt 0.704430 0.492530 +vt 0.600921 0.491553 +vt 0.535495 0.482764 +vt 0.527683 0.481788 +vt 0.243522 0.951484 +vt 0.241569 0.953437 +vt 0.237663 0.959296 +vt 0.235710 0.966131 +vt 0.234734 0.965155 +vt 0.233757 0.970037 +vt 0.233757 0.971014 +vt 0.232781 0.973943 +vt 0.504247 0.857740 +vt 0.506200 0.868481 +vt 0.505224 0.863599 +vt 0.502294 0.857740 +vt 0.504247 0.868481 +vt 0.505224 0.872387 +vt 0.506200 0.876293 +vt 0.503271 0.880199 +vt 0.503271 0.884105 +vt 0.501318 0.887035 +vt 0.500341 0.886058 +vt 0.490577 0.899729 +vt 0.492529 0.900706 +vt 0.023810 0.829421 +vt 0.023810 0.912424 +vt 0.022834 0.829421 +vt 0.023810 0.935860 +vt 0.022834 0.935860 +vt 0.383162 0.640957 +vt 0.386091 0.742513 +vt 0.184932 0.984685 +vt 0.174191 0.971990 +vt 0.175168 0.971014 +vt 0.176144 0.970037 +vt 0.183956 0.967108 +vt 0.181027 0.963202 +vt 0.181027 0.960272 +vt 0.180050 0.955390 +vt 0.179073 0.949531 +vt 0.186885 0.949531 +vt 0.186885 0.948554 +vt 0.201533 0.930001 +vt 0.209345 0.906565 +vt 0.215204 0.914377 +vt 0.218133 0.910471 +vt 0.219110 0.909494 +vt 0.207392 0.967108 +vt 0.208368 0.966131 +vt 0.209345 0.967108 +vt 0.213251 0.964178 +vt 0.248405 0.934883 +vt 0.253287 0.939766 +vt 0.136108 0.953437 +vt 0.116578 0.638028 +vt 0.121460 0.639004 +vt 0.119507 0.716148 +vt 0.750325 0.479835 +vt 0.746419 0.402692 +vt 0.991520 0.762043 +vt 0.992497 0.757161 +vt 0.994450 0.759114 +vt 0.005257 0.289418 +vt 0.011116 0.289418 +vt 0.010139 0.333360 +vt 0.015998 0.370467 +vt 0.142943 0.958319 +vt 0.143920 0.958319 +vt 0.144896 0.949531 +vt 0.285512 0.829421 +vt 0.299183 0.825515 +vt 0.310901 0.826492 +vt 0.337266 0.826492 +vt 0.335313 0.826492 +vt 0.339219 0.826492 +vt 0.339219 0.794267 +vt 0.339219 0.763020 +vt 0.339219 0.729819 +vt 0.339219 0.720054 +vt 0.338243 0.718101 +vt 0.302112 0.718101 +vt 0.299183 0.718101 +vt 0.864575 0.290394 +vt 0.866528 0.288441 +vt 0.877270 0.307971 +vt 0.280629 0.704430 +vt 0.144896 0.819656 +vt 0.144896 0.695641 +vt 0.875317 0.430034 +vt 0.811844 0.484717 +vt 0.808915 0.488623 +vt 0.805009 0.491553 +vt 0.388044 0.610686 +vt 0.400739 0.611662 +vt 0.399762 0.618498 +vt 0.800126 0.497412 +vt 0.805009 0.501318 +vt 0.802079 0.504247 +vt 0.563814 0.898753 +vt 0.504247 0.928048 +vt 0.654628 0.952460 +vt 0.654628 0.947578 +vt 0.654628 0.940742 +vt 0.874340 0.630216 +vt 0.877270 0.575532 +vt 0.865552 0.583344 +vt 0.868481 0.575532 +vt 0.857740 0.571626 +vt 0.863599 0.569673 +vt 0.542331 0.863599 +vt 0.542331 0.822586 +vt 0.543307 0.851881 +vt 0.543307 0.805985 +vt 0.810868 0.549166 +vt 0.805985 0.547213 +vt 0.779620 0.536472 +vt 0.779620 0.537448 +vt 0.800126 0.575532 +vt 0.768879 0.562837 +vt 0.763996 0.573579 +vt 0.763996 0.575532 +vt 0.761067 0.586273 +vt 0.763996 0.588226 +vt 0.797197 0.602874 +vt 0.796220 0.603850 +vt 0.852857 0.665370 +vt 0.722007 0.915353 +vt 0.837233 0.703453 +vt 0.719077 0.660487 +vt 0.836257 0.706383 +vt 0.835280 0.707359 +vt 0.376326 0.760090 +vt 0.367538 0.190791 +vt 0.454446 0.193721 +vt 0.004280 0.735678 +vt 0.004280 0.736654 +vt 0.004280 0.737631 +vt 0.003304 0.737631 +vt 0.007210 0.742513 +vt 0.158567 0.709312 +vt 0.159544 0.704430 +vt 0.158567 0.699547 +vt 0.159544 0.663417 +vt 0.571626 0.975896 +vt 0.568696 0.974920 +vt 0.566743 0.973943 +vt 0.560884 0.970037 +vt 0.556002 0.967108 +vt 0.552096 0.963202 +vt 0.550143 0.961249 +vt 0.547213 0.957343 +vt 0.546237 0.957343 +vt 0.541354 0.953437 +vt 0.525730 0.957343 +vt 0.524754 0.955390 +vt 0.523777 0.955390 +vt 0.521824 0.947578 +vt 0.522801 0.947578 +vt 0.278676 0.599944 +vt 0.279653 0.599944 +vt 0.279653 0.608733 +vt 0.808915 0.939766 +vt 0.806962 0.939766 +vt 0.800126 0.947578 +vt 0.765949 0.850904 +vt 0.764973 0.850904 +vt 0.797197 0.773761 +vt 0.797197 0.771808 +vt 0.793291 0.780596 +vt 0.819656 0.710289 +vt 0.747396 0.482764 +vt 0.744466 0.482764 +vt 0.741537 0.482764 +vt 0.741537 0.483741 +vt 0.719077 0.483741 +vt 0.609709 0.482764 +vt 0.259146 0.726889 +vt 0.259146 0.801103 +vt 0.258170 0.754231 +vt 0.258170 0.795244 +vt 0.257193 0.805009 +vt 0.259146 0.808915 +vt 0.258170 0.811844 +vt 0.257193 0.811844 +vt 0.527683 0.850904 +vt 0.519871 0.854810 +vt 0.526707 0.849928 +vt 0.522801 0.851881 +vt 0.514989 0.855787 +vt 0.511083 0.857740 +vt 0.511083 0.856763 +vt 0.231804 0.969061 +vt 0.231804 0.972967 +vt 0.503271 0.858716 +vt 0.500341 0.857740 +vt 0.501318 0.858716 +vt 0.501318 0.859693 +vt 0.502294 0.862622 +vt 0.504247 0.869458 +vt 0.503271 0.877270 +vt 0.499365 0.886058 +vt 0.019904 0.901682 +vt 0.019904 0.900706 +vt 0.021857 0.906565 +vt 0.021857 0.829421 +vt 0.863599 0.767902 +vt 0.863599 0.737631 +vt 0.863599 0.701500 +vt 0.863599 0.631192 +vt 0.862622 0.737631 +vt 0.862622 0.631192 +vt 0.185909 0.982732 +vt 0.194697 0.977849 +vt 0.395856 0.986638 +vt 0.395856 0.987614 +vt 0.394880 0.986638 +vt 0.362655 0.973943 +vt 0.362655 0.904612 +vt 0.873364 0.701500 +vt 0.873364 0.632169 +vt 0.195674 0.976873 +vt 0.184932 0.966131 +vt 0.182003 0.960272 +vt 0.188838 0.953437 +vt 0.189815 0.952460 +vt 0.187862 0.949531 +vt 0.187862 0.948554 +vt 0.190791 0.944648 +vt 0.218133 0.911447 +vt 0.219110 0.908518 +vt 0.206415 0.968084 +vt 0.192744 0.988591 +vt 0.195674 0.992497 +vt 0.198603 0.995426 +vt 0.197627 0.996403 +vt 0.993473 0.762043 +vt 0.198603 0.999332 +vt 0.195674 0.995426 +vt 0.000374 0.289418 +vt 0.010139 0.370467 +vt 0.142943 0.964178 +vt 0.143920 0.964178 +vt 0.143920 0.962225 +vt 0.144896 0.962225 +vt 0.285512 0.841139 +vt 0.334337 0.840163 +vt 0.881176 0.770832 +vt 0.897776 0.777667 +vt 0.881176 0.776690 +vt 0.350937 0.839186 +vt 0.351914 0.836257 +vt 0.351914 0.781573 +vt 0.351914 0.715171 +vt 0.825515 0.289418 +vt 0.861646 0.290394 +vt 0.811844 0.292347 +vt 0.809891 0.429057 +vt 0.804032 0.429057 +vt 0.805985 0.484717 +vt 0.401715 0.604827 +vt 0.388044 0.604827 +vt 0.275747 0.869458 +vt 0.274770 0.873364 +vt 0.274770 0.869458 +vt 0.270864 0.869458 +vt 0.272817 0.874340 +vt 0.268911 0.873364 +vt 0.270864 0.877270 +vt 0.274770 0.876293 +vt 0.387068 0.614592 +vt 0.385115 0.618498 +vt 0.396833 0.624357 +vt 0.795244 0.502294 +vt 0.797197 0.502294 +vt 0.797197 0.508153 +vt 0.560884 0.893870 +vt 0.502294 0.924142 +vt 0.499365 0.919259 +vt 0.543307 0.895823 +vt 0.540378 0.881176 +vt 0.541354 0.880199 +vt 0.877270 0.574555 +vt 0.542331 0.878246 +vt 0.555025 0.875317 +vt 0.555025 0.859693 +vt 0.556002 0.789385 +vt 0.801103 0.531589 +vt 0.783526 0.524754 +vt 0.778643 0.536472 +vt 0.773761 0.547213 +vt 0.769855 0.554049 +vt 0.767902 0.562837 +vt 0.763996 0.569673 +vt 0.763020 0.573579 +vt 0.409527 0.276723 +vt 0.401715 0.266958 +vt 0.408551 0.263052 +vt 0.397809 0.267935 +vt 0.763996 0.589203 +vt 0.753255 0.586273 +vt 0.407574 0.261099 +vt 0.466164 0.225945 +vt 0.001351 0.507177 +vt 0.001351 0.541354 +vt 0.001351 0.540378 +vt 0.003304 0.736654 +vt 0.158567 0.710289 +vt 0.159544 0.709312 +vt 0.159544 0.708336 +vt 0.159544 0.699547 +vt 0.571626 0.974920 +vt 0.568696 0.973943 +vt 0.567720 0.973943 +vt 0.561861 0.969061 +vt 0.556978 0.966131 +vt 0.551119 0.961249 +vt 0.547213 0.956366 +vt 0.546237 0.954413 +vt 0.541354 0.950507 +vt 0.539401 0.948554 +vt 0.523777 0.946601 +vt 0.277700 0.599944 +vt 0.278676 0.608733 +vt 0.438822 0.986638 +vt 0.515965 0.987614 +vt 0.515965 0.986638 +vt 0.805985 0.776690 +vt 0.805009 0.776690 +vt 0.776690 0.848951 +vt 0.775714 0.847975 +vt 0.801103 0.776690 +vt 0.800126 0.775714 +vt 0.801103 0.774737 +vt 0.805985 0.773761 +vt 0.494482 0.913400 +vt 0.439799 0.886058 +vt 0.436869 0.883129 +vt 0.436869 0.882152 +vt 0.435893 0.883129 +vt 0.435893 0.884105 +vt 0.027716 0.954413 +vt 0.026740 0.954413 +vt 0.026740 0.950507 +vt 0.025763 0.954413 +vt 0.495459 0.912424 +vt 0.499365 0.903635 +vt 0.821609 0.710289 +vt 0.747396 0.480811 +vt 0.741537 0.481788 +vt 0.332384 0.953437 +vt 0.333360 0.953437 +vt 0.328478 0.963202 +vt 0.730795 0.481788 +vt 0.707359 0.480811 +vt 0.710289 0.482764 +vt 0.011116 0.655605 +vt 0.011116 0.555025 +vt 0.012092 0.574555 +vt 0.258170 0.706383 +vt 0.257193 0.771808 +vt 0.256217 0.808915 +vt 0.527683 0.846022 +vt 0.524754 0.847975 +vt 0.518895 0.852857 +vt 0.512059 0.855787 +vt 0.504247 0.855787 +vt 0.502294 0.855787 +vt 0.501318 0.856763 +vt 0.498388 0.856763 +vt 0.496435 0.863599 +vt 0.497412 0.866528 +vt 0.496435 0.871411 +vt 0.494482 0.876293 +vt 0.490577 0.883129 +vt 0.481788 0.895823 +vt 0.013069 0.831374 +vt 0.013069 0.899729 +vt 0.018928 0.899729 +vt 0.122437 0.857740 +vt 0.122437 0.856763 +vt 0.122437 0.855787 +vt 0.537448 0.991520 +vt 0.538425 0.991520 +vt 0.537448 0.992497 +vt 0.541354 0.993473 +vt 0.534519 0.997379 +vt 0.873364 0.770832 +vt 0.872387 0.770832 +vt 0.523777 0.995426 +vt 0.524754 0.994450 +vt 0.532566 0.997379 +vt 0.533542 0.997379 +vt 0.113648 0.636075 +vt 0.182003 0.637051 +vt 0.113648 0.637051 +vt 0.182003 0.638028 +vt 0.873364 0.703453 +vt 0.872387 0.703453 +vt 0.867505 0.703453 +vt 0.866528 0.703453 +vt 0.866528 0.702477 +vt 0.867505 0.701500 +vt 0.872387 0.701500 +vt 0.396833 0.988591 +vt 0.396833 0.987614 +vt 0.363632 0.973943 +vt 0.363632 0.904612 +vt 0.196650 0.976873 +vt 0.184932 0.964178 +vt 0.192744 0.958319 +vt 0.192744 0.956366 +vt 0.192744 0.953437 +vt 0.188838 0.950507 +vt 0.221063 0.909494 +vt 0.235710 0.888988 +vt 0.188838 0.984685 +vt 0.187862 0.986638 +vt 0.193721 0.991520 +vt 0.192744 0.992497 +vt 0.884105 0.777667 +vt 0.888988 0.778643 +vt 0.887035 0.859693 +vt 0.003304 0.370467 +vt 0.143920 0.971014 +vt 0.144896 0.969061 +vt 0.145873 0.968084 +vt 0.946601 0.781573 +vt 0.947578 0.774737 +vt 0.882152 0.763996 +vt 0.402692 0.277700 +vt 0.404645 0.282582 +vt 0.401715 0.279653 +vt 0.400739 0.284535 +vt 0.404645 0.287465 +vt 0.406598 0.288441 +vt 0.407574 0.291371 +vt 0.401715 0.293324 +vt 0.809891 0.171262 +vt 0.810868 0.225945 +vt 0.804032 0.172238 +vt 0.805009 0.263052 +vt 0.805009 0.346055 +vt 0.402692 0.467141 +vt 0.401715 0.550143 +vt 0.389021 0.549166 +vt 0.275747 0.813797 +vt 0.274770 0.755208 +vt 0.270864 0.727866 +vt 0.270864 0.813797 +vt 0.269888 0.870434 +vt 0.378279 0.598968 +vt 0.379256 0.597015 +vt 0.369491 0.578461 +vt 0.366561 0.443705 +vt 0.364608 0.376326 +vt 0.362655 0.531589 +vt 0.014045 0.599944 +vt 0.018928 0.609709 +vt 0.017951 0.826492 +vt 0.017951 0.828445 +vt 0.929024 0.905588 +vt 0.929024 0.903635 +vt 0.931954 0.903635 +vt 0.932930 0.901682 +vt 0.935860 0.900706 +vt 0.937813 0.895823 +vt 0.965155 0.812821 +vt 0.978826 0.759114 +vt 0.985661 0.286488 +vt 0.999332 0.286488 +vt 0.999332 0.284535 +vt 0.999332 0.060917 +vt 0.999332 0.015998 +vt 0.984685 0.632169 +vt 0.983708 0.516942 +vt 0.984685 0.471047 +vt 0.983708 0.471047 +vt 0.983708 0.470070 +vt 0.985661 0.466164 +vt 0.984685 0.466164 +vt 0.984685 0.465188 +vt 0.977849 0.738607 +vt 0.972967 0.739584 +vt 0.967108 0.749349 +vt 0.009163 0.571626 +vt 0.004280 0.631192 +vt 0.003304 0.725913 +vt 0.139037 0.835280 +vt 0.139037 0.839186 +vt 0.129272 0.853834 +vt 0.140990 0.843092 +vt 0.376326 0.599944 +vt 0.267935 0.875317 +vt 0.267935 0.878246 +vt 0.127319 0.851881 +vt 0.125366 0.854810 +vt 0.122437 0.851881 +vt 0.271841 0.881176 +vt 0.273794 0.879223 +vt 0.669276 0.743490 +vt 0.668299 0.745443 +vt 0.656581 0.739584 +vt 0.393903 0.627286 +vt 0.792314 0.504247 +vt 0.563814 0.889964 +vt 0.556978 0.888988 +vt 0.559908 0.874340 +vt 0.559908 0.789385 +vt 0.556002 0.771808 +vt 0.556002 0.705406 +vt 0.433940 0.322619 +vt 0.427104 0.311877 +vt 0.490577 0.290394 +vt 0.422222 0.301136 +vt 0.419292 0.293324 +vt 0.467141 0.228875 +vt 0.411480 0.279653 +vt 0.466164 0.227898 +vt 0.156614 0.738607 +vt 0.156614 0.753255 +vt 0.155638 0.738607 +vt 0.155638 0.753255 +vt 0.766926 0.960272 +vt 0.751302 0.930001 +vt 0.777667 0.950507 +vt 0.752278 0.930001 +vt 0.753255 0.928048 +vt 0.753255 0.927071 +vt 0.747396 0.921212 +vt 0.723960 0.892894 +vt 0.727866 0.885082 +vt 0.562837 0.968084 +vt 0.556002 0.963202 +vt 0.552096 0.960272 +vt 0.548190 0.954413 +vt 0.543307 0.949531 +vt 0.539401 0.944648 +vt 0.537448 0.942695 +vt 0.523777 0.938789 +vt 0.536472 0.932930 +vt 0.445658 0.998356 +vt 0.515965 0.997379 +vt 0.995426 0.660487 +vt 0.995426 0.590179 +vt 0.995426 0.593109 +vt 0.996403 0.590179 +vt 0.153685 0.749349 +vt 0.152708 0.749349 +vt 0.152708 0.762043 +vt 0.442728 0.872387 +vt 0.442728 0.873364 +vt 0.442728 0.874340 +vt 0.805009 0.775714 +vt 0.801103 0.775714 +vt 0.439799 0.879223 +vt 0.427104 0.877270 +vt 0.148802 0.724936 +vt 0.149779 0.724936 +vt 0.151732 0.734701 +vt 0.152708 0.734701 +vt 0.152708 0.735678 +vt 0.431987 0.893870 +vt 0.490577 0.922189 +vt 0.494482 0.914377 +vt 0.028693 0.954413 +vt 0.027716 0.950507 +vt 0.027716 0.943672 +vt 0.025763 0.943672 +vt 0.027716 0.933907 +vt 0.025763 0.933907 +vt 0.025763 0.894847 +vt 0.026740 0.894847 +vt 0.036505 0.932930 +vt 0.035528 0.894847 +vt 0.826492 0.749349 +vt 0.826492 0.758137 +vt 0.825515 0.749349 +vt 0.825515 0.759114 +vt 0.826492 0.759114 +vt 0.819656 0.761067 +vt 0.819656 0.760090 +vt 0.824539 0.760090 +vt 0.024787 0.894847 +vt 0.024787 0.943672 +vt 0.025763 0.948554 +vt 0.017951 0.952460 +vt 0.017951 0.954413 +vt 0.335313 0.947578 +vt 0.333360 0.949531 +vt 0.331407 0.949531 +vt 0.325548 0.960272 +vt 0.327501 0.962225 +vt 0.328478 0.985661 +vt 0.327501 0.988591 +vt 0.253287 0.634122 +vt 0.256217 0.632169 +vt 0.253287 0.643887 +vt 0.254264 0.643887 +vt 0.362655 0.890941 +vt 0.362655 0.891917 +vt 0.362655 0.889964 +vt 0.361679 0.889964 +vt 0.254264 0.645840 +vt 0.256217 0.648769 +vt 0.257193 0.629239 +vt 0.257193 0.686853 +vt 0.259146 0.626310 +vt 0.256217 0.801103 +vt 0.255240 0.805009 +vt 0.529636 0.842116 +vt 0.523777 0.832351 +vt 0.521824 0.838210 +vt 0.517918 0.843092 +vt 0.514012 0.846998 +vt 0.509130 0.849928 +vt 0.506200 0.849928 +vt 0.502294 0.854810 +vt 0.502294 0.856763 +vt 0.500341 0.854810 +vt 0.497412 0.853834 +vt 0.495459 0.853834 +vt 0.496435 0.854810 +vt 0.495459 0.863599 +vt 0.496435 0.868481 +vt 0.495459 0.871411 +vt 0.492529 0.878246 +vt 0.489600 0.882152 +vt 0.124390 0.948554 +vt 0.122437 0.932930 +vt 0.124390 0.932930 +vt 0.123413 0.863599 +vt 0.124390 0.863599 +vt 0.125366 0.863599 +vt 0.123413 0.862622 +vt 0.123413 0.857740 +vt 0.123413 0.856763 +vt 0.123413 0.855787 +vt 0.533542 0.996403 +vt 0.523777 0.993473 +vt 0.522801 0.994450 +vt 0.182003 0.627286 +vt 0.182003 0.628263 +vt 0.163450 0.627286 +vt 0.162473 0.628263 +vt 0.113648 0.627286 +vt 0.410504 0.977849 +vt 0.411480 0.977849 +vt 0.411480 0.985661 +vt 0.411480 0.987614 +vt 0.409527 0.987614 +vt 0.410504 0.988591 +vt 0.404645 0.988591 +vt 0.404645 0.989567 +vt 0.403668 0.988591 +vt 0.403668 0.989567 +vt 0.402692 0.988591 +vt 0.402692 0.989567 +vt 0.402692 0.975896 +vt 0.396833 0.976873 +vt 0.395856 0.976873 +vt 0.395856 0.977849 +vt 0.394880 0.977849 +vt 0.372420 0.913400 +vt 0.372420 0.973943 +vt 0.372420 0.896800 +vt 0.197627 0.964178 +vt 0.195674 0.959296 +vt 0.195674 0.956366 +vt 0.244499 0.897776 +vt 0.240593 0.893870 +vt 0.236687 0.888011 +vt 0.241569 0.893870 +vt 0.240593 0.890941 +vt 0.182979 0.986638 +vt 0.187862 0.987614 +vt 0.878246 0.859693 +vt 0.877270 0.777667 +vt 0.882152 0.859693 +vt 0.884105 0.860669 +vt 0.145873 0.972967 +vt 0.147826 0.974920 +vt 0.948554 0.768879 +vt 0.948554 0.764973 +vt 0.882152 0.760090 +vt 0.400739 0.273794 +vt 0.397809 0.277700 +vt 0.398786 0.282582 +vt 0.400739 0.288441 +vt 0.388044 0.294300 +vt 0.402692 0.385115 +vt 0.389021 0.416363 +vt 0.275747 0.680017 +vt 0.270864 0.594085 +vt 0.269888 0.755208 +vt 0.379256 0.480811 +vt 0.369491 0.299183 +vt 0.365585 0.299183 +vt 0.362655 0.301136 +vt 0.013069 0.829421 +vt 0.066776 0.793291 +vt 0.064823 0.794267 +vt 0.061894 0.790361 +vt 0.057988 0.791338 +vt 0.063847 0.797197 +vt 0.056035 0.793291 +vt 0.062870 0.799150 +vt 0.055058 0.796220 +vt 0.060917 0.802079 +vt 0.055058 0.803056 +vt 0.059941 0.806962 +vt 0.056035 0.883129 +vt 0.056035 0.893870 +vt 0.060917 0.893870 +vt 0.966131 0.813797 +vt 0.983708 0.761067 +vt 0.989567 0.748372 +vt 0.987614 0.747396 +vt 0.984685 0.746419 +vt 0.983708 0.745443 +vt 0.984685 0.744466 +vt 0.983708 0.742513 +vt 0.988591 0.669276 +vt 0.988591 0.516942 +vt 0.989567 0.471047 +vt 0.988591 0.469094 +vt 0.970037 0.734701 +vt 0.976873 0.733725 +vt 0.969061 0.736654 +vt 0.970037 0.738607 +vt 0.008186 0.555025 +vt 0.009163 0.557955 +vt 0.008186 0.557955 +vt 0.004280 0.557955 +vt 0.002327 0.555025 +vt 0.003304 0.557955 +vt 0.003304 0.641934 +vt 0.139037 0.751302 +vt 0.129272 0.770832 +vt 0.127319 0.770832 +vt 0.122437 0.837233 +vt 0.707359 0.670252 +vt 0.677088 0.729819 +vt 0.658534 0.736654 +vt 0.561861 0.888011 +vt 0.561861 0.705406 +vt 0.367538 0.660487 +vt 0.374373 0.673182 +vt 0.362655 0.662440 +vt 0.369491 0.676111 +vt 0.503271 0.285512 +vt 0.481788 0.223016 +vt 0.157591 0.738607 +vt 0.157591 0.753255 +vt 0.156614 0.731772 +vt 0.155638 0.731772 +vt 0.782550 0.945625 +vt 0.755208 0.913400 +vt 0.732748 0.881176 +vt 0.759114 0.910471 +vt 0.736654 0.878246 +vt 0.553072 0.959296 +vt 0.742513 0.873364 +vt 0.762043 0.908518 +vt 0.765949 0.905588 +vt 0.767902 0.903635 +vt 0.772785 0.906565 +vt 0.745443 0.870434 +vt 0.550143 0.948554 +vt 0.550143 0.953437 +vt 0.549166 0.953437 +vt 0.547213 0.946601 +vt 0.540378 0.944648 +vt 0.537448 0.939766 +vt 0.996403 0.666346 +vt 0.996403 0.660487 +vt 0.996403 0.593109 +vt 0.997379 0.590179 +vt 0.153685 0.747396 +vt 0.151732 0.744466 +vt 0.432963 0.866528 +vt 0.433940 0.866528 +vt 0.430034 0.870434 +vt 0.440775 0.877270 +vt 0.429057 0.870434 +vt 0.152708 0.742513 +vt 0.151732 0.743490 +vt 0.151732 0.742513 +vt 0.429057 0.871411 +vt 0.440775 0.878246 +vt 0.426128 0.876293 +vt 0.151732 0.736654 +vt 0.431010 0.894847 +vt 0.489600 0.923165 +vt 0.494482 0.924142 +vt 0.036505 0.949531 +vt 0.036505 0.943672 +vt 0.037481 0.943672 +vt 0.037481 0.932930 +vt 0.037481 0.894847 +vt 0.825515 0.748372 +vt 0.819656 0.748372 +vt 0.375350 0.889964 +vt 0.375350 0.888988 +vt 0.375350 0.888011 +vt 0.253287 0.645840 +vt 0.252311 0.729819 +vt 0.253287 0.729819 +vt 0.255240 0.736654 +vt 0.255240 0.801103 +vt 0.531589 0.836257 +vt 0.549166 0.765949 +vt 0.548190 0.764973 +vt 0.548190 0.765949 +vt 0.533542 0.804032 +vt 0.521824 0.835280 +vt 0.518895 0.840163 +vt 0.514012 0.846022 +vt 0.507177 0.849928 +vt 0.499365 0.852857 +vt 0.499365 0.853834 +vt 0.496435 0.851881 +vt 0.494482 0.850904 +vt 0.493506 0.851881 +vt 0.490577 0.849928 +vt 0.463235 0.829421 +vt 0.489600 0.849928 +vt 0.490577 0.850904 +vt 0.493506 0.853834 +vt 0.487647 0.864575 +vt 0.495459 0.864575 +vt 0.494482 0.871411 +vt 0.491553 0.878246 +vt 0.487647 0.882152 +vt 0.125366 0.948554 +vt 0.134155 0.863599 +vt 0.134155 0.927071 +vt 0.135131 0.863599 +vt 0.136108 0.857740 +vt 0.136108 0.856763 +vt 0.136108 0.855787 +vt 0.525730 0.986638 +vt 0.524754 0.987614 +vt 0.522801 0.992497 +vt 0.182003 0.626310 +vt 0.163450 0.625333 +vt 0.113648 0.626310 +vt 0.410504 0.975896 +vt 0.409527 0.976873 +vt 0.404645 0.975896 +vt 0.403668 0.975896 +vt 0.396833 0.975896 +vt 0.395856 0.975896 +vt 0.373397 0.973943 +vt 0.373397 0.896800 +vt 0.374373 0.891917 +vt 0.374373 0.896800 +vt 0.032599 0.620451 +vt 0.025763 0.619474 +vt 0.041387 0.543307 +vt 0.250358 0.902659 +vt 0.382185 0.299183 +vt 0.382185 0.284535 +vt 0.383162 0.284535 +vt 0.250358 0.901682 +vt 0.245475 0.896800 +vt 0.244499 0.893870 +vt 0.868481 0.777667 +vt 0.872387 0.777667 +vt 0.870434 0.817703 +vt 0.871411 0.859693 +vt 0.878246 0.860669 +vt 0.882152 0.861646 +vt 0.878246 0.861646 +vt 0.874340 0.861646 +vt 0.948554 0.760090 +vt 0.883129 0.753255 +vt 0.397809 0.268911 +vt 0.395856 0.271841 +vt 0.395856 0.276723 +vt 0.385115 0.284535 +vt 0.387068 0.288441 +vt 0.275747 0.552096 +vt 0.275747 0.557955 +vt 0.272817 0.555025 +vt 0.269888 0.554049 +vt 0.379256 0.281606 +vt 0.042364 0.777667 +vt 0.059941 0.784503 +vt 0.057011 0.786456 +vt 0.368514 0.295277 +vt 0.365585 0.291371 +vt 0.365585 0.296253 +vt 0.362655 0.297230 +vt 0.361679 0.295277 +vt 0.361679 0.291371 +vt 0.052129 0.794267 +vt 0.054082 0.799150 +vt 0.054082 0.882152 +vt 0.054082 0.893870 +vt 0.055058 0.952460 +vt 0.059941 0.963202 +vt 0.055058 0.967108 +vt 0.061894 0.966131 +vt 0.988591 0.746419 +vt 0.989567 0.745443 +vt 0.990544 0.743490 +vt 0.989567 0.741537 +vt 0.989567 0.516942 +vt 0.977849 0.685876 +vt 0.975896 0.730795 +vt 0.972967 0.687829 +vt 0.971014 0.733725 +vt 0.010139 0.545260 +vt 0.009163 0.547213 +vt 0.005257 0.548190 +vt 0.008186 0.550143 +vt 0.005257 0.551119 +vt 0.003304 0.550143 +vt 0.003304 0.551119 +vt 0.141967 0.658534 +vt 0.140014 0.662440 +vt 0.129272 0.648769 +vt 0.139037 0.666346 +vt 0.128296 0.718101 +vt 0.126343 0.750325 +vt 0.124390 0.750325 +vt 0.122437 0.770832 +vt 0.718101 0.653652 +vt 0.757161 0.575532 +vt 0.745443 0.569673 +vt 0.762043 0.559908 +vt 0.748372 0.557955 +vt 0.750325 0.554049 +vt 0.365585 0.654628 +vt 0.363632 0.657558 +vt 0.360702 0.658534 +vt 0.510106 0.282582 +vt 0.507177 0.283559 +vt 0.488623 0.220086 +vt 0.157591 0.731772 +vt 0.158567 0.724936 +vt 0.156614 0.727866 +vt 0.155638 0.726889 +vt 0.786456 0.942695 +vt 0.791338 0.938789 +vt 0.797197 0.934883 +vt 0.749349 0.868481 +vt 0.550143 0.943672 +vt 0.548190 0.946601 +vt 0.547213 0.941719 +vt 0.538425 0.938789 +vt 0.997379 0.666346 +vt 0.997379 0.660487 +vt 0.853834 0.929024 +vt 0.880199 0.993473 +vt 0.876293 0.993473 +vt 0.152708 0.744466 +vt 0.152708 0.743490 +vt 0.425151 0.897776 +vt 0.425151 0.898753 +vt 0.423198 0.904612 +vt 0.426128 0.898753 +vt 0.151732 0.741537 +vt 0.152708 0.736654 +vt 0.431010 0.895823 +vt 0.489600 0.924142 +vt 0.492529 0.927071 +vt 0.495459 0.927071 +vt 0.037481 0.947578 +vt 0.038458 0.943672 +vt 0.038458 0.894847 +vt 0.043340 0.894847 +vt 0.044317 0.894847 +vt 0.374373 0.882152 +vt 0.373397 0.881176 +vt 0.373397 0.860669 +vt 0.374373 0.797197 +vt 0.363632 0.790361 +vt 0.362655 0.790361 +vt 0.253287 0.730795 +vt 0.554049 0.765949 +vt 0.555025 0.764973 +vt 0.555025 0.765949 +vt 0.253287 0.731772 +vt 0.252311 0.731772 +vt 0.555025 0.766926 +vt 0.549166 0.764973 +vt 0.547213 0.764973 +vt 0.539401 0.761067 +vt 0.539401 0.762043 +vt 0.520848 0.835280 +vt 0.516942 0.841139 +vt 0.511083 0.846998 +vt 0.506200 0.848951 +vt 0.495459 0.850904 +vt 0.493506 0.848951 +vt 0.491553 0.848951 +vt 0.462258 0.828445 +vt 0.460305 0.826492 +vt 0.458352 0.825515 +vt 0.456399 0.826492 +vt 0.459329 0.833327 +vt 0.471047 0.846998 +vt 0.486670 0.864575 +vt 0.486670 0.865552 +vt 0.484717 0.869458 +vt 0.480812 0.874340 +vt 0.474953 0.879223 +vt 0.134155 0.941719 +vt 0.135131 0.941719 +vt 0.135131 0.927071 +vt 0.136108 0.863599 +vt 0.522801 0.980779 +vt 0.524754 0.985661 +vt 0.523777 0.985661 +vt 0.182003 0.620451 +vt 0.113648 0.625333 +vt 0.409527 0.975896 +vt 0.111695 0.619474 +vt 0.110719 0.619474 +vt 0.047246 0.621427 +vt 0.110719 0.625333 +vt 0.374373 0.973943 +vt 0.374373 0.911447 +vt 0.056035 0.544284 +vt 0.381209 0.299183 +vt 0.382185 0.393903 +vt 0.383162 0.299183 +vt 0.384138 0.284535 +vt 0.251334 0.899729 +vt 0.249381 0.898753 +vt 0.860669 0.778643 +vt 0.866528 0.817703 +vt 0.866528 0.859693 +vt 0.871411 0.860669 +vt 0.869458 0.862622 +vt 0.949531 0.756184 +vt 0.883129 0.747396 +vt 0.394880 0.264029 +vt 0.391950 0.265005 +vt 0.390974 0.269888 +vt 0.383162 0.280629 +vt 0.272817 0.546237 +vt 0.274770 0.549166 +vt 0.272817 0.548190 +vt 0.268911 0.552096 +vt 0.040411 0.778643 +vt 0.053105 0.788409 +vt 0.051152 0.790361 +vt 0.363632 0.289418 +vt 0.049199 0.799150 +vt 0.049199 0.882152 +vt 0.051152 0.882152 +vt 0.050176 0.967108 +vt 0.404645 0.294300 +vt 0.408551 0.294300 +vt 0.408551 0.297230 +vt 0.054082 0.969061 +vt 0.054082 0.971014 +vt 0.062870 0.968084 +vt 0.407574 0.301136 +vt 0.408551 0.300159 +vt 0.408551 0.303089 +vt 0.056035 0.972967 +vt 0.064823 0.971014 +vt 0.061894 0.974920 +vt 0.066776 0.971990 +vt 0.971990 0.456399 +vt 0.976873 0.460305 +vt 0.972967 0.570649 +vt 0.410504 0.424175 +vt 0.410504 0.540378 +vt 0.405621 0.540378 +vt 0.410504 0.587250 +vt 0.405621 0.587250 +vt 0.005257 0.543307 +vt 0.003304 0.546237 +vt 0.146849 0.653652 +vt 0.142943 0.656581 +vt 0.130249 0.646816 +vt 0.128296 0.649746 +vt 0.123413 0.664393 +vt 0.123413 0.649746 +vt 0.123413 0.645840 +vt 0.763020 0.553072 +vt 0.751302 0.548190 +vt 0.363632 0.647793 +vt 0.362655 0.653652 +vt 0.359726 0.654628 +vt 0.357773 0.650722 +vt 0.514012 0.281606 +vt 0.495459 0.218133 +vt 0.517918 0.280629 +vt 0.499365 0.217157 +vt 0.158567 0.721030 +vt 0.157591 0.721030 +vt 0.156614 0.721030 +vt 0.157591 0.714195 +vt 0.156614 0.714195 +vt 0.433940 0.970037 +vt 0.434916 0.973943 +vt 0.432963 0.970037 +vt 0.156614 0.709312 +vt 0.800126 0.931954 +vt 0.753255 0.865552 +vt 0.550143 0.938789 +vt 0.548190 0.941719 +vt 0.547213 0.933907 +vt 0.850904 0.923165 +vt 0.842116 0.928048 +vt 0.875317 0.993473 +vt 0.870434 0.998356 +vt 0.836257 0.986638 +vt 0.109742 0.900706 +vt 0.074588 0.892894 +vt 0.107789 0.816727 +vt 0.439799 0.984685 +vt 0.496435 0.930977 +vt 0.035528 0.778643 +vt 0.035528 0.777667 +vt 0.036505 0.779620 +vt 0.036505 0.777667 +vt 0.029669 0.777667 +vt 0.375350 0.882152 +vt 0.374373 0.798173 +vt 0.375350 0.792314 +vt 0.362655 0.789385 +vt 0.363632 0.788409 +vt 0.376326 0.790361 +vt 0.542331 0.755208 +vt 0.538425 0.760090 +vt 0.538425 0.761067 +vt 0.514989 0.824539 +vt 0.511083 0.831374 +vt 0.509130 0.835280 +vt 0.505224 0.841139 +vt 0.504247 0.841139 +vt 0.491553 0.847975 +vt 0.463235 0.828445 +vt 0.461282 0.827468 +vt 0.459329 0.824539 +vt 0.458352 0.823562 +vt 0.457376 0.823562 +vt 0.455423 0.825515 +vt 0.454446 0.826492 +vt 0.465188 0.841139 +vt 0.482765 0.860669 +vt 0.485694 0.864575 +vt 0.483741 0.868481 +vt 0.481788 0.872387 +vt 0.477882 0.875317 +vt 0.473976 0.878246 +vt 0.473976 0.877270 +vt 0.136108 0.941719 +vt 0.136108 0.927071 +vt 0.184932 0.626310 +vt 0.184932 0.620451 +vt 0.247428 0.625333 +vt 0.183956 0.619474 +vt 0.123413 0.549166 +vt 0.182979 0.619474 +vt 0.113648 0.619474 +vt 0.112672 0.619474 +vt 0.381209 0.368514 +vt 0.382185 0.407574 +vt 0.383162 0.393903 +vt 0.572602 0.829421 +vt 0.571626 0.814774 +vt 0.611662 0.816727 +vt 0.858716 0.778643 +vt 0.859693 0.818680 +vt 0.859693 0.860669 +vt 0.866528 0.861646 +vt 0.864575 0.862622 +vt 0.949531 0.752278 +vt 0.883129 0.743490 +vt 0.392927 0.260123 +vt 0.387068 0.262076 +vt 0.857740 0.911447 +vt 0.856763 0.918283 +vt 0.844069 0.911447 +vt 0.844069 0.916330 +vt 0.270864 0.543307 +vt 0.268911 0.544284 +vt 0.842116 0.879223 +vt 0.842116 0.911447 +vt 0.837233 0.912424 +vt 0.267935 0.549166 +vt 0.039434 0.780596 +vt 0.039434 0.893870 +vt 0.837233 0.799150 +vt 0.827468 0.809891 +vt 0.827468 0.725913 +vt 0.049199 0.967108 +vt 0.403668 0.294300 +vt 0.403668 0.297230 +vt 0.406598 0.299183 +vt 0.406598 0.302112 +vt 0.406598 0.307971 +vt 0.410504 0.309924 +vt 0.405621 0.410504 +vt 0.403668 0.480811 +vt 0.403668 0.587250 +vt 0.004280 0.543307 +vt 0.149779 0.652675 +vt 0.131225 0.644863 +vt 0.132202 0.642910 +vt 0.129272 0.644863 +vt 0.125366 0.644863 +vt 0.124390 0.642910 +vt 0.764973 0.548190 +vt 0.764973 0.544284 +vt 0.752278 0.545260 +vt 0.362655 0.641934 +vt 0.359726 0.645840 +vt 0.356796 0.646816 +vt 0.356796 0.644863 +vt 0.519871 0.279653 +vt 0.524754 0.277700 +vt 0.506200 0.215204 +vt 0.159544 0.714195 +vt 0.435893 0.973943 +vt 0.435893 0.969061 +vt 0.432963 0.965155 +vt 0.433940 0.963202 +vt 0.432963 0.963202 +vt 0.421245 0.907541 +vt 0.431987 0.963202 +vt 0.805985 0.928048 +vt 0.757161 0.862622 +vt 0.550143 0.934883 +vt 0.549166 0.934883 +vt 0.840163 0.928048 +vt 0.108766 0.816727 +vt 0.107789 0.759114 +vt 0.045293 0.774737 +vt 0.514012 0.984685 +vt 0.518895 0.975896 +vt 0.029669 0.693688 +vt 0.035528 0.693688 +vt 0.034552 0.692712 +vt 0.035528 0.692712 +vt 0.036505 0.692712 +vt 0.376326 0.791338 +vt 0.541354 0.754231 +vt 0.541354 0.755208 +vt 0.537448 0.761067 +vt 0.515965 0.815750 +vt 0.514989 0.820633 +vt 0.512059 0.826492 +vt 0.510106 0.830398 +vt 0.508153 0.835280 +vt 0.504247 0.840163 +vt 0.499365 0.838210 +vt 0.481788 0.831374 +vt 0.466164 0.824539 +vt 0.457376 0.821609 +vt 0.456399 0.821609 +vt 0.455423 0.823562 +vt 0.454446 0.823562 +vt 0.453470 0.826492 +vt 0.450540 0.825515 +vt 0.481788 0.869458 +vt 0.478859 0.872387 +vt 0.424175 0.815750 +vt 0.262076 0.547213 +vt 0.262076 0.625333 +vt 0.257193 0.547213 +vt 0.242546 0.548190 +vt 0.381209 0.487647 +vt 0.382185 0.487647 +vt 0.383162 0.487647 +vt 0.384138 0.487647 +vt 0.384138 0.407574 +vt 0.572602 0.937813 +vt 0.654628 0.924142 +vt 0.653652 0.833327 +vt 0.653652 0.818680 +vt 0.654628 0.833327 +vt 0.951484 0.732748 +vt 0.952460 0.747396 +vt 0.950507 0.747396 +vt 0.860669 0.861646 +vt 0.860669 0.863599 +vt 0.949531 0.747396 +vt 0.884105 0.728842 +vt 0.389021 0.246452 +vt 0.383162 0.247428 +vt 0.878246 0.727866 +vt 0.879223 0.641934 +vt 0.878246 0.641934 +vt 0.857740 0.809891 +vt 0.857740 0.896800 +vt 0.844069 0.889964 +vt 0.842116 0.809891 +vt 0.839186 0.742513 +vt 0.837233 0.707359 +vt 0.827468 0.722007 +vt 0.825515 0.718101 +vt 0.403668 0.300159 +vt 0.403668 0.304065 +vt 0.403668 0.307971 +vt 0.782550 0.241569 +vt 0.782550 0.414410 +vt 0.772785 0.389997 +vt 0.782550 0.521824 +vt 0.772785 0.540378 +vt 0.771808 0.474953 +vt 0.771808 0.539401 +vt 0.766926 0.539401 +vt 0.129272 0.638028 +vt 0.126343 0.640957 +vt 0.753255 0.541354 +vt 0.361679 0.636075 +vt 0.358749 0.638028 +vt 0.355820 0.638028 +vt 0.355820 0.636075 +vt 0.526707 0.277700 +vt 0.511083 0.214227 +vt 0.517918 0.213251 +vt 0.422222 0.907541 +vt 0.349961 0.307971 +vt 0.349961 0.306995 +vt 0.350937 0.307971 +vt 0.379256 0.258170 +vt 0.690759 0.000374 +vt 0.785479 0.004280 +vt 0.783526 0.086306 +vt 0.842116 0.009163 +vt 0.805985 0.830398 +vt 0.839186 0.927071 +vt 0.840163 0.927071 +vt 0.834304 0.985661 +vt 0.834304 0.986638 +vt 0.835280 0.986638 +vt 0.110719 0.900706 +vt 0.108766 0.810868 +vt 0.108766 0.734701 +vt 0.107789 0.698571 +vt 0.107789 0.634122 +vt 0.035528 0.691735 +vt 0.035528 0.690759 +vt 0.029669 0.690759 +vt 0.030646 0.634122 +vt 0.030646 0.625333 +vt 0.512059 0.823562 +vt 0.508153 0.832351 +vt 0.505224 0.838210 +vt 0.458352 0.820633 +vt 0.453470 0.821609 +vt 0.433940 0.806962 +vt 0.431987 0.808915 +vt 0.428081 0.811844 +vt 0.425151 0.812821 +vt 0.382185 0.510106 +vt 0.381209 0.508153 +vt 0.382185 0.503271 +vt 0.381209 0.503271 +vt 0.383162 0.503271 +vt 0.484717 0.086306 +vt 0.471047 0.087283 +vt 0.479835 0.041387 +vt 0.446634 0.007210 +vt 0.392927 0.098024 +vt 0.368514 0.018928 +vt 0.380232 0.015998 +vt 0.655605 0.936836 +vt 0.655605 0.900706 +vt 0.655605 0.833327 +vt 0.950507 0.732748 +vt 0.949531 0.732748 +vt 0.949531 0.629239 +vt 0.884105 0.666346 +vt 0.881176 0.641934 +vt 0.884105 0.556002 +vt 0.882152 0.556002 +vt 0.879223 0.570649 +vt 0.877270 0.556002 +vt 0.857740 0.722983 +vt 0.844069 0.729819 +vt 0.841139 0.729819 +vt 0.839186 0.708336 +vt 0.776690 0.218133 +vt 0.775714 0.220086 +vt 0.773761 0.222039 +vt 0.786456 0.231804 +vt 0.783526 0.236687 +vt 0.772785 0.223992 +vt 0.771808 0.280629 +vt 0.766926 0.304065 +vt 0.766926 0.483741 +vt 0.753255 0.537448 +vt 0.360702 0.630216 +vt 0.359726 0.630216 +vt 0.354843 0.630216 +vt 0.532566 0.275747 +vt 0.583344 0.272817 +vt 0.578461 0.205439 +vt 0.423198 0.907541 +vt 0.423198 0.816727 +vt 0.421245 0.797197 +vt 0.349961 0.418316 +vt 0.350937 0.401715 +vt 0.350937 0.440775 +vt 0.650722 0.000374 +vt 0.713218 0.082400 +vt 0.111695 0.900706 +vt 0.110719 0.829421 +vt 0.110719 0.770832 +vt 0.109742 0.716148 +vt 0.109742 0.634122 +vt 0.109742 0.657558 +vt 0.108766 0.630216 +vt 0.107789 0.628263 +vt 0.439799 0.793291 +vt 0.437846 0.797197 +vt 0.435893 0.802079 +vt 0.434916 0.803056 +vt 0.432963 0.805985 +vt 0.429057 0.809891 +vt 0.424175 0.811844 +vt 0.383162 0.510106 +vt 0.486670 0.041387 +vt 0.490577 0.085330 +vt 0.483741 0.041387 +vt 0.475929 0.004280 +vt 0.996403 0.496435 +vt 0.997379 0.496435 +vt 0.996403 0.511083 +vt 0.997379 0.523777 +vt 0.996403 0.589203 +vt 0.656581 0.936836 +vt 0.950507 0.629239 +vt 0.995426 0.589203 +vt 0.994450 0.511083 +vt 0.949531 0.551119 +vt 0.883129 0.540378 +vt 0.879223 0.541354 +vt 0.877270 0.541354 +vt 0.856763 0.710289 +vt 0.844069 0.707359 +vt 0.769855 0.216180 +vt 0.771808 0.215204 +vt 0.772785 0.218133 +vt 0.776690 0.213251 +vt 0.771808 0.223992 +vt 0.766926 0.223992 +vt 0.855787 0.684900 +vt 0.843092 0.690759 +vt 0.852857 0.680994 +vt 0.753255 0.227898 +vt 0.753255 0.424175 +vt 0.360702 0.515965 +vt 0.359726 0.565767 +vt 0.354843 0.574555 +vt 0.354843 0.425151 +vt 0.731772 0.271841 +vt 0.670252 0.205439 +vt 0.763020 0.206415 +vt 0.348008 0.397809 +vt 0.348008 0.491553 +vt 0.349961 0.508153 +vt 0.350937 0.508153 +vt 0.583344 0.000374 +vt 0.653652 0.081424 +vt 0.519871 0.001351 +vt 0.519871 0.083377 +vt 0.516942 0.084353 +vt 0.109742 0.631192 +vt 0.109742 0.626310 +vt 0.437846 0.795244 +vt 0.435893 0.799150 +vt 0.432963 0.802079 +vt 0.431987 0.805009 +vt 0.498388 0.045293 +vt 0.500341 0.085330 +vt 0.496435 0.085330 +vt 0.430034 0.806962 +vt 0.491553 0.041387 +vt 0.482765 0.004280 +vt 0.997379 0.489600 +vt 0.996403 0.491553 +vt 0.995426 0.496435 +vt 0.994450 0.496435 +vt 0.949531 0.536472 +vt 0.883129 0.536472 +vt 0.876293 0.536472 +vt 0.857740 0.705406 +vt 0.845045 0.701500 +vt 0.858716 0.700524 +vt 0.845045 0.699547 +vt 0.858716 0.695641 +vt 0.845045 0.695641 +vt 0.767902 0.220086 +vt 0.856763 0.688806 +vt 0.361679 0.309924 +vt 0.360702 0.313830 +vt 0.359726 0.309924 +vt 0.359726 0.315783 +vt 0.360702 0.318713 +vt 0.354843 0.318713 +vt 0.281606 0.461282 +vt 0.282582 0.566743 +vt 0.348008 0.571626 +vt 0.348984 0.574555 +vt 0.350937 0.571626 +vt 0.350937 0.576508 +vt 0.513036 0.001351 +vt 0.510106 0.084353 +vt 0.112672 0.625333 +vt 0.435893 0.797197 +vt 0.504247 0.084353 +vt 0.502294 0.044317 +vt 0.495459 0.003304 +vt 0.490577 0.003304 +vt 0.998356 0.482764 +vt 0.996403 0.487647 +vt 0.995426 0.491553 +vt 0.948554 0.531589 +vt 0.345078 0.604827 +vt 0.279653 0.598968 +vt 0.279653 0.595062 +vt 0.882152 0.532566 +vt 0.877270 0.532566 +vt 0.874340 0.530613 +vt 0.875317 0.525730 +vt 0.871411 0.526707 +vt 0.359726 0.300159 +vt 0.361679 0.302112 +vt 0.361679 0.306018 +vt 0.857740 0.692712 +vt 0.355820 0.306995 +vt 0.354843 0.313830 +vt 0.282582 0.571626 +vt 0.348008 0.574555 +vt 0.995426 0.461282 +vt 0.997379 0.461282 +vt 0.997379 0.465188 +vt 0.998356 0.463235 +vt 0.998356 0.467141 +vt 0.509130 0.002327 +vt 0.501318 0.002327 +vt 0.998356 0.473976 +vt 0.998356 0.478859 +vt 0.997379 0.478859 +vt 0.997379 0.482764 +vt 0.995426 0.484717 +vt 0.346055 0.597991 +vt 0.280629 0.590179 +vt 0.880199 0.528660 +vt 0.877270 0.524754 +vt 0.356796 0.299183 +vt 0.355820 0.303089 +vt 0.281606 0.582367 +vt 0.281606 0.578461 +vt 0.348008 0.581391 +vt 0.996403 0.467141 +vt 0.997379 0.470070 +vt 0.997379 0.473976 +vt 0.996403 0.478859 +vt 0.347031 0.592132 +vt 0.280629 0.586273 +vt 0.347031 0.587250 +vt 0.995426 0.473976 +s 1 +usemtl SimplygonCastMaterial.004 +f 1/1/1 2/2/1 3/3/1 +f 4/4/1 5/5/1 6/6/1 +f 7/7/2 8/8/2 9/9/2 +f 10/10/2 11/11/2 12/12/2 +f 13/13/2 14/14/2 15/15/2 +f 16/16/3 17/17/4 18/18/5 +f 17/17/4 19/19/6 18/18/5 +f 18/18/5 19/19/6 20/19/7 +f 21/20/8 22/21/9 23/20/10 +f 26/22/11 29/23/12 31/24/13 +f 27/25/14 36/26/15 30/27/16 +f 36/26/15 25/20/15 30/27/16 +f 23/20/10 22/21/9 32/27/17 +f 32/27/17 22/21/9 37/28/18 +f 22/21/9 21/20/8 40/28/19 +f 40/28/19 21/20/8 41/29/20 +f 21/20/8 23/20/10 43/29/21 +f 43/29/21 23/20/10 45/30/22 +f 24/20/23 33/26/23 44/30/23 +f 47/30/23 34/26/23 49/31/23 +f 49/31/23 34/26/23 50/32/23 +f 54/33/24 52/32/25 56/34/26 +f 52/32/25 35/26/27 56/34/26 +f 35/26/27 28/25/28 56/34/26 +f 56/34/26 28/25/28 58/35/29 +f 58/35/29 28/25/28 61/35/30 +f 29/23/12 26/22/11 60/36/31 +f 59/36/32 26/22/11 62/37/33 +f 26/22/11 31/24/13 62/37/33 +f 62/37/33 31/24/13 64/38/34 +f 32/27/17 37/28/18 63/39/35 +f 65/40/36 63/39/35 68/41/37 +f 63/39/35 37/28/18 68/41/37 +f 69/42/38 72/43/39 75/44/40 +f 81/45/23 84/46/23 85/46/23 +f 88/47/23 86/46/23 89/48/23 +f 86/46/23 82/46/23 89/48/23 +f 89/48/23 82/46/23 97/49/23 +f 83/46/23 77/45/23 96/49/23 +f 78/45/23 102/50/23 95/49/23 +s 0 +f 98/50/41 105/51/41 94/49/41 +s 1 +f 107/52/23 108/53/23 109/54/23 +f 108/53/23 115/55/23 109/54/23 +f 111/54/23 112/55/23 118/56/23 +f 122/57/23 125/58/23 126/59/23 +f 129/59/23 123/58/23 130/60/23 +f 133/60/42 124/58/23 135/61/42 +f 124/58/23 138/62/23 135/61/42 +f 138/62/23 120/56/23 134/61/23 +f 137/61/23 116/56/23 140/63/23 +f 140/63/23 116/56/23 144/64/42 +f 144/64/42 116/56/23 146/65/23 +f 117/56/23 113/55/23 145/65/23 +f 149/65/23 114/55/23 90/49/23 +f 115/55/23 108/53/23 97/49/23 +f 108/53/23 107/52/23 97/49/23 +f 97/49/23 107/52/23 150/66/23 +f 150/66/23 107/52/23 151/67/23 +f 106/52/23 110/54/23 152/67/23 +f 152/67/23 110/54/23 153/68/23 +f 110/54/23 119/56/23 153/68/23 +f 154/68/23 120/56/23 158/69/23 +f 120/56/23 138/62/23 158/69/23 +f 138/62/23 124/58/23 158/69/23 +f 124/58/23 121/57/23 158/69/23 +f 158/69/23 121/57/23 160/70/23 +f 160/70/23 121/57/23 164/71/23 +f 122/57/23 126/59/23 163/71/23 +f 172/72/23 176/73/23 177/74/23 +f 184/74/23 173/73/23 185/75/23 +f 188/76/23 174/77/23 190/78/23 +f 175/77/23 168/79/23 189/78/23 +f 192/80/23 169/81/23 195/82/23 +f 170/81/23 201/83/23 194/82/23 +f 198/83/23 167/71/23 193/82/23 +f 197/82/23 161/71/23 204/84/23 +f 162/71/23 127/59/23 203/84/23 +f 128/59/23 131/60/23 202/84/23 +f 205/85/23 132/86/23 206/87/23 +f 132/86/23 136/88/23 206/87/23 +f 136/88/23 139/89/23 206/87/23 +f 139/89/23 141/90/23 206/87/23 +f 208/91/23 142/64/23 211/92/23 +f 143/64/23 147/65/23 210/92/23 +f 148/65/23 91/49/23 209/92/23 +f 212/92/23 92/49/23 214/93/23 +f 93/49/23 103/51/23 213/93/23 +f 216/93/23 104/51/23 218/94/23 +f 105/51/43 98/50/23 217/94/23 +f 220/94/23 99/50/23 221/95/23 +f 223/95/23 100/50/23 224/96/23 +f 101/50/23 79/45/23 228/96/23 +f 228/96/23 79/45/23 233/97/23 +f 80/45/23 86/46/23 232/97/23 +f 86/46/23 88/47/23 232/97/23 +f 239/98/23 237/97/23 240/99/23 +f 244/99/23 229/97/23 245/100/23 +f 249/100/23 230/97/23 251/101/23 +f 231/97/23 87/47/23 250/101/23 +f 254/101/23 88/47/23 258/102/23 +f 88/47/23 89/48/23 258/102/23 +f 89/48/23 97/49/23 258/102/23 +f 258/102/23 97/49/23 261/103/23 +f 97/49/23 150/66/23 261/103/23 +f 261/103/23 150/66/23 265/104/23 +f 265/104/23 150/66/23 269/105/23 +f 269/105/23 150/66/23 271/106/23 +f 150/66/23 151/67/23 271/106/23 +f 152/67/23 153/68/23 270/106/23 +f 273/106/23 154/68/23 274/106/23 +f 154/68/23 158/69/23 274/106/23 +f 277/106/23 155/69/23 278/107/23 +f 281/107/23 156/69/23 283/108/23 +f 157/69/23 159/70/23 282/108/23 +f 286/108/23 160/70/23 290/109/23 +f 290/109/23 160/70/23 291/110/23 +f 160/70/23 164/71/23 291/110/23 +f 295/110/23 165/71/23 298/111/23 +f 166/71/23 199/83/23 297/111/23 +f 200/83/23 171/81/23 296/111/23 +f 171/81/23 178/112/23 296/111/23 +f 302/111/23 179/112/23 303/113/23 +s 0 +f 307/114/44 180/115/44 308/116/44 +f 312/116/44 181/115/44 314/117/44 +s 1 +f 321/118/23 317/119/23 322/120/23 +f 325/120/23 313/119/23 327/121/23 +f 327/121/23 313/119/23 329/122/23 +f 313/119/23 182/74/23 329/122/23 +f 183/74/23 186/75/23 328/122/23 +f 333/123/23 330/122/23 334/124/23 +f 330/122/23 187/75/23 334/124/23 +f 187/75/23 191/125/23 334/124/23 +f 191/125/23 196/126/23 334/124/23 +f 196/126/23 205/85/23 334/124/23 +f 205/85/23 206/87/23 334/124/23 +s 0 +f 338/127/41 207/128/41 340/129/41 +s 1 +f 208/91/23 211/92/23 339/130/23 +f 211/92/23 215/93/23 339/130/23 +f 215/93/23 219/94/23 339/130/23 +f 219/94/23 222/95/23 339/130/23 +f 346/131/23 343/130/23 349/132/23 +f 339/130/23 222/95/23 348/132/23 +f 223/95/23 224/96/23 347/132/23 +f 351/132/23 225/96/23 352/133/23 +f 354/133/23 226/96/23 356/134/23 +f 227/96/23 234/97/23 355/134/23 +f 358/134/23 235/97/23 359/135/23 +f 361/135/23 236/97/23 363/136/23 +f 363/136/23 236/97/23 365/137/23 +f 365/137/23 236/97/23 367/138/23 +f 236/97/23 238/98/23 367/138/23 +f 367/138/23 238/98/23 368/139/23 +f 238/98/23 241/99/23 368/139/23 +f 370/139/23 242/99/23 372/140/23 +f 243/99/23 246/100/23 371/140/23 +f 374/140/23 247/100/23 377/141/23 +f 248/100/23 252/101/23 376/141/23 +f 253/101/23 255/102/23 375/141/23 +f 380/141/23 256/102/23 383/142/23 +f 257/102/23 259/103/23 382/142/23 +f 260/103/23 262/104/23 381/142/23 +f 386/142/23 263/104/23 388/143/23 +f 264/104/23 266/105/23 387/143/23 +f 391/143/23 267/105/23 393/144/23 +f 268/105/23 272/106/23 392/144/23 +f 396/144/23 273/106/23 397/145/23 +f 273/106/23 274/106/23 397/145/23 +f 401/145/23 275/106/23 405/146/23 +f 276/106/23 279/107/23 404/146/23 +f 280/107/23 284/108/23 403/146/23 +f 285/108/23 287/109/23 402/146/23 +f 408/146/23 288/109/23 410/147/23 +f 289/109/23 292/110/23 409/147/23 +f 412/147/23 293/110/23 414/148/23 +f 294/110/23 299/111/23 413/148/23 +f 416/148/23 300/111/23 418/149/23 +f 301/111/23 304/113/23 417/149/23 +f 420/149/23 305/113/23 421/150/23 +f 306/113/23 309/151/23 423/150/23 +f 423/150/23 309/151/23 424/152/23 +f 426/152/23 310/151/23 428/153/23 +f 311/151/23 315/119/23 427/153/23 +f 430/153/23 316/119/23 431/154/23 +f 316/119/23 318/118/23 431/154/23 +f 432/154/23 319/118/23 435/155/23 +f 320/118/23 323/120/23 434/155/23 +f 324/120/23 326/121/23 433/155/23 +f 326/121/23 330/122/23 433/155/23 +f 330/122/23 333/123/23 433/155/23 +f 437/155/23 331/123/23 440/156/23 +f 332/123/23 335/124/23 439/156/23 +f 446/157/45 449/158/46 450/159/47 +f 453/160/48 456/161/49 457/162/50 +f 456/161/49 461/163/51 457/162/50 +f 462/164/52 458/162/53 463/165/54 +f 458/162/53 459/163/55 463/165/54 +f 459/163/55 451/166/56 463/165/54 +f 451/166/56 447/167/57 463/165/54 +f 463/165/54 447/167/57 465/168/58 +f 448/169/23 444/170/23 464/171/23 +f 467/171/23 445/170/23 468/172/23 +f 446/157/45 450/159/47 471/173/45 +f 471/173/45 450/159/47 472/174/47 +f 474/175/59 473/174/60 475/176/59 +f 472/174/47 450/159/47 475/176/59 +f 475/176/59 450/159/47 478/177/61 +f 451/166/56 459/163/55 477/178/61 +f 481/179/62 478/177/61 483/180/63 +f 476/178/64 460/163/65 482/181/66 +f 484/181/67 461/163/51 485/182/68 +f 461/163/51 456/161/49 485/182/68 +f 486/183/69 454/184/70 487/185/71 +f 487/185/71 454/184/70 488/186/72 +f 455/187/73 452/188/74 489/189/72 +f 490/190/75 493/191/23 494/192/76 +f 493/191/23 489/189/72 494/192/76 +f 489/189/72 452/188/74 494/192/76 +f 494/192/76 452/188/74 495/193/77 +f 496/194/78 453/160/48 497/195/79 +f 453/160/48 457/162/50 497/195/79 +f 458/162/53 462/164/52 498/195/80 +f 498/195/80 462/164/52 499/196/81 +f 462/164/52 463/165/54 499/196/81 +f 463/165/54 465/168/58 499/196/81 +f 465/168/58 443/197/82 499/196/81 +f 499/196/81 443/197/82 500/198/83 +f 503/199/81 504/200/83 505/201/84 +f 506/202/85 509/203/86 510/204/87 +f 510/204/87 509/203/86 511/205/88 +f 513/206/23 507/207/23 514/208/23 +f 508/209/89 505/201/84 517/210/82 +f 505/201/84 504/200/83 517/210/82 +f 517/210/82 504/200/83 521/211/82 +f 504/200/83 500/198/83 521/211/82 +f 500/198/83 443/197/82 521/211/82 +f 438/212/23 336/127/23 520/213/23 +f 337/127/23 341/129/23 519/213/23 +f 342/129/23 344/214/23 518/213/23 +f 523/215/23 345/131/23 524/216/23 +f 345/131/23 350/132/23 524/216/23 +f 350/132/23 353/133/23 524/216/23 +f 353/133/23 357/134/23 524/216/23 +f 357/134/23 360/135/23 524/216/23 +f 360/135/23 362/136/23 524/216/23 +f 524/216/23 362/136/23 529/217/23 +f 362/136/23 364/137/23 529/217/23 +f 364/137/23 366/138/23 529/217/23 +f 366/138/23 369/139/23 529/217/23 +f 369/139/23 373/140/23 529/217/23 +f 373/140/23 378/141/23 529/217/23 +f 379/141/23 384/142/23 528/217/23 +f 385/142/23 389/143/23 527/217/23 +f 390/143/23 394/144/23 526/217/23 +f 395/144/23 398/145/23 525/217/23 +f 530/217/23 399/145/23 535/218/23 +f 400/145/23 406/146/23 534/218/23 +f 407/146/23 411/147/23 533/218/23 +f 411/147/23 415/148/23 533/218/23 +f 415/148/23 419/149/23 533/218/23 +f 419/149/23 422/150/23 533/218/23 +f 422/150/23 425/152/23 533/218/23 +f 425/152/23 429/153/23 533/218/23 +f 429/153/23 432/154/23 533/218/23 +f 432/154/23 435/155/23 533/218/23 +f 436/155/23 441/156/23 532/218/23 +f 442/156/23 466/171/23 531/218/23 +f 538/218/23 467/171/23 539/219/23 +f 467/171/23 468/172/23 539/219/23 +f 542/219/23 469/172/23 543/220/23 +f 470/173/45 473/174/60 547/221/45 +f 550/222/90 547/221/45 552/223/91 +f 547/221/45 473/174/60 552/223/91 +f 473/174/60 474/175/59 552/223/91 +f 552/223/91 474/175/59 554/224/61 +f 474/175/59 475/176/59 554/224/61 +f 475/176/59 478/177/61 554/224/61 +f 478/177/61 481/179/62 554/224/61 +f 557/225/92 554/224/61 558/226/93 +f 554/224/61 481/179/62 558/226/93 +f 559/226/94 479/179/94 561/227/95 +f 480/228/96 484/181/67 560/229/95 +f 484/181/67 485/182/68 560/229/95 +f 562/230/93 486/183/69 563/231/59 +f 486/183/69 487/185/71 563/231/59 +f 563/231/59 487/185/71 566/232/97 +f 566/232/97 487/185/71 569/233/23 +f 487/185/71 488/186/72 569/233/23 +f 489/189/72 493/191/23 568/234/23 +f 574/235/98 577/236/99 578/237/100 +f 580/238/101 582/239/101 584/239/102 +f 589/240/103 593/241/103 595/242/103 +f 590/241/103 605/243/103 594/242/103 +f 600/242/103 601/243/103 607/244/103 +f 618/245/103 612/244/103 620/246/103 +f 606/244/103 602/243/103 619/246/103 +f 624/246/104 603/243/104 632/247/104 +f 604/243/104 591/241/104 631/247/104 +f 592/241/104 587/240/104 630/247/104 +f 588/240/103 596/242/103 629/247/103 +f 639/248/103 643/249/103 646/250/103 +f 656/251/103 650/250/105 657/252/105 +f 660/252/103 644/250/103 662/253/103 +f 645/250/103 640/249/103 661/253/103 +f 667/253/103 641/249/103 671/254/103 +f 642/249/103 636/248/103 670/254/103 +f 674/254/103 677/255/103 679/256/103 +f 677/255/103 687/257/103 679/256/103 +f 684/257/103 691/258/103 678/256/103 +f 683/256/106 688/258/103 692/259/103 +f 696/259/103 689/258/103 702/260/103 +f 690/258/103 685/257/103 701/260/103 +f 686/257/103 675/255/103 700/260/103 +f 676/255/103 668/254/103 699/260/103 +f 669/254/103 637/248/103 698/260/103 +f 638/248/103 647/250/103 697/260/103 +f 707/260/103 648/250/103 626/247/103 +f 649/250/103 651/251/103 625/247/103 +f 635/247/103 652/251/103 709/261/103 +f 717/262/103 712/261/103 719/263/103 +f 708/261/103 653/251/103 718/263/103 +f 723/263/103 654/251/103 727/264/103 +f 727/264/103 654/251/103 730/265/103 +f 655/251/105 658/252/105 729/265/105 +f 659/252/103 663/253/103 728/265/103 +f 733/265/105 664/253/105 734/266/105 +f 737/266/107 665/253/107 741/267/107 +f 666/253/103 672/254/103 740/267/103 +f 673/254/107 680/256/107 739/267/107 +f 744/268/108 745/269/109 748/270/110 +f 748/270/110 745/269/109 751/271/111 +f 751/271/111 745/269/109 753/272/109 +f 753/272/109 745/269/109 754/273/112 +f 754/273/112 745/269/109 755/274/113 +f 745/269/109 744/268/108 755/274/113 +f 757/274/114 743/268/115 761/275/116 +f 761/275/116 743/268/115 764/270/117 +f 743/268/115 746/270/118 764/270/117 +f 766/276/119 764/270/117 767/276/120 +f 764/270/117 746/270/118 767/276/120 +f 747/270/23 749/271/23 769/276/23 +f 769/276/23 749/271/23 770/277/23 +f 772/277/121 750/271/122 774/278/123 +f 774/278/123 750/271/122 776/279/124 +f 750/271/122 752/272/125 776/279/124 +f 776/279/124 752/272/125 778/280/126 +f 3536/281/127 778/280/126 780/282/128 +f 778/280/126 752/272/125 780/282/128 +f 779/282/129 753/272/109 783/283/130 +f 753/272/109 754/273/112 783/283/130 +f 754/273/112 755/274/113 782/283/130 +f 781/284/130 756/285/131 786/285/130 +f 757/274/114 761/275/116 785/274/132 +f 758/286/116 788/287/133 784/288/134 +f 784/288/134 788/287/133 792/287/135 +f 795/289/136 796/290/137 797/291/138 +f 799/292/139 798/293/140 800/294/141 +f 797/291/138 796/290/137 801/295/142 +f 801/295/142 796/290/137 802/296/143 +f 796/290/137 793/289/144 802/296/143 +f 807/296/145 794/289/146 809/297/147 +f 809/297/147 794/289/146 811/298/148 +f 813/299/149 814/300/150 815/301/151 +f 817/302/152 819/303/153 821/304/154 +f 822/305/155 823/306/155 824/307/156 +f 827/308/157 829/309/158 830/310/159 +f 830/310/159 829/309/158 831/311/160 +f 833/312/161 828/313/158 835/314/162 +f 836/315/163 838/316/164 839/317/165 +f 838/316/164 835/314/162 839/317/165 +f 835/314/162 828/313/158 839/317/165 +f 829/309/158 827/308/157 840/318/166 +f 840/318/166 827/308/157 841/319/167 +f 844/319/168 826/308/168 845/320/168 +f 827/308/157 830/310/159 848/320/169 +f 848/320/169 830/310/159 849/321/170 +f 851/322/171 850/323/159 853/324/171 +f 849/321/170 830/310/159 852/325/172 +f 830/310/159 831/311/160 852/325/172 +f 853/324/171 832/326/161 855/327/156 +f 833/312/161 835/314/162 854/328/173 +f 857/328/174 834/314/175 859/329/176 +f 859/329/176 834/314/175 860/330/177 +f 834/314/175 837/316/178 860/330/177 +f 863/331/179 862/330/177 864/332/180 +f 860/330/177 837/316/178 866/332/181 +f 866/332/181 837/316/178 867/333/182 +f 838/316/164 836/315/163 867/333/182 +f 867/333/182 836/315/163 868/334/183 +f 836/315/163 839/317/165 868/334/183 +f 869/335/184 871/336/184 874/337/185 +f 870/338/186 868/334/183 873/339/187 +f 868/334/183 839/317/165 873/339/187 +f 840/318/166 841/319/167 872/337/188 +f 841/319/167 878/340/167 872/337/188 +f 874/337/185 875/340/167 880/341/167 +f 883/342/168 881/341/168 888/343/168 +f 879/341/168 876/340/168 887/343/168 +f 877/340/168 842/319/168 886/343/168 +f 843/319/168 846/320/168 885/343/168 +f 895/344/168 890/343/168 898/345/168 +f 884/343/168 847/320/168 897/345/168 +f 848/320/169 849/321/170 896/345/169 +f 850/323/159 851/322/171 900/346/169 +f 902/347/189 900/346/169 903/348/159 +f 900/346/169 851/322/171 903/348/159 +f 903/348/159 851/322/171 905/349/190 +f 905/349/190 851/322/171 907/350/156 +f 907/350/156 851/322/171 908/351/156 +f 851/322/171 853/324/171 908/351/156 +f 853/324/171 855/327/156 908/351/156 +f 909/351/191 856/327/192 825/307/193 +f 825/307/193 856/327/192 911/352/194 +f 857/328/174 859/329/176 910/353/195 +f 913/353/196 858/329/197 917/354/198 +f 917/354/198 858/329/197 920/355/199 +f 858/329/197 861/330/200 920/355/199 +f 861/330/200 863/331/179 920/355/199 +f 920/355/199 863/331/179 921/356/201 +f 928/357/168 924/358/168 929/359/168 +f 932/360/202 921/356/201 933/361/203 +f 921/356/201 863/331/179 933/361/203 +f 863/331/179 864/332/180 933/361/203 +f 933/361/203 865/332/204 934/362/205 +f 866/332/181 867/333/182 935/362/206 +f 935/362/206 867/333/182 936/363/207 +f 942/364/208 938/363/208 944/365/208 +f 936/363/207 867/333/182 943/365/209 +f 867/333/182 868/334/183 943/365/209 +f 868/334/183 870/338/186 943/365/209 +f 947/366/207 871/336/184 950/367/210 +f 871/336/184 869/335/184 950/367/210 +f 950/367/210 869/335/184 952/368/211 +f 952/368/211 869/335/184 953/369/212 +f 869/335/184 956/370/213 953/369/212 +f 955/369/214 956/370/213 957/371/215 +f 956/370/213 869/335/184 957/371/215 +f 869/335/184 874/337/185 957/371/215 +f 874/337/185 880/341/167 957/371/215 +f 880/341/167 882/342/216 957/371/215 +f 957/371/215 882/342/216 958/372/217 +f 958/372/217 882/342/216 959/373/218 +f 882/342/216 889/343/219 959/373/218 +f 959/373/218 889/343/219 960/374/220 +f 889/343/219 891/344/219 960/374/220 +f 960/374/220 891/344/219 961/375/221 +f 961/375/221 892/344/222 962/376/218 +f 892/344/222 965/377/219 962/376/218 +f 962/376/218 965/377/219 966/378/223 +f 967/378/168 963/377/168 969/379/168 +f 964/377/168 893/344/168 968/379/168 +f 971/379/168 894/344/168 972/380/168 +f 894/344/168 899/345/168 972/380/168 +f 972/380/168 899/345/168 820/381/224 +f 899/345/168 901/382/225 820/381/224 +f 902/347/189 903/348/159 821/304/154 +f 821/304/154 903/348/159 973/383/226 +f 903/348/159 905/349/190 973/383/226 +f 974/383/227 904/349/228 975/384/229 +f 975/384/229 904/349/228 976/385/230 +f 976/385/230 904/349/228 977/386/231 +f 904/349/228 906/350/232 977/386/231 +f 906/350/232 909/351/191 977/386/231 +f 909/351/191 825/307/193 977/386/231 +f 824/307/156 823/306/155 977/386/231 +f 977/386/231 823/306/155 980/387/233 +f 980/387/233 823/306/155 983/388/234 +f 823/306/155 822/305/155 983/388/234 +f 983/388/234 822/305/155 984/389/233 +f 822/305/155 824/307/156 984/389/233 +f 824/307/156 912/352/235 984/389/233 +f 912/352/235 914/390/236 984/389/233 +f 987/389/168 915/390/168 991/391/168 +f 916/390/168 918/392/168 990/391/168 +f 919/392/168 922/358/168 989/391/168 +f 923/358/168 925/357/168 988/391/168 +f 994/393/237 993/391/238 995/394/239 +f 993/391/238 926/357/240 995/394/239 +f 997/395/241 995/394/239 998/396/242 +f 1001/397/243 999/396/244 1002/398/245 +f 998/396/242 995/394/239 1005/398/246 +f 1005/398/246 995/394/239 1007/399/247 +f 995/394/239 926/357/240 1007/399/247 +f 927/357/168 930/359/168 1006/399/168 +f 1010/400/168 931/360/168 1011/401/168 +f 1014/401/167 932/360/202 1015/402/185 +f 932/360/202 933/361/203 1015/402/185 +f 933/361/203 934/362/205 1015/402/185 +f 934/362/205 937/363/248 1015/402/185 +f 1015/402/185 937/363/248 1016/403/184 +f 937/363/248 939/364/249 1016/403/184 +f 1016/403/184 939/364/249 1017/404/184 +f 1018/405/184 940/406/250 1021/407/248 +f 941/406/251 945/366/251 1020/407/251 +f 946/366/207 948/367/207 1019/407/207 +f 949/367/251 951/368/252 1022/407/251 +f 1022/407/251 951/368/252 1024/408/253 +f 951/368/252 954/369/254 1024/408/253 +f 1024/408/253 954/369/254 1027/409/255 +f 1027/409/255 954/369/254 1030/410/256 +f 1030/410/256 954/369/254 1031/411/257 +f 955/369/214 957/371/215 1032/411/258 +f 957/371/215 958/372/217 1032/411/258 +f 1032/411/258 958/372/217 1033/412/259 +f 958/372/217 959/373/218 1033/412/259 +f 1033/412/259 959/373/218 1034/413/260 +f 959/373/218 960/374/220 1034/413/260 +f 1034/413/260 960/374/220 1035/414/260 +f 960/374/220 961/375/221 1035/414/260 +f 961/375/221 962/376/218 1035/414/260 +f 1035/414/260 962/376/218 1036/415/260 +f 1038/416/261 1036/415/260 1039/417/262 +f 1036/415/260 962/376/218 1039/417/262 +f 962/376/218 966/378/223 1039/417/262 +f 1039/417/262 966/378/223 1040/418/263 +f 966/378/223 970/379/264 1040/418/263 +f 1040/418/263 970/379/264 1041/419/265 +f 970/379/264 972/380/168 1041/419/265 +f 1041/419/265 972/380/168 1042/420/266 +f 1042/420/266 972/380/168 1043/421/266 +f 972/380/168 820/381/224 1043/421/266 +f 820/381/224 818/422/153 1043/421/266 +f 1043/421/266 818/422/153 1044/423/267 +f 1047/424/267 819/303/153 1049/425/268 +f 819/303/153 817/302/152 1049/425/268 +f 817/302/152 821/304/154 1048/425/269 +f 821/304/154 973/383/226 1048/425/269 +f 1050/425/270 974/383/227 1052/426/271 +f 974/383/227 975/384/229 1052/426/271 +f 1052/426/271 975/384/229 1053/427/272 +f 975/384/229 976/385/230 1053/427/272 +f 1057/428/168 1061/429/168 1062/430/168 +f 1058/429/273 1053/427/272 1065/430/274 +f 1053/427/272 976/385/230 1065/430/274 +f 1065/430/274 976/385/230 1066/431/169 +f 976/385/230 977/386/231 1066/431/169 +f 977/386/231 980/387/233 1066/431/169 +f 1069/431/168 978/387/168 1073/432/168 +f 979/387/168 981/388/168 1072/432/168 +f 982/388/168 985/389/168 1071/432/168 +f 986/389/168 992/391/168 1070/432/168 +f 1076/433/275 1075/432/276 1077/434/237 +f 1075/432/276 993/391/238 1077/434/237 +f 993/391/238 994/393/237 1077/434/237 +f 1077/434/237 994/393/237 1079/435/277 +f 994/393/237 995/394/239 1079/435/277 +f 1079/435/277 995/394/239 1080/436/278 +f 995/394/239 997/395/241 1080/436/278 +f 1081/436/279 996/395/280 1083/437/281 +f 1083/437/281 996/395/280 1084/438/282 +f 996/395/280 999/396/244 1084/438/282 +f 999/396/244 1001/397/243 1084/438/282 +f 1000/397/283 1087/439/284 1085/438/285 +f 1085/438/285 1087/439/284 1088/440/286 +f 1089/440/287 1086/439/288 1091/441/287 +f 1091/441/287 1086/439/288 1092/442/289 +f 1094/443/290 1093/442/291 1095/444/292 +f 1096/444/293 1092/442/289 1097/445/294 +f 1092/442/289 1086/439/288 1097/445/294 +f 1087/439/284 1000/397/283 1098/445/295 +f 1098/445/295 1000/397/283 1100/446/296 +f 1001/397/243 1002/398/245 1099/446/297 +f 1102/447/297 1003/448/298 1103/449/297 +f 1004/448/246 1008/400/264 1104/449/299 +f 1108/450/299 1104/449/299 1110/451/264 +f 1104/449/299 1008/400/264 1110/451/264 +f 1009/400/168 1012/401/168 1109/451/168 +f 1112/451/168 1013/401/168 1113/452/168 +f 1117/452/167 1014/401/167 1118/453/185 +f 1014/401/167 1015/402/185 1118/453/185 +f 1015/402/185 1016/403/184 1118/453/185 +f 1016/403/184 1017/404/184 1118/453/185 +f 1119/454/185 1018/405/184 1120/455/185 +f 1018/405/184 1021/407/248 1120/455/185 +f 1021/407/248 1023/408/300 1120/455/185 +f 1023/408/300 1025/409/301 1120/455/185 +f 1120/455/185 1025/409/301 1121/456/302 +f 1026/409/303 1028/410/304 1121/456/302 +f 1121/456/302 1028/410/304 1122/457/305 +f 1122/457/305 1029/410/306 1123/458/307 +f 1030/410/256 1031/411/257 1124/458/308 +f 1126/459/309 1124/458/308 1128/460/310 +f 1124/458/308 1031/411/257 1128/460/310 +f 1032/411/258 1033/412/259 1127/460/310 +f 1033/412/259 1034/413/260 1129/460/311 +f 1129/460/311 1034/413/260 1133/461/311 +f 1034/413/260 1035/414/260 1133/461/311 +f 1133/461/311 1035/414/260 1134/462/311 +f 1035/414/260 1036/415/260 1134/462/311 +f 1036/415/260 1038/416/261 1134/462/311 +f 1136/462/312 1037/416/313 1138/463/314 +f 1138/463/314 1037/416/313 1140/464/315 +f 1140/464/315 1037/416/313 1141/465/316 +f 1038/416/261 1039/417/262 1142/465/317 +f 1039/417/262 1040/418/263 1142/465/317 +f 1142/465/317 1040/418/263 1144/466/318 +f 1144/466/318 1040/418/263 1146/467/319 +f 1040/418/263 1041/419/265 1146/467/319 +f 1041/419/265 1042/420/266 1146/467/319 +f 1042/420/266 1043/421/266 1146/467/319 +f 1043/421/266 1044/423/267 1146/467/319 +f 1150/468/320 1154/469/321 1156/470/299 +f 1151/469/322 1149/467/322 1155/470/322 +f 1145/467/323 1045/423/322 1157/470/323 +f 1157/470/323 1045/423/322 1158/471/324 +f 1046/424/325 1050/425/270 1161/472/324 +f 1161/472/324 1050/425/270 1162/473/326 +f 1050/425/270 1052/426/271 1162/473/326 +f 1164/473/326 1051/426/327 1165/474/328 +f 1051/426/327 1053/427/272 1165/474/328 +f 1053/427/272 1058/429/273 1165/474/328 +f 1166/475/328 1059/476/329 1167/477/330 +f 1169/477/168 1060/476/168 1170/478/168 +f 1060/476/168 1054/479/168 1170/478/168 +f 1173/478/167 1055/479/331 1174/480/332 +f 1174/480/332 1055/479/331 1178/481/333 +f 1056/428/168 1063/430/168 1177/482/168 +f 1064/430/168 1067/431/168 1176/482/168 +f 1068/431/168 1074/432/168 1175/482/168 +f 1179/482/334 1075/432/276 1183/483/335 +f 1075/432/276 1076/433/275 1183/483/335 +f 1183/483/335 1076/433/275 1184/484/336 +f 1076/433/275 1077/434/237 1184/484/336 +f 1077/434/237 1079/435/277 1184/484/336 +f 1186/484/337 1078/435/338 1188/485/339 +f 1188/485/339 1078/435/338 1189/486/340 +f 1078/435/338 1081/436/279 1189/486/340 +f 1081/436/279 1083/437/281 1189/486/340 +f 1190/486/341 1082/437/342 1191/487/343 +f 1082/437/342 1085/438/285 1191/487/343 +f 1085/438/285 1088/440/286 1191/487/343 +f 1192/487/344 1089/440/287 1193/488/345 +f 1089/440/287 1091/441/287 1193/488/345 +f 1195/488/346 1090/441/347 1197/489/348 +f 1090/441/347 1093/442/291 1197/489/348 +f 1093/442/291 1094/443/290 1197/489/348 +f 1201/490/287 1203/491/287 1205/492/349 +f 1202/491/350 1199/493/351 1204/492/352 +f 1207/494/352 1196/495/351 1208/496/353 +f 1197/489/348 1094/443/290 1210/497/353 +f 1210/497/353 1094/443/290 1211/498/354 +f 1094/443/290 1095/444/292 1211/498/354 +f 1211/498/354 1095/444/292 1212/499/355 +f 1212/499/355 1095/444/292 1213/500/356 +f 1214/500/357 1096/444/293 1216/501/357 +f 1096/444/293 1097/445/294 1216/501/357 +f 1098/445/295 1100/446/296 1215/501/358 +f 1218/502/359 1101/447/296 1221/503/360 +f 1221/503/360 1223/504/361 1224/505/360 +f 1223/504/361 1229/506/362 1224/505/360 +f 1232/507/363 1227/508/363 1233/509/362 +f 1227/508/363 1228/510/364 1233/509/362 +f 1228/510/364 1222/511/361 1233/509/362 +f 1233/509/362 1222/511/361 1236/512/365 +f 1223/504/361 1221/503/360 1235/513/366 +f 1221/503/360 1101/447/296 1235/513/366 +f 1102/447/297 1103/449/297 1234/513/297 +f 1103/449/297 1105/450/322 1234/513/297 +f 1238/512/297 1106/514/297 1239/515/297 +f 1242/515/299 1107/514/299 1245/516/264 +f 1108/450/299 1110/451/264 1244/517/264 +f 1111/451/168 1114/452/168 1243/517/168 +f 1247/518/168 1115/519/168 1248/520/168 +f 1116/519/167 1119/454/185 1251/520/367 +f 1119/454/185 1120/455/185 1251/520/367 +f 1251/520/367 1120/455/185 1255/521/368 +f 1120/455/185 1121/456/302 1255/521/368 +f 1255/521/368 1121/456/302 1258/522/369 +f 1121/456/302 1122/457/305 1258/522/369 +f 1258/522/369 1122/457/305 1261/523/370 +f 1122/457/305 1123/458/307 1261/523/370 +f 1261/523/370 1123/458/307 1265/524/371 +f 1123/458/307 1125/459/372 1265/524/371 +f 1265/524/371 1125/459/372 1268/525/336 +f 1126/459/309 1128/460/310 1267/525/337 +f 1128/460/310 1130/461/337 1267/525/337 +f 1269/526/275 1268/525/336 1272/527/373 +f 1266/525/337 1131/461/337 1271/527/337 +f 1132/461/312 1135/462/312 1270/527/312 +f 1273/527/374 1136/462/312 1274/528/375 +f 1136/462/312 1138/463/314 1274/528/375 +f 1275/528/376 1137/463/377 1276/529/378 +f 1137/463/377 1139/464/379 1276/529/378 +f 1276/529/378 1139/464/379 1277/530/380 +f 1277/530/380 1139/464/379 1278/531/381 +f 1140/464/315 1141/465/316 1279/531/382 +f 1141/465/316 1143/466/383 1279/531/382 +f 1279/531/382 1143/466/383 1281/532/384 +f 1281/532/384 1143/466/383 1283/533/323 +f 1143/466/383 1147/467/323 1283/533/323 +f 1148/467/323 1152/469/323 1282/533/323 +f 1284/533/385 1153/469/321 1285/534/386 +f 1153/469/321 1150/468/320 1285/534/386 +f 1285/534/386 1150/468/320 1286/535/387 +f 1150/468/320 1156/470/299 1286/535/387 +f 1286/535/387 1156/470/299 1289/536/247 +f 1156/470/299 1159/471/388 1289/536/247 +f 1160/537/389 1163/538/390 1288/539/264 +f 1163/538/390 1166/475/328 1288/539/264 +f 1166/475/328 1167/477/330 1288/539/264 +f 1168/477/168 1171/478/168 1287/539/168 +f 1291/539/168 1172/478/168 1293/540/168 +f 1299/541/250 1300/542/391 1301/543/392 +f 1300/542/391 1296/544/167 1301/543/392 +f 1304/545/249 1292/540/167 1306/546/248 +f 1292/540/167 1173/478/167 1306/546/248 +f 1173/478/167 1174/480/332 1306/546/248 +f 1308/547/393 1306/546/248 1310/548/394 +f 1306/546/248 1174/480/332 1310/548/394 +f 1310/548/394 1174/480/332 1312/549/395 +f 1312/549/395 1174/480/332 1315/550/336 +f 1174/480/332 1178/481/333 1315/550/336 +f 1179/482/334 1183/483/335 1314/551/336 +f 1180/483/337 1321/552/337 1313/551/337 +f 1316/550/396 1317/553/337 1322/554/397 +f 1323/554/398 1318/553/399 1324/555/400 +f 1325/556/401 1319/552/399 1326/557/401 +f 1326/557/401 1319/552/399 1330/558/399 +f 1320/552/312 1181/483/312 1329/558/312 +f 1182/483/337 1185/484/337 1328/558/337 +f 1186/484/337 1188/485/339 1327/558/337 +f 1330/558/399 1187/485/399 1332/559/402 +f 1187/485/399 1190/486/341 1332/559/402 +f 1332/559/402 1190/486/341 1333/560/403 +f 1190/486/341 1191/487/343 1333/560/403 +f 1334/560/404 1192/487/344 1335/561/405 +f 1192/487/344 1193/488/345 1335/561/405 +f 1337/562/406 1194/563/346 1338/564/407 +f 1194/563/346 1198/565/408 1338/564/407 +f 1339/566/409 1199/493/351 1340/567/410 +f 1199/493/351 1202/491/350 1340/567/410 +f 1341/567/411 1203/491/287 1342/568/412 +f 1203/491/287 1201/490/287 1342/568/412 +f 1343/569/413 1200/570/347 1344/571/414 +f 1346/572/415 1344/571/414 1347/573/416 +f 1344/571/414 1200/570/347 1347/573/416 +f 1347/573/416 1200/570/347 1349/574/417 +f 1201/490/287 1205/492/349 1348/575/289 +f 1206/576/349 1209/577/418 1351/578/289 +f 1353/579/419 1351/578/289 1354/580/293 +f 1351/578/289 1209/577/418 1354/580/293 +f 1210/497/353 1211/498/354 1355/581/420 +f 1355/581/420 1211/498/354 1357/582/421 +f 1211/498/354 1212/499/355 1357/582/421 +f 1357/582/421 1212/499/355 1358/583/422 +f 1212/499/355 1363/584/322 1358/583/422 +f 1361/585/422 1362/586/325 1364/587/325 +f 1365/588/423 1364/587/325 1371/589/424 +f 1364/587/325 1362/586/325 1371/589/424 +f 1363/584/322 1212/499/355 1370/590/424 +f 1212/499/355 1213/500/356 1370/590/424 +f 1214/500/357 1216/501/357 1369/590/425 +f 1217/502/357 1219/503/425 1368/591/425 +f 1220/503/425 1225/505/425 1367/591/425 +f 1226/508/424 1230/507/424 1366/589/424 +f 1373/589/425 1231/507/425 1374/592/425 +f 1232/507/363 1233/509/362 1377/592/360 +f 1377/592/360 1233/509/362 1379/593/426 +f 1233/509/362 1236/512/365 1379/593/426 +f 1237/512/297 1240/515/297 1378/593/297 +f 1380/593/297 1241/515/297 1381/594/297 +f 1386/595/427 1384/596/323 1388/597/428 +f 1381/594/297 1241/515/297 1387/598/429 +f 1390/598/430 1242/515/299 1393/599/431 +f 1242/515/299 1245/516/264 1393/599/431 +f 1246/518/168 1249/520/168 1392/600/168 +f 1250/520/168 1252/521/168 1391/600/168 +f 1395/600/168 1253/521/168 1400/601/168 +f 1254/521/168 1256/522/168 1399/601/168 +f 1257/522/168 1259/523/168 1398/601/168 +f 1260/523/168 1262/524/168 1397/601/168 +f 1402/602/432 1401/601/433 1404/603/169 +f 1396/601/168 1263/524/168 1403/603/168 +f 1406/603/168 1264/524/168 1407/604/168 +f 1265/524/371 1268/525/336 1410/604/434 +f 1268/525/336 1269/526/275 1410/604/434 +f 1410/604/434 1269/526/275 1414/605/219 +f 1269/526/275 1272/527/373 1414/605/219 +f 1272/527/373 1275/528/376 1414/605/219 +f 1275/528/376 1276/529/378 1414/605/219 +f 1414/605/219 1276/529/378 1417/606/435 +f 1417/606/435 1276/529/378 1420/607/436 +f 1276/529/378 1277/530/380 1420/607/436 +f 1420/607/436 1277/530/380 1422/608/437 +f 1422/608/437 1277/530/380 1425/609/438 +f 1425/609/438 1277/530/380 1428/610/247 +f 1277/530/380 1278/531/381 1428/610/247 +f 1278/531/381 1280/532/439 1428/610/247 +f 1280/532/439 1284/533/385 1428/610/247 +f 1284/533/385 1285/534/386 1428/610/247 +f 1428/610/247 1285/534/386 1431/611/264 +f 1285/534/386 1286/535/387 1431/611/264 +f 1286/535/387 1289/536/247 1430/611/264 +f 1290/536/168 1294/544/168 1429/611/168 +f 1434/611/168 1295/544/168 1435/612/168 +f 1296/544/167 1300/542/391 1437/612/167 +f 1437/612/167 1300/542/391 1438/613/391 +f 1300/542/391 1299/541/250 1438/613/391 +f 1438/613/391 1299/541/250 1440/614/392 +f 1443/615/440 1448/616/441 1449/617/442 +f 1451/618/443 1444/619/441 1455/620/441 +f 1445/619/444 1442/614/444 1454/620/444 +f 1439/614/444 1297/541/444 1453/620/444 +f 1298/541/208 1302/543/208 1452/620/208 +f 1457/621/444 1303/545/444 1459/622/208 +f 1303/545/444 1305/546/445 1459/622/208 +f 1305/546/445 1307/547/446 1459/622/208 +f 1459/622/208 1307/547/446 1461/623/447 +f 1307/547/446 1309/548/448 1461/623/447 +f 1461/623/447 1309/548/448 1463/624/449 +f 1463/624/449 1309/548/448 1465/625/450 +f 1309/548/448 1311/549/451 1465/625/450 +f 1465/625/450 1311/549/451 1466/626/452 +f 1311/549/451 1316/550/396 1466/626/452 +f 1316/550/396 1322/554/397 1466/626/452 +f 1467/626/453 1323/554/398 1469/627/454 +f 1469/627/454 1323/554/398 1470/628/455 +f 1323/554/398 1324/555/400 1470/628/455 +f 1472/629/456 1325/556/401 1476/630/457 +f 1325/556/401 1326/557/401 1476/630/457 +f 1476/630/457 1326/557/401 1477/631/458 +f 1326/557/401 1330/558/399 1477/631/458 +f 1330/558/399 1332/559/402 1477/631/458 +f 1478/631/459 1331/559/460 1480/632/459 +f 1480/632/459 1331/559/460 1481/633/460 +f 1331/559/460 1334/560/404 1481/633/460 +f 1334/560/404 1335/561/405 1481/633/460 +f 1484/634/461 1483/635/462 1485/636/463 +f 1483/635/462 1336/637/464 1485/636/463 +f 1336/637/464 1339/566/409 1485/636/463 +f 1485/636/463 1339/566/409 1486/638/465 +f 1339/566/409 1340/567/410 1486/638/465 +f 1341/567/411 1342/568/412 1487/638/466 +f 1487/638/466 1342/568/412 1488/639/466 +f 1490/640/467 1343/569/413 1491/641/468 +f 1343/569/413 1344/571/414 1491/641/468 +f 1491/641/468 1344/571/414 1492/642/469 +f 1344/571/414 1346/572/415 1492/642/469 +f 1495/643/470 1493/642/471 1496/644/472 +f 1493/642/471 1345/572/473 1496/644/472 +f 1345/572/473 1499/645/473 1496/644/472 +f 1497/644/474 1498/645/475 1500/646/476 +f 1501/647/477 1502/648/478 1503/649/479 +f 1502/648/478 1500/646/476 1503/649/479 +f 1504/650/480 1503/649/479 1505/651/481 +f 1503/649/479 1500/646/476 1505/651/481 +f 1505/651/481 1500/646/476 1506/652/482 +f 1500/646/476 1498/645/475 1506/652/482 +f 1506/652/482 1498/645/475 1508/653/483 +f 1499/645/473 1345/572/473 1507/653/484 +f 1346/572/415 1347/573/416 1509/653/485 +f 1509/653/485 1347/573/416 1510/654/486 +f 1347/573/416 1349/574/417 1510/654/486 +f 1511/655/487 1350/578/488 1512/656/489 +f 1350/578/488 1352/579/490 1512/656/489 +f 1512/656/489 1352/579/490 1513/657/491 +f 1353/579/419 1354/580/293 1514/657/357 +f 1354/580/293 1356/658/357 1514/657/357 +f 1356/658/357 1359/659/425 1514/657/357 +f 1514/657/357 1359/659/425 1516/660/425 +f 1522/661/425 1518/660/425 1525/662/425 +f 1515/660/425 1360/659/425 1524/662/425 +f 1361/585/422 1364/587/325 1523/663/492 +f 1528/663/422 1364/587/325 1529/588/325 +f 1364/587/325 1365/588/423 1529/588/325 +f 1530/664/325 1529/588/325 1532/665/493 +f 1529/588/325 1365/588/423 1532/665/493 +f 1365/588/423 1371/589/424 1532/665/493 +f 1372/589/424 1375/592/424 1531/665/424 +f 1533/665/424 1376/592/424 1535/666/494 +f 1377/592/360 1379/593/426 1534/666/495 +f 1379/593/426 1382/594/496 1534/666/495 +f 1383/596/496 1385/595/497 1536/667/498 +f 1536/667/498 1385/595/497 1538/668/499 +f 1540/669/500 1538/668/499 1541/670/501 +f 1538/668/499 1385/595/497 1541/670/501 +f 1542/670/502 1386/595/427 1543/671/503 +f 1386/595/427 1388/597/428 1543/671/503 +f 1544/671/504 1389/597/505 1545/672/506 +f 1389/597/505 1394/600/264 1545/672/506 +f 1394/600/264 1401/601/433 1545/672/506 +f 1401/601/433 1402/602/432 1545/672/506 +f 1545/672/506 1402/602/432 1546/673/155 +f 1546/673/155 1402/602/432 1547/674/171 +f 1547/674/171 1402/602/432 1548/675/432 +f 1402/602/432 1404/603/169 1548/675/432 +f 1548/675/432 1404/603/169 1551/676/169 +f 1405/603/168 1408/604/168 1550/676/168 +f 1409/604/168 1411/605/168 1549/676/168 +f 1552/676/168 1412/605/168 1556/677/168 +f 1413/605/168 1415/606/168 1555/677/168 +f 1416/606/168 1418/607/168 1554/677/168 +f 1419/607/168 1421/608/168 1553/677/168 +f 1558/678/507 1557/677/234 1559/679/508 +f 1553/677/168 1421/608/168 1560/679/168 +f 1560/679/168 1421/608/168 1564/680/168 +f 1421/608/168 1423/609/168 1564/680/168 +f 1424/609/168 1426/610/168 1563/680/168 +f 1427/610/168 1432/611/168 1562/680/168 +f 1433/611/168 1436/612/168 1561/680/168 +f 1437/612/167 1438/613/391 1565/680/509 +f 1438/613/391 1440/614/392 1565/680/509 +f 1565/680/509 1440/614/392 1566/681/510 +f 1441/614/444 1446/619/207 1567/681/511 +f 1567/681/511 1446/619/207 1568/682/512 +f 1569/682/513 1447/619/514 1570/683/515 +f 1448/616/441 1443/615/440 1572/684/516 +f 1572/684/516 1443/615/440 1576/685/517 +f 1576/685/517 1443/615/440 1577/686/517 +f 1443/615/440 1449/617/442 1577/686/517 +f 1581/687/518 1450/688/519 1584/689/518 +f 1450/688/519 1456/621/441 1584/689/518 +f 1456/621/441 1458/622/441 1584/689/518 +f 1584/689/518 1458/622/441 1587/690/520 +f 1458/622/441 1460/623/521 1587/690/520 +f 1587/690/520 1460/623/521 1589/691/522 +f 1589/691/522 1460/623/521 1591/692/523 +f 1460/623/521 1462/624/524 1591/692/523 +f 1462/624/524 1464/625/525 1591/692/523 +f 1591/692/523 1464/625/525 1593/693/526 +f 1593/693/526 1464/625/525 1595/694/527 +f 1464/625/525 1467/626/453 1595/694/527 +f 1595/694/527 1467/626/453 1596/695/528 +f 1467/626/453 1469/627/454 1596/695/528 +f 1597/696/529 1468/697/530 1599/698/529 +f 1599/698/529 1468/697/530 1601/699/531 +f 1468/697/530 1471/629/532 1601/699/531 +f 1471/629/532 1473/630/459 1601/699/531 +f 1601/699/531 1473/630/459 1602/700/459 +f 1604/700/459 1474/630/459 1605/701/459 +f 1607/701/533 1475/630/533 1608/702/534 +f 1475/630/533 1478/631/459 1608/702/534 +f 1478/631/459 1480/632/459 1608/702/534 +f 1609/702/535 1479/632/535 1610/703/536 +f 1479/632/535 1482/633/537 1610/703/536 +f 1483/635/462 1484/634/461 1612/704/538 +f 1612/704/538 1484/634/461 1617/705/539 +f 1617/705/539 1484/634/461 1618/706/540 +f 1484/634/461 1485/636/463 1618/706/540 +f 1485/636/463 1486/638/465 1618/706/540 +f 1487/638/466 1488/639/466 1619/706/460 +f 1619/706/460 1488/639/466 1620/707/460 +f 1622/708/541 1621/707/542 1623/709/543 +f 1621/707/542 1489/639/467 1623/709/543 +f 1490/640/467 1491/641/468 1624/710/544 +f 1624/710/544 1491/641/468 1625/711/545 +f 1491/641/468 1492/642/469 1625/711/545 +f 1493/642/471 1495/643/470 1626/711/546 +f 1626/711/546 1495/643/470 1627/712/547 +f 1629/713/548 1630/714/549 1631/715/550 +f 1631/715/550 1630/714/549 1632/716/551 +f 1630/714/549 1634/717/552 1632/716/551 +f 1634/717/552 1628/712/553 1632/716/551 +f 1632/716/551 1628/712/553 1635/718/554 +f 1628/712/553 1494/643/555 1635/718/554 +f 1494/643/555 1497/644/474 1635/718/554 +f 1497/644/474 1500/646/476 1635/718/554 +f 1500/646/476 1502/648/478 1635/718/554 +f 1502/648/478 1501/647/477 1635/718/554 +f 1635/718/554 1501/647/477 1640/719/556 +f 1640/719/556 1501/647/477 1641/720/557 +f 1501/647/477 1503/649/479 1641/720/557 +f 1641/720/557 1503/649/479 1642/721/558 +f 1503/649/479 1504/650/480 1642/721/558 +f 1642/721/558 1504/650/480 1643/722/559 +f 1504/650/480 1505/651/481 1643/722/559 +f 1505/651/481 1506/652/482 1643/722/559 +f 1506/652/482 1508/653/483 1643/722/559 +f 1643/722/559 1508/653/483 1646/723/560 +f 1509/653/485 1510/654/486 1645/723/561 +f 1511/655/487 1512/656/489 1644/724/561 +f 1648/725/562 1647/724/563 1649/726/564 +f 1644/724/561 1512/656/489 1650/726/565 +f 1652/727/566 1650/726/565 1653/728/567 +f 1650/726/565 1512/656/489 1653/728/567 +f 1512/656/489 1513/657/491 1653/728/567 +f 1653/728/567 1513/657/491 1654/729/322 +f 1513/657/491 1517/660/568 1654/729/322 +f 1654/729/322 1517/660/568 1655/661/569 +f 1517/660/568 1519/661/570 1655/661/569 +f 1655/661/569 1520/661/570 1657/730/571 +f 1521/661/425 1526/662/425 1656/730/425 +f 1659/730/425 1527/662/425 1660/731/425 +f 1528/663/422 1529/588/325 1663/732/422 +f 1529/588/325 1530/664/325 1663/732/422 +f 1663/732/422 1530/664/325 1666/733/422 +f 1668/734/572 1666/733/422 1669/735/573 +f 1666/733/422 1530/664/325 1669/735/573 +f 1530/664/325 1532/665/493 1669/735/573 +f 1669/735/573 1532/665/493 1670/736/574 +f 1533/665/424 1535/666/494 1671/736/494 +f 1671/736/494 1535/666/494 1673/737/575 +f 1535/666/494 1537/738/576 1673/737/575 +f 1673/737/575 1537/738/576 1675/739/576 +f 1675/739/576 1537/738/576 1677/740/577 +f 1537/738/576 1539/741/578 1677/740/577 +f 1677/740/577 1539/741/578 1678/742/577 +f 1679/743/579 1540/669/500 1680/744/580 +f 1540/669/500 1541/670/501 1680/744/580 +f 1542/670/502 1543/671/503 1681/744/581 +f 1681/744/581 1543/671/503 1682/745/582 +f 1544/671/504 1545/672/506 1683/745/583 +f 1545/672/506 1546/673/155 1683/745/583 +f 1683/745/583 1546/673/155 1685/746/584 +f 1546/673/155 1547/674/171 1685/746/584 +f 1685/746/584 1547/674/171 1688/747/585 +f 1688/747/585 1547/674/171 1691/748/586 +f 1547/674/171 1548/675/432 1691/748/586 +f 1548/675/432 1551/676/169 1691/748/586 +f 1691/748/586 1551/676/169 1694/749/156 +f 1551/676/169 1557/677/234 1694/749/156 +f 1557/677/234 1558/678/507 1694/749/156 +f 1694/749/156 1558/678/507 1696/750/587 +f 1696/750/587 1558/678/507 1698/751/588 +f 1698/751/588 1558/678/507 1699/752/589 +f 1558/678/507 1559/679/508 1699/752/589 +f 1559/679/508 1565/680/509 1699/752/589 +f 1565/680/509 1566/681/510 1699/752/589 +f 1700/752/590 1567/681/511 1701/753/591 +f 1567/681/511 1568/682/512 1701/753/591 +f 1704/754/592 1702/753/593 1705/755/594 +f 1702/753/593 1569/682/513 1705/755/594 +f 1569/682/513 1570/683/515 1705/755/594 +f 1707/756/595 1571/684/596 1709/757/597 +f 1709/757/597 1571/684/596 1711/758/596 +f 1711/758/596 1571/684/596 1712/759/598 +f 1571/684/596 1573/685/598 1712/759/598 +f 1714/759/599 1574/685/599 1716/760/599 +f 1575/685/599 1578/686/599 1715/760/599 +f 1719/761/598 1579/687/598 1721/762/598 +f 1580/687/598 1582/689/598 1720/762/598 +f 1583/689/598 1585/690/600 1723/762/598 +f 1723/762/598 1585/690/600 1724/763/600 +f 1727/764/600 1586/765/600 1729/766/601 +f 1586/765/600 1588/767/602 1729/766/601 +f 1588/767/602 1590/768/603 1729/766/601 +f 1729/766/601 1590/768/603 1731/769/603 +f 1590/768/603 1592/770/604 1731/769/603 +f 1731/769/603 1592/770/604 1733/771/604 +f 1733/771/604 1592/770/604 1734/772/605 +f 1592/770/604 1594/773/606 1734/772/605 +f 1594/773/606 1597/696/529 1734/772/605 +f 1597/696/529 1599/698/529 1734/772/605 +f 1735/772/607 1598/698/608 1736/774/609 +f 1598/698/608 1600/699/610 1736/774/609 +f 1736/774/609 1600/699/610 1737/775/611 +f 1737/775/611 1600/699/610 1738/776/373 +f 1600/699/610 1603/700/612 1738/776/373 +f 1738/776/373 1603/700/612 1739/777/373 +f 1739/777/373 1603/700/612 1740/778/538 +f 1603/700/612 1606/701/613 1740/778/538 +f 1606/701/613 1609/702/535 1740/778/538 +f 1609/702/535 1610/703/536 1740/778/538 +f 1740/778/538 1610/703/536 1743/779/539 +f 1611/780/538 1613/781/539 1742/782/539 +f 1614/783/533 1749/784/533 1741/785/533 +f 1745/785/533 1746/784/533 1750/786/533 +f 1754/786/533 1747/784/533 1757/787/533 +f 1748/784/533 1615/783/533 1756/787/533 +f 1616/705/533 1619/706/460 1755/788/533 +f 1619/706/460 1620/707/460 1755/788/533 +f 1621/707/542 1622/708/541 1759/788/614 +f 1759/788/614 1622/708/541 1761/789/615 +f 1761/789/615 1622/708/541 1762/708/461 +f 1622/708/541 1623/709/543 1762/708/461 +f 1762/708/461 1623/709/543 1764/790/616 +f 1624/710/544 1625/711/545 1763/791/540 +f 1626/711/546 1627/712/547 1765/791/460 +f 1627/712/547 1633/717/617 1765/791/460 +f 1765/791/460 1633/717/617 1766/714/618 +f 1634/717/552 1630/714/549 1767/714/619 +f 1767/714/619 1630/714/549 1768/792/620 +f 1630/714/549 1629/713/548 1768/792/620 +f 1768/792/620 1629/713/548 1770/793/621 +f 1770/793/621 1629/713/548 1771/794/622 +f 1629/713/548 1631/715/550 1771/794/622 +f 1771/794/622 1631/715/550 1772/795/623 +f 1772/795/623 1631/715/550 1775/796/624 +f 1631/715/550 1632/716/551 1775/796/624 +f 1775/796/624 1632/716/551 1776/797/625 +f 1632/716/551 1635/718/554 1776/797/625 +f 1635/718/554 1640/719/556 1776/797/625 +f 1779/797/626 1636/719/626 1781/798/626 +f 1637/719/626 1786/799/626 1780/798/626 +f 1787/800/627 1783/798/628 1788/801/629 +f 1789/802/630 1788/801/629 1790/803/631 +f 1788/801/629 1783/798/628 1790/803/631 +f 1783/798/628 1784/799/632 1790/803/631 +f 1792/804/633 1790/803/631 1793/805/634 +f 1790/803/631 1784/799/632 1793/805/634 +f 1793/805/634 1784/799/632 1795/806/635 +f 1785/799/636 1638/719/636 1794/806/636 +f 1797/806/636 1639/719/636 1798/807/636 +f 1640/719/556 1641/720/557 1801/807/637 +f 1801/807/637 1641/720/557 1804/808/638 +f 1641/720/557 1642/721/558 1804/808/638 +f 1804/808/638 1642/721/558 1806/809/639 +f 1806/809/639 1642/721/558 1807/810/640 +f 1642/721/558 1643/722/559 1807/810/640 +f 1643/722/559 1646/723/560 1807/810/640 +f 1647/724/563 1648/725/562 1808/725/641 +f 1808/725/641 1648/725/562 1809/811/642 +f 1648/725/562 1649/726/564 1809/811/642 +f 1649/726/564 1651/727/643 1809/811/642 +f 1810/812/644 1809/811/642 1811/812/645 +f 1809/811/642 1651/727/643 1811/812/645 +f 1652/727/566 1653/728/567 1812/812/646 +f 1653/728/567 1654/729/322 1812/812/646 +f 1654/729/322 1813/813/647 1812/812/646 +f 1812/812/646 1813/813/647 1814/813/422 +f 1816/813/648 1813/813/647 1820/814/648 +f 1820/814/648 1813/813/647 1821/815/649 +f 1813/813/647 1654/729/322 1821/815/649 +f 1654/729/322 1655/661/569 1821/815/649 +f 1655/661/569 1657/730/571 1821/815/649 +f 1821/815/649 1657/730/571 1822/816/649 +f 1823/817/649 1822/816/649 1826/818/568 +f 1822/816/649 1657/730/571 1826/818/568 +f 1658/730/425 1661/731/425 1825/818/425 +f 1662/731/425 1664/819/425 1824/818/425 +f 1665/819/425 1667/820/494 1827/818/425 +f 1667/820/494 1828/821/494 1827/818/425 +f 1827/818/425 1828/821/494 1832/822/494 +f 1832/822/494 1828/821/494 1833/823/575 +f 1828/821/494 1667/820/494 1833/823/575 +f 1835/824/650 1668/734/572 1836/825/651 +f 1668/734/572 1669/735/573 1836/825/651 +f 1669/735/573 1670/736/574 1836/825/651 +f 1670/736/574 1672/737/652 1836/825/651 +f 1672/737/652 1674/739/653 1836/825/651 +f 1836/825/651 1674/739/653 1837/826/654 +f 1674/739/653 1676/740/655 1837/826/654 +f 1837/826/654 1676/740/655 1838/827/656 +f 1838/827/656 1676/740/655 1839/828/657 +f 1840/828/658 1677/740/577 1841/829/658 +f 1677/740/577 1678/742/577 1841/829/658 +f 1679/743/579 1680/744/580 1843/830/659 +f 1843/830/659 1680/744/580 1844/831/660 +f 1681/744/581 1682/745/582 1845/831/661 +f 1682/745/582 1684/746/662 1845/831/661 +f 1845/831/661 1684/746/662 1847/832/663 +f 1684/746/662 1686/747/193 1847/832/663 +f 1847/832/663 1686/747/193 1850/833/193 +f 1687/747/193 1689/748/193 1849/833/193 +f 1852/834/664 1851/833/665 1853/835/665 +f 1848/833/193 1690/748/193 1854/835/666 +f 1690/748/193 1692/749/193 1854/835/666 +f 1854/835/666 1692/749/193 1855/836/193 +f 1857/836/193 1693/749/667 1859/837/668 +f 1693/749/667 1695/750/669 1859/837/668 +f 1859/837/668 1695/750/669 1861/838/670 +f 1695/750/669 1697/751/671 1861/838/670 +f 1861/838/670 1697/751/671 1862/839/672 +f 1697/751/671 1700/752/590 1862/839/672 +f 1700/752/590 1701/753/591 1862/839/672 +f 1702/753/593 1704/754/592 1863/839/673 +f 1863/839/673 1704/754/592 1864/840/674 +f 1867/841/675 1865/840/675 1869/842/676 +f 1865/840/675 1703/754/676 1869/842/676 +f 1703/754/676 1706/755/677 1869/842/676 +f 1707/756/595 1709/757/597 1868/843/678 +f 1871/843/679 1708/757/680 1872/844/681 +f 1872/844/681 1708/757/680 1873/845/682 +f 1708/757/680 1710/758/683 1873/845/682 +f 1873/845/682 1710/758/683 1874/846/684 +f 1710/758/683 1713/759/685 1874/846/684 +f 1713/759/685 1717/760/685 1874/846/684 +f 1874/846/684 1717/760/685 1875/847/686 +f 1718/761/685 1722/762/685 1876/848/686 +f 1876/848/686 1722/762/685 1877/849/687 +f 1877/849/687 1722/762/685 1878/850/688 +f 1722/762/685 1725/763/689 1878/850/688 +f 1879/851/690 1726/764/691 1880/852/692 +f 1726/764/691 1728/766/693 1880/852/692 +f 1881/853/694 1880/852/692 1882/854/695 +f 1880/852/692 1728/766/693 1882/854/695 +f 1728/766/693 1730/769/696 1882/854/695 +f 1882/854/695 1730/769/696 1883/855/697 +f 1730/769/696 1732/771/698 1883/855/697 +f 1732/771/698 1735/772/607 1883/855/697 +f 1735/772/607 1736/774/609 1883/855/697 +f 1883/855/697 1736/774/609 1886/856/699 +f 1886/856/699 1736/774/609 1888/857/700 +f 1736/774/609 1737/775/611 1888/857/700 +f 1737/775/611 1738/776/373 1888/857/700 +f 1888/857/700 1738/776/373 1890/858/701 +f 1890/858/701 1738/776/373 1894/859/539 +f 1738/776/373 1739/777/373 1894/859/539 +f 1894/859/539 1739/777/373 1896/860/702 +f 1739/777/373 1740/778/538 1896/860/702 +f 1740/778/538 1743/779/539 1896/860/702 +f 1744/785/533 1751/786/533 1895/861/533 +f 1899/861/533 1752/786/533 1900/862/533 +f 1902/862/613 1753/786/703 1903/863/373 +f 1753/786/703 1758/787/535 1903/863/373 +f 1903/863/373 1758/787/535 1904/864/536 +f 1758/787/535 1760/865/704 1904/864/536 +f 1761/789/615 1762/708/461 1905/866/705 +f 1905/866/705 1762/708/461 1906/867/706 +f 1762/708/461 1764/790/616 1906/867/706 +f 1765/791/460 1766/714/618 1910/868/533 +f 1910/868/533 1766/714/618 1911/869/533 +f 1767/714/619 1768/792/620 1914/869/707 +f 1914/869/707 1768/792/620 1915/870/708 +f 1768/792/620 1770/793/621 1915/870/708 +f 1916/871/709 1769/872/621 1917/873/710 +f 1917/873/710 1769/872/621 1918/874/711 +f 1918/874/711 1769/872/621 1919/875/712 +f 1770/793/621 1771/794/622 1920/876/712 +f 1920/876/712 1771/794/622 1923/794/713 +f 1771/794/622 1772/795/623 1923/794/713 +f 1923/794/713 1772/795/623 1924/877/714 +f 1772/795/623 1775/796/624 1924/877/714 +f 1927/877/636 1773/796/636 1930/878/636 +f 1774/796/636 1777/797/636 1929/878/636 +f 1778/797/715 1782/798/715 1928/878/715 +f 1783/798/628 1787/800/627 1932/878/716 +f 1933/879/717 1932/878/716 1934/880/718 +f 1934/880/718 1932/878/716 1935/881/719 +f 1932/878/716 1787/800/627 1935/881/719 +f 1935/881/719 1787/800/627 1936/882/720 +f 1936/882/720 1787/800/627 1937/883/721 +f 1787/800/627 1788/801/629 1937/883/721 +f 1788/801/629 1789/802/630 1937/883/721 +f 1937/883/721 1789/802/630 1938/884/722 +f 1938/884/722 1789/802/630 1939/885/723 +f 1789/802/630 1790/803/631 1939/885/723 +f 1790/803/631 1792/804/633 1939/885/723 +f 1939/885/723 1792/804/633 1940/886/724 +f 1941/886/725 1791/804/726 1942/887/727 +f 1945/888/728 1943/887/729 1946/889/730 +f 1946/889/730 1943/887/729 1947/890/731 +f 1948/890/732 1942/887/727 1949/891/733 +f 1942/887/727 1791/804/726 1949/891/733 +f 1792/804/633 1793/805/634 1950/891/734 +f 1950/891/734 1793/805/634 1951/892/735 +f 1793/805/634 1795/806/635 1951/892/735 +f 1951/892/735 1795/806/635 1954/893/736 +f 1796/806/636 1799/807/636 1953/893/636 +f 1800/807/636 1802/808/636 1952/893/636 +f 1955/893/636 1803/808/636 1957/894/636 +f 1804/808/638 1806/809/639 1956/894/737 +f 1960/895/738 1805/896/739 1961/897/740 +f 1805/896/739 1808/725/641 1961/897/740 +f 1808/725/641 1809/811/642 1961/897/740 +f 1809/811/642 1810/812/644 1961/897/740 +f 1961/897/740 1810/812/644 1962/898/386 +f 1962/898/386 1810/812/644 1963/899/644 +f 1810/812/644 1811/812/645 1963/899/644 +f 1811/812/645 1815/813/741 1963/899/644 +f 1963/899/644 1815/813/741 1964/900/742 +f 1815/813/741 1817/814/741 1964/900/742 +f 1966/900/743 1818/814/741 1967/901/741 +f 1819/814/744 1821/815/649 1969/901/646 +f 1821/815/649 1822/816/649 1969/901/646 +f 1822/816/649 1823/817/649 1969/901/646 +f 1973/902/745 1969/901/646 1974/903/573 +f 1969/901/646 1823/817/649 1974/903/573 +f 1823/817/649 1826/818/568 1974/903/573 +f 1826/818/568 1829/822/746 1974/903/573 +f 1975/904/573 1830/905/747 1976/906/652 +f 1831/905/494 1834/907/575 1977/906/575 +f 1979/908/576 1977/906/575 1980/909/576 +f 1977/906/575 1834/907/575 1980/909/576 +f 1835/824/650 1836/825/651 1981/910/748 +f 1836/825/651 1837/826/654 1981/910/748 +f 1981/910/748 1837/826/654 1983/911/749 +f 1837/826/654 1838/827/656 1983/911/749 +f 1983/911/749 1838/827/656 1985/912/750 +f 1985/912/750 1838/827/656 1987/913/751 +f 1987/913/751 1838/827/656 1988/914/752 +f 1838/827/656 1839/828/657 1988/914/752 +f 1988/914/752 1839/828/657 1990/915/753 +f 1840/828/658 1841/829/658 1989/915/754 +f 1992/916/755 1842/830/756 1993/917/757 +f 1843/830/659 1844/831/660 1994/917/758 +f 1844/831/660 1846/832/665 1994/917/758 +f 1994/917/758 1846/832/665 1996/918/759 +f 1996/918/759 1846/832/665 2000/919/760 +f 1846/832/665 1851/833/665 2000/919/760 +f 1851/833/665 1852/834/664 2000/919/760 +f 2000/919/760 1852/834/664 2005/920/760 +f 1852/834/664 1853/835/665 2005/920/760 +f 1853/835/665 1856/836/149 2005/920/760 +f 2005/920/760 1856/836/149 2007/921/761 +f 1856/836/149 1858/837/762 2007/921/761 +f 2007/921/761 1858/837/762 2009/922/763 +f 1858/837/762 1860/838/764 2009/922/763 +f 2009/922/763 1860/838/764 2010/923/765 +f 1860/838/764 1863/839/673 2010/923/765 +f 1863/839/673 1864/840/674 2010/923/765 +f 2011/923/766 1865/840/675 2012/924/767 +f 1865/840/675 1867/841/675 2012/924/767 +f 2013/924/768 1866/841/769 2015/925/770 +f 1866/841/769 1870/842/679 2015/925/770 +f 2015/925/770 1870/842/679 2016/926/771 +f 1871/843/679 1872/844/681 2018/927/772 +f 2018/927/772 1872/844/681 2021/928/773 +f 2021/928/773 1872/844/681 2023/929/774 +f 1872/844/681 1873/845/682 2023/929/774 +f 2023/929/774 1873/845/682 2026/930/775 +f 2026/930/775 1873/845/682 2028/931/776 +f 1873/845/682 1874/846/684 2028/931/776 +f 1874/846/684 1875/847/686 2028/931/776 +f 2033/932/598 2030/933/598 2034/934/598 +f 2037/934/599 2027/933/599 2039/935/599 +f 2028/931/776 1875/847/686 2038/936/777 +f 1876/848/686 1877/849/687 2042/937/777 +f 2042/937/777 1877/849/687 2043/938/185 +f 1877/849/687 1878/850/688 2043/938/185 +f 1879/851/690 1880/852/692 2045/939/185 +f 1880/852/692 1881/853/694 2045/939/185 +f 2045/939/185 1881/853/694 2048/940/778 +f 2048/940/778 1881/853/694 2050/941/779 +f 2050/941/779 1881/853/694 2052/942/780 +f 1881/853/694 1882/854/695 2052/942/780 +f 1882/854/695 1883/855/697 2052/942/780 +f 2052/942/780 1883/855/697 2053/943/781 +f 1883/855/697 1886/856/699 2053/943/781 +f 2056/944/604 2054/945/604 2057/946/605 +f 2054/945/604 1884/947/782 2057/946/605 +f 2060/948/783 1885/949/784 2062/950/529 +f 1885/949/784 1887/951/530 2062/950/529 +f 2062/950/529 1887/951/530 2064/952/531 +f 1887/951/530 1889/953/785 2064/952/531 +f 2064/952/531 1889/953/785 2065/954/533 +f 1889/953/785 1891/955/533 2065/954/533 +f 2067/954/533 1892/955/533 2070/956/533 +f 1893/955/533 1897/861/533 2069/956/533 +f 1898/861/533 1901/862/533 2068/956/533 +f 2071/956/786 1902/862/613 2072/957/373 +f 1902/862/613 1903/863/373 2072/957/373 +f 2072/957/373 1903/863/373 2073/958/702 +f 1903/863/373 1904/864/536 2073/958/702 +f 1904/864/536 1907/959/706 2073/958/702 +f 2077/960/533 1908/961/533 2080/962/533 +f 1909/961/533 1912/963/533 2079/962/533 +f 1913/963/707 1916/871/709 2078/962/707 +f 1916/871/709 1917/873/710 2078/962/707 +f 2083/962/533 1917/873/710 2085/964/533 +f 1917/873/710 1918/874/711 2085/964/533 +f 2085/964/533 1918/874/711 2086/965/373 +f 2086/965/373 1918/874/711 2088/966/787 +f 2088/966/787 1918/874/711 2090/967/788 +f 1918/874/711 1919/875/712 2090/967/788 +f 2090/967/788 1919/875/712 2092/968/789 +f 1920/876/712 1923/794/713 2091/969/790 +f 2094/969/791 1921/794/636 2096/970/792 +f 2096/970/792 1921/794/636 2099/971/636 +f 1922/794/636 1925/877/636 2098/971/636 +f 1926/877/636 1931/878/636 2097/971/636 +f 1932/878/716 1933/879/717 2100/971/793 +f 2100/971/793 1933/879/717 2101/972/794 +f 2101/972/794 1933/879/717 2102/973/795 +f 1933/879/717 1934/880/718 2102/973/795 +f 2102/973/795 1934/880/718 2103/974/796 +f 1934/880/718 1935/881/719 2103/974/796 +f 2105/974/797 2103/974/796 2107/975/798 +f 2103/974/796 1935/881/719 2107/975/798 +f 2107/975/798 1935/881/719 2109/976/799 +f 1935/881/719 1936/882/720 2109/976/799 +f 1936/882/720 1937/883/721 2109/976/799 +f 2109/976/799 1937/883/721 2110/977/800 +f 1937/883/721 1938/884/722 2110/977/800 +f 1938/884/722 1939/885/723 2110/977/800 +f 1939/885/723 1940/886/724 2110/977/800 +f 2111/977/801 1941/886/725 2112/978/802 +f 1941/886/725 1942/887/727 2112/978/802 +f 2113/978/803 1943/887/729 2114/979/804 +f 1943/887/729 1945/888/728 2114/979/804 +f 2116/980/805 2115/981/804 2117/982/806 +f 2115/981/804 1944/983/807 2117/982/806 +f 2117/982/806 1944/983/807 2118/984/808 +f 2119/985/809 1945/888/728 2120/986/810 +f 1945/888/728 1946/889/730 2120/986/810 +f 1946/889/730 1947/890/731 2120/986/810 +f 2121/987/811 2120/986/810 2122/988/812 +f 2120/986/810 1947/890/731 2122/988/812 +f 1948/890/732 1949/891/733 2123/988/813 +f 2123/988/813 1949/891/733 2124/989/814 +f 1950/891/734 1951/892/735 2125/989/815 +f 2127/990/816 2125/989/815 2128/991/817 +f 2125/989/815 1951/892/735 2128/991/817 +f 1951/892/735 1954/893/736 2128/991/817 +f 2128/991/817 1954/893/736 2130/992/818 +f 2130/992/818 1954/893/736 2132/993/819 +f 1955/893/636 1957/894/636 2131/993/636 +f 2136/994/626 1958/995/626 2137/996/626 +f 2139/996/391 1959/995/820 2140/997/821 +f 1960/895/738 1961/897/740 2141/998/188 +f 1961/897/740 1962/898/386 2141/998/188 +f 2141/998/188 1962/898/386 2142/999/822 +f 2142/999/822 1962/898/386 2143/1000/386 +f 1962/898/386 1963/899/644 2143/1000/386 +f 1963/899/644 1964/900/742 2143/1000/386 +f 2143/1000/386 1965/900/823 2144/1001/824 +f 1965/900/823 1968/901/825 2144/1001/824 +f 2144/1001/824 1968/901/825 2145/1002/826 +f 1968/901/825 1970/902/827 2145/1002/826 +f 2146/1003/826 1971/1003/828 2147/1004/829 +f 1972/1003/566 1975/904/573 2148/1004/830 +f 2148/1004/830 1975/904/573 2149/1005/831 +f 1975/904/573 1976/906/652 2149/1005/831 +f 1976/906/652 1978/908/832 2149/1005/831 +f 2149/1005/831 1978/908/832 2150/1006/654 +f 2150/1006/654 1978/908/832 2151/1007/833 +f 1979/908/576 1980/909/576 2152/1007/578 +f 1980/909/576 1982/1008/834 2152/1007/578 +f 1982/1008/834 1984/1009/577 2152/1007/578 +f 2152/1007/578 1984/1009/577 2154/1010/577 +f 1984/1009/577 1986/1011/835 2154/1010/577 +f 2154/1010/577 1986/1011/835 2156/1012/658 +f 2156/1012/658 1986/1011/835 2157/1013/658 +f 2158/1014/836 1987/913/751 2159/1015/837 +f 1987/913/751 1988/914/752 2159/1015/837 +f 1988/914/752 1990/915/753 2159/1015/837 +f 2160/1016/838 1991/916/839 2161/1017/840 +f 2161/1017/840 1991/916/839 2162/1018/841 +f 1992/916/755 1993/917/757 2163/1018/842 +f 1993/917/757 1995/918/842 2163/1018/842 +f 1995/918/842 1997/919/843 2163/1018/842 +f 2163/1018/842 1997/919/843 2164/1019/844 +f 2167/1019/844 1998/919/844 2169/1020/844 +f 1999/919/843 2001/920/843 2168/1020/843 +f 2171/1020/843 2002/920/843 2172/1021/843 +f 2174/1021/843 2003/920/843 2175/1022/843 +f 2004/920/845 2006/921/846 2177/1022/845 +f 2177/1022/845 2006/921/846 2179/1023/846 +f 2006/921/846 2008/922/847 2179/1023/846 +f 2179/1023/846 2008/922/847 2180/1024/848 +f 2008/922/847 2011/923/766 2180/1024/848 +f 2011/923/766 2012/924/767 2180/1024/848 +f 2181/1024/849 2013/924/768 2182/1025/850 +f 2013/924/768 2015/925/770 2182/1025/850 +f 2183/1026/850 2014/1027/851 2184/1028/852 +f 2014/1027/851 2017/1029/771 2184/1028/852 +f 2017/1029/771 2019/1030/853 2184/1028/852 +f 2187/1031/675 2185/1032/675 2189/1033/854 +f 2185/1032/675 2020/1034/676 2189/1033/854 +f 2189/1033/854 2020/1034/676 2191/1035/855 +f 2191/1035/855 2020/1034/676 2193/1036/597 +f 2020/1034/676 2022/1037/597 2193/1036/597 +f 2022/1037/597 2024/1038/596 2193/1036/597 +f 2193/1036/597 2024/1038/596 2195/1039/596 +f 2025/1040/596 2029/933/599 2194/1041/596 +f 2029/933/599 2031/932/599 2194/1041/596 +f 2197/1041/856 2032/932/857 2198/1042/208 +f 2032/932/857 2035/934/685 2198/1042/208 +f 2198/1042/208 2035/934/685 2199/1043/858 +f 2199/1043/858 2035/934/685 2200/1044/208 +f 2200/1044/208 2035/934/685 2202/1045/857 +f 2036/934/599 2040/935/599 2201/1045/599 +f 2041/935/599 2044/1046/600 2203/1045/599 +f 2203/1045/599 2044/1046/600 2205/1047/600 +f 2044/1046/600 2046/1048/859 2205/1047/600 +f 2205/1047/600 2046/1048/859 2206/1049/860 +f 2047/1050/859 2049/1051/861 2209/1052/601 +f 2209/1052/601 2049/1051/861 2210/1053/603 +f 2049/1051/861 2051/1054/603 2210/1053/603 +f 2051/1054/603 2054/945/604 2210/1053/603 +f 2054/945/604 2056/944/604 2210/1053/603 +f 2211/1053/862 2055/944/698 2212/1055/863 +f 2055/944/698 2058/946/864 2212/1055/863 +f 2213/1056/697 2059/948/607 2214/1057/865 +f 2214/1057/865 2059/948/607 2215/1058/866 +f 2059/948/607 2061/950/867 2215/1058/866 +f 2215/1058/866 2061/950/867 2216/1059/868 +f 2061/950/867 2063/952/869 2216/1059/868 +f 2063/952/869 2066/954/870 2216/1059/868 +f 2216/1059/868 2066/954/870 2217/1060/373 +f 2066/954/870 2071/956/786 2217/1060/373 +f 2071/956/786 2072/957/373 2217/1060/373 +f 2072/957/373 2073/958/702 2217/1060/373 +f 2219/1061/702 2217/1060/373 2220/1062/702 +f 2217/1060/373 2073/958/702 2220/1062/702 +f 2222/1063/533 2074/960/533 2223/1064/533 +f 2226/1064/533 2075/960/533 2228/1065/533 +f 2228/1065/533 2075/960/533 2231/1066/533 +f 2076/960/533 2081/962/533 2230/1066/533 +f 2082/962/533 2084/964/533 2229/1066/533 +f 2085/964/533 2086/965/373 2232/1066/871 +f 2232/1066/871 2086/965/373 2233/1067/373 +f 2233/1067/373 2086/965/373 2234/1068/434 +f 2086/965/373 2088/966/787 2234/1068/434 +f 2236/1068/168 2087/966/872 2238/1069/872 +f 2087/966/872 2089/967/873 2238/1069/872 +f 2238/1069/872 2089/967/873 2240/1070/873 +f 2089/967/873 2093/968/791 2240/1070/873 +f 2094/969/791 2096/970/792 2239/1071/874 +f 2242/1071/875 2095/970/876 2243/1072/877 +f 2095/970/876 2100/971/793 2243/1072/877 +f 2100/971/793 2101/972/794 2243/1072/877 +f 2243/1072/877 2101/972/794 2244/1073/878 +f 2101/972/794 2102/973/795 2244/1073/878 +f 2244/1073/878 2102/973/795 2245/1074/879 +f 2102/973/795 2103/974/796 2245/1074/879 +f 2247/1075/880 2245/1074/879 2248/1076/881 +f 2245/1074/879 2103/974/796 2248/1076/881 +f 2103/974/796 2105/974/797 2248/1076/881 +f 2249/1076/882 2104/974/883 2250/1077/884 +f 2253/1078/885 2251/1077/886 2254/1079/887 +f 2250/1077/884 2104/974/883 2255/1079/888 +f 2104/974/883 2106/975/889 2255/1079/888 +f 2255/1079/888 2106/975/889 2257/1080/890 +f 2106/975/889 2108/976/891 2257/1080/890 +f 2257/1080/890 2108/976/891 2258/1081/892 +f 2108/976/891 2111/977/801 2258/1081/892 +f 2111/977/801 2112/978/802 2258/1081/892 +f 2259/1081/893 2113/978/803 2260/1082/894 +f 2113/978/803 2114/979/804 2260/1082/894 +f 2115/981/804 2116/980/805 2261/1083/895 +f 2261/1083/895 2116/980/805 2262/1084/896 +f 2262/1084/896 2116/980/805 2263/1085/897 +f 2263/1085/897 2116/980/805 2264/1086/898 +f 2116/980/805 2117/982/806 2264/1086/898 +f 2264/1086/898 2117/982/806 2265/1087/899 +f 2117/982/806 2118/984/808 2265/1087/899 +f 2265/1087/899 2118/984/808 2266/1088/900 +f 2267/1089/900 2119/985/809 2268/987/901 +f 2119/985/809 2120/986/810 2268/987/901 +f 2120/986/810 2121/987/811 2268/987/901 +f 2268/987/901 2121/987/811 2269/1090/902 +f 2121/987/811 2122/988/812 2269/1090/902 +f 2269/1090/902 2122/988/812 2270/1091/903 +f 2123/988/813 2124/989/814 2271/1091/904 +f 2124/989/814 2126/990/904 2271/1091/904 +f 2271/1091/904 2126/990/904 2273/1092/905 +f 2127/990/816 2128/991/817 2272/1092/906 +f 2128/991/817 2130/992/818 2272/1092/906 +f 2276/1093/906 2129/1094/907 2277/1095/908 +f 2129/1094/907 2133/994/819 2277/1095/908 +f 2277/1095/908 2133/994/819 2279/1096/909 +f 2285/1097/636 2280/1096/636 2290/1098/636 +f 2278/1096/636 2134/994/636 2289/1098/636 +f 2135/994/636 2138/996/636 2288/1098/636 +f 2139/996/391 2140/997/821 2287/1098/391 +f 2141/998/188 2142/999/822 2286/1099/391 +f 2142/999/822 2143/1000/386 2292/1099/391 +f 2292/1099/391 2143/1000/386 2293/1100/910 +f 2143/1000/386 2144/1001/824 2293/1100/910 +f 2293/1100/910 2144/1001/824 2295/1101/911 +f 2144/1001/824 2145/1002/826 2295/1101/911 +f 2295/1101/911 2145/1002/826 2296/1102/912 +f 2298/1103/913 2146/1003/826 2300/1104/914 +f 2146/1003/826 2147/1004/829 2300/1104/914 +f 2300/1104/914 2147/1004/829 2301/1105/915 +f 2148/1004/830 2149/1005/831 2303/1105/916 +f 2149/1005/831 2150/1006/654 2303/1105/916 +f 2303/1105/916 2150/1006/654 2306/1106/917 +f 2308/1107/918 2306/1106/917 2309/1108/919 +f 2306/1106/917 2150/1006/654 2309/1108/919 +f 2150/1006/654 2151/1007/833 2309/1108/919 +f 2151/1007/833 2153/1010/920 2309/1108/919 +f 2153/1010/920 2155/1012/921 2309/1108/919 +f 2309/1108/919 2155/1012/921 2310/1109/922 +f 2310/1109/922 2155/1012/921 2311/1110/753 +f 2312/1110/755 2156/1012/658 2314/1111/757 +f 2156/1012/658 2157/1013/658 2314/1111/757 +f 2158/1014/836 2159/1015/837 2313/1112/923 +f 2160/1016/838 2161/1017/840 2315/1113/923 +f 2161/1017/840 2316/1114/924 2315/1113/923 +f 2315/1113/923 2316/1114/924 2319/1115/925 +f 2319/1115/925 2316/1114/924 2320/1116/926 +f 2320/1116/926 2316/1114/924 2322/1117/927 +f 2316/1114/924 2161/1017/840 2322/1117/927 +f 2161/1017/840 2162/1018/841 2321/1117/927 +f 2162/1018/841 2165/1019/928 2321/1117/927 +f 2166/1019/928 2170/1020/928 2322/1117/927 +f 2322/1117/927 2170/1020/928 2323/1118/929 +f 2170/1020/928 2173/1021/930 2323/1118/929 +f 2324/1119/931 2323/1118/929 2326/1120/929 +f 2323/1118/929 2173/1021/930 2326/1120/929 +f 2173/1021/930 2176/1022/932 2326/1120/929 +f 2176/1022/932 2178/1023/933 2326/1120/929 +f 2326/1120/929 2178/1023/933 2327/1121/934 +f 2178/1023/933 2181/1024/849 2327/1121/934 +f 2181/1024/849 2182/1025/850 2327/1121/934 +f 2328/1122/934 2183/1026/850 2329/1123/935 +f 2183/1026/850 2184/1028/852 2329/1123/935 +f 2185/1032/675 2187/1031/675 2330/1124/936 +f 2330/1124/936 2187/1031/675 2331/1125/766 +f 2332/1125/937 2186/1031/938 2333/1126/939 +f 2186/1031/938 2188/1033/940 2333/1126/939 +f 2333/1126/939 2188/1033/940 2334/1127/771 +f 2188/1033/940 2190/1035/941 2334/1127/771 +f 2334/1127/771 2190/1035/941 2335/1128/942 +f 2190/1035/941 2192/1036/680 2335/1128/942 +f 2335/1128/942 2192/1036/680 2336/1129/943 +f 2192/1036/680 2196/1039/944 2336/1129/943 +f 2197/1041/856 2198/1042/208 2337/1130/943 +f 2337/1130/943 2198/1042/208 2339/1130/945 +f 2339/1130/945 2198/1042/208 2340/1131/946 +f 2198/1042/208 2199/1043/858 2340/1131/946 +f 2340/1131/946 2199/1043/858 2341/1132/183 +f 2341/1132/183 2199/1043/858 2342/1133/183 +f 2199/1043/858 2200/1044/208 2342/1133/183 +f 2342/1133/183 2200/1044/208 2343/1134/690 +f 2200/1044/208 2202/1045/857 2343/1134/690 +f 2202/1045/857 2204/1047/947 2343/1134/690 +f 2343/1134/690 2204/1047/947 2344/1135/948 +f 2204/1047/947 2207/1049/949 2344/1135/948 +f 2345/1136/692 2208/1052/949 2346/1137/950 +f 2208/1052/949 2211/1053/862 2346/1137/950 +f 2211/1053/862 2212/1055/863 2346/1137/950 +f 2346/1137/950 2212/1055/863 2348/1138/951 +f 2348/1138/951 2212/1055/863 2349/1139/952 +f 2213/1056/697 2214/1057/865 2352/1140/953 +f 2352/1140/953 2214/1057/865 2353/1141/954 +f 2214/1057/865 2215/1058/866 2353/1141/954 +f 2356/1142/782 2354/1143/606 2359/1144/955 +f 2353/1141/954 2215/1058/866 2358/1145/956 +f 2215/1058/866 2216/1059/868 2357/1145/957 +f 2216/1059/868 2217/1060/373 2357/1145/957 +f 2217/1060/373 2219/1061/702 2357/1145/957 +f 2359/1144/955 2218/1146/532 2360/1147/532 +f 2218/1146/532 2221/1063/533 2360/1147/532 +f 2221/1063/533 2224/1064/533 2360/1147/532 +f 2361/1147/958 2225/1064/871 2362/1148/373 +f 2362/1148/373 2225/1064/871 2363/1149/373 +f 2225/1064/871 2227/1065/871 2363/1149/373 +f 2227/1065/871 2232/1066/871 2363/1149/373 +f 2232/1066/871 2233/1067/373 2363/1149/373 +f 2363/1149/373 2233/1067/373 2364/1150/959 +f 2233/1067/373 2234/1068/434 2364/1150/959 +f 2367/1150/168 2235/1068/168 2369/1151/168 +f 2236/1068/168 2238/1069/872 2368/1151/168 +f 2237/1069/960 2372/1152/961 2371/1151/962 +f 2371/1151/962 2372/1152/961 2373/1153/231 +f 2374/1154/963 2373/1153/231 2375/1155/964 +f 2373/1153/231 2372/1152/961 2375/1155/964 +f 2375/1155/964 2372/1152/961 2376/1156/965 +f 2372/1152/961 2237/1069/960 2376/1156/965 +f 2237/1069/960 2241/1070/966 2376/1156/965 +f 2242/1071/875 2243/1072/877 2377/1157/967 +f 2243/1072/877 2244/1073/878 2377/1157/967 +f 2244/1073/878 2245/1074/879 2377/1157/967 +f 2377/1157/967 2245/1074/879 2378/1158/968 +f 2245/1074/879 2247/1075/880 2378/1158/968 +f 2381/1159/969 2379/1158/969 2382/1160/884 +f 2379/1158/969 2246/1075/884 2382/1160/884 +f 2246/1075/884 2249/1076/882 2382/1160/884 +f 2249/1076/882 2250/1077/884 2382/1160/884 +f 2383/1160/970 2251/1077/886 2384/1161/971 +f 2251/1077/886 2253/1078/885 2384/1161/971 +f 2385/1162/972 2252/1163/973 2386/1164/974 +f 2253/1078/885 2254/1079/887 2387/1165/975 +f 2387/1165/975 2254/1079/887 2388/1166/976 +f 2254/1079/887 2256/1080/977 2388/1166/976 +f 2256/1080/977 2259/1081/893 2388/1166/976 +f 2259/1081/893 2260/1082/894 2388/1166/976 +f 2388/1166/976 2260/1082/894 2389/1167/978 +f 2261/1083/895 2262/1084/896 2390/1168/978 +f 2390/1168/978 2262/1084/896 2391/1169/979 +f 2262/1084/896 2263/1085/897 2391/1169/979 +f 2391/1169/979 2263/1085/897 2393/1170/980 +f 2393/1170/980 2263/1085/897 2395/1171/981 +f 2263/1085/897 2264/1086/898 2395/1171/981 +f 2395/1171/981 2264/1086/898 2397/1172/982 +f 2264/1086/898 2265/1087/899 2397/1172/982 +f 2397/1172/982 2265/1087/899 2399/1173/983 +f 2265/1087/899 2266/1088/900 2399/1173/983 +f 2399/1173/983 2266/1088/900 2401/1174/984 +f 2401/1174/984 2266/1088/900 2402/1175/985 +f 2267/1089/900 2268/987/901 2403/1176/986 +f 2268/987/901 2269/1090/902 2403/1176/986 +f 2403/1176/986 2269/1090/902 2405/1177/987 +f 2269/1090/902 2270/1091/903 2405/1177/987 +f 2405/1177/987 2270/1091/903 2407/1178/988 +f 2271/1091/904 2273/1092/905 2406/1178/989 +f 2409/1179/989 2274/1180/905 2410/1181/990 +f 2414/1182/991 2275/1093/905 2415/1183/991 +f 2276/1093/906 2277/1095/908 2418/1183/992 +f 2418/1183/992 2277/1095/908 2421/1184/992 +f 2421/1184/992 2277/1095/908 2422/1185/993 +f 2277/1095/908 2279/1096/909 2422/1185/993 +f 2279/1096/909 2281/1097/994 2422/1185/993 +f 2423/1186/995 2282/1187/994 2424/1188/909 +f 2426/1188/626 2283/1187/626 2429/1189/626 +f 2284/1187/626 2291/1099/626 2428/1189/626 +f 2292/1099/391 2293/1100/910 2427/1189/996 +f 2293/1100/910 2295/1101/911 2427/1189/996 +f 2432/1190/391 2294/1191/911 2434/1192/997 +f 2294/1191/911 2297/1193/998 2434/1192/997 +f 2434/1192/997 2297/1193/998 2435/1194/999 +f 2435/1194/999 2297/1193/998 2436/1195/1000 +f 2297/1193/998 2299/1196/1001 2436/1195/1000 +f 2299/1196/1001 2302/1197/1002 2436/1195/1000 +f 2436/1195/1000 2302/1197/1002 2437/1198/1003 +f 2302/1197/1002 2304/1199/1004 2437/1198/1003 +f 2438/1198/1005 2437/1198/1003 2439/1200/1006 +f 2439/1200/1006 2437/1198/1003 2440/1201/1007 +f 2437/1198/1003 2304/1199/1004 2440/1201/1007 +f 2440/1201/1007 2304/1199/1004 2441/1202/1008 +f 2305/1199/1009 2307/1203/1010 2442/1202/1010 +f 2442/1202/1010 2307/1203/1010 2443/1204/1011 +f 2308/1107/918 2309/1108/919 2445/1205/1012 +f 2309/1108/919 2310/1109/922 2445/1205/1012 +f 2445/1205/1012 2310/1109/922 2447/1206/1013 +f 2447/1206/1013 2310/1109/922 2448/1207/1014 +f 2310/1109/922 2311/1110/753 2448/1207/1014 +f 2448/1207/1014 2311/1110/753 2450/1208/1015 +f 2312/1110/755 2314/1111/757 2449/1208/842 +f 2314/1111/757 2317/1209/842 2449/1208/842 +f 2452/1210/842 2318/1211/842 2453/1212/845 +f 2319/1115/925 2320/1116/926 2456/1213/1016 +f 2456/1213/1016 2320/1116/926 2459/1214/1017 +f 2320/1116/926 2322/1117/927 2459/1214/1017 +f 2322/1117/927 2323/1118/929 2459/1214/1017 +f 2459/1214/1017 2323/1118/929 2461/1215/1016 +f 2323/1118/929 2324/1119/931 2461/1215/1016 +f 2461/1215/1016 2324/1119/931 2462/1216/1016 +f 2324/1119/931 2326/1120/929 2462/1216/1016 +f 2468/1217/1017 2325/1218/929 2470/1219/1017 +f 2470/1219/1017 2325/1218/929 2471/1220/1018 +f 2325/1218/929 2328/1122/934 2471/1220/1018 +f 2328/1122/934 2329/1123/935 2471/1220/1018 +f 2473/1221/1019 2330/1124/936 2474/1222/847 +f 2330/1124/936 2331/1125/766 2474/1222/847 +f 2475/1222/1020 2332/1125/937 2476/1223/1021 +f 2332/1125/937 2333/1126/939 2476/1223/1021 +f 2476/1223/1021 2333/1126/939 2478/1224/1022 +f 2478/1224/1022 2333/1126/939 2480/1225/1023 +f 2480/1225/1023 2333/1126/939 2482/1226/1024 +f 2333/1126/939 2334/1127/771 2482/1226/1024 +f 2334/1127/771 2335/1128/942 2482/1226/1024 +f 2482/1226/1024 2335/1128/942 2485/1227/1025 +f 2335/1128/942 2336/1129/943 2485/1227/1025 +f 2336/1129/943 2338/1228/945 2485/1227/1025 +f 2485/1227/1025 2338/1228/945 2486/1229/821 +f 2488/1230/821 2339/1130/945 2490/1231/1026 +f 2339/1130/945 2340/1131/946 2490/1231/1026 +f 2340/1131/946 2341/1132/183 2490/1231/1026 +f 2493/1232/599 2497/1233/599 2499/1234/599 +f 2502/1235/1027 2503/1236/1028 2504/1237/1029 +f 2503/1236/1028 2501/1234/1030 2504/1237/1029 +f 2504/1237/1029 2501/1234/1030 2506/1238/1030 +f 2498/1234/599 2494/1233/599 2505/1238/599 +f 2508/1238/599 2495/1233/599 2510/1239/1031 +f 2496/1233/1031 2489/1232/1031 2509/1239/1031 +f 2490/1231/1026 2341/1132/183 2511/1240/1026 +f 2341/1132/183 2342/1133/183 2511/1240/1026 +f 2511/1240/1026 2342/1133/183 2512/1241/821 +f 2342/1133/183 2343/1134/690 2512/1241/821 +f 2343/1134/690 2344/1135/948 2512/1241/821 +f 2517/1242/1032 2514/1243/821 2518/1244/1033 +f 2514/1243/821 2345/1136/692 2518/1244/1033 +f 2345/1136/692 2346/1137/950 2518/1244/1033 +f 2346/1137/950 2348/1138/951 2518/1244/1033 +f 2519/1245/861 2347/1246/603 2521/1247/1034 +f 2521/1247/1034 2347/1246/603 2523/1248/1035 +f 2347/1246/603 2350/1249/604 2523/1248/1035 +f 2351/1250/1036 2354/1143/606 2522/1251/604 +f 2354/1143/606 2356/1142/782 2522/1251/604 +f 2524/1252/526 2355/1253/1037 2525/1254/1038 +f 2525/1254/1038 2355/1253/1037 2527/1255/1039 +f 2356/1142/782 2359/1144/955 2526/1256/955 +f 2359/1144/955 2360/1147/532 2526/1256/955 +f 2528/1256/1039 2361/1147/958 2530/1257/1040 +f 2361/1147/958 2362/1148/373 2530/1257/1040 +f 2530/1257/1040 2362/1148/373 2532/1258/1041 +f 2532/1258/1041 2362/1148/373 2533/1259/276 +f 2362/1148/373 2363/1149/373 2533/1259/276 +f 2363/1149/373 2364/1150/959 2533/1259/276 +f 2535/1259/168 2365/1150/168 2538/1260/168 +f 2366/1150/168 2370/1151/168 2537/1260/168 +f 2371/1151/962 2373/1153/231 2536/1260/433 +f 2373/1153/231 816/1261/1042 2536/1260/433 +f 2540/1262/1043 815/301/151 2541/1263/1044 +f 815/301/151 814/300/150 2541/1263/1044 +f 2541/1263/1044 814/300/150 2542/1264/1044 +f 2542/1264/1044 814/300/150 2543/1265/1045 +f 814/300/150 813/299/149 2543/1265/1045 +f 2543/1265/1045 813/299/149 2547/1266/1046 +f 2547/1266/1046 813/299/149 2548/1267/1046 +f 813/299/149 815/301/151 2548/1267/1046 +f 816/1261/1042 2373/1153/231 2551/1268/1047 +f 2373/1153/231 2374/1154/963 2551/1268/1047 +f 2551/1268/1047 2374/1154/963 2553/1269/1048 +f 2374/1154/963 2375/1155/964 2553/1269/1048 +f 2375/1155/964 2376/1156/965 2553/1269/1048 +f 2377/1157/967 2378/1158/968 2552/1270/1047 +f 2555/1270/1049 2379/1158/969 2556/1271/1049 +f 2379/1158/969 2381/1159/969 2556/1271/1049 +f 2559/1271/1050 2380/1159/1050 2561/1272/1051 +f 2380/1159/1050 2383/1160/970 2561/1272/1051 +f 2561/1272/1051 2383/1160/970 2562/1273/1052 +f 2562/1273/1052 2383/1160/970 2563/1274/1053 +f 2383/1160/970 2384/1161/971 2563/1274/1053 +f 2563/1274/1053 2384/1161/971 2565/1275/1054 +f 2385/1162/972 2386/1164/974 2564/1276/1055 +f 2569/1277/1056 2567/1278/1057 2571/1279/1058 +f 2564/1276/1055 2386/1164/974 2570/1280/1059 +f 2574/1281/1060 2572/1281/1061 2575/1282/1062 +f 2572/1281/1061 2387/1165/975 2575/1282/1062 +f 2387/1165/975 2388/1166/976 2575/1282/1062 +f 2388/1166/976 2389/1167/978 2575/1282/1062 +f 2575/1282/1062 2389/1167/978 2577/1283/1063 +f 2390/1168/978 2391/1169/979 2576/1284/1063 +f 2391/1169/979 2393/1170/980 2576/1284/1063 +f 2583/1285/1064 2579/1284/1065 2585/1286/1066 +f 2579/1284/1065 2392/1170/1067 2585/1286/1066 +f 2585/1286/1066 2392/1170/1067 2587/1287/1068 +f 2392/1170/1067 2394/1171/1069 2587/1287/1068 +f 2587/1287/1068 2394/1171/1069 2589/1288/1070 +f 2394/1171/1069 2396/1172/1071 2589/1288/1070 +f 2396/1172/1071 2398/1173/1072 2589/1288/1070 +f 2589/1288/1070 2398/1173/1072 2591/1289/1073 +f 2591/1289/1073 2398/1173/1072 2593/1290/1074 +f 2398/1173/1072 2400/1174/1075 2593/1290/1074 +f 2593/1290/1074 2400/1174/1075 2595/1291/1076 +f 2595/1291/1076 2400/1174/1075 2596/1292/1077 +f 2401/1174/984 2402/1175/985 2597/1292/1078 +f 2597/1292/1078 2402/1175/985 2598/1293/1079 +f 2402/1175/985 2404/1294/1080 2598/1293/1079 +f 2598/1293/1079 2404/1294/1080 2601/1295/1081 +f 2405/1177/987 2407/1178/988 2600/1296/1082 +f 2603/1297/1083 2602/1298/1082 2605/1299/1084 +f 2599/1300/1082 2408/1179/988 2604/1301/1085 +f 2408/1179/988 2411/1181/1086 2604/1301/1085 +f 2606/1302/1084 2412/1182/1086 2609/1303/1086 +f 2413/1182/991 2416/1183/991 2608/1303/991 +f 2417/1183/991 2419/1184/991 2607/1303/991 +f 2613/1303/990 2420/1184/990 2615/1304/990 +f 2421/1184/992 2422/1185/993 2614/1304/1087 +f 2618/1305/1087 2423/1186/995 2619/1306/908 +f 2423/1186/995 2424/1188/909 2619/1306/908 +f 2619/1306/908 2424/1188/909 2621/1307/1088 +f 2425/1188/636 2430/1189/636 2620/1307/636 +f 2431/1190/636 2433/1192/636 2623/1308/636 +f 2623/1308/636 2433/1192/636 2624/1309/636 +f 2434/1192/997 2435/1194/999 2625/1309/1089 +f 2625/1309/1089 2435/1194/999 2626/1310/1090 +f 2435/1194/999 2436/1195/1000 2626/1310/1090 +f 2436/1195/1000 2437/1198/1003 2626/1310/1090 +f 2437/1198/1003 2438/1198/1005 2626/1310/1090 +f 2626/1310/1090 2438/1198/1005 2627/1311/1091 +f 2438/1198/1005 2439/1200/1006 2627/1311/1091 +f 2628/1312/1092 2627/1311/1091 2629/1313/1093 +f 2627/1311/1091 2439/1200/1006 2629/1313/1093 +f 2439/1200/1006 2440/1201/1007 2629/1313/1093 +f 2440/1201/1007 2441/1202/1008 2629/1313/1093 +f 2629/1313/1093 2441/1202/1008 2631/1314/1094 +f 2442/1202/1010 2443/1204/1011 2630/1314/1095 +f 2444/1315/1096 2446/1316/1097 2633/1317/1098 +f 2633/1317/1098 2446/1316/1097 2635/1318/1099 +f 2637/1319/1100 2635/1318/1099 2639/1320/842 +f 2635/1318/1099 2446/1316/1097 2639/1320/842 +f 2447/1206/1013 2448/1207/1014 2638/1321/1101 +f 2641/1322/926 2640/1323/1102 2643/1324/840 +f 2638/1321/1101 2448/1207/1014 2642/1325/840 +f 2448/1207/1014 2450/1208/1015 2642/1325/840 +f 2643/1324/840 2451/1210/1103 2645/1326/928 +f 2452/1210/842 2453/1212/845 2644/1326/845 +f 2646/1326/845 2454/1212/845 2649/1327/845 +f 2455/1212/845 2457/1328/845 2648/1327/845 +f 2458/1328/845 2460/1329/845 2647/1327/845 +f 2460/1329/845 2463/1330/845 2647/1327/845 +f 2651/1327/845 2464/1330/845 2652/1331/845 +f 2654/1331/845 2465/1330/845 2655/1332/845 +f 2657/1332/845 2466/1330/845 2658/1333/845 +f 2467/1330/845 2469/1334/1104 2660/1333/845 +f 2660/1333/845 2469/1334/1104 2662/1335/1105 +f 2469/1334/1104 2472/1336/1019 2662/1335/1105 +f 2473/1221/1019 2474/1222/847 2661/1337/1106 +f 2664/1337/1107 2475/1222/1020 2666/1338/934 +f 2475/1222/1020 2476/1223/1021 2666/1338/934 +f 2666/1338/934 2476/1223/1021 2667/1339/1108 +f 2476/1223/1021 2478/1224/1022 2667/1339/1108 +f 2668/1340/936 2477/1341/766 2669/1342/675 +f 2477/1341/766 2479/1343/1109 2669/1342/675 +f 2479/1343/1109 2481/1344/676 2669/1342/675 +f 2669/1342/675 2481/1344/676 2671/1345/854 +f 2671/1345/854 2481/1344/676 2673/1346/595 +f 2481/1344/676 2483/1347/597 2673/1346/595 +f 2673/1346/595 2483/1347/597 2674/1348/596 +f 2484/1349/1110 2487/1350/596 2677/1351/596 +f 2677/1351/596 2487/1350/596 2679/1352/599 +f 2487/1350/596 2491/1232/599 2679/1352/599 +f 2492/1232/599 2500/1234/599 2678/1352/599 +f 2501/1234/1030 2503/1236/1028 2680/1352/1111 +f 2680/1352/1111 2503/1236/1028 2681/1353/858 +f 2681/1353/858 2503/1236/1028 2682/1354/209 +f 2503/1236/1028 2502/1235/1027 2682/1354/209 +f 2683/1354/251 2502/1235/1027 2684/1355/1112 +f 2502/1235/1027 2504/1237/1029 2684/1355/1112 +f 2504/1237/1029 2507/1238/1030 2684/1355/1112 +f 2684/1355/1112 2507/1238/1030 2685/1356/1111 +f 2508/1238/599 2510/1239/1031 2686/1356/599 +f 2510/1239/1031 2513/1357/600 2686/1356/599 +f 2686/1356/599 2513/1357/600 2687/1358/600 +f 2513/1357/600 2515/1359/859 2687/1358/600 +f 2690/1360/1113 2516/1361/1114 2691/1362/860 +f 2516/1361/1114 2519/1245/861 2691/1362/860 +f 2519/1245/861 2521/1247/1034 2691/1362/860 +f 2692/1363/1115 2520/1364/1116 2693/1365/1117 +f 2693/1365/1117 2520/1364/1116 2694/1366/1118 +f 2520/1364/1116 2524/1252/526 2694/1366/1118 +f 2524/1252/526 2525/1254/1038 2694/1366/1118 +f 2694/1366/1118 2525/1254/1038 2695/1367/1119 +f 2695/1367/1119 2525/1254/1038 2696/1368/1120 +f 2696/1368/1120 2525/1254/1038 2697/1369/1121 +f 2525/1254/1038 2527/1255/1039 2697/1369/1121 +f 2527/1255/1039 2529/1370/1122 2697/1369/1121 +f 2697/1369/1121 2529/1370/1122 2699/1371/1123 +f 2530/1257/1040 2532/1258/1041 2698/1372/1123 +f 2701/1372/168 2531/1258/168 2702/1373/168 +f 2531/1258/168 2534/1259/168 2702/1373/168 +f 2534/1259/168 2539/1260/168 2702/1373/168 +f 2540/1262/1043 2541/1263/1044 2705/1374/1043 +f 2541/1263/1044 2542/1264/1044 2705/1374/1043 +f 2705/1374/1043 2542/1264/1044 2707/1375/1124 +f 2542/1264/1044 2543/1265/1045 2707/1375/1124 +f 2707/1375/1124 2543/1265/1045 2709/1376/1125 +f 2709/1376/1125 2543/1265/1045 2710/1377/1126 +f 2543/1265/1045 2547/1266/1046 2710/1377/1126 +f 2711/1377/1127 2713/1378/1128 2714/1379/1129 +f 2716/1380/1130 2717/1381/1131 2718/1382/1132 +f 2717/1381/1131 2715/1379/1133 2718/1382/1132 +f 2715/1379/1133 2712/1378/1134 2718/1382/1132 +f 2718/1382/1132 2712/1378/1134 2720/1383/1135 +f 2713/1378/1128 2711/1377/1127 2719/1383/1049 +f 2711/1377/1127 2544/1266/1049 2719/1383/1049 +f 2721/1383/1049 2545/1266/1049 2722/1384/1049 +f 2546/1266/1049 2549/1267/1049 2724/1384/1049 +f 2724/1384/1049 2549/1267/1049 2725/1385/1049 +f 2728/1386/1049 2550/1268/1049 2729/1387/1049 +f 2550/1268/1049 2554/1269/1049 2729/1387/1049 +f 2554/1269/1049 2557/1388/1049 2729/1387/1049 +f 2730/1387/1136 2558/1388/1050 2731/1389/1051 +f 2558/1388/1050 2560/1388/1137 2731/1389/1051 +f 2731/1389/1051 2560/1388/1137 2732/1390/16 +f 2732/1390/16 2560/1388/1137 2733/1391/1138 +f 2561/1272/1051 2562/1273/1052 2734/1392/1138 +f 2734/1392/1138 2562/1273/1052 2736/1393/1139 +f 2736/1393/1139 2562/1273/1052 2737/1394/1140 +f 2562/1273/1052 2563/1274/1053 2737/1394/1140 +f 2563/1274/1053 2565/1275/1054 2737/1394/1140 +f 2566/1395/1141 2568/1395/1142 2738/1396/1143 +f 2738/1396/1143 2568/1395/1142 2739/1397/1144 +f 2569/1277/1056 2571/1279/1058 2740/1398/1145 +f 2571/1279/1058 2573/1399/1146 2740/1398/1145 +f 2740/1398/1145 2573/1399/1146 2746/1400/1147 +f 2746/1400/1147 2573/1399/1146 2748/1399/1148 +f 2574/1281/1060 2575/1282/1062 2747/1401/1149 +f 2575/1282/1062 2577/1283/1063 2747/1401/1149 +f 2578/1283/1150 2580/1402/1064 2750/1401/1151 +f 2750/1401/1151 2580/1402/1064 2752/1403/1152 +f 2757/1404/136 2758/1405/137 2759/1406/1153 +f 2761/1406/1154 2758/1405/137 2765/1407/1155 +f 2765/1407/1155 2758/1405/137 2766/1408/137 +f 2758/1405/137 2757/1404/136 2766/1408/137 +f 2767/1409/137 2756/1410/1156 2769/1411/1157 +f 2756/1410/1156 2755/1412/1158 2769/1411/1157 +f 2751/1403/1159 2581/1402/1160 2768/1413/1161 +f 2770/1414/1161 2582/1285/1162 2771/1415/1163 +f 2582/1285/1162 2584/1286/1164 2771/1415/1163 +f 2771/1415/1163 2584/1286/1164 2772/1416/1165 +f 2584/1286/1164 2586/1287/1166 2772/1416/1165 +f 2772/1416/1165 2586/1287/1166 2773/1417/1167 +f 2586/1287/1166 2588/1288/1168 2773/1417/1167 +f 2775/1418/1169 2773/1417/1167 2776/1419/1170 +f 2773/1417/1167 2588/1288/1168 2776/1419/1170 +f 2588/1288/1168 2590/1289/1171 2776/1419/1170 +f 2776/1419/1170 2590/1289/1171 2777/1420/1172 +f 2590/1289/1171 2592/1290/1173 2777/1420/1172 +f 2777/1420/1172 2592/1290/1173 2778/1421/1174 +f 2592/1290/1173 2594/1291/1175 2778/1421/1174 +f 2778/1421/1174 2594/1291/1175 2779/1422/1176 +f 2779/1422/1176 2594/1291/1175 2780/1423/1177 +f 2780/1423/1177 2594/1291/1175 2781/1424/1178 +f 2595/1291/1076 2596/1292/1077 2782/1424/1179 +f 2596/1292/1077 2784/1425/1180 2782/1424/1179 +f 2789/1426/1181 2791/1427/1182 2792/1427/1183 +f 2791/1427/1182 2798/1428/1184 2792/1427/1183 +f 2793/1429/1185 2794/1430/1186 2799/1431/1187 +f 2799/1431/1187 2794/1430/1186 2800/1432/1188 +f 2803/1433/1189 2801/1434/1188 2805/1434/1190 +f 2800/1432/1188 2794/1430/1186 2804/1432/1191 +f 2806/1432/1192 2795/1430/1192 2808/1435/1192 +f 2796/1428/1192 2782/1424/1179 2807/1436/1192 +f 2782/1424/1179 2784/1425/1180 2807/1436/1192 +f 2810/1436/1192 2783/1425/1193 2811/1437/1194 +f 2783/1425/1193 2597/1292/1078 2811/1437/1194 +f 2597/1292/1078 2598/1293/1079 2811/1437/1194 +f 2598/1293/1079 2601/1295/1081 2811/1437/1194 +f 2602/1298/1082 2603/1297/1083 2813/1438/1194 +f 2813/1438/1194 2603/1297/1083 2814/1439/1195 +f 2817/1440/1195 2816/1441/1196 2818/1442/1084 +f 2814/1439/1195 2603/1297/1083 2819/1443/1197 +f 2603/1297/1083 2605/1299/1084 2819/1443/1197 +f 2605/1299/1084 2610/1444/1086 2819/1443/1197 +f 2819/1443/1197 2610/1444/1086 2820/1443/1198 +f 2824/1445/1199 2822/1446/1200 2826/1447/1086 +f 2822/1446/1200 2611/1303/1086 2826/1447/1086 +f 2612/1303/990 2616/1304/990 2825/1447/990 +f 2829/1448/991 2617/1305/991 2830/1449/991 +f 2618/1305/1087 2619/1306/908 2833/1449/1201 +f 2837/1450/1202 2833/1449/1201 2838/1451/1203 +f 2833/1449/1201 2619/1306/908 2838/1451/1203 +f 2619/1306/908 2621/1307/1088 2838/1451/1203 +f 2839/1452/1204 2622/1308/1088 2840/1453/1205 +f 2841/1454/1206 2840/1453/1205 2842/1455/1207 +f 2840/1453/1205 2622/1308/1088 2842/1455/1207 +f 2844/1455/636 2623/1308/636 2845/1456/636 +f 2623/1308/636 2624/1309/636 2845/1456/636 +f 2846/1456/1208 2625/1309/1089 2847/1457/1209 +f 2625/1309/1089 2626/1310/1090 2847/1457/1209 +f 2626/1310/1090 2627/1311/1091 2847/1457/1209 +f 2847/1457/1209 2627/1311/1091 2848/1458/1210 +f 2848/1458/1210 2627/1311/1091 2849/1459/1211 +f 2627/1311/1091 2628/1312/1092 2849/1459/1211 +f 2849/1459/1211 2628/1312/1092 2850/1460/1212 +f 2628/1312/1092 2629/1313/1093 2850/1460/1212 +f 2629/1313/1093 2631/1314/1094 2850/1460/1212 +f 2632/1317/1213 2634/1318/1214 2851/1461/1215 +f 2851/1461/1215 2634/1318/1214 2852/1462/1216 +f 2634/1318/1214 2636/1319/1217 2852/1462/1216 +f 2852/1462/1216 2636/1319/1217 2853/1463/1218 +f 2853/1463/1218 2636/1319/1217 2854/1464/1219 +f 2854/1464/1219 2636/1319/1217 2855/1464/1220 +f 2858/1464/845 2637/1319/1100 2859/1465/845 +f 2637/1319/1100 2639/1320/842 2859/1465/845 +f 2640/1323/1102 2641/1322/926 2861/1466/1017 +f 2861/1466/1017 2641/1322/926 2862/1467/927 +f 2641/1322/926 2643/1324/840 2862/1467/927 +f 2643/1324/840 2645/1326/928 2862/1467/927 +f 2645/1326/928 2650/1327/928 2862/1467/927 +f 2862/1467/927 2650/1327/928 2863/1468/929 +f 2650/1327/928 2653/1331/1221 2863/1468/929 +f 2863/1468/929 2653/1331/1221 2864/1469/929 +f 2653/1331/1221 2656/1332/1221 2864/1469/929 +f 2864/1469/929 2656/1332/1221 2865/1470/929 +f 2656/1332/1221 2659/1333/1222 2865/1470/929 +f 2659/1333/1222 2663/1335/933 2865/1470/929 +f 2663/1335/933 2665/1471/1223 2865/1470/929 +f 2865/1470/929 2665/1471/1223 2866/1472/1224 +f 2666/1338/934 2667/1339/1108 2868/1473/1224 +f 2868/1473/1224 2667/1339/1108 2869/1474/1225 +f 2871/1475/847 2668/1340/936 2873/1476/936 +f 2668/1340/936 2669/1342/675 2873/1476/936 +f 2873/1476/936 2669/1342/675 2874/1477/675 +f 2669/1342/675 2671/1345/854 2874/1477/675 +f 2875/1477/1226 2670/1345/1227 2876/1478/1228 +f 2670/1345/1227 2672/1346/1229 2876/1478/1228 +f 2877/1479/771 2876/1478/1228 2878/1480/1230 +f 2876/1478/1228 2672/1346/1229 2878/1480/1230 +f 2878/1480/1230 2672/1346/1229 2879/1481/513 +f 2672/1346/1229 2675/1348/1231 2879/1481/513 +f 2676/1351/1232 2680/1352/1111 2881/1482/513 +f 2680/1352/1111 2681/1353/858 2881/1482/513 +f 2881/1482/513 2681/1353/858 2883/1483/1233 +f 2681/1353/858 2682/1354/209 2883/1483/1233 +f 2883/1483/1233 2682/1354/209 2886/1484/185 +f 2682/1354/209 2887/1354/184 2886/1484/185 +f 2886/1484/185 2887/1354/184 2890/1485/183 +f 2890/1485/183 2887/1354/184 2891/1486/209 +f 2887/1354/184 2683/1354/251 2891/1486/209 +f 2683/1354/251 2684/1355/1112 2891/1486/209 +f 2684/1355/1112 2685/1356/1111 2891/1486/209 +f 2891/1486/209 2685/1356/1111 2892/1487/1234 +f 2685/1356/1111 2688/1358/1235 2892/1487/1234 +f 2896/1488/1236 2894/1489/688 2897/1490/1237 +f 2894/1489/688 2689/1491/1238 2897/1490/1237 +f 2689/1491/1238 2692/1363/1115 2897/1490/1237 +f 2692/1363/1115 2693/1365/1117 2897/1490/1237 +f 2897/1490/1237 2693/1365/1117 2898/1365/695 +f 2898/1365/695 2693/1365/1117 2899/1492/1239 +f 2693/1365/1117 2694/1366/1118 2899/1492/1239 +f 2694/1366/1118 2695/1367/1119 2899/1492/1239 +f 2899/1492/1239 2695/1367/1119 2900/1493/1240 +f 2695/1367/1119 2696/1368/1120 2900/1493/1240 +f 2696/1368/1120 2697/1369/1121 2900/1493/1240 +f 2697/1369/1121 2699/1371/1123 2900/1493/1240 +f 2902/1493/168 2700/1371/168 2903/1494/168 +f 2700/1371/168 2703/1495/168 2903/1494/168 +f 2904/1494/1241 2704/1495/433 2905/1496/1242 +f 2704/1495/433 2706/1497/1243 2905/1496/1242 +f 2706/1497/1243 2708/1498/1244 2905/1496/1242 +f 2905/1496/1242 2708/1498/1244 2906/1499/1245 +f 2906/1499/1245 2708/1498/1244 2908/1500/1246 +f 2709/1376/1125 2710/1377/1126 2907/1501/1247 +f 2711/1377/1127 2714/1379/1129 2909/1501/1248 +f 2909/1501/1248 2714/1379/1129 2910/1502/1249 +f 2715/1379/1133 2717/1381/1131 2911/1502/1250 +f 2911/1502/1250 2717/1381/1131 2912/1503/1251 +f 2717/1381/1131 2716/1380/1130 2912/1503/1251 +f 2913/1504/1252 2912/1503/1251 2914/1505/1253 +f 2912/1503/1251 2716/1380/1130 2914/1505/1253 +f 2914/1505/1253 2716/1380/1130 2915/1506/1254 +f 2716/1380/1130 2718/1382/1132 2915/1506/1254 +f 2915/1506/1254 2718/1382/1132 2916/1507/1255 +f 2718/1382/1132 2720/1383/1135 2916/1507/1255 +f 2720/1383/1135 2723/1384/1135 2916/1507/1255 +f 2916/1507/1255 2723/1384/1135 2917/1508/1256 +f 2917/1508/1256 2723/1384/1135 2918/1509/1255 +f 2723/1384/1135 2726/1385/1136 2918/1509/1255 +f 2918/1509/1255 2726/1385/1136 2919/1510/1256 +f 2727/1386/1135 2730/1387/1136 2920/1511/1255 +f 2921/1512/42 2920/1511/1255 2922/1513/1255 +f 2920/1511/1255 2730/1387/1136 2922/1513/1255 +f 2730/1387/1136 2731/1389/1051 2922/1513/1255 +f 2731/1389/1051 2732/1390/16 2922/1513/1255 +f 2922/1513/1255 2732/1390/16 2923/1514/109 +f 2732/1390/16 2733/1391/1138 2923/1514/109 +f 2733/1391/1138 2735/1515/1257 2923/1514/109 +f 2923/1514/109 2735/1515/1257 2925/1515/1258 +f 2736/1393/1139 2737/1394/1140 2924/1393/1258 +f 2927/1516/1258 2738/1396/1143 2929/1517/1259 +f 2929/1517/1259 2738/1396/1143 2930/1518/1260 +f 2738/1396/1143 2739/1397/1144 2930/1518/1260 +f 2739/1397/1144 2741/1519/1147 2930/1518/1260 +f 812/298/1261 2931/1518/1261 2933/1520/1262 +f 2933/1520/1262 2931/1518/1261 2934/1521/1262 +f 2931/1518/1261 2742/1519/1262 2934/1521/1262 +f 2936/1521/1262 2743/1519/1262 2937/1522/1262 +f 2939/1522/1262 2744/1519/1262 2940/1523/1262 +f 2745/1400/1261 2749/1399/1151 2943/1524/1263 +f 2749/1399/1151 2753/1525/1264 2943/1524/1263 +f 2943/1524/1263 2753/1525/1264 2945/1526/1261 +f 2754/1525/1158 2757/1404/136 2944/1526/1265 +f 2757/1404/136 2759/1406/1153 2944/1526/1265 +f 2946/1527/1266 2760/1528/1267 2948/1529/1268 +f 2948/1529/1268 2760/1528/1267 2949/1530/1269 +f 2760/1528/1267 2762/1531/1270 2949/1530/1269 +f 2762/1531/1270 2953/1532/1271 2949/1530/1269 +f 2951/1530/1269 2952/1532/1272 2955/1532/1273 +f 2955/1532/1273 2952/1532/1272 2956/1533/1274 +f 2956/1533/1274 2953/1532/1271 2957/1534/1275 +f 2953/1532/1271 2762/1531/1270 2957/1534/1275 +f 2959/1535/1276 2763/1407/1276 2960/1536/1276 +f 2964/1536/1276 2764/1407/1276 2966/1408/1276 +f 2765/1407/1155 2766/1408/137 2965/1408/1277 +f 2767/1409/137 2769/1411/1157 2969/1537/1155 +f 2969/1537/1155 2769/1411/1157 2970/1538/1278 +f 2974/1539/1278 2770/1414/1161 2975/1540/1279 +f 2770/1414/1161 2771/1415/1163 2975/1540/1279 +f 2771/1415/1163 2772/1416/1165 2975/1540/1279 +f 2772/1416/1165 2773/1417/1167 2975/1540/1279 +f 2773/1417/1167 2775/1418/1169 2975/1540/1279 +f 2977/1541/1280 2978/1542/1281 2979/1543/1282 +f 2978/1542/1281 2982/1544/1283 2979/1543/1282 +f 2981/1544/1276 2976/1540/1276 2980/1543/1276 +f 2976/1540/1276 2774/1418/1276 2980/1543/1276 +f 2980/1543/1276 2774/1418/1276 2983/1545/1284 +f 2775/1418/1169 2776/1419/1170 2984/1545/1285 +f 2776/1419/1170 2777/1420/1172 2984/1545/1285 +f 2777/1420/1172 2778/1421/1174 2984/1545/1285 +f 2984/1545/1285 2778/1421/1174 2987/1546/1286 +f 2778/1421/1174 2779/1422/1176 2987/1546/1286 +f 2987/1546/1286 2779/1422/1176 2991/1547/1287 +f 2779/1422/1176 2780/1423/1177 2991/1547/1287 +f 2991/1547/1287 2780/1423/1177 2992/1548/1288 +f 2780/1423/1177 2781/1424/1178 2992/1548/1288 +f 2781/1424/1178 2797/1428/1289 2992/1548/1288 +f 2797/1428/1289 2790/1427/1290 2992/1548/1288 +f 2790/1427/1290 2785/1426/1291 2992/1548/1288 +f 2993/1548/1284 2786/1426/1276 2994/1549/1292 +f 2787/1426/1293 2998/1550/1294 2996/1549/1295 +f 2996/1549/1295 2998/1550/1294 2999/1551/1296 +f 3001/1552/1297 3000/1553/1298 3003/1554/1050 +f 3000/1553/1298 2997/1555/1299 3003/1554/1050 +f 3003/1554/1050 2997/1555/1299 3006/1429/1300 +f 2997/1555/1299 2788/1556/1293 3006/1429/1300 +f 2788/1556/1293 2793/1429/1185 3006/1429/1300 +f 3006/1429/1300 2793/1429/1185 3009/1431/1301 +f 2793/1429/1185 2799/1431/1187 3009/1431/1301 +f 3009/1431/1301 2799/1431/1187 3011/1557/1302 +f 2799/1431/1187 2800/1432/1188 3011/1557/1302 +f 2801/1434/1188 2803/1433/1189 3010/1433/1303 +f 2802/1558/1304 3015/1559/1305 3014/1558/1306 +f 3014/1558/1306 3015/1559/1305 3018/1560/1307 +f 3022/1561/1308 3018/1560/1307 3023/1562/1309 +f 3018/1560/1307 3015/1559/1305 3023/1562/1309 +f 3023/1562/1309 3015/1559/1305 3024/1559/1310 +f 3015/1559/1305 2802/1558/1304 3024/1559/1310 +f 3024/1559/1310 2802/1558/1304 3025/1563/1311 +f 3026/1564/1312 2803/1433/1189 3027/1564/1313 +f 2803/1433/1189 2805/1434/1190 3027/1564/1313 +f 2805/1434/1190 2809/1565/1192 3027/1564/1313 +f 2809/1565/1192 2812/1566/1288 3027/1564/1313 +f 2812/1566/1288 2815/1567/1314 3027/1564/1313 +f 3027/1564/1313 2815/1567/1314 3028/1568/1315 +f 3031/1569/1192 3033/1570/1192 3034/1571/1192 +f 3032/1570/1316 3029/1572/1317 3037/1571/1317 +f 3029/1572/1317 2816/1441/1196 3037/1571/1317 +f 2816/1441/1196 2817/1440/1195 3037/1571/1317 +f 3037/1571/1317 2817/1440/1195 3039/1573/23 +f 2817/1440/1195 2818/1442/1084 3039/1573/23 +f 3039/1573/23 2818/1442/1084 3041/1574/1084 +f 2818/1442/1084 2821/1575/1199 3041/1574/1084 +f 2822/1446/1200 2824/1445/1199 3040/1576/1318 +f 3043/1577/1318 2823/1578/1200 3046/1579/1197 +f 3046/1579/1197 2823/1578/1200 3048/1580/1319 +f 2823/1578/1200 2827/1448/1086 3048/1580/1319 +f 2828/1448/990 2831/1449/990 3047/1580/990 +f 2832/1449/990 2834/1450/1320 3049/1580/990 +f 3049/1580/990 2834/1450/1320 3050/1581/1321 +f 3053/1582/1321 2835/1583/1320 3054/1584/1322 +f 2836/1583/1202 2839/1452/1204 3055/1584/1323 +f 2839/1452/1204 2840/1453/1205 3055/1584/1323 +f 3055/1584/1323 2840/1453/1205 3057/1585/1324 +f 2840/1453/1205 2841/1454/1206 3057/1585/1324 +f 3057/1585/1324 2841/1454/1206 3059/1586/1325 +f 3059/1586/1325 2841/1454/1206 3060/1587/1326 +f 3060/1587/1326 2841/1454/1206 3061/1588/1327 +f 2841/1454/1206 2842/1455/1207 3061/1588/1327 +f 3063/1588/636 2843/1455/636 3065/1589/636 +f 2844/1455/636 2845/1456/636 3064/1589/636 +f 2846/1456/1208 2847/1457/1209 3068/1589/1328 +f 2847/1457/1209 2848/1458/1210 3068/1589/1328 +f 3068/1589/1328 2848/1458/1210 3069/1590/1329 +f 2848/1458/1210 2849/1459/1211 3069/1590/1329 +f 2849/1459/1211 2850/1460/1212 3069/1590/1329 +f 3070/1591/1330 2851/1461/1215 3071/1592/1331 +f 2851/1461/1215 2852/1462/1216 3071/1592/1331 +f 3071/1592/1331 2852/1462/1216 3072/1593/1332 +f 2852/1462/1216 2853/1463/1218 3072/1593/1332 +f 2853/1463/1218 2854/1464/1219 3072/1593/1332 +f 3072/1593/1332 2854/1464/1219 3073/1594/1333 +f 3073/1594/1333 2854/1464/1219 3074/1595/1334 +f 2854/1464/1219 2855/1464/1220 3074/1595/1334 +f 3074/1595/1334 2855/1464/1220 3075/1596/1335 +f 3078/1596/1336 2856/1464/1336 3080/1597/1336 +f 2857/1464/845 2860/1465/845 3079/1597/845 +f 2861/1466/1017 2862/1467/927 3084/1598/1016 +f 2862/1467/927 2863/1468/929 3084/1598/1016 +f 2863/1468/929 2864/1469/929 3084/1598/1016 +f 3084/1598/1016 2864/1469/929 3085/1599/1337 +f 2864/1469/929 2865/1470/929 3085/1599/1337 +f 3090/1600/844 3088/1601/844 3092/1602/844 +f 3085/1599/1337 2865/1470/929 3091/1603/1017 +f 2865/1470/929 2866/1472/1224 3091/1603/1017 +f 3093/1602/1104 2867/1604/1338 3095/1605/1339 +f 2867/1604/1338 2870/1606/1340 3095/1605/1339 +f 3095/1605/1339 2870/1606/1340 3097/1607/1341 +f 2871/1475/847 2873/1476/936 3096/1608/1342 +f 3099/1608/1343 2872/1476/1344 3101/1609/1345 +f 3101/1609/1345 2872/1476/1344 3102/1610/1346 +f 2872/1476/1344 2875/1477/1226 3102/1610/1346 +f 2875/1477/1226 2876/1478/1228 3102/1610/1346 +f 3102/1610/1346 2876/1478/1228 3103/1611/1347 +f 2876/1478/1228 2877/1479/771 3103/1611/1347 +f 3103/1611/1347 2877/1479/771 3105/1612/1348 +f 2877/1479/771 2878/1480/1230 3105/1612/1348 +f 3105/1612/1348 2878/1480/1230 3106/1613/1349 +f 2878/1480/1230 2879/1481/513 3106/1613/1349 +f 2880/1614/513 2882/1615/1350 3107/1616/1349 +f 3107/1616/1349 2882/1615/1350 3111/1617/1351 +f 3111/1617/1351 2882/1615/1350 3112/1618/167 +f 2882/1615/1350 2884/1619/166 3112/1618/167 +f 2885/1620/1352 2888/1621/1353 3115/1622/167 +f 3115/1622/167 2888/1621/1353 3116/1623/167 +f 3119/1623/167 2889/1621/1353 3121/1624/167 +f 2890/1485/183 2891/1486/209 3120/1625/1354 +f 2891/1486/209 2892/1487/1234 3120/1625/1354 +f 2893/1626/690 2895/1627/1355 3124/1624/367 +f 3124/1624/367 2895/1627/1355 3127/1628/1351 +f 3127/1628/1351 2895/1627/1355 3132/1629/1356 +f 3132/1629/1356 2895/1627/1355 3133/1630/1357 +f 2896/1488/1236 2897/1490/1237 3137/1631/1358 +f 2897/1490/1237 2898/1365/695 3137/1631/1358 +f 2898/1365/695 2899/1492/1239 3137/1631/1358 +f 3137/1631/1358 2899/1492/1239 3140/1632/1359 +f 3140/1632/1359 2899/1492/1239 3142/1633/1360 +f 2899/1492/1239 2900/1493/1240 3142/1633/1360 +f 3148/1634/168 3145/1633/168 3151/1635/168 +f 3141/1633/168 2901/1493/168 3150/1635/168 +f 2902/1493/168 2903/1494/168 3149/1635/168 +f 2904/1494/1241 2905/1496/1242 3152/1635/1361 +f 2905/1496/1242 2906/1499/1245 3152/1635/1361 +f 2906/1499/1245 3153/1636/1362 3152/1635/1361 +f 3152/1635/1361 3153/1636/1362 3154/1637/1363 +f 3155/1638/1364 3154/1637/1363 3156/1639/1365 +f 3156/1639/1365 3154/1637/1363 3157/1639/1366 +f 3154/1637/1363 3153/1636/1362 3157/1639/1366 +f 3157/1639/1366 3153/1636/1362 3158/1640/1367 +f 3153/1636/1362 2906/1499/1245 3158/1640/1367 +f 2906/1499/1245 2908/1500/1246 3158/1640/1367 +f 2909/1501/1248 2910/1502/1249 3159/1641/1368 +f 3159/1641/1368 2910/1502/1249 3160/1642/1368 +f 2911/1502/1250 2912/1503/1251 3161/1642/1369 +f 3161/1642/1369 2912/1503/1251 3162/1643/1370 +f 3162/1643/1370 2912/1503/1251 3163/1644/1371 +f 2912/1503/1251 2913/1504/1252 3163/1644/1371 +f 3163/1644/1371 2913/1504/1252 3164/1645/1372 +f 3164/1645/1372 2913/1504/1252 3166/1646/1373 +f 3166/1646/1373 2913/1504/1252 3168/1647/1374 +f 2913/1504/1252 2914/1505/1253 3168/1647/1374 +f 3168/1647/1374 2914/1505/1253 3170/1648/1375 +f 2914/1505/1253 2915/1506/1254 3170/1648/1375 +f 3170/1648/1375 2915/1506/1254 3172/1649/1376 +f 3172/1649/1376 2915/1506/1254 3173/1650/1377 +f 2915/1506/1254 2916/1507/1255 3173/1650/1377 +f 2916/1507/1255 2917/1508/1256 3173/1650/1377 +f 3173/1650/1377 2917/1508/1256 3174/1651/109 +f 2917/1508/1256 2918/1509/1255 3174/1651/109 +f 3180/1652/1378 3174/1651/109 3181/1653/109 +f 3174/1651/109 2918/1509/1255 3181/1653/109 +f 2918/1509/1255 2919/1510/1256 3181/1653/109 +f 2920/1511/1255 2921/1512/42 3182/1654/1377 +f 3182/1654/1377 2921/1512/42 3183/1655/109 +f 2921/1512/42 2922/1513/1255 3183/1655/109 +f 2922/1513/1255 2923/1514/109 3183/1655/109 +f 3184/1656/1378 3183/1655/109 3186/1657/1379 +f 3186/1657/1379 3183/1655/109 3189/1658/1378 +f 3183/1655/109 2923/1514/109 3189/1658/1378 +f 2923/1514/109 2925/1515/1258 3189/1658/1378 +f 3190/1659/1 3187/1660/1 3192/1661/1 +f 3188/1660/1 2926/1516/1379 3191/1661/1379 +f 2926/1516/1379 2928/1517/1380 3191/1661/1379 +f 3191/1661/1379 2928/1517/1380 3195/1662/1262 +f 2928/1517/1380 2931/1518/1261 3195/1662/1262 +f 2931/1518/1261 812/298/1261 3195/1662/1262 +f 810/298/1381 795/289/136 3194/1662/1382 +f 795/289/136 797/291/138 3194/1662/1382 +f 3193/1663/1383 798/293/140 3197/1664/1384 +f 798/293/140 799/292/139 3197/1664/1384 +f 799/292/139 3198/1665/1385 3197/1664/1384 +f 3196/1664/1386 3198/1665/1385 3199/1666/1387 +f 3198/1665/1385 3202/1667/109 3199/1666/1387 +f 3199/1666/1387 3202/1667/109 3204/1668/130 +f 3203/1669/1388 3200/1670/1389 3206/1671/1390 +f 3206/1671/1390 3200/1670/1389 3207/1672/1391 +f 3210/1673/1392 3201/1674/1389 3211/1675/1393 +f 3213/1676/1394 3202/1667/109 3217/1677/75 +f 3202/1667/109 3198/1665/1385 3217/1677/75 +f 3217/1677/75 3198/1665/1385 3219/1678/1395 +f 3219/1678/1395 3198/1665/1385 3221/1679/1396 +f 3198/1665/1385 799/292/139 3221/1679/1396 +f 3221/1679/1396 799/292/139 3222/294/1397 +f 799/292/139 800/294/141 3222/294/1397 +f 800/294/141 803/1680/1398 3222/294/1397 +f 804/1680/1399 3227/1681/1284 3223/294/1400 +f 3223/294/1400 3227/1681/1284 3228/1682/1400 +f 3230/1683/1401 3224/1684/1402 3231/1684/1403 +f 3234/1685/1404 3236/1686/1405 3238/1685/1406 +f 3236/1686/1405 3232/1684/1407 3238/1685/1406 +f 3232/1684/1407 3225/1684/1408 3238/1685/1406 +f 3226/1687/1409 805/1688/1410 3237/1689/1411 +f 806/1688/20 808/1690/20 3240/1689/19 +f 3240/1689/19 808/1690/20 3241/1691/1412 +f 809/297/147 811/298/148 3242/1520/1413 +f 811/298/148 2932/1520/1414 3242/1520/1413 +f 3242/1520/1413 2932/1520/1414 3243/1692/1415 +f 2932/1520/1414 2935/1521/1416 3243/1692/1415 +f 2935/1521/1416 2938/1522/1417 3243/1692/1415 +f 3246/1693/1418 3244/1694/1419 3248/1695/1420 +f 3243/1692/1415 2938/1522/1417 3247/1696/1421 +f 2938/1522/1417 2941/1523/1422 3247/1696/1421 +f 3250/1697/1421 2942/1698/1423 3251/1699/1424 +f 2942/1698/1423 2946/1527/1266 3251/1699/1424 +f 2946/1527/1266 2948/1529/1268 3251/1699/1424 +f 2947/1529/1425 2950/1530/1426 3252/1699/1427 +f 3252/1699/1427 2950/1530/1426 3253/1700/1426 +f 2950/1530/1426 2954/1532/1426 3253/1700/1426 +f 2955/1532/1273 2956/1533/1274 3254/1700/1428 +f 3254/1700/1428 2956/1533/1274 3255/1533/1429 +f 2956/1533/1274 2957/1534/1275 3255/1533/1429 +f 3256/1701/1430 2958/1535/1431 3257/1702/1432 +f 3257/1702/1432 2958/1535/1431 3258/1536/1433 +f 2958/1535/1431 2961/1536/1434 3258/1536/1433 +f 3259/1703/1433 2962/1704/1434 3262/1705/1435 +f 2963/1704/1276 2967/1537/1276 3261/1705/1276 +f 2968/1537/1276 2971/1538/1276 3260/1705/1276 +f 3263/1705/1276 2972/1538/1276 3265/1706/1276 +f 2973/1539/1284 2976/1540/1276 3264/1707/1276 +f 2976/1540/1276 2981/1544/1276 3264/1707/1276 +f 3267/1707/1436 2982/1544/1283 3269/1708/1437 +f 2982/1544/1283 2978/1542/1281 3269/1708/1437 +f 3272/1709/1438 3269/1708/1437 3273/1710/1439 +f 3269/1708/1437 2978/1542/1281 3273/1710/1439 +f 2978/1542/1281 3277/1711/1440 3273/1710/1439 +f 3281/1712/1441 3275/1710/1442 3283/1713/1443 +f 3275/1710/1442 3276/1711/1444 3283/1713/1443 +f 3283/1713/1443 3276/1711/1444 3284/1714/1445 +f 3285/1714/1446 3277/1711/1440 3286/1715/1447 +f 3277/1711/1440 2978/1542/1281 3286/1715/1447 +f 2978/1542/1281 2977/1541/1280 3286/1715/1447 +f 3286/1715/1447 2977/1541/1280 3289/1716/1448 +f 2977/1541/1280 2979/1543/1282 3289/1716/1448 +f 2980/1543/1276 2983/1545/1284 3288/1716/1276 +f 2983/1545/1284 2985/1546/1276 3288/1716/1276 +f 2986/1546/1276 2988/1547/1276 3287/1716/1276 +f 3290/1716/1276 2989/1547/1276 3291/1717/1276 +f 2990/1547/1276 2993/1548/1284 3293/1717/1276 +f 3293/1717/1276 2993/1548/1284 3294/1718/1276 +f 2993/1548/1284 2994/1549/1292 3294/1718/1276 +f 3295/1718/1449 2995/1549/1450 3297/1551/1451 +f 2996/1549/1295 2999/1551/1296 3296/1551/1452 +f 3298/1719/1453 3000/1553/1298 3299/1720/1454 +f 3000/1553/1298 3001/1552/1297 3299/1720/1454 +f 3001/1552/1297 3003/1554/1050 3300/1720/1050 +f 3302/1721/1455 3300/1720/1050 3309/1722/1455 +f 3300/1720/1050 3003/1554/1050 3309/1722/1455 +f 3002/1554/1050 3004/1429/1050 3308/1722/1050 +f 3005/1429/1050 3007/1431/1050 3307/1722/1050 +f 3008/1431/1050 3012/1557/1050 3306/1722/1050 +f 3013/1557/1050 3016/1723/1050 3305/1722/1050 +f 3017/1723/1050 3019/1724/1050 3304/1722/1050 +f 3020/1724/1050 3314/1725/1050 3303/1722/1050 +f 3311/1722/1050 3312/1725/1050 3315/1726/1050 +f 3318/1727/1456 3313/1727/1457 3319/1728/1458 +f 3319/1728/1458 3313/1727/1457 3320/1729/1459 +f 3313/1727/1457 3021/1730/1308 3320/1729/1459 +f 3022/1561/1308 3023/1562/1309 3321/1731/1460 +f 3321/1731/1460 3023/1562/1309 3323/1731/1461 +f 3023/1562/1309 3024/1559/1310 3323/1731/1461 +f 3323/1731/1461 3024/1559/1310 3324/1732/1462 +f 3024/1559/1310 3025/1563/1311 3324/1732/1462 +f 3324/1732/1462 3025/1563/1311 3325/1563/1463 +f 3326/1733/1464 3026/1564/1312 3327/1734/1465 +f 3026/1564/1312 3027/1564/1313 3327/1734/1465 +f 3027/1564/1313 3028/1568/1315 3327/1734/1465 +f 3327/1734/1465 3028/1568/1315 3329/1734/1466 +f 3029/1572/1317 3032/1570/1316 3328/1735/1467 +f 3330/1735/1192 3033/1570/1192 3332/1736/1192 +f 3332/1736/1192 3033/1570/1192 3335/1737/1191 +f 3335/1737/1191 3033/1570/1192 3336/1738/1192 +f 3033/1570/1192 3031/1569/1192 3336/1738/1192 +f 3338/1738/1192 3030/1569/1192 3340/1739/1192 +f 3031/1569/1192 3034/1571/1192 3339/1739/1192 +f 3343/1740/1192 3035/1741/1192 3344/1742/1192 +f 3346/1742/1186 3350/1743/1468 3351/1743/1187 +f 3347/1744/1468 3355/1745/1469 3352/1744/1470 +f 3352/1744/1470 3355/1745/1469 3356/1746/1471 +f 3358/1747/1472 3357/1748/1473 3360/1748/1474 +f 3356/1746/1471 3355/1745/1469 3359/1749/1475 +f 3361/1749/1476 3353/1745/1477 3365/1750/1195 +f 3354/1745/1192 3348/1744/1192 3364/1750/1192 +f 3349/1743/1192 3344/1742/1192 3363/1751/1192 +f 3344/1742/1192 3035/1741/1192 3363/1751/1192 +f 3036/1741/1317 3038/1752/23 3362/1751/1478 +f 3038/1752/23 3042/1753/1084 3362/1751/1478 +f 3042/1753/1084 3044/1754/1197 3362/1751/1478 +f 3365/1750/1195 3045/1755/1197 3367/1756/1479 +f 3046/1579/1197 3048/1580/1319 3366/1757/1480 +f 3048/1580/1319 3051/1581/1481 3366/1757/1480 +f 3370/1758/1480 3052/1582/1482 3372/1759/1483 +f 3372/1759/1483 3052/1582/1482 3373/1760/1484 +f 3053/1582/1321 3054/1584/1322 3374/1760/1485 +f 3054/1584/1322 3056/1585/1486 3374/1760/1485 +f 3374/1760/1485 3056/1585/1486 3375/1761/1487 +f 3377/1762/1488 3378/1763/1489 3379/1764/1490 +f 3378/1763/1489 3381/1765/1491 3379/1764/1490 +f 3381/1765/1491 3376/1761/1492 3379/1764/1490 +f 3375/1761/1487 3056/1585/1486 3380/1764/1493 +f 3056/1585/1486 3058/1586/1494 3380/1764/1493 +f 3380/1764/1493 3058/1586/1494 3382/1766/1495 +f 3059/1586/1325 3060/1587/1326 3383/1766/1496 +f 3060/1587/1326 3384/1767/1497 3383/1766/1496 +f 3383/1766/1496 3384/1767/1497 3385/1768/1498 +f 3385/1768/1498 3384/1767/1497 3386/1769/1499 +f 3386/1769/1499 3384/1767/1497 3387/1770/1500 +f 3387/1770/1500 3384/1767/1497 3389/1771/1501 +f 3384/1767/1497 3060/1587/1326 3389/1771/1501 +f 3060/1587/1326 3061/1588/1327 3389/1771/1501 +f 3062/1588/636 3066/1589/636 3388/1771/636 +f 3391/1771/636 3067/1589/636 3393/1772/636 +f 3068/1589/1328 3069/1590/1329 3392/1772/1502 +f 3396/1773/1503 3070/1591/1330 3400/1774/876 +f 3070/1591/1330 3071/1592/1331 3400/1774/876 +f 3071/1592/1331 3072/1593/1332 3400/1774/876 +f 3400/1774/876 3072/1593/1332 3403/1775/1504 +f 3403/1775/1504 3072/1593/1332 3404/1776/1505 +f 3072/1593/1332 3073/1594/1333 3404/1776/1505 +f 3404/1776/1505 3073/1594/1333 3405/1777/1506 +f 3406/1778/877 3405/1777/1506 3407/1779/1507 +f 3405/1777/1506 3073/1594/1333 3407/1779/1507 +f 3073/1594/1333 3074/1595/1334 3407/1779/1507 +f 3407/1779/1507 3074/1595/1334 3408/1780/1508 +f 3074/1595/1334 3075/1596/1335 3408/1780/1508 +f 3409/1781/1508 3076/1782/1335 3411/1783/1509 +f 3077/1782/845 3081/1784/845 3410/1783/845 +f 3412/1783/845 3082/1784/845 3414/1785/845 +f 3083/1784/845 3086/1601/845 3413/1785/845 +f 3087/1601/845 3089/1600/845 3416/1785/845 +f 3416/1785/845 3089/1600/845 3417/1786/845 +f 3089/1600/845 3093/1602/1104 3417/1786/845 +f 3093/1602/1104 3095/1605/1339 3417/1786/845 +f 3418/1786/1510 3094/1605/1511 3419/1787/1512 +f 3419/1787/1512 3094/1605/1511 3420/1788/1513 +f 3094/1605/1511 3098/1607/1514 3420/1788/1513 +f 3420/1788/1513 3098/1607/1514 3421/1789/850 +f 3098/1607/1514 3100/1790/1515 3421/1789/850 +f 3421/1789/850 3100/1790/1515 3422/1791/1516 +f 3101/1609/1345 3102/1610/1346 3423/1792/1516 +f 3102/1610/1346 3103/1611/1347 3423/1792/1516 +f 3423/1792/1516 3103/1611/1347 3424/1793/1517 +f 3103/1611/1347 3105/1612/1348 3424/1793/1517 +f 3429/1794/508 3104/1795/1518 3433/1796/1519 +f 3433/1796/1519 3104/1795/1518 3435/1797/1520 +f 3105/1612/1348 3106/1613/1349 3434/1798/1520 +f 3106/1613/1349 3108/1799/1351 3434/1798/1520 +f 3439/1797/168 3109/1800/168 3440/1801/168 +f 3110/1617/168 3113/1618/168 3444/1802/168 +f 3444/1802/168 3113/1618/168 3448/1803/168 +f 3114/1622/168 3117/1623/168 3447/1804/168 +f 3447/1804/168 3117/1623/168 3452/1805/168 +f 3118/1623/168 3122/1624/168 3451/1805/168 +f 3123/1624/168 3125/1628/168 3450/1805/168 +f 3126/1628/168 3128/1629/168 3449/1805/168 +f 3454/1805/168 3129/1629/168 3455/1806/168 +f 3457/1806/168 3130/1629/168 3459/1807/168 +f 3131/1629/168 3134/1630/168 3458/1807/168 +f 3462/1808/168 3135/1631/168 3466/1809/168 +f 3136/1631/168 3138/1632/168 3465/1809/168 +f 3139/1632/168 3143/1633/168 3464/1809/168 +f 3144/1633/168 3146/1634/168 3463/1809/168 +f 3468/1809/168 3147/1634/1521 3469/1810/1522 +f 3147/1634/1521 3152/1635/1361 3469/1810/1522 +f 3152/1635/1361 3154/1637/1363 3469/1810/1522 +f 3154/1637/1363 3155/1638/1364 3469/1810/1522 +f 3469/1810/1522 3155/1638/1364 3470/1811/1523 +f 3470/1811/1523 3155/1638/1364 3471/1812/1524 +f 3155/1638/1364 3156/1639/1365 3471/1812/1524 +f 3471/1812/1524 3156/1639/1365 3472/1813/1525 +f 3156/1639/1365 3157/1639/1366 3472/1813/1525 +f 3157/1639/1366 3158/1640/1367 3472/1813/1525 +f 3473/1814/1526 3159/1641/1368 3474/1815/1527 +f 3159/1641/1368 3160/1642/1368 3474/1815/1527 +f 3475/1815/1528 3476/1816/1529 3478/1817/1530 +f 3478/1817/1530 3476/1816/1529 3480/1818/1531 +f 3476/1816/1529 3475/1815/1528 3480/1818/1531 +f 3475/1815/1528 3161/1642/1369 3480/1818/1531 +f 3161/1642/1369 3162/1643/1370 3480/1818/1531 +f 3162/1643/1370 3163/1644/1371 3480/1818/1531 +f 3480/1818/1531 3163/1644/1371 3481/1819/1532 +f 3163/1644/1371 3164/1645/1372 3481/1819/1532 +f 3164/1645/1372 3166/1646/1373 3481/1819/1532 +f 3483/1820/1533 3481/1819/1532 3484/1821/1534 +f 3481/1819/1532 3166/1646/1373 3484/1821/1534 +f 3486/1821/1535 3165/1646/1536 3488/1822/1537 +f 3492/1823/1538 3496/1824/1539 3497/1825/1540 +f 3496/1824/1539 3490/1826/1541 3497/1825/1540 +f 3499/1825/1542 3487/1826/1542 3500/1827/1543 +f 3488/1822/1537 3165/1646/1536 3503/1828/1543 +f 3503/1828/1543 3165/1646/1536 3505/1829/1544 +f 3165/1646/1536 3167/1647/1545 3505/1829/1544 +f 3167/1647/1545 3169/1648/1546 3505/1829/1544 +f 3505/1829/1544 3169/1648/1546 3507/1830/1547 +f 3169/1648/1546 3171/1649/1548 3507/1830/1547 +f 3507/1830/1547 3171/1649/1548 3509/1831/1549 +f 3512/1832/1379 3515/1833/1379 3518/1834/1379 +f 3513/1833/1379 3509/1831/1549 3517/1834/1379 +f 3509/1831/1549 3171/1649/1548 3517/1834/1379 +f 3172/1649/1376 3173/1650/1377 3516/1834/1550 +f 3173/1650/1377 3174/1651/109 3516/1834/1550 +f 3174/1651/109 3180/1652/1378 3516/1834/1550 +f 3520/1834/1379 3175/1652/1379 3521/1835/1379 +f 3524/1835/1379 3176/1652/1379 790/1836/1379 +f 789/1836/1 3177/1652/1 3526/1837/1 +f 3525/1838/1 3178/1839/1 3529/1840/1 +f 3179/1839/1551 3182/1654/1377 3528/1840/1550 +f 3182/1654/1377 3183/1655/109 3528/1840/1550 +f 3183/1655/109 3184/1656/1378 3528/1840/1550 +f 3185/1841/1 3530/1842/1379 3527/1843/1379 +f 3530/1842/1379 3533/1844/1552 3527/1843/1379 +f 3533/1844/1552 3534/1845/1553 3527/1843/1379 +f 3535/1846/1379 3527/1843/1379 3539/1847/1379 +f 3527/1843/1379 3534/1845/1553 3539/1847/1379 +f 3538/1847/127 3534/1845/1553 3540/1848/1554 +f 3534/1845/1553 3533/1844/1552 3540/1848/1554 +f 3542/281/1555 3532/1849/1556 3544/1849/1557 +f 3544/1849/1557 3532/1849/1556 3545/1850/1558 +f 3532/1849/1556 3531/1849/1 3545/1850/1558 +f 3531/1849/1 3547/1850/135 3545/1850/1558 +f 3547/1850/135 3203/1669/1388 3545/1850/1558 +f 3203/1669/1388 3206/1671/1390 3545/1850/1558 +f 3546/1850/1427 3205/1671/1426 3548/1851/1427 +f 3205/1671/1426 3208/1672/1426 3548/1851/1427 +f 3552/1852/1426 3550/1853/1559 3553/1854/1426 +f 3550/1853/1559 3209/1673/1425 3553/1854/1426 +f 3210/1673/1392 3211/1675/1393 3554/1854/1560 +f 3554/1854/1560 3211/1675/1393 3556/1855/27 +f 3212/1676/23 3214/1677/23 3555/1856/23 +f 3558/1856/23 3215/1677/23 3559/1857/23 +f 3216/1677/23 3218/1678/1561 3561/1857/23 +f 3561/1857/23 3218/1678/1561 3564/1858/1561 +f 3218/1678/1561 3220/1679/47 3564/1858/1561 +f 3564/1858/1561 3220/1679/47 3565/1859/60 +f 3220/1679/47 3223/294/1400 3565/1859/60 +f 3223/294/1400 3228/1682/1400 3565/1859/60 +f 3567/1860/1562 3566/1861/1563 3568/1862/1564 +f 3566/1861/1563 3229/1683/1565 3568/1862/1564 +f 3230/1683/1401 3231/1684/1403 3569/1862/1566 +f 3231/1684/1403 3235/1686/1567 3569/1862/1566 +f 3569/1862/1566 3235/1686/1567 3571/1863/1568 +f 3236/1686/1405 3234/1685/1404 3570/1863/1569 +f 3233/1864/1570 3239/1689/1571 3573/1865/1569 +f 3573/1865/1569 3239/1689/1571 3575/1866/1572 +f 3240/1689/19 3241/1691/1412 3574/1866/1573 +f 3241/1691/1412 3244/1694/1419 3574/1866/1573 +f 3244/1694/1419 3246/1693/1418 3574/1866/1573 +f 3575/1866/1572 3245/1693/1574 3577/1693/1575 +f 3577/1693/1575 3245/1693/1574 3579/1867/1576 +f 3246/1693/1418 3248/1695/1420 3578/1867/1577 +f 3249/1697/1578 3252/1699/1427 3581/1868/1579 +f 3581/1868/1579 3252/1699/1427 3582/1869/1580 +f 3252/1699/1427 3253/1700/1426 3582/1869/1580 +f 3586/1869/1581 3583/1869/1582 3587/1870/1583 +f 3583/1869/1582 3254/1700/1428 3587/1870/1583 +f 3254/1700/1428 3255/1533/1429 3587/1870/1583 +f 3589/1871/1584 3256/1701/1430 3590/1872/1585 +f 3256/1701/1430 3257/1702/1432 3590/1872/1585 +f 3590/1872/1585 3257/1702/1432 3591/1873/1586 +f 3257/1702/1432 3258/1536/1433 3591/1873/1586 +f 3259/1703/1433 3262/1705/1435 3592/1703/1587 +f 3592/1703/1587 3262/1705/1435 3593/1874/1588 +f 3262/1705/1435 3266/1706/1589 3593/1874/1588 +f 3266/1706/1589 3268/1875/1590 3593/1874/1588 +f 3593/1874/1588 3268/1875/1590 3594/1876/1591 +f 3268/1875/1590 3270/1877/1592 3594/1876/1591 +f 3595/1878/1593 3271/1879/1594 3597/1880/1595 +f 3597/1880/1595 3271/1879/1594 3598/1881/1596 +f 3271/1879/1594 3274/1882/1597 3598/1881/1596 +f 3274/1882/1597 3278/1883/1598 3598/1881/1596 +f 3599/1884/1599 3279/1885/1600 3600/1886/1601 +f 3601/1887/1601 3280/1712/1600 3602/1888/1602 +f 3280/1712/1600 3282/1713/1603 3602/1888/1602 +f 3603/1889/1604 3602/1888/1602 3605/1890/1605 +f 3602/1888/1602 3282/1713/1603 3605/1890/1605 +f 3283/1713/1443 3284/1714/1445 3604/1890/1606 +f 3285/1714/1446 3286/1715/1447 3605/1890/1605 +f 3605/1890/1605 3286/1715/1447 3606/1891/1607 +f 3286/1715/1447 3289/1716/1448 3606/1891/1607 +f 3289/1716/1448 3292/1717/1608 3606/1891/1607 +f 3606/1891/1607 3292/1717/1608 3607/1892/1609 +f 3607/1892/1609 3292/1717/1608 3608/1893/1610 +f 3292/1717/1608 3295/1718/1449 3608/1893/1610 +f 3295/1718/1449 3297/1551/1451 3608/1893/1610 +f 3608/1893/1610 3297/1551/1451 3609/1894/1611 +f 3298/1719/1453 3299/1720/1454 3611/1895/1612 +f 3611/1895/1612 3299/1720/1454 3612/1896/1613 +f 3299/1720/1454 3301/1721/1614 3612/1896/1613 +f 3612/1896/1613 3301/1721/1614 3613/1897/1615 +f 3613/1897/1615 3301/1721/1614 3615/1722/1616 +f 3301/1721/1614 3310/1722/1617 3615/1722/1616 +f 3615/1722/1616 3310/1722/1617 3617/1898/1444 +f 3617/1898/1444 3310/1722/1617 3620/1899/1618 +f 3311/1722/1050 3315/1726/1050 3619/1899/1050 +f 3626/1900/1135 3623/1901/1135 3628/1902/1135 +f 3618/1901/1050 3316/1727/1050 3627/1902/1050 +f 3629/1902/1619 3317/1727/1620 3630/1902/1621 +f 3318/1727/1456 3319/1728/1458 3632/1902/1622 +f 3632/1902/1622 3319/1728/1458 3634/1903/1623 +f 3634/1903/1623 3319/1728/1458 3635/1728/1624 +f 3319/1728/1458 3320/1729/1459 3635/1728/1624 +f 3320/1729/1459 3322/1729/1625 3635/1728/1624 +f 3635/1728/1624 3322/1729/1625 3637/1904/1626 +f 3323/1731/1461 3324/1732/1462 3636/1905/1627 +f 3324/1732/1462 3639/1906/1628 3636/1905/1627 +f 3637/1904/1626 3638/1907/1628 3640/1908/1629 +f 3640/1908/1629 3638/1907/1628 3643/1909/1630 +f 3643/1909/1630 3638/1907/1628 3644/1907/1631 +f 3646/1910/1632 3639/1906/1628 3647/1911/1633 +f 3639/1906/1628 3324/1732/1462 3647/1911/1633 +f 3647/1911/1633 3324/1732/1462 3648/1912/1634 +f 3324/1732/1462 3325/1563/1463 3648/1912/1634 +f 3326/1733/1464 3327/1734/1465 3649/1913/1635 +f 3327/1734/1465 3329/1734/1466 3649/1913/1635 +f 3649/1913/1635 3329/1734/1466 3650/1914/1636 +f 3329/1734/1466 3331/1734/1637 3650/1914/1636 +f 3331/1734/1637 3333/1915/1638 3650/1914/1636 +f 3650/1914/1636 3333/1915/1638 3651/1916/1636 +f 3651/1916/1636 3333/1915/1638 3652/1917/1639 +f 3656/1918/1640 3653/1919/1641 3659/1920/1642 +f 3659/1920/1642 3653/1919/1641 3660/1921/1641 +f 3653/1919/1641 3334/1737/1643 3660/1921/1641 +f 3334/1737/1643 3337/1738/1644 3660/1921/1641 +f 3337/1738/1644 3341/1739/1289 3660/1921/1641 +f 3660/1921/1641 3341/1739/1289 3661/1922/1641 +f 3663/1923/1645 3342/1740/1643 3664/1924/1646 +f 3342/1740/1643 3345/1742/1643 3664/1924/1646 +f 3667/1924/1647 3346/1742/1186 3669/1925/1648 +f 3346/1742/1186 3351/1743/1187 3669/1925/1648 +f 3669/1925/1648 3351/1743/1187 3672/1926/1303 +f 3352/1744/1470 3356/1746/1471 3671/1927/1303 +f 3357/1748/1473 3678/1748/1649 3670/1928/1303 +f 3681/1929/1650 3676/1928/1651 3682/1930/1305 +f 3683/1931/1652 3684/1932/1653 3685/1930/1310 +f 3684/1932/1653 3682/1930/1305 3685/1930/1310 +f 3682/1930/1305 3676/1928/1651 3685/1930/1310 +f 3676/1928/1651 3677/1748/1654 3685/1930/1310 +f 3685/1930/1310 3677/1748/1654 3686/1933/1655 +f 3678/1748/1649 3357/1748/1473 3687/1933/1656 +f 3357/1748/1473 3358/1747/1472 3687/1933/1656 +f 3690/1934/1657 3687/1933/1656 3691/1747/1658 +f 3687/1933/1656 3358/1747/1472 3691/1747/1658 +f 3691/1747/1658 3358/1747/1472 3692/1935/1659 +f 3694/1935/1660 3358/1747/1472 3696/1936/1661 +f 3358/1747/1472 3360/1748/1474 3696/1936/1661 +f 3361/1749/1476 3365/1750/1195 3695/1937/1662 +f 3365/1750/1195 3367/1756/1479 3695/1937/1662 +f 3696/1936/1661 3368/1938/1663 3697/1939/1664 +f 3369/1938/1480 3371/1940/1665 3698/1939/1666 +f 3698/1939/1666 3371/1940/1665 3699/1941/1667 +f 3699/1941/1667 3371/1940/1665 3700/1942/1668 +f 3372/1759/1483 3373/1760/1484 3701/1943/1669 +f 3373/1760/1484 3376/1761/1492 3701/1943/1669 +f 3376/1761/1492 3381/1765/1491 3701/1943/1669 +f 3701/1943/1669 3381/1765/1491 3702/1765/1670 +f 3381/1765/1491 3378/1763/1489 3702/1765/1670 +f 3702/1765/1670 3378/1763/1489 3703/1763/1671 +f 3703/1763/1671 3378/1763/1489 3704/1944/1672 +f 3378/1763/1489 3377/1762/1488 3704/1944/1672 +f 3704/1944/1672 3377/1762/1488 3705/1945/1673 +f 3705/1945/1673 3377/1762/1488 3706/1946/1674 +f 3706/1946/1674 3377/1762/1488 3707/1947/1675 +f 3377/1762/1488 3379/1764/1490 3707/1947/1675 +f 3380/1764/1493 3382/1766/1495 3708/1947/1676 +f 3708/1947/1676 3382/1766/1495 3709/1948/1677 +f 3383/1766/1496 3385/1768/1498 3710/1948/1678 +f 3710/1948/1678 3385/1768/1498 3711/1949/1679 +f 3385/1768/1498 3386/1769/1499 3711/1949/1679 +f 3711/1949/1679 3386/1769/1499 3712/1950/1680 +f 3386/1769/1499 3387/1770/1500 3712/1950/1680 +f 3713/1951/1681 3712/1950/1680 3714/1951/1682 +f 3712/1950/1680 3387/1770/1500 3714/1951/1682 +f 3714/1951/1682 3387/1770/1500 3717/1952/1683 +f 3387/1770/1500 3389/1771/1501 3717/1952/1683 +f 3390/1771/636 3394/1772/636 3716/1952/636 +f 3395/1773/636 3397/1774/636 3715/1953/636 +f 3718/1953/636 3398/1774/636 3720/1954/636 +f 3399/1774/636 3401/1775/636 3719/1954/636 +f 3724/1954/626 3402/1775/626 3725/1955/626 +f 3403/1775/1504 3404/1776/1505 3728/1955/1684 +f 3728/1955/1684 3404/1776/1505 3731/1956/793 +f 3404/1776/1505 3405/1777/1506 3731/1956/793 +f 3405/1777/1506 3406/1778/877 3731/1956/793 +f 3731/1956/793 3406/1778/877 3733/1778/876 +f 3733/1778/876 3406/1778/877 3734/1957/875 +f 3406/1778/877 3407/1779/1507 3734/1957/875 +f 3407/1779/1507 3408/1780/1508 3734/1957/875 +f 3737/1958/966 3409/1781/1508 3738/1959/929 +f 3409/1781/1508 3411/1783/1509 3738/1959/929 +f 3738/1959/929 3411/1783/1509 3739/1960/929 +f 3411/1783/1509 3415/1785/1685 3739/1960/929 +f 3739/1960/929 3415/1785/1685 3740/1961/929 +f 3740/1961/929 3415/1785/1685 3741/1962/929 +f 3415/1785/1685 3418/1786/1510 3741/1962/929 +f 3418/1786/1510 3419/1787/1512 3741/1962/929 +f 3741/1962/929 3419/1787/1512 3742/1963/170 +f 3419/1787/1512 3420/1788/1513 3742/1963/170 +f 3742/1963/170 3420/1788/1513 3745/1964/1686 +f 3420/1788/1513 3421/1789/850 3745/1964/1686 +f 3421/1789/850 3422/1791/1516 3745/1964/1686 +f 3745/1964/1686 3422/1791/1516 3746/1965/1687 +f 3422/1791/1516 3425/1794/508 3746/1965/1687 +f 3750/1965/168 3426/1794/168 3751/1966/168 +f 3754/1966/168 3427/1794/168 3756/1967/168 +f 3428/1794/168 3430/1796/168 3755/1967/168 +f 3758/1967/168 3431/1796/168 3760/1968/168 +f 3432/1796/168 3436/1797/168 3759/1968/168 +f 3762/1968/168 3437/1797/168 3764/1969/168 +f 3438/1797/168 3441/1801/168 3763/1969/168 +f 3765/1970/1688 3442/1971/431 3766/1972/1689 +f 3767/1973/1690 3766/1972/1689 3768/1974/1691 +f 3766/1972/1689 3442/1971/431 3768/1974/1691 +f 3769/1975/1691 3443/1802/431 3770/1976/1692 +f 3443/1802/431 3445/1803/1693 3770/1976/1692 +f 3771/1977/1694 3770/1976/1692 3772/1978/1695 +f 3770/1976/1692 3445/1803/1693 3772/1978/1695 +f 3773/1979/1696 3446/1804/1693 3774/1980/566 +f 3446/1804/1693 3453/1805/1697 3774/1980/566 +f 3774/1980/566 3453/1805/1697 3775/1981/566 +f 3453/1805/1697 3456/1806/830 3775/1981/566 +f 3775/1981/566 3456/1806/830 3776/1982/1698 +f 3456/1806/830 3460/1807/330 3776/1982/1698 +f 3777/1983/1699 3461/1808/330 3778/1984/1700 +f 3461/1808/330 3467/1809/1701 3778/1984/1700 +f 3467/1809/1701 3469/1810/1522 3778/1984/1700 +f 3469/1810/1522 3470/1811/1523 3778/1984/1700 +f 3780/1985/1702 3778/1984/1700 3781/1986/1703 +f 3778/1984/1700 3470/1811/1523 3781/1986/1703 +f 3470/1811/1523 3471/1812/1524 3781/1986/1703 +f 3471/1812/1524 3472/1813/1525 3781/1986/1703 +f 3783/1987/1704 3473/1814/1526 3784/1988/1705 +f 3473/1814/1526 3474/1815/1527 3784/1988/1705 +f 3475/1815/1528 3478/1817/1530 3785/1988/1706 +f 3785/1988/1706 3478/1817/1530 3786/1989/1707 +f 3787/1990/1707 3477/1991/1708 3788/1990/1709 +f 3477/1991/1708 3479/1992/1531 3788/1990/1709 +f 3788/1990/1709 3479/1992/1531 3789/1993/1710 +f 3792/1994/1711 3793/1993/1712 3795/1994/1713 +f 3795/1994/1713 3793/1993/1712 3797/1995/1714 +f 3793/1993/1712 3789/1993/1710 3797/1995/1714 +f 3797/1995/1714 3789/1993/1710 3799/1996/1715 +f 3789/1993/1710 3479/1992/1531 3799/1996/1715 +f 3480/1818/1531 3481/1819/1532 3798/1820/1715 +f 3481/1819/1532 3483/1820/1533 3798/1820/1715 +f 3801/1997/1716 3800/1996/1717 3802/1998/1718 +f 3800/1996/1717 3482/1999/1719 3802/1998/1718 +f 3482/1999/1719 3485/1999/1720 3802/1998/1718 +f 3485/1999/1720 3489/2000/1721 3802/1998/1718 +f 3489/2000/1721 3493/2001/1722 3802/1998/1718 +f 3803/1998/1723 3494/2001/1724 3804/2002/1725 +f 3495/1824/1726 3491/1823/1727 3805/2003/1728 +f 3807/2004/1729 3805/2003/1728 3808/2005/1727 +f 3805/2003/1728 3491/1823/1727 3808/2005/1727 +f 3811/2006/1730 3814/2007/1731 3815/1825/1732 +f 3814/2007/1731 3809/2005/1733 3815/1825/1732 +f 3809/2005/1733 3492/1823/1538 3815/1825/1732 +f 3492/1823/1538 3497/1825/1540 3815/1825/1732 +f 3498/1825/1734 3501/1827/1735 3816/1825/1736 +f 3816/1825/1736 3501/1827/1735 3817/2008/1737 +f 3502/1828/1738 3504/1829/1739 3818/2009/1740 +f 3818/2009/1740 3504/1829/1739 3819/2009/1741 +f 3819/2009/1741 3504/1829/1739 3820/2010/1742 +f 3504/1829/1739 3506/1830/1743 3820/2010/1742 +f 3820/2010/1742 3506/1830/1743 3821/2011/1744 +f 3506/1830/1743 3508/1831/1745 3821/2011/1744 +f 3821/2011/1744 3508/1831/1745 3822/2012/81 +f 3508/1831/1745 3514/1833/1746 3822/2012/81 +f 3514/1833/1746 3510/1832/1747 3822/2012/81 +f 3510/1832/1747 3824/2013/1748 3822/2012/81 +f 3822/2012/81 3824/2013/1748 3825/2014/111 +f 3827/2014/122 3823/2013/1749 3831/2015/21 +f 3831/2015/21 3823/2013/1749 3832/2013/1750 +f 3842/2016/1751 3835/2017/1752 3839/2018/1753 +f 3832/2013/1750 3823/2013/1749 3838/2019/1754 +f 3824/2013/1748 3510/1832/1747 3837/2019/1747 +f 3836/2019/1755 3511/1832/1379 3841/2020/1379 +f 3511/1832/1379 3519/1834/1379 3841/2020/1379 +f 3519/1834/1379 3522/1835/1379 3841/2020/1379 +f 3844/2021/1756 3840/2020/1757 3846/2022/1415 +f 3846/2022/1415 3840/2020/1757 3848/1835/1758 +f 3840/2020/1757 3523/1835/1759 3848/1835/1758 +f 3523/1835/1759 791/1836/1760 3848/1835/1758 +f 792/287/135 788/287/133 3847/287/1761 +f 3849/287/1762 787/287/1427 3851/2023/1763 +f 3851/2023/1763 787/287/1427 3853/2023/1764 +f 3853/2023/1764 787/287/1427 3856/2024/1426 +f 787/287/1427 759/286/1426 3856/2024/1426 +f 760/275/1426 762/270/1426 3855/2025/1426 +f 763/270/1426 765/276/1426 3854/2025/1426 +f 3858/2025/1765 766/276/119 3859/2026/1766 +f 766/276/119 767/276/120 3859/2026/1766 +f 768/276/1767 771/277/1768 3862/2026/1769 +f 3862/2026/1769 771/277/1768 3863/2027/1770 +f 3866/2027/1771 772/277/121 3867/2028/1772 +f 772/277/121 774/278/123 3867/2028/1772 +f 3869/2028/19 773/278/19 3871/2029/1773 +f 773/278/19 775/279/19 3871/2029/1773 +f 775/279/19 777/280/19 3871/2029/1773 +f 777/280/19 3537/281/1774 3871/2029/1773 +f 3537/281/1774 3541/281/1662 3871/2029/1773 +f 3871/2029/1773 3541/281/1662 3872/2030/1775 +f 3541/281/1662 3543/1849/1776 3872/2030/1775 +f 3874/2031/1777 3873/2031/1574 3876/1853/1778 +f 3872/2030/1775 3543/1849/1776 3875/1851/1762 +f 3543/1849/1776 3546/1850/1427 3875/1851/1762 +f 3546/1850/1427 3548/1851/1427 3875/1851/1762 +f 3876/1853/1778 3549/1853/1779 3879/2032/1780 +f 3879/2032/1780 3549/1853/1779 3882/2033/1781 +f 3549/1853/1779 3551/1852/1782 3882/2033/1781 +f 3882/2033/1781 3551/1852/1782 3883/1852/1766 +f 3551/1852/1782 3554/1854/1560 3883/1852/1766 +f 3554/1854/1560 3556/1855/27 3883/1852/1766 +f 3557/1856/1783 3560/1857/1767 3884/2034/1770 +f 3884/2034/1770 3560/1857/1767 3885/2035/14 +f 3885/2035/14 3560/1857/1767 3886/2036/1784 +f 3560/1857/1767 3562/1858/1785 3886/2036/1784 +f 3563/2037/1786 3566/1861/1563 3887/2038/1784 +f 3566/1861/1563 3567/1860/1562 3887/2038/1784 +f 3887/2038/1784 3567/1860/1562 3889/2039/1787 +f 3567/1860/1562 3568/1862/1564 3889/2039/1787 +f 3889/2039/1787 3568/1862/1564 3891/2040/1788 +f 3569/1862/1566 3571/1863/1568 3890/2040/1584 +f 3895/2041/1789 3572/2042/1789 3897/2043/1789 +f 3573/1865/1569 3575/1866/1572 3896/2044/1790 +f 3575/1866/1572 3577/1693/1575 3896/2044/1790 +f 3900/2045/1791 3576/2046/1792 3902/2046/1793 +f 3576/2046/1792 3580/1868/1794 3902/2046/1793 +f 3902/2046/1793 3580/1868/1794 3903/1868/1795 +f 3580/1868/1794 3583/1869/1582 3903/1868/1795 +f 3583/1869/1582 3586/1869/1581 3903/1868/1795 +f 3906/2047/1789 3584/2048/1789 3910/2049/1789 +f 3585/2048/1796 3588/1871/1796 3909/2049/1796 +f 3589/1871/1584 3590/1872/1585 3908/2049/1797 +f 3590/1872/1585 3591/1873/1586 3908/2049/1797 +f 3592/1703/1587 3593/1874/1588 3907/2050/1797 +f 3593/1874/1588 3594/1876/1591 3907/2050/1797 +f 3913/2051/1798 3595/1878/1593 3914/2052/1799 +f 3595/1878/1593 3597/1880/1595 3914/2052/1799 +f 3916/2053/1800 3917/2054/1801 3918/2055/1770 +f 3918/2055/1770 3917/2054/1801 3919/2055/1802 +f 3919/2055/1802 3917/2054/1801 3920/2054/1803 +f 3922/2056/1804 3923/2057/1805 3924/2058/1799 +f 3923/2057/1805 3927/2059/1805 3924/2058/1799 +f 3926/2060/1806 3920/2054/1803 3925/2061/1807 +f 3925/2061/1807 3920/2054/1803 3928/2062/1808 +f 3920/2054/1803 3917/2054/1801 3928/2062/1808 +f 3917/2054/1801 3916/2053/1800 3928/2062/1808 +f 3916/2053/1800 3596/2063/1809 3928/2062/1808 +f 3928/2062/1808 3596/2063/1809 3929/2064/1810 +f 3596/2063/1809 3599/1884/1599 3929/2064/1810 +f 3599/1884/1599 3600/1886/1601 3929/2064/1810 +f 3929/2064/1810 3600/1886/1601 3930/2065/1811 +f 3932/2066/1812 3601/1887/1601 3934/2067/1813 +f 3601/1887/1601 3602/1888/1602 3934/2067/1813 +f 3934/2067/1813 3602/1888/1602 3936/2068/1814 +f 3602/1888/1602 3603/1889/1604 3936/2068/1814 +f 3936/2068/1814 3603/1889/1604 3938/2069/1815 +f 3938/2069/1815 3603/1889/1604 3939/2070/1816 +f 3603/1889/1604 3605/1890/1605 3939/2070/1816 +f 3605/1890/1605 3606/1891/1607 3939/2070/1816 +f 3606/1891/1607 3607/1892/1609 3939/2070/1816 +f 3939/2070/1816 3607/1892/1609 3940/2071/1817 +f 3607/1892/1609 3608/1893/1610 3940/2071/1817 +f 3608/1893/1610 3609/1894/1611 3940/2071/1817 +f 3940/2071/1817 3610/1894/1084 3943/2072/1818 +f 3611/1895/1612 3612/1896/1613 3942/1897/1819 +f 3612/1896/1613 3613/1897/1615 3941/1897/1820 +f 3613/1897/1615 3615/1722/1616 3941/1897/1820 +f 3945/2073/1821 3614/2074/1822 3947/2075/1823 +f 3614/2074/1822 3616/2076/1822 3947/2075/1823 +f 3947/2075/1823 3616/2076/1822 3950/2077/1824 +f 3950/2077/1824 3616/2076/1822 3952/2078/1822 +f 3617/1898/1444 3620/1899/1618 3951/1899/1825 +f 3953/2079/1822 3621/1901/1618 3955/2080/1826 +f 3622/1901/1050 3624/1900/1050 3954/2080/1050 +f 3956/2080/1827 3625/1900/1828 3957/2081/1829 +f 3958/2081/1830 3957/2081/1829 3959/2082/1831 +f 3957/2081/1829 3625/1900/1828 3959/2082/1831 +f 3625/1900/1828 3629/1902/1619 3959/2082/1831 +f 3629/1902/1619 3630/1902/1621 3959/2082/1831 +f 3631/1902/1832 3633/1903/1833 3960/2082/1834 +f 3961/2083/1835 3962/2084/1836 3963/1903/1837 +f 3962/2084/1836 3960/2082/1834 3963/1903/1837 +f 3960/2082/1834 3633/1903/1833 3963/1903/1837 +f 3963/1903/1837 3634/1903/1623 3964/2085/1838 +f 3634/1903/1623 3635/1728/1624 3964/2085/1838 +f 3635/1728/1624 3637/1904/1626 3964/2085/1838 +f 3964/2085/1838 3637/1904/1626 3965/1908/1839 +f 3637/1904/1626 3640/1908/1629 3965/1908/1839 +f 3640/1908/1629 3643/1909/1630 3965/1908/1839 +f 3965/1908/1839 3643/1909/1630 3966/2086/1840 +f 3969/2086/1841 3641/1909/1841 3971/2087/1841 +f 3642/1909/1842 3645/1907/1631 3970/2087/1842 +f 3975/2088/1841 3646/1910/1632 3976/1912/1843 +f 3646/1910/1632 3647/1911/1633 3976/1912/1843 +f 3647/1911/1633 3648/1912/1634 3976/1912/1843 +f 3649/1913/1635 3650/1914/1636 3980/1914/1844 +f 3650/1914/1636 3651/1916/1636 3980/1914/1844 +f 3980/1914/1844 3651/1916/1636 3981/2089/1845 +f 3983/2089/1846 3651/1916/1636 3984/1917/1847 +f 3651/1916/1636 3652/1917/1639 3984/1917/1847 +f 3652/1917/1639 3654/2090/1848 3984/1917/1847 +f 3984/1917/1847 3654/2090/1848 3986/2091/1849 +f 3988/2092/1850 3987/2093/1851 3990/2094/1852 +f 3985/2093/1276 3655/1918/1276 3989/2094/1276 +f 3655/1918/1276 3657/1920/1276 3989/2094/1276 +f 3992/2094/1276 3658/1920/1276 3994/2095/1276 +f 3659/1920/1642 3660/1921/1641 3993/2095/1291 +f 3660/1921/1641 3661/1922/1641 3993/2095/1291 +f 3997/2096/1642 3662/2097/1290 3998/2098/1291 +f 3662/2097/1290 3665/2099/1853 3998/2098/1291 +f 4001/2098/1854 3666/2099/1855 4002/2098/1856 +f 3666/2099/1855 3668/2100/1187 4002/2098/1856 +f 4002/2098/1856 3668/2100/1187 4003/2101/1857 +f 3668/2100/1187 3673/2102/1303 4003/2101/1857 +f 4005/2101/1050 3674/2102/1050 4007/2103/1050 +f 3675/2102/1135 3679/2104/1135 4006/2103/1135 +f 4010/2103/1050 3680/2104/1050 4011/2105/1050 +f 3681/1929/1650 3682/1930/1305 4015/2106/1858 +f 3682/1930/1305 3684/1932/1653 4015/2106/1858 +f 4018/2107/1859 4015/2106/1858 4019/2106/1860 +f 4015/2106/1858 3684/1932/1653 4019/2106/1860 +f 3684/1932/1653 3683/1931/1652 4019/2106/1860 +f 4019/2106/1860 3683/1931/1652 4020/2108/1861 +f 4022/2109/1862 4021/2110/1624 4023/2111/1837 +f 4023/2111/1837 4021/2110/1624 4024/2112/1863 +f 4025/2113/1864 4020/2108/1861 4026/2114/1865 +f 4020/2108/1861 3683/1931/1652 4026/2114/1865 +f 4026/2114/1865 3683/1931/1652 4027/2115/1626 +f 4027/2115/1626 3683/1931/1652 4028/2116/1866 +f 3683/1931/1652 3685/1930/1310 4028/2116/1866 +f 3685/1930/1310 3686/1933/1655 4028/2116/1866 +f 3686/1933/1655 3688/1934/1867 4028/2116/1866 +f 4028/2116/1866 3688/1934/1867 4029/2117/1633 +f 4029/2117/1633 3688/1934/1867 4030/1934/1842 +f 3689/1934/1635 3691/1747/1658 4032/1934/1868 +f 4032/1934/1868 3691/1747/1658 4033/1935/1869 +f 3691/1747/1658 3692/1935/1659 4033/1935/1869 +f 4033/1935/1869 3692/1935/1659 4034/1935/1870 +f 4035/1935/1192 3693/1935/1192 4038/2118/1871 +f 3694/1935/1660 3696/1936/1661 4037/2118/1872 +f 3696/1936/1661 3697/1939/1664 4037/2118/1872 +f 4040/2119/1873 4038/2118/1871 4041/2120/1873 +f 4036/2118/1660 3698/1939/1666 4042/2120/1874 +f 3698/1939/1666 3699/1941/1667 4042/2120/1874 +f 4042/2120/1874 3699/1941/1667 4044/2121/1875 +f 4044/2121/1875 3699/1941/1667 4045/2122/1876 +f 3699/1941/1667 3700/1942/1668 4045/2122/1876 +f 3701/1943/1669 3702/1765/1670 4046/2123/1876 +f 3702/1765/1670 3703/1763/1671 4046/2123/1876 +f 3703/1763/1671 3704/1944/1672 4046/2123/1876 +f 4046/2123/1876 3704/1944/1672 4047/2124/1877 +f 4047/2124/1877 3704/1944/1672 4048/2125/1878 +f 3704/1944/1672 3705/1945/1673 4048/2125/1878 +f 4048/2125/1878 3705/1945/1673 4049/2126/1879 +f 4049/2126/1879 3705/1945/1673 4050/2127/1880 +f 3705/1945/1673 3706/1946/1674 4050/2127/1880 +f 4050/2127/1880 3706/1946/1674 4051/2128/1881 +f 3706/1946/1674 3707/1947/1675 4051/2128/1881 +f 3708/1947/1676 3709/1948/1677 4052/2128/1882 +f 4052/2128/1882 3709/1948/1677 4053/2129/1883 +f 3710/1948/1678 3711/1949/1679 4054/2129/1884 +f 3711/1949/1679 3712/1950/1680 4054/2129/1884 +f 3712/1950/1680 3713/1951/1681 4054/2129/1884 +f 4054/2129/1884 3713/1951/1681 4056/2130/1885 +f 4056/2130/1885 3713/1951/1681 4057/2131/1886 +f 3713/1951/1681 3714/1951/1682 4057/2131/1886 +f 3714/1951/1682 3717/1952/1683 4057/2131/1886 +f 3717/1952/1683 3721/2132/1887 4057/2131/1886 +f 3721/2132/1887 4059/2133/1888 4057/2131/1886 +f 4057/2131/1886 4059/2133/1888 4060/2134/1889 +f 4063/2135/1890 4061/2136/1891 4064/2137/1892 +f 4061/2136/1891 4058/2138/1888 4064/2137/1892 +f 4064/2137/1892 4058/2138/1888 4065/2139/1893 +f 4065/2139/1893 4058/2138/1888 4068/2139/1894 +f 4058/2138/1888 3722/1954/1895 4068/2139/1894 +f 3723/1954/636 3726/1955/636 4067/2139/636 +f 3727/1955/636 3729/1956/636 4066/2139/636 +f 4069/2139/636 3730/1956/636 4071/2140/792 +f 3730/1956/636 3732/1778/791 4071/2140/792 +f 4071/2140/792 3732/1778/791 4073/2141/183 +f 3732/1778/791 3735/1957/874 4073/2141/183 +f 4073/2141/183 3735/1957/874 4074/2142/185 +f 4077/2143/166 3736/1958/1896 4078/2144/872 +f 3737/1958/966 3738/1959/929 4079/2144/1897 +f 4079/2144/1897 3738/1959/929 4082/2145/962 +f 3738/1959/929 3739/1960/929 4082/2145/962 +f 4082/2145/962 3739/1960/929 4085/2146/234 +f 3739/1960/929 3740/1961/929 4085/2146/234 +f 4085/2146/234 3740/1961/929 4089/2147/233 +f 3740/1961/929 3741/1962/929 4089/2147/233 +f 4089/2147/233 3741/1962/929 4092/2148/169 +f 3741/1962/929 3742/1963/170 4092/2148/169 +f 4092/2148/169 3742/1963/170 4093/2149/1898 +f 3742/1963/170 3745/1964/1686 4093/2149/1898 +f 4096/2149/168 3743/1964/168 4098/2150/168 +f 3744/1964/168 3747/1965/168 4097/2150/168 +f 4102/2150/168 3748/1965/168 4104/2151/168 +f 3749/1965/168 3752/1966/168 4103/2151/168 +f 4105/2152/1899 3753/2153/1900 4106/2154/1901 +f 3753/2153/1900 4107/2155/1902 4106/2154/1901 +f 4106/2154/1901 4107/2155/1902 4108/2156/1903 +f 4108/2156/1903 4107/2155/1902 4109/2157/1904 +f 4110/2158/1905 4109/2157/1904 4111/2159/1906 +f 4109/2157/1904 4107/2155/1902 4111/2159/1906 +f 4107/2155/1902 3753/2153/1900 4111/2159/1906 +f 3753/2153/1900 3757/2160/1907 4111/2159/1906 +f 4111/2159/1906 3757/2160/1907 4112/2161/1908 +f 3757/2160/1907 3761/2162/1909 4112/2161/1908 +f 4112/2161/1908 3761/2162/1909 4113/2163/1910 +f 3761/2162/1909 3765/1970/1688 4113/2163/1910 +f 3765/1970/1688 3766/1972/1689 4113/2163/1910 +f 3766/1972/1689 3767/1973/1690 4113/2163/1910 +f 4115/2164/1911 4113/2163/1910 4117/2165/1912 +f 4113/2163/1910 3767/1973/1690 4117/2165/1912 +f 3767/1973/1690 3768/1974/1691 4117/2165/1912 +f 4117/2165/1912 3768/1974/1691 4118/2166/1913 +f 4121/2167/1914 3769/1975/1691 4125/2168/1915 +f 3769/1975/1691 3770/1976/1692 4125/2168/1915 +f 4125/2168/1915 3770/1976/1692 4126/1977/1916 +f 3770/1976/1692 3771/1977/1694 4126/1977/1916 +f 4127/1977/1917 3771/1977/1694 4129/2169/1917 +f 4129/2169/1917 3771/1977/1694 4130/2169/1694 +f 3771/1977/1694 3772/1978/1695 4130/2169/1694 +f 3773/1979/1696 3774/1980/566 4131/2170/1694 +f 4131/2170/1694 3774/1980/566 4132/2171/1694 +f 3774/1980/566 3775/1981/566 4132/2171/1694 +f 4132/2171/1694 3775/1981/566 4133/2172/1918 +f 3775/1981/566 3776/1982/1698 4133/2172/1918 +f 4133/2172/1918 3776/1982/1698 4134/2173/1919 +f 3777/1983/1699 3778/1984/1700 4135/2174/1920 +f 3778/1984/1700 3780/1985/1702 4135/2174/1920 +f 4135/2174/1920 3780/1985/1702 4136/2175/1921 +f 4139/2176/1922 3779/2177/1923 4141/2178/1924 +f 3779/2177/1923 3782/2179/1705 4141/2178/1924 +f 3783/1987/1704 3784/1988/1705 4140/2180/1925 +f 4143/2180/1926 3785/1988/1706 4144/2181/1927 +f 3785/1988/1706 3786/1989/1707 4144/2181/1927 +f 3787/1990/1707 3788/1990/1709 4146/2182/1927 +f 4146/2182/1927 3788/1990/1709 4147/2183/1928 +f 4147/2183/1928 3788/1990/1709 4148/2184/1929 +f 3788/1990/1709 3789/1993/1710 4148/2184/1929 +f 3789/1993/1710 3793/1993/1712 4148/2184/1929 +f 4148/2184/1929 3793/1993/1712 4149/2185/1930 +f 3793/1993/1712 3792/1994/1711 4149/2185/1930 +f 4150/2185/1931 3790/1994/1932 4151/2186/1933 +f 4152/2186/1934 3791/1994/1935 4153/2187/1936 +f 3791/1994/1935 3794/1994/1937 4153/2187/1936 +f 3794/1994/1937 3796/1995/1938 4153/2187/1936 +f 4153/2187/1936 3796/1995/1938 4154/2187/1939 +f 3796/1995/1938 3800/1996/1717 4154/2187/1939 +f 3800/1996/1717 3801/1997/1716 4154/2187/1939 +f 4154/2187/1939 3801/1997/1716 4155/2188/1940 +f 4155/2188/1940 3801/1997/1716 4156/2189/1941 +f 3801/1997/1716 3803/1998/1723 4156/2189/1941 +f 3803/1998/1723 3804/2002/1725 4156/2189/1941 +f 3804/2002/1725 3806/2190/1942 4156/2189/1941 +f 4156/2189/1941 3806/2190/1942 4157/2191/1943 +f 4157/2191/1943 4159/2192/1944 4160/2193/1945 +f 4163/2194/1946 4161/2195/1947 4164/2196/1948 +f 4161/2195/1947 4158/2197/1944 4164/2196/1948 +f 4165/2198/1949 4164/2196/1948 4166/2199/1949 +f 4164/2196/1948 4158/2197/1944 4166/2199/1949 +f 4159/2192/1944 4157/2191/1943 4167/2200/1950 +f 4167/2200/1950 4157/2191/1943 4168/2201/1951 +f 4157/2191/1943 3806/2190/1942 4168/2201/1951 +f 4169/2202/1952 3807/2004/1729 4170/2203/1953 +f 3807/2004/1729 3808/2005/1727 4170/2203/1953 +f 4171/2203/1954 3809/2005/1733 4172/2204/1955 +f 3809/2005/1733 3814/2007/1731 4172/2204/1955 +f 4172/2204/1955 3814/2007/1731 4173/2205/1956 +f 4176/2205/23 3812/2007/23 4178/2206/23 +f 3813/2007/1957 3810/2006/1958 4177/2206/1959 +f 3810/2006/1958 3816/1825/1736 4177/2206/1959 +f 3816/1825/1736 3817/2008/1737 4177/2206/1959 +f 3818/2009/1740 3819/2009/1741 4181/2207/1960 +f 4181/2207/1960 3819/2009/1741 4185/2208/1961 +f 3819/2009/1741 3820/2010/1742 4185/2208/1961 +f 4185/2208/1961 3820/2010/1742 4187/2209/1394 +f 3820/2010/1742 3821/2011/1744 4187/2209/1394 +f 3821/2011/1744 3822/2012/81 4187/2209/1394 +f 3822/2012/81 3825/2014/111 4187/2209/1394 +f 4193/2210/23 4189/2211/23 4194/2212/23 +f 4186/2209/23 3826/2014/23 4196/2213/23 +f 4196/2213/23 3826/2014/23 4199/2214/23 +f 3827/2014/122 3831/2015/21 4198/2214/1962 +f 3828/2215/21 4204/2216/1963 4197/2217/1962 +f 4201/2218/1964 4202/2218/1965 4206/2219/1966 +f 4206/2219/1966 4202/2218/1965 4209/2219/1967 +f 4203/2216/19 3829/2215/19 4208/2220/19 +f 3830/2215/19 3833/2017/19 4207/2220/19 +f 3834/2017/19 3843/2016/1773 4210/2220/19 +f 4210/2220/19 3843/2016/1773 4212/2221/1773 +f 3843/2016/1773 3845/2222/1968 4212/2221/1773 +f 4212/2221/1773 3845/2222/1968 4213/2223/1968 +f 3845/2222/1968 3849/287/1762 4213/2223/1968 +f 3849/287/1762 3851/2023/1763 4213/2223/1968 +f 4214/2224/1969 3850/2225/1970 4215/2226/1971 +f 4215/2226/1971 3850/2225/1970 4218/2226/1972 +f 3850/2225/1970 3852/2225/1973 4218/2226/1972 +f 4218/2226/1972 3852/2225/1973 4220/2225/1780 +f 4220/2225/1780 3852/2225/1973 4221/2227/1974 +f 3852/2225/1973 3857/2228/1765 4221/2227/1974 +f 3857/2228/1765 3860/2229/1766 4221/2227/1974 +f 3861/2230/1770 3864/2231/1975 4222/2232/1976 +f 4222/2232/1976 3864/2231/1975 4223/2233/1977 +f 3865/2234/1771 3868/2235/1772 4224/2236/1978 +f 3868/2235/1772 3870/2237/1979 4224/2236/1978 +f 4224/2236/1978 3870/2237/1979 4226/2237/1980 +f 4226/2237/1980 3870/2237/1979 4229/2031/1981 +f 3870/2237/1979 3873/2031/1574 4229/2031/1981 +f 3873/2031/1574 3874/2031/1777 4229/2031/1981 +s 0 +f 4229/2031/1982 3874/2031/1983 4230/2032/1984 +s 1 +f 3874/2031/1777 3876/1853/1778 4230/2032/1985 +f 3876/1853/1778 3879/2032/1780 4230/2032/1985 +f 4233/2238/1789 3877/2239/1789 4235/2240/1789 +f 3878/2239/1789 3880/2241/1789 4234/2240/1789 +f 3881/2242/1986 3884/2034/1770 4238/2243/1987 +f 3884/2034/1770 3885/2035/14 4238/2243/1987 +f 4238/2243/1987 3885/2035/14 4241/2244/1988 +f 3885/2035/14 3886/2036/1784 4241/2244/1988 +f 3887/2038/1784 3889/2039/1787 4240/2245/1989 +f 3888/2246/1789 3892/2041/1789 4239/2247/1789 +f 4244/2247/1789 3893/2041/1789 4249/2248/1789 +f 3894/2041/1796 3898/2043/1796 4248/2248/1796 +f 3899/2043/1789 3901/2249/1789 4247/2248/1789 +f 3901/2249/1789 3904/2250/1789 4247/2248/1789 +f 3905/2250/1789 3911/2251/1789 4246/2248/1789 +f 3912/2251/1796 3915/2252/1796 4245/2248/1796 +f 3916/2053/1800 3918/2055/1770 4251/2253/1990 +f 4251/2253/1990 3918/2055/1770 4252/2254/1770 +f 3918/2055/1770 3919/2055/1802 4252/2254/1770 +f 4252/2254/1770 3919/2055/1802 4253/2255/1991 +f 3919/2055/1802 3920/2054/1803 4253/2255/1991 +f 3920/2054/1803 3926/2060/1806 4253/2255/1991 +f 4255/2256/1789 3927/2059/1805 4259/2257/1789 +f 3927/2059/1805 3923/2057/1805 4259/2257/1789 +f 4259/2257/1789 3923/2057/1805 4261/2258/1805 +f 3923/2057/1805 3922/2056/1804 4261/2258/1805 +f 4267/2259/1992 4265/2260/1993 4268/2261/1994 +f 4260/2260/1805 3921/2262/1995 4269/2261/1996 +f 4269/2261/1996 3921/2262/1995 4272/2263/1997 +f 3922/2056/1804 3924/2058/1799 4271/2264/1998 +f 3925/2061/1807 3928/2062/1808 4270/2265/1999 +f 3928/2062/1808 3929/2064/1810 4270/2265/1999 +f 3929/2064/1810 3930/2065/1811 4270/2265/1999 +f 4272/2263/1997 3931/2266/2000 4273/2267/2000 +f 4273/2267/2000 3931/2266/2000 4274/2268/2001 +f 3931/2266/2000 3933/2269/2002 4274/2268/2001 +f 4274/2268/2001 3933/2269/2002 4276/2270/2002 +f 3933/2269/2002 3935/2271/2003 4276/2270/2002 +f 4276/2270/2002 3935/2271/2003 4277/2272/2003 +f 3935/2271/2003 3937/2273/2004 4277/2272/2003 +f 4277/2272/2003 3937/2273/2004 4278/2274/2005 +f 4278/2274/2005 3937/2273/2004 4279/2275/2006 +f 4279/2275/2006 3937/2273/2004 4281/2276/2007 +f 3938/2069/1815 3939/2070/1816 4280/2277/2008 +f 3939/2070/1816 3940/2071/1817 4280/2277/2008 +f 3940/2071/1817 3943/2072/1818 4280/2277/2008 +f 3944/2073/103 3946/2075/105 4281/2276/2007 +f 3946/2075/105 3948/2077/105 4281/2276/2007 +f 4281/2276/2007 3948/2077/105 4282/2278/103 +f 4283/2278/103 3949/2077/103 4285/2279/103 +f 3950/2077/1824 3952/2078/1822 4284/2279/2009 +f 4287/2280/2010 3953/2079/1822 4288/2280/2011 +f 3953/2079/1822 3955/2080/1826 4288/2280/2011 +f 4289/2281/2012 4288/2280/2011 4290/2282/2013 +f 4288/2280/2011 3955/2080/1826 4290/2282/2013 +f 3956/2080/1827 3957/2081/1829 4291/2282/2014 +f 3957/2081/1829 3958/2081/1830 4291/2282/2014 +f 4291/2282/2014 3958/2081/1830 4292/2084/2015 +f 3958/2081/1830 3960/2082/1834 4292/2084/2015 +f 3960/2082/1834 3962/2084/1836 4292/2084/2015 +f 4292/2084/2015 3962/2084/1836 4295/2283/2016 +f 4295/2283/2016 3962/2084/1836 4296/2284/2017 +f 3962/2084/1836 3961/2083/1835 4300/2284/2018 +f 4300/2284/2018 3961/2083/1835 4302/2285/2019 +f 3961/2083/1835 3963/1903/1837 4301/2285/2020 +f 3963/1903/1837 3964/2085/1838 4301/2285/2020 +f 3964/2085/1838 3965/1908/1839 4301/2285/2020 +f 3965/1908/1839 3967/2086/2021 4301/2285/2020 +f 3968/2086/2022 3972/2087/1841 4303/2285/1841 +f 4303/2285/1841 3972/2087/1841 4304/2286/1841 +f 4308/2088/1841 3973/2088/1841 4310/2287/1841 +f 3974/2088/1841 3977/1912/1841 4309/2287/1841 +f 4312/2287/1841 3978/1912/1841 4314/2288/1841 +f 3979/1912/1841 3982/2289/1841 4313/2288/1841 +f 3983/2089/1846 3984/1917/1847 4316/2290/2023 +f 3984/1917/1847 3986/2091/1849 4316/2290/2023 +f 4316/2290/2023 3986/2091/1849 4317/2291/2024 +f 4317/2291/2024 3986/2091/1849 4318/2291/2025 +f 3987/2093/1851 3988/2092/1850 4319/2292/2026 +f 4319/2292/2026 3988/2092/1850 4320/2293/1451 +f 4320/2293/1451 4321/2294/2027 4322/2295/2028 +f 4324/2295/2029 4321/2294/2027 4327/2296/1851 +f 4321/2294/2027 4320/2293/1451 4327/2296/1851 +f 4320/2293/1451 3988/2092/1850 4327/2296/1851 +f 3988/2092/1850 3990/2094/1852 4327/2296/1851 +f 3991/2094/1276 3995/2095/1276 4326/2296/1276 +f 3996/2096/1276 3999/2098/1276 4325/2297/1276 +f 4329/2297/1276 4000/2098/1276 4330/2298/1276 +f 4001/2098/1854 4002/2098/1856 4332/2298/1574 +f 4332/2298/1574 4002/2098/1856 4333/2298/2030 +f 4002/2098/1856 4003/2101/1857 4333/2298/2030 +f 4333/2298/2030 4003/2101/1857 4337/2299/2031 +f 4004/2101/1050 4008/2103/1050 4336/2299/1050 +f 4009/2103/1050 4012/2105/1050 4335/2299/1050 +f 4338/2299/1050 4339/2300/1455 4340/2301/2032 +f 4346/2302/1050 4343/2301/1050 4351/2303/1050 +f 4340/2301/2032 4339/2300/1455 4350/2303/1135 +f 4339/2300/1455 4338/2299/1050 4349/2303/1050 +f 4334/2299/1050 4013/2105/1050 4348/2303/1050 +f 4014/2105/1050 4016/2304/1050 4347/2303/1050 +f 4353/2303/1050 4017/2304/1050 4355/2305/1050 +f 4018/2107/1859 4019/2106/1860 4354/2108/2033 +f 4019/2106/1860 4020/2108/1861 4354/2108/2033 +f 4021/2110/1624 4022/2109/1862 4357/2306/2034 +f 4022/2109/1862 4358/2109/1622 4357/2306/2034 +f 4357/2306/2034 4358/2109/1622 4361/2307/1619 +f 4361/2307/1619 4358/2109/1622 4362/2308/2035 +f 4362/2308/2035 4358/2109/1622 4363/2309/2036 +f 4358/2109/1622 4022/2109/1862 4363/2309/2036 +f 4022/2109/1862 4023/2111/1837 4363/2309/2036 +f 4363/2309/2036 4023/2111/1837 4364/2310/1835 +f 4023/2111/1837 4024/2112/1863 4364/2310/1835 +f 4364/2310/1835 4024/2112/1863 4365/2112/2020 +f 4025/2113/1864 4026/2114/1865 4367/2113/2037 +f 4367/2113/2037 4026/2114/1865 4368/2115/2038 +f 4026/2114/1865 4027/2115/1626 4368/2115/2038 +f 4368/2115/2038 4027/2115/1626 4369/2311/1628 +f 4027/2115/1626 4028/2116/1866 4369/2311/1628 +f 4028/2116/1866 4029/2117/1633 4369/2311/1628 +f 4369/2311/1628 4029/2117/1633 4370/2312/2039 +f 4029/2117/1633 4030/1934/1842 4370/2312/2039 +f 4370/2312/2039 4030/1934/1842 4371/2313/1841 +f 4372/2313/1841 4031/1934/1841 4373/2314/1841 +f 4374/2314/2040 4032/1934/1868 4375/2314/2041 +f 4032/1934/1868 4033/1935/1869 4375/2314/2041 +f 4376/2315/2042 4375/2314/2041 4378/2315/2043 +f 4378/2315/2043 4375/2314/2041 4379/2316/2044 +f 4375/2314/2041 4033/1935/1869 4379/2316/2044 +f 4033/1935/1869 4034/1935/1870 4379/2316/2044 +f 4035/1935/1192 4038/2118/1871 4381/2316/1191 +f 4381/2316/1191 4038/2118/1871 4382/2317/2045 +f 4038/2118/1871 4040/2119/1873 4382/2317/2045 +f 4383/2317/2046 4039/2119/2047 4384/2318/2048 +f 4384/2318/2048 4039/2119/2047 4385/2319/2049 +f 4385/2319/2049 4039/2119/2047 4386/2320/2050 +f 4040/2119/1873 4041/2120/1873 4387/2320/2051 +f 4041/2120/1873 4043/2121/2052 4387/2320/2051 +f 4389/2321/2053 4387/2320/2051 4391/2322/2054 +f 4387/2320/2051 4043/2121/2052 4391/2322/2054 +f 4044/2121/1875 4045/2122/1876 4390/2322/2055 +f 4046/2123/1876 4047/2124/1877 4393/2323/2055 +f 4393/2323/2055 4047/2124/1877 4395/2324/2056 +f 4395/2324/2056 4047/2124/1877 4396/2325/2057 +f 4047/2124/1877 4048/2125/1878 4396/2325/2057 +f 4048/2125/1878 4049/2126/1879 4396/2325/2057 +f 4396/2325/2057 4049/2126/1879 4397/2326/2058 +f 4397/2326/2058 4049/2126/1879 4398/2327/2059 +f 4049/2126/1879 4050/2127/1880 4398/2327/2059 +f 4398/2327/2059 4050/2127/1880 4399/2328/2060 +f 4400/2329/2061 4399/2328/2060 4401/2330/2062 +f 4399/2328/2060 4050/2127/1880 4401/2330/2062 +f 4050/2127/1880 4051/2128/1881 4401/2330/2062 +f 4052/2128/1882 4053/2129/1883 4402/2330/2063 +f 4053/2129/1883 4055/2130/2064 4402/2330/2063 +f 4402/2330/2063 4055/2130/2064 4404/2331/2065 +f 4404/2331/2065 4055/2130/2064 4405/2332/2066 +f 4056/2130/1885 4057/2131/1886 4406/2332/2067 +f 4057/2131/1886 4060/2134/1889 4406/2332/2067 +f 4060/2134/1889 4062/2333/1890 4406/2332/2067 +f 4406/2332/2067 4062/2333/1890 4407/2334/2068 +f 4410/2135/2069 4063/2135/1890 4412/2335/2070 +f 4412/2335/2070 4063/2135/1890 4413/2336/2071 +f 4063/2135/1890 4064/2137/1892 4413/2336/2071 +f 4414/2337/2072 4413/2336/2071 4415/2338/2073 +f 4413/2336/2071 4064/2137/1892 4415/2338/2073 +f 4064/2137/1892 4065/2139/1893 4415/2338/2073 +f 4415/2338/2073 4065/2139/1893 4416/2140/712 +f 4065/2139/1893 4068/2139/1894 4416/2140/712 +f 4068/2139/1894 4070/2140/790 4416/2140/712 +f 4070/2140/790 4072/2141/2074 4416/2140/712 +f 4416/2140/712 4072/2141/2074 4417/2339/2075 +f 4072/2141/2074 4075/2142/2076 4417/2339/2075 +f 4419/2340/2077 4418/2341/2078 4420/2342/2079 +f 4418/2341/2078 4076/2143/2080 4420/2342/2079 +f 4077/2143/166 4078/2144/872 4421/2342/872 +f 4078/2144/872 4080/2145/168 4421/2342/872 +f 4421/2342/872 4080/2145/168 4424/2343/168 +f 4081/2145/168 4083/2146/168 4423/2343/168 +f 4084/2146/168 4086/2147/168 4422/2343/168 +f 4426/2343/168 4087/2147/168 4430/2344/168 +f 4088/2147/168 4090/2148/168 4429/2344/168 +f 4091/2148/168 4094/2149/168 4428/2344/168 +f 4095/2149/168 4099/2150/168 4427/2344/168 +f 4432/2345/2081 4431/2344/2082 4433/2346/2083 +f 4431/2344/2082 4100/2150/2084 4433/2346/2083 +f 4434/2347/2085 4101/2348/2086 4435/2349/2087 +f 4101/2348/2086 4105/2152/1899 4435/2349/2087 +f 4105/2152/1899 4106/2154/1901 4435/2349/2087 +f 4435/2349/2087 4106/2154/1901 4437/2350/2088 +f 4437/2350/2088 4106/2154/1901 4439/2351/2089 +f 4106/2154/1901 4108/2156/1903 4439/2351/2089 +f 4108/2156/1903 4109/2157/1904 4439/2351/2089 +f 4109/2157/1904 4110/2158/1905 4439/2351/2089 +f 4439/2351/2089 4110/2158/1905 4441/2352/2090 +f 4110/2158/1905 4111/2159/1906 4441/2352/2090 +f 4111/2159/1906 4112/2161/1908 4441/2352/2090 +f 4441/2352/2090 4112/2161/1908 4442/2353/2091 +f 4112/2161/1908 4113/2163/1910 4442/2353/2091 +f 4113/2163/1910 4115/2164/1911 4442/2353/2091 +f 4443/2353/2092 4114/2164/2093 4445/2354/2094 +f 4445/2354/2094 4114/2164/2093 4447/2355/2095 +f 4114/2164/2093 4116/2165/2096 4447/2355/2095 +f 4447/2355/2095 4116/2165/2096 4448/2356/2097 +f 4116/2165/2096 4119/2166/2098 4448/2356/2097 +f 4451/2357/2097 4120/2167/2098 4452/2358/2099 +f 4120/2167/2098 4122/2168/2099 4452/2358/2099 +f 4455/2358/2099 4123/2168/2099 4457/2359/2099 +f 4124/2168/2100 4127/1977/1917 4456/2359/2100 +f 4127/1977/1917 4129/2169/1917 4456/2359/2100 +f 4128/2360/2101 4131/2170/1694 4461/2361/2100 +f 4461/2361/2100 4131/2170/1694 4464/2362/2102 +f 4131/2170/1694 4132/2171/1694 4464/2362/2102 +f 4132/2171/1694 4133/2172/1918 4464/2362/2102 +f 4464/2362/2102 4133/2172/1918 4465/2363/2103 +f 4133/2172/1918 4134/2173/1919 4465/2363/2103 +f 4134/2173/1919 4137/2364/2104 4465/2363/2103 +f 4466/2363/2105 4138/2364/2106 4467/2365/2107 +f 4470/2366/2107 4139/2176/1922 4471/2367/2108 +f 4139/2176/1922 4141/2178/1924 4471/2367/2108 +f 4472/2367/2109 4142/2178/2110 4473/2368/2111 +f 4142/2178/2110 4145/2369/2112 4473/2368/2111 +f 4146/2182/1927 4147/2183/1928 4474/2370/2113 +f 4474/2370/2113 4147/2183/1928 4475/2371/2114 +f 4475/2371/2114 4147/2183/1928 4476/2372/2115 +f 4147/2183/1928 4148/2184/1929 4476/2372/2115 +f 4148/2184/1929 4149/2185/1930 4476/2372/2115 +f 4477/2372/2116 4150/2185/1931 4479/2373/2117 +f 4150/2185/1931 4151/2186/1933 4479/2373/2117 +f 4479/2373/2117 4151/2186/1933 4480/2374/2118 +f 4482/2374/2119 4152/2186/1934 4483/2375/2120 +f 4152/2186/1934 4153/2187/1936 4483/2375/2120 +f 4153/2187/1936 4154/2187/1939 4483/2375/2120 +f 4485/2376/2121 4484/2375/2122 4486/2188/2123 +f 4484/2375/2122 4154/2187/1939 4486/2188/2123 +f 4154/2187/1939 4155/2188/1940 4486/2188/2123 +f 4486/2188/2123 4155/2188/1940 4487/2377/2124 +f 4155/2188/1940 4156/2189/1941 4487/2377/2124 +f 4487/2377/2124 4156/2189/1941 4488/2378/2125 +f 4156/2189/1941 4157/2191/1943 4488/2378/2125 +f 4157/2191/1943 4160/2193/1945 4488/2378/2125 +f 4488/2378/2125 4160/2193/1945 4489/2379/2126 +f 4160/2193/1945 4162/2380/1946 4489/2379/2126 +f 4489/2379/2126 4162/2380/1946 4490/2380/2127 +f 4492/2194/2127 4163/2194/1946 4493/2381/2128 +f 4163/2194/1946 4164/2196/1948 4493/2381/2128 +f 4164/2196/1948 4165/2198/1949 4493/2381/2128 +f 4493/2381/2128 4494/2382/2129 4495/2383/2130 +f 4494/2382/2129 4496/2384/1955 4495/2383/2130 +f 4495/2383/2130 4496/2384/1955 4497/2385/2131 +f 4497/2385/2131 4496/2384/1955 4499/2386/2132 +f 4499/2386/2132 4496/2384/1955 4500/2387/2133 +f 4501/2388/2134 4500/2387/2133 4502/2389/2135 +f 4500/2387/2133 4496/2384/1955 4502/2389/2135 +f 4502/2389/2135 4496/2384/1955 4503/2390/2136 +f 4496/2384/1955 4494/2382/2129 4503/2390/2136 +f 4503/2390/2136 4494/2382/2129 4504/2391/2137 +f 4494/2382/2129 4493/2381/2128 4504/2391/2137 +f 4493/2381/2128 4165/2198/1949 4504/2391/2137 +f 4504/2391/2137 4165/2198/1949 4506/2392/1953 +f 4165/2198/1949 4166/2199/1949 4506/2392/1953 +f 4506/2392/1953 4166/2199/1949 4507/2203/1727 +f 4166/2199/1949 4169/2202/1952 4507/2203/1727 +f 4169/2202/1952 4170/2203/1953 4507/2203/1727 +f 4508/2203/2138 4171/2203/1954 4509/2393/2139 +f 4171/2203/1954 4172/2204/1955 4509/2393/2139 +f 4172/2204/1955 4173/2205/1956 4509/2393/2139 +f 4510/2393/2140 4174/2205/2141 4511/2394/2142 +f 4175/2205/23 4179/2206/23 4514/2394/23 +f 4180/2395/23 4182/2396/23 4513/2397/23 +f 4513/2397/23 4182/2396/23 4515/2398/23 +f 4517/2398/23 4183/2396/23 4519/2399/23 +f 4184/2396/23 4188/2400/23 4518/2399/23 +f 4188/2400/23 4190/2401/23 4518/2399/23 +f 4520/2399/2143 4191/2401/2144 4521/2402/2145 +f 4192/2210/1767 4195/2212/1767 4522/2403/2146 +f 4522/2403/2146 4195/2212/1767 4523/2404/1770 +f 4195/2212/1767 4200/2218/1767 4523/2404/1770 +f 4200/2218/1767 4205/2219/1770 4523/2404/1770 +f 4523/2404/1770 4205/2219/1770 4524/2405/1990 +f 4206/2219/1966 4209/2219/1967 4526/2405/1978 +f 4209/2219/1967 4211/2224/2147 4526/2405/1978 +f 4526/2405/1978 4211/2224/2147 4529/2406/1980 +f 4529/2406/1980 4211/2224/2147 4530/2406/1981 +f 4211/2224/2147 4214/2224/1969 4530/2406/1981 +f 4214/2224/1969 4215/2226/1971 4530/2406/1981 +s 0 +f 4215/2226/2148 4218/2226/1984 4530/2406/1982 +s 1 +f 4533/2407/1789 4216/2408/1789 4534/2409/1789 +f 4217/2408/2149 4219/2408/1789 4537/2409/1789 +f 4219/2408/1789 4222/2232/1976 4537/2409/1789 +f 4537/2409/1789 4222/2232/1976 4538/2410/1789 +f 4222/2232/1976 4223/2233/1977 4538/2410/1789 +f 4223/2233/1977 4225/2411/2150 4538/2410/1789 +f 4225/2411/2150 4227/2411/1789 4538/2410/1789 +f 4538/2410/1789 4227/2411/1789 4543/2412/1789 +f 4228/2411/1796 4231/2238/1796 4542/2412/1796 +f 4232/2238/1789 4236/2240/1789 4541/2412/1789 +f 4237/2240/1789 4242/2247/1789 4540/2412/1789 +f 4243/2247/1789 4250/2248/1789 4539/2412/1789 +f 4251/2253/1990 4252/2254/1770 4544/2413/1990 +f 4544/2413/1990 4252/2254/1770 4545/2414/1770 +f 4545/2414/1770 4252/2254/1770 4547/2415/1806 +f 4252/2254/1770 4253/2255/1991 4547/2415/1806 +f 4254/2256/1789 4256/2257/1789 4546/2416/1789 +f 4549/2416/1789 4257/2257/1789 4550/2417/1789 +f 4553/2418/2151 4258/2419/2152 4555/2419/2153 +f 4258/2419/2152 4262/2420/2154 4555/2419/2153 +f 4263/2421/2154 4557/2422/2155 4554/2423/2153 +f 4555/2419/2153 4556/2424/2156 4558/2425/2157 +f 4558/2425/2157 4556/2424/2156 4560/2424/2158 +f 4557/2422/2155 4263/2421/2154 4559/2422/2158 +f 4563/2426/2159 4561/2426/2160 4565/2427/2161 +f 4559/2422/2158 4263/2421/2154 4564/2428/2162 +f 4264/2429/2163 4266/2430/2164 4565/2427/2161 +f 4565/2427/2161 4266/2430/2164 4568/2431/2165 +f 4568/2431/2165 4266/2430/2164 4569/2432/2166 +f 4571/2433/2167 4267/2259/1992 4572/2434/2168 +f 4267/2259/1992 4268/2261/1994 4572/2434/2168 +f 4572/2434/2168 4268/2261/1994 4573/2435/2169 +f 4269/2261/1996 4272/2263/1997 4574/2435/2170 +f 4272/2263/1997 4273/2267/2000 4574/2435/2170 +f 4574/2435/2170 4273/2267/2000 4576/2436/2002 +f 4273/2267/2000 4274/2268/2001 4576/2436/2002 +f 4274/2268/2001 4275/2270/2171 4576/2436/2002 +f 4275/2270/2171 4277/2272/2003 4576/2436/2002 +f 4576/2436/2002 4277/2272/2003 4578/2437/2003 +f 4277/2272/2003 4278/2274/2005 4578/2437/2003 +f 4578/2437/2003 4278/2274/2005 4580/2438/2005 +f 4278/2274/2005 4279/2275/2006 4580/2438/2005 +f 4580/2438/2005 4279/2275/2006 4582/2439/2007 +f 4279/2275/2006 4281/2276/2007 4582/2439/2007 +f 4281/2276/2007 4282/2278/103 4582/2439/2007 +f 4282/2278/103 4286/2279/103 4582/2439/2007 +f 4582/2439/2007 4286/2279/103 4583/2440/103 +f 4287/2280/2010 4288/2280/2011 4585/2281/2172 +f 4288/2280/2011 4289/2281/2012 4585/2281/2172 +f 4585/2281/2172 4289/2281/2012 4588/2441/2173 +f 4588/2441/2173 4289/2281/2012 4589/2442/2174 +f 4289/2281/2012 4290/2282/2013 4589/2442/2174 +f 4589/2442/2174 4290/2282/2013 4590/2443/2175 +f 4291/2282/2014 4292/2084/2015 4591/2443/79 +f 4591/2443/79 4292/2084/2015 4592/2444/2176 +f 4292/2084/2015 4295/2283/2016 4592/2444/2176 +f 4594/2444/1841 4293/2283/1841 4596/2445/1841 +f 4294/2283/1842 4297/2284/1842 4595/2445/1842 +f 4598/2445/1841 4298/2284/1841 4601/2446/1841 +f 4299/2284/1841 4303/2285/1841 4600/2446/1841 +f 4303/2285/1841 4304/2286/1841 4600/2446/1841 +f 4305/2286/1841 4605/2447/1841 4599/2446/1841 +f 4606/2448/2177 4602/2446/2178 4607/2447/2179 +f 4602/2446/2178 4603/2447/2180 4607/2447/2179 +f 4609/2449/2181 4607/2447/2179 4612/2450/2182 +f 4607/2447/2179 4603/2447/2180 4612/2450/2182 +f 4604/2447/1841 4306/2286/1841 4611/2450/1841 +f 4307/2088/1841 4311/2287/1841 4610/2451/1841 +f 4311/2287/1841 4315/2288/1841 4610/2451/1841 +f 4316/2290/2023 4317/2291/2024 4613/2452/2183 +f 4613/2452/2183 4317/2291/2024 4614/2453/2184 +f 4614/2453/2184 4317/2291/2024 4615/2453/2185 +f 4317/2291/2024 4318/2291/2025 4615/2453/2185 +f 4319/2292/2026 4320/2293/1451 4616/2454/2186 +f 4616/2454/2186 4320/2293/1451 4617/2455/2186 +f 4320/2293/1451 4322/2295/2028 4617/2455/2186 +f 4619/2456/2186 4323/2457/2187 4620/2458/1611 +f 4620/2458/1611 4323/2457/2187 4621/2459/2188 +f 4323/2457/2187 4328/2297/2189 4621/2459/2188 +f 4328/2297/2189 4331/2298/2190 4621/2459/2188 +f 4332/2298/1574 4333/2298/2030 4622/2459/2191 +f 4622/2459/2191 4333/2298/2030 4623/2459/1298 +f 4333/2298/2030 4337/2299/2031 4623/2459/1298 +f 4623/2459/1298 4337/2299/2031 4624/2460/2192 +f 4337/2299/2031 4341/2301/2193 4624/2460/2192 +f 4624/2460/2192 4341/2301/2193 4625/2461/2194 +f 4627/2462/2195 4342/2463/2196 4628/2464/1822 +f 4342/2463/2196 4344/2465/1618 4628/2464/1822 +f 4628/2464/1822 4344/2465/1618 4633/2466/1618 +f 4345/2302/1050 4352/2303/1050 4632/2467/1050 +f 4353/2303/1050 4355/2305/1050 4631/2467/1050 +f 4356/2468/1050 4359/2469/1050 4630/2466/1050 +f 4639/2470/1050 4635/2466/1050 4641/2471/1050 +f 4629/2466/1050 4360/2469/1050 4640/2471/1050 +f 4361/2307/1619 4362/2308/2035 4644/2472/1828 +f 4644/2472/1828 4362/2308/2035 4645/2473/2197 +f 4362/2308/2035 4363/2309/2036 4645/2473/2197 +f 4645/2473/2197 4363/2309/2036 4646/2474/1836 +f 4363/2309/2036 4364/2310/1835 4646/2474/1836 +f 4646/2474/1836 4364/2310/1835 4649/2474/2198 +f 4649/2474/2198 4364/2310/1835 4650/2475/2199 +f 4364/2310/1835 4366/2112/2200 4650/2475/2199 +f 4654/2113/2201 4367/2113/2037 4655/2476/2202 +f 4367/2113/2037 4368/2115/2038 4655/2476/2202 +f 4368/2115/2038 4369/2311/1628 4655/2476/2202 +f 4369/2311/1628 4370/2312/2039 4655/2476/2202 +f 4658/2476/1841 4370/2312/2039 4662/2477/1841 +f 4370/2312/2039 4371/2313/1841 4662/2477/1841 +f 4662/2477/1841 4371/2313/1841 4663/2315/2203 +f 4371/2313/1841 4374/2314/2040 4663/2315/2203 +f 4374/2314/2040 4375/2314/2041 4663/2315/2203 +f 4375/2314/2041 4376/2315/2042 4663/2315/2203 +f 4665/2315/2204 4376/2315/2042 4666/2315/2205 +f 4376/2315/2042 4378/2315/2043 4666/2315/2205 +f 4668/2315/1291 4377/2315/2206 4673/2478/2207 +f 4377/2315/2206 4380/2316/1289 4673/2478/2207 +f 4380/2316/1289 4383/2317/2046 4673/2478/2207 +f 4383/2317/2046 4384/2318/2048 4673/2478/2207 +f 4673/2478/2207 4384/2318/2048 4676/2479/2208 +f 4384/2318/2048 4385/2319/2049 4676/2479/2208 +f 4676/2479/2208 4385/2319/2049 4680/2480/2209 +f 4680/2480/2209 4385/2319/2049 4681/2481/2210 +f 4385/2319/2049 4386/2320/2050 4681/2481/2210 +f 4386/2320/2050 4388/2321/2211 4681/2481/2210 +f 4681/2481/2210 4388/2321/2211 4682/2482/2212 +f 4682/2482/2212 4388/2321/2211 4684/2483/2213 +f 4389/2321/2053 4391/2322/2054 4683/2483/2214 +f 4392/2323/2054 4394/2324/2215 4685/2484/2214 +f 4685/2484/2214 4394/2324/2215 4686/2485/2216 +f 4395/2324/2056 4396/2325/2057 4687/2485/2217 +f 4687/2485/2217 4396/2325/2057 4689/2326/2218 +f 4396/2325/2057 4397/2326/2058 4689/2326/2218 +f 4689/2326/2218 4397/2326/2058 4690/2486/2219 +f 4397/2326/2058 4398/2327/2059 4690/2486/2219 +f 4691/2487/2220 4690/2486/2219 4692/2487/2221 +f 4690/2486/2219 4398/2327/2059 4692/2487/2221 +f 4398/2327/2059 4399/2328/2060 4692/2487/2221 +f 4399/2328/2060 4400/2329/2061 4692/2487/2221 +f 4692/2487/2221 4400/2329/2061 4694/2488/2222 +f 4694/2488/2222 4400/2329/2061 4696/2489/2223 +f 4400/2329/2061 4401/2330/2062 4696/2489/2223 +f 4401/2330/2062 4403/2331/2224 4696/2489/2223 +f 4404/2331/2065 4405/2332/2066 4695/2489/2225 +f 4405/2332/2066 4408/2334/2226 4695/2489/2225 +f 4698/2490/2225 4409/2135/2226 4700/2491/2227 +f 4409/2135/2226 4411/2335/2228 4700/2491/2227 +f 4700/2491/2227 4411/2335/2228 4701/2492/2228 +f 4412/2335/2070 4413/2336/2071 4704/2492/2229 +f 4413/2336/2071 4414/2337/2072 4704/2492/2229 +f 4704/2492/2229 4414/2337/2072 4705/2493/2230 +f 4414/2337/2072 4415/2338/2073 4705/2493/2230 +f 4415/2338/2073 4416/2140/712 4705/2493/2230 +f 4416/2140/712 4417/2339/2075 4705/2493/2230 +f 4706/2494/2231 4418/2341/2078 4707/2495/2232 +f 4418/2341/2078 4419/2340/2077 4707/2495/2232 +f 4707/2495/2232 4419/2340/2077 4708/2496/2233 +f 4419/2340/2077 4420/2342/2079 4708/2496/2233 +f 4420/2342/2079 4425/2343/434 4708/2496/2233 +f 4708/2496/2233 4425/2343/434 4709/2497/2234 +f 4425/2343/434 4431/2344/2082 4709/2497/2234 +f 4709/2497/2234 4431/2344/2082 4710/2498/2235 +f 4431/2344/2082 4432/2345/2081 4710/2498/2235 +f 4710/2498/2235 4432/2345/2081 4711/2499/399 +f 4711/2499/399 4432/2345/2081 4713/2500/2236 +f 4713/2500/2236 4432/2345/2081 4714/2501/2237 +f 4432/2345/2081 4433/2346/2083 4714/2501/2237 +f 4434/2347/2085 4435/2349/2087 4717/2502/2237 +f 4717/2502/2237 4435/2349/2087 4718/2503/2238 +f 4435/2349/2087 4437/2350/2088 4718/2503/2238 +f 4719/2503/2239 4436/2350/2240 4721/2504/2241 +f 4721/2504/2241 4436/2350/2240 4723/2505/2242 +f 4436/2350/2240 4438/2351/2243 4723/2505/2242 +f 4723/2505/2242 4438/2351/2243 4725/2506/2244 +f 4438/2351/2243 4440/2352/2245 4725/2506/2244 +f 4725/2506/2244 4440/2352/2245 4726/2507/2246 +f 4440/2352/2245 4443/2353/2092 4726/2507/2246 +f 4443/2353/2092 4445/2354/2094 4726/2507/2246 +f 4727/2507/2247 4444/2354/2248 4729/2508/2249 +f 4729/2508/2249 4444/2354/2248 4731/2509/2250 +f 4444/2354/2248 4446/2355/2251 4731/2509/2250 +f 4731/2509/2250 4446/2355/2251 4733/2510/2252 +f 4446/2355/2251 4449/2356/2253 4733/2510/2252 +f 4733/2510/2252 4449/2356/2253 4734/2511/2254 +f 4736/2512/2254 4450/2357/2253 4739/2513/2255 +f 4450/2357/2253 4453/2358/20 4739/2513/2255 +f 4741/2514/2256 4744/2515/2255 4745/2516/2257 +f 4745/2516/2257 4744/2515/2255 4748/2517/2258 +f 4742/2518/2255 4739/2513/2255 4747/2519/2258 +f 4739/2513/2255 4453/2358/20 4747/2519/2258 +f 4454/2358/2099 4458/2359/2099 4746/2519/2099 +f 4750/2520/2099 4459/2361/2099 4753/2521/2099 +f 4460/2361/2099 4462/2362/2099 4752/2521/2099 +f 4463/2362/2099 4466/2363/2105 4751/2521/2099 +f 4466/2363/2105 4467/2365/2107 4751/2521/2099 +f 4754/2521/20 4468/2365/2259 4755/2522/2260 +f 4757/2523/1666 4469/2366/2261 4758/2524/2262 +f 4469/2366/2261 4472/2367/2109 4758/2524/2262 +f 4472/2367/2109 4473/2368/2111 4758/2524/2262 +f 4474/2370/2113 4475/2371/2114 4759/2525/2263 +f 4759/2525/2263 4475/2371/2114 4760/2526/2264 +f 4760/2526/2264 4475/2371/2114 4761/2527/2265 +f 4475/2371/2114 4476/2372/2115 4761/2527/2265 +f 4762/2527/2266 4477/2372/2116 4763/2528/2267 +f 4477/2372/2116 4479/2373/2117 4763/2528/2267 +f 4764/2528/2268 4478/2373/2269 4765/2373/2270 +f 4478/2373/2269 4481/2374/2271 4765/2373/2270 +f 4766/2529/2272 4765/2373/2270 4767/2530/2273 +f 4767/2530/2273 4765/2373/2270 4768/2374/2274 +f 4765/2373/2270 4481/2374/2271 4768/2374/2274 +f 4770/2531/2275 4769/2374/2276 4771/2531/2277 +f 4769/2374/2276 4482/2374/2119 4771/2531/2277 +f 4482/2374/2119 4483/2375/2120 4771/2531/2277 +f 4484/2375/2122 4485/2376/2121 4772/2531/2278 +f 4772/2531/2278 4485/2376/2121 4773/2532/2279 +f 4485/2376/2121 4486/2188/2123 4773/2532/2279 +f 4486/2188/2123 4487/2377/2124 4773/2532/2279 +f 4487/2377/2124 4488/2378/2125 4773/2532/2279 +f 4773/2532/2279 4488/2378/2125 4774/2533/2280 +f 4488/2378/2125 4489/2379/2126 4774/2533/2280 +f 4774/2533/2280 4489/2379/2126 4776/2379/2281 +f 4489/2379/2126 4490/2380/2127 4775/2379/2282 +f 4776/2379/2281 4491/2380/2127 4777/2534/2283 +f 4777/2534/2283 4491/2380/2127 4778/2535/2284 +f 4492/2194/2127 4493/2381/2128 4779/2536/2285 +f 4493/2381/2128 4495/2383/2130 4779/2536/2285 +f 4779/2536/2285 4495/2383/2130 4781/2537/2286 +f 4495/2383/2130 4497/2385/2131 4781/2537/2286 +f 4781/2537/2286 4497/2385/2131 4782/2538/2122 +f 4497/2385/2131 4499/2386/2132 4782/2538/2122 +f 4783/2539/2122 4498/2540/2287 4784/2540/2121 +f 4784/2540/2121 4498/2540/2287 4785/2541/2288 +f 4785/2541/2288 4498/2540/2287 4786/2542/2289 +f 4787/2543/2290 4499/2386/2132 4788/2544/2291 +f 4499/2386/2132 4500/2387/2133 4788/2544/2291 +f 4500/2387/2133 4501/2388/2134 4788/2544/2291 +f 4788/2544/2291 4501/2388/2134 4790/2545/2292 +f 4790/2545/2292 4501/2388/2134 4792/2546/1953 +f 4501/2388/2134 4502/2389/2135 4792/2546/1953 +f 4502/2389/2135 4503/2390/2136 4792/2546/1953 +f 4792/2546/1953 4503/2390/2136 4793/2547/1727 +f 4503/2390/2136 4504/2391/2137 4793/2547/1727 +f 4504/2391/2137 4506/2392/1953 4793/2547/1727 +f 4794/2547/2293 4505/2392/2294 4796/2548/2295 +f 4505/2392/2294 4508/2203/2138 4796/2548/2295 +f 4796/2548/2295 4508/2203/2138 4799/2549/2296 +f 4799/2549/2296 4508/2203/2138 4800/2393/2297 +f 4508/2203/2138 4509/2393/2139 4800/2393/2297 +f 4801/2393/2298 4510/2393/2140 4802/2550/2299 +f 4510/2393/2140 4511/2394/2142 4802/2550/2299 +f 4512/2397/2300 4516/2398/2301 4803/2551/2302 +f 4803/2551/2302 4516/2398/2301 4804/2552/2303 +f 4804/2552/2303 4516/2398/2301 4805/2553/2304 +f 4516/2398/2301 4520/2399/2143 4805/2553/2304 +f 4520/2399/2143 4521/2402/2145 4805/2553/2304 +f 4805/2553/2304 4521/2402/2145 4806/2554/2305 +f 4809/2555/2306 4522/2403/2146 4813/2556/1986 +f 4522/2403/2146 4523/2404/1770 4813/2556/1986 +f 4523/2404/1770 4524/2405/1990 4813/2556/1986 +f 4525/2557/1789 4527/2407/1789 4812/2558/1789 +f 4528/2407/1789 4531/2407/1789 4811/2558/1789 +f 4532/2407/1789 4535/2409/1789 4810/2558/1789 +f 4815/2558/1789 4536/2409/1789 4817/2559/1789 +f 4537/2409/1789 4538/2410/1789 4816/2559/1789 +f 4538/2410/1789 4543/2412/1789 4816/2559/1789 +f 4544/2413/1990 4545/2414/1770 4819/2560/1986 +f 4819/2560/1986 4545/2414/1770 4820/2561/1770 +f 4823/2562/1806 4820/2561/1770 4825/2563/1806 +f 4820/2561/1770 4545/2414/1770 4825/2563/1806 +f 4545/2414/1770 4547/2415/1806 4825/2563/1806 +f 4548/2416/1789 4551/2417/1789 4824/2564/1789 +f 4828/2564/1789 4552/2417/1789 4829/2565/1789 +f 4553/2418/2151 4832/2566/1770 4831/2567/2151 +f 4831/2567/2151 4832/2566/1770 4833/2568/1770 +f 4833/2568/1770 4832/2566/1770 4835/2569/1806 +f 4844/2570/2307 4847/2571/2307 4848/2572/2307 +f 4854/2572/2308 4845/2571/2308 4859/2573/2308 +f 4846/2571/1789 4838/2570/1789 4858/2573/1789 +f 4839/2570/1796 4837/2574/1796 4857/2573/1796 +f 4867/2575/1789 4862/2573/1789 4868/2576/1789 +f 4872/2576/2309 4855/2573/2309 4874/2577/2309 +f 4856/2573/2309 4834/2574/2309 4873/2577/2309 +f 4876/2578/1806 4835/2569/1806 4877/2579/14 +f 4835/2569/1806 4832/2566/1770 4877/2579/14 +f 4832/2566/1770 4553/2418/2151 4877/2579/14 +f 4553/2418/2151 4555/2419/2153 4877/2579/14 +f 4555/2419/2153 4558/2425/2157 4877/2579/14 +f 4877/2579/14 4558/2425/2157 4878/2580/2310 +f 4558/2425/2157 4560/2424/2158 4878/2580/2310 +f 4560/2424/2158 4562/2581/2311 4878/2580/2310 +f 4878/2580/2310 4562/2581/2311 4880/2582/2312 +f 4563/2426/2159 4565/2427/2161 4879/2583/2312 +f 4565/2427/2161 4568/2431/2165 4879/2583/2312 +f 4881/2583/1805 4566/2431/2313 4882/2584/1804 +f 4884/2585/1995 4567/2586/2314 4885/2587/1997 +f 4567/2586/2314 4570/2588/2170 4885/2587/1997 +f 4887/2589/1459 4886/2590/2315 4888/2591/2316 +f 4885/2587/1997 4570/2588/2170 4889/2592/2000 +f 4891/2593/2317 4889/2592/2000 4892/2594/2002 +f 4889/2592/2000 4570/2588/2170 4892/2594/2002 +f 4893/2595/2318 4571/2433/2167 4894/2596/2319 +f 4571/2433/2167 4572/2434/2168 4894/2596/2319 +f 4572/2434/2168 4573/2435/2169 4894/2596/2319 +f 4573/2435/2169 4575/2436/2320 4894/2596/2319 +f 4575/2436/2320 4577/2437/2321 4894/2596/2319 +f 4894/2596/2319 4577/2437/2321 4895/2597/2322 +f 4577/2437/2321 4579/2438/2323 4895/2597/2322 +f 4895/2597/2322 4579/2438/2323 4896/2598/2324 +f 4579/2438/2323 4581/2439/2325 4896/2598/2324 +f 4896/2598/2324 4581/2439/2325 4897/2599/2326 +f 4899/2600/1579 4898/2601/2327 4903/2602/2328 +f 4897/2599/2326 4581/2439/2325 4902/2603/2328 +f 4582/2439/2007 4583/2440/103 4901/2603/107 +f 4584/2604/103 4586/2605/103 4900/2602/103 +f 4906/2602/103 4587/2605/103 4907/2606/103 +f 4588/2441/2173 4589/2442/2174 4909/2607/2329 +f 4909/2607/2329 4589/2442/2174 4910/2607/2330 +f 4589/2442/2174 4590/2443/2175 4910/2607/2330 +f 4590/2443/2175 4593/2444/2331 4910/2607/2330 +f 4910/2607/2330 4593/2444/2331 4911/2608/2179 +f 4593/2444/2331 4597/2445/2332 4911/2608/2179 +f 4597/2445/2332 4602/2446/2178 4911/2608/2179 +f 4602/2446/2178 4606/2448/2177 4911/2608/2179 +f 4911/2608/2179 4606/2448/2177 4912/2609/103 +f 4606/2448/2177 4607/2447/2179 4912/2609/103 +f 4607/2447/2179 4609/2449/2181 4914/2609/2333 +f 4914/2609/2333 4609/2449/2181 4915/2449/2334 +f 4918/2610/2335 4608/2452/2336 4919/2610/2337 +f 4608/2452/2336 4613/2452/2183 4919/2610/2337 +f 4613/2452/2183 4614/2453/2184 4919/2610/2337 +f 4614/2453/2184 4615/2453/2185 4919/2610/2337 +f 4921/2611/2338 4616/2454/2186 4922/2612/2339 +f 4616/2454/2186 4617/2455/2186 4922/2612/2339 +f 4923/2613/2340 4618/2614/2341 4924/2615/2339 +f 4925/2616/2339 4619/2456/2186 4928/2617/2186 +f 4619/2456/2186 4620/2458/1611 4928/2617/2186 +f 4620/2458/1611 4621/2459/2188 4927/2617/1611 +f 4622/2459/2191 4623/2459/1298 4926/2617/2342 +f 4623/2459/1298 4624/2460/2192 4926/2617/2342 +f 4624/2460/2192 4625/2461/2194 4926/2617/2342 +f 4929/2617/2343 4626/2461/2344 4930/2618/2345 +f 4627/2462/2195 4628/2464/1822 4933/2462/2345 +f 4933/2462/2345 4628/2464/1822 4936/2464/2346 +f 4936/2464/2346 4628/2464/1822 4937/2619/2347 +f 4628/2464/1822 4634/2466/1050 4937/2619/2347 +f 4634/2466/1050 4636/2470/2348 4937/2619/2347 +f 4938/2620/1822 4637/2621/1618 4940/2622/2349 +f 4638/2621/1050 4642/2623/1050 4939/2622/1050 +f 4941/2622/2350 4643/2623/2351 4942/2624/2352 +f 4644/2472/1828 4645/2473/2197 4943/2625/2352 +f 4943/2625/2352 4645/2473/2197 4945/2626/2015 +f 4645/2473/2197 4646/2474/1836 4945/2626/2015 +f 4945/2626/2015 4646/2474/1836 4947/2626/2353 +f 4646/2474/1836 4649/2474/2198 4947/2626/2353 +f 4951/2627/1841 4948/2626/1841 4954/2628/1841 +f 4946/2626/1842 4647/2474/1842 4953/2628/1842 +f 4648/2474/1841 4651/2475/1841 4952/2628/1841 +f 4957/2628/1841 4652/2475/1841 4960/2629/1841 +f 4653/2113/1841 4656/2476/1841 4959/2630/1841 +f 4657/2476/1841 4659/2477/1841 4958/2630/1841 +f 4963/2630/1841 4660/2477/1841 4965/2631/2203 +f 4965/2631/2203 4660/2477/1841 4966/2632/2039 +f 4966/2632/2039 4660/2477/1841 4968/2633/2354 +f 4661/2477/1841 4664/2315/2203 4967/2633/1841 +f 4969/2633/2355 4665/2315/2204 4970/2634/2356 +f 4665/2315/2204 4666/2315/2205 4970/2634/2356 +f 4970/2634/2356 4666/2315/2205 4972/2635/2357 +f 4667/2315/1276 4669/2478/1276 4971/2635/1276 +f 4974/2635/1276 4670/2478/1276 4975/2636/1276 +f 4977/2636/1276 4671/2478/1276 4980/2637/1276 +f 4672/2478/1292 4674/2479/1292 4979/2637/1292 +f 4675/2479/1276 4677/2480/1276 4978/2637/1276 +f 4982/2637/1276 4678/2480/1276 4985/2638/1276 +f 4987/2639/2358 4986/2638/2359 4988/2640/2360 +f 4990/2640/1276 4983/2638/1276 4992/2641/1276 +f 4984/2638/1276 4679/2480/1276 4991/2641/1276 +f 4680/2480/2209 4681/2481/2210 4995/2641/2361 +f 4681/2481/2210 4682/2482/2212 4995/2641/2361 +f 4998/2642/2362 4995/2641/2361 4999/2642/2363 +f 4995/2641/2361 4682/2482/2212 4999/2642/2363 +f 4682/2482/2212 4684/2483/2213 4999/2642/2363 +f 4999/2642/2363 4684/2483/2213 5000/2643/2364 +f 4685/2484/2214 4686/2485/2216 5001/2644/2365 +f 5001/2644/2365 4686/2485/2216 5003/2645/2366 +f 4686/2485/2216 4688/2326/2367 5003/2645/2366 +f 5003/2645/2366 4688/2326/2367 5007/2646/2368 +f 5007/2646/2368 4688/2326/2367 5008/2647/2369 +f 4689/2326/2218 4690/2486/2219 5009/2647/2370 +f 4690/2486/2219 4691/2487/2220 5009/2647/2370 +f 5009/2647/2370 4691/2487/2220 5010/2648/2371 +f 4691/2487/2220 4692/2487/2221 5010/2648/2371 +f 4692/2487/2221 4694/2488/2222 5010/2648/2371 +f 5012/2649/2372 5010/2648/2371 5014/2649/2373 +f 5014/2649/2373 5010/2648/2371 5015/2650/2374 +f 5010/2648/2371 4694/2488/2222 5015/2650/2374 +f 5016/2651/2374 4693/2490/2375 5017/2652/2376 +f 5017/2652/2376 4693/2490/2375 5018/2653/2375 +f 4693/2490/2375 4697/2490/2377 5018/2653/2375 +f 4697/2490/2377 4699/2491/2378 5018/2653/2375 +f 5018/2653/2375 4699/2491/2378 5020/2654/2379 +f 4700/2491/2227 4701/2492/2228 5019/2654/2228 +f 5022/2655/2228 4702/2656/2227 5024/2657/2380 +f 5024/2657/2380 4702/2656/2227 5025/2658/2381 +f 4703/2656/2382 4706/2494/2231 5026/2658/2383 +f 4706/2494/2231 4707/2495/2232 5026/2658/2383 +f 5026/2658/2383 4707/2495/2232 5029/2659/2384 +f 5029/2659/2384 4707/2495/2232 5032/2660/2236 +f 4707/2495/2232 4708/2496/2233 5032/2660/2236 +f 4708/2496/2233 4709/2497/2234 5032/2660/2236 +f 4709/2497/2234 4710/2498/2235 5032/2660/2236 +f 5032/2660/2236 4710/2498/2235 5033/2661/2384 +f 4710/2498/2235 4711/2499/399 5033/2661/2384 +f 4711/2499/399 4713/2500/2236 5033/2661/2384 +f 5037/2661/2385 4712/2500/2385 5040/2662/2385 +f 4712/2500/2385 4715/2501/2386 5040/2662/2385 +f 5040/2662/2385 4715/2501/2386 5041/2663/2387 +f 5043/2664/2387 4716/2502/2386 5044/2665/2388 +f 4716/2502/2386 4719/2503/2239 5044/2665/2388 +f 4719/2503/2239 4721/2504/2241 5044/2665/2388 +f 5047/2665/2389 4720/2504/2390 5049/2666/2391 +f 4720/2504/2390 4722/2505/471 5049/2666/2391 +f 4722/2505/471 5050/2667/2392 5049/2666/2391 +f 5049/2666/2391 5050/2667/2392 5052/2668/2393 +f 5052/2668/2393 5050/2667/2392 5053/2669/2394 +f 5050/2667/2392 4722/2505/471 5053/2669/2394 +f 4722/2505/471 4724/2506/2395 5053/2669/2394 +f 4724/2506/2395 5054/2670/2396 5053/2669/2394 +f 5053/2669/2394 5054/2670/2396 5055/2671/2397 +f 5054/2670/2396 4724/2506/2395 5055/2671/2397 +f 4724/2506/2395 4727/2507/2247 5055/2671/2397 +f 4727/2507/2247 4729/2508/2249 5055/2671/2397 +f 5056/2672/2397 4728/2673/2249 5057/2674/2393 +f 5059/2675/2398 5057/2674/2393 5060/2676/2399 +f 5060/2676/2399 5057/2674/2393 5061/2677/2400 +f 5057/2674/2393 4728/2673/2249 5061/2677/2400 +f 4728/2673/2249 4730/2678/2401 5061/2677/2400 +f 5061/2677/2400 4730/2678/2401 5062/2679/2402 +f 4730/2678/2401 4732/2680/2403 5062/2679/2402 +f 4732/2680/2403 4735/2681/2404 5062/2679/2402 +f 5062/2679/2402 4735/2681/2404 5063/2682/2405 +f 5063/2682/2405 4735/2681/2404 5064/2683/2406 +f 4735/2681/2404 4737/2684/2407 5064/2683/2406 +f 4738/2685/2407 4743/2686/2408 5065/2687/2409 +f 4743/2686/2408 4740/2688/2256 5065/2687/2409 +f 5065/2687/2409 4740/2688/2256 5067/2689/2410 +f 5067/2689/2410 4740/2688/2256 5068/2689/23 +f 4741/2514/2256 4745/2516/2257 5070/2514/2411 +f 5070/2514/2411 4745/2516/2257 5072/2690/2257 +f 4745/2516/2257 4748/2517/2258 5072/2690/2257 +f 4749/2520/2258 4754/2521/20 5071/2691/2255 +f 4754/2521/20 4755/2522/2260 5071/2691/2255 +f 5074/2692/2257 4756/2693/2260 5075/2694/2412 +f 4756/2693/2260 4759/2525/2263 5075/2694/2412 +f 4759/2525/2263 4760/2526/2264 5075/2694/2412 +f 5075/2694/2412 4760/2526/2264 5076/2695/2413 +f 4760/2526/2264 4761/2527/2265 5076/2695/2413 +f 5077/2695/2414 4762/2527/2266 5079/2696/2415 +f 5079/2696/2415 4762/2527/2266 5080/2697/1268 +f 4762/2527/2266 4763/2528/2267 5080/2697/1268 +f 5081/2697/2416 4764/2528/2268 5082/2698/2417 +f 4764/2528/2268 4765/2373/2270 5082/2698/2417 +f 4765/2373/2270 4766/2529/2272 5082/2698/2417 +f 4766/2529/2272 4767/2530/2273 5082/2698/2417 +f 5082/2698/2417 4767/2530/2273 5085/2699/2418 +f 5085/2699/2418 4767/2530/2273 5086/2700/2419 +f 4767/2530/2273 4768/2374/2274 5086/2700/2419 +f 4769/2374/2276 4770/2531/2275 5089/2700/2420 +f 5089/2700/2420 4770/2531/2275 5092/2701/2421 +f 5094/2701/2422 5092/2701/2421 5095/2701/2423 +f 5092/2701/2421 4770/2531/2275 5095/2701/2423 +f 5095/2701/2423 4770/2531/2275 5096/2702/2424 +f 4770/2531/2275 4771/2531/2277 5096/2702/2424 +f 4772/2531/2278 4773/2532/2279 5097/2702/2425 +f 4773/2532/2279 4774/2533/2280 5097/2702/2425 +f 5097/2702/2425 4774/2533/2280 5099/2702/2425 +f 4774/2533/2280 4776/2379/2281 5099/2702/2425 +f 5099/2702/2425 4776/2379/2281 5101/2703/2278 +f 4776/2379/2281 4777/2534/2283 5101/2703/2278 +f 4777/2534/2283 4778/2535/2284 5101/2703/2278 +f 4778/2535/2284 4780/2704/2426 5101/2703/2278 +f 5101/2703/2278 4780/2704/2426 5103/2705/2425 +f 4780/2704/2426 4783/2539/2122 5103/2705/2425 +f 4783/2539/2122 4784/2540/2121 5103/2705/2425 +f 4784/2540/2121 4785/2541/2288 5103/2705/2425 +f 5103/2705/2425 4785/2541/2288 5105/2706/2427 +f 4785/2541/2288 4786/2542/2289 5105/2706/2427 +f 5105/2706/2427 4786/2542/2289 5106/2707/2428 +f 5108/2708/2429 4787/2543/2290 5109/2709/2430 +f 4787/2543/2290 4788/2544/2291 5109/2709/2430 +f 5109/2709/2430 4788/2544/2291 5110/2710/2431 +f 4788/2544/2291 4790/2545/2292 5110/2710/2431 +f 5110/2710/2431 4789/2545/2432 5112/2710/2433 +f 5112/2710/2433 4789/2545/2432 5113/2711/2434 +f 4789/2545/2432 4791/2546/2294 5113/2711/2434 +f 4791/2546/2294 4794/2547/2293 5113/2711/2434 +f 4794/2547/2293 4796/2548/2295 5113/2711/2434 +f 5114/2711/2435 4795/2548/2436 5115/2712/2437 +f 4795/2548/2436 4797/2549/2436 5115/2712/2437 +f 4798/2549/2438 4801/2393/2298 5116/2712/2439 +f 4801/2393/2298 4802/2550/2299 5116/2712/2439 +f 5116/2712/2439 4802/2550/2299 5119/2713/2440 +f 4803/2551/2302 4804/2552/2303 5118/2714/2440 +f 4804/2552/2303 4805/2553/2304 5117/2714/2441 +f 4805/2553/2304 4806/2554/2305 5117/2714/2441 +f 5121/2715/2442 4807/2716/1301 5123/2717/2443 +f 4808/2718/2444 4814/2558/1789 5122/2719/2443 +f 4814/2558/1789 4818/2559/1789 5122/2719/2443 +f 5126/2720/2445 4819/2560/1986 5127/2721/1770 +f 4819/2560/1986 4820/2561/1770 5127/2721/1770 +f 4820/2561/1770 4823/2562/1806 5127/2721/1770 +f 5127/2721/1770 4823/2562/1806 5128/2722/2446 +f 5130/2723/2447 4821/2724/1789 5131/2725/2448 +f 5131/2725/2448 4821/2724/1789 5134/2726/1789 +f 4822/2724/1789 4826/2564/1789 5133/2726/1789 +f 4827/2564/1789 4830/2565/1789 5132/2726/1789 +f 5135/2727/1986 4831/2567/2151 5136/2728/1770 +f 4831/2567/2151 4833/2568/1770 5136/2728/1770 +f 5136/2728/1770 4833/2568/1770 5138/2729/1806 +f 4833/2568/1770 4835/2569/1806 5138/2729/1806 +f 4836/2574/2309 4840/2570/2309 5137/2730/2309 +f 5140/2730/1789 4841/2570/1789 5141/2731/1789 +f 5145/2731/1789 4842/2570/1789 5150/2732/1789 +f 4843/2570/1789 4849/2572/1789 5149/2732/1789 +f 5155/2733/2308 5159/2734/2449 5161/2735/2449 +f 5156/2734/1789 5153/2732/1789 5160/2735/1789 +f 5164/2735/1796 5146/2732/1796 5165/2736/1796 +f 5169/2736/1796 5147/2732/1796 5172/2737/1796 +f 5148/2732/1789 4850/2572/1789 5171/2737/1789 +f 4851/2572/1789 5179/2738/1789 5170/2737/1789 +f 5175/2737/1789 5176/2738/1789 5181/2738/1789 +f 5188/2739/1789 5185/2738/1789 5193/2740/1789 +f 5180/2738/1789 5177/2738/1789 5192/2740/1789 +f 5178/2738/1789 4852/2572/1789 5191/2740/1789 +f 4853/2572/1789 4860/2573/1789 5190/2740/1789 +f 4861/2573/1789 4863/2575/1789 5189/2740/1789 +f 5196/2740/1789 4864/2575/1789 5197/2741/1789 +f 5200/2741/1789 4865/2575/1789 5202/2742/1789 +f 4866/2575/1789 4869/2576/1789 5201/2742/1789 +f 5205/2743/1789 4870/2744/1789 5207/2745/1789 +f 4871/2576/2309 4875/2577/2309 5206/2746/2309 +f 4876/2578/1806 4877/2579/14 5210/2747/1806 +f 4877/2579/14 4878/2580/2310 5210/2747/1806 +f 5210/2747/1806 4878/2580/2310 5211/2748/2312 +f 4878/2580/2310 4880/2582/2312 5211/2748/2312 +f 5214/2749/1789 4881/2583/1805 5215/2750/1805 +f 4881/2583/1805 4882/2584/1804 5215/2750/1805 +f 5216/2750/2450 4883/2584/2451 5217/2751/2452 +f 4883/2584/2451 4886/2590/2315 5217/2751/2452 +f 4886/2590/2315 4887/2589/1459 5217/2751/2452 +f 5217/2751/2452 4887/2589/1459 5218/2752/2453 +f 5221/2753/2454 5219/2754/2455 5222/2755/2456 +f 5218/2752/2453 4887/2589/1459 5223/2756/2457 +f 4887/2589/1459 4888/2591/2316 5223/2756/2457 +f 4888/2591/2316 4890/2757/2458 5223/2756/2457 +f 5223/2756/2457 4890/2757/2458 5224/2758/2459 +f 4891/2593/2317 4892/2594/2002 5225/2759/2003 +f 5225/2759/2003 4892/2594/2002 5226/2760/2460 +f 4893/2595/2318 4894/2596/2319 5227/2761/2461 +f 4894/2596/2319 4895/2597/2322 5227/2761/2461 +f 5227/2761/2461 4895/2597/2322 5229/2762/1815 +f 4895/2597/2322 4896/2598/2324 5229/2762/1815 +f 4896/2598/2324 4897/2599/2326 5229/2762/1815 +f 5229/2762/1815 4897/2599/2326 5234/2763/2462 +f 4898/2601/2327 4899/2600/1579 5233/2764/2462 +f 4899/2600/1579 742/267/1818 5232/2764/1818 +f 738/267/105 681/256/105 5231/2764/105 +f 682/256/103 693/259/103 5230/2764/103 +f 5237/2764/103 694/259/103 5240/2765/103 +f 695/259/103 703/260/103 5239/2765/103 +f 704/260/103 5247/2766/103 5238/2765/103 +f 5243/2765/103 5244/2766/103 5248/2767/103 +f 5251/2767/104 5245/2766/104 5255/2768/104 +f 5246/2766/104 705/260/104 5254/2768/104 +f 706/260/104 627/247/104 5253/2768/104 +f 628/247/103 597/242/103 5252/2768/103 +f 5258/2768/103 598/242/103 5260/2769/103 +f 599/242/107 608/244/105 5259/2769/105 +f 5264/2770/103 609/2771/103 5265/2772/103 +f 5267/2772/103 610/2771/103 5269/2773/103 +f 611/244/103 613/245/103 5268/2774/103 +f 5272/2774/103 614/245/103 5273/2775/103 +f 5277/2775/103 615/245/103 5278/2776/103 +f 5282/2776/105 616/245/105 5284/2777/105 +f 617/245/103 621/246/103 5283/2777/103 +f 5288/2777/103 622/246/103 5292/2778/103 +f 623/246/103 633/247/103 5291/2778/103 +f 634/247/103 710/261/103 5290/2778/103 +f 711/261/103 713/262/103 5289/2778/103 +f 5296/2778/103 714/262/103 5297/2779/103 +f 5302/2779/103 715/262/103 5304/2780/103 +f 716/262/105 720/263/105 5303/2780/105 +f 5308/2780/105 721/263/105 5311/2781/105 +f 722/263/103 724/264/103 5310/2781/103 +f 5319/2782/107 5314/2781/107 5323/2783/107 +f 5309/2781/107 725/264/107 5322/2783/107 +f 726/264/107 731/265/107 5321/2783/107 +f 732/265/103 735/266/103 5320/2783/103 +f 5324/2783/2462 736/266/1818 5325/2784/1579 +f 736/266/1818 742/267/1818 5325/2784/1579 +f 742/267/1818 4899/2600/1579 5325/2784/1579 +f 4899/2600/1579 4903/2602/2328 5325/2784/1579 +f 5325/2784/1579 4903/2602/2328 5326/2785/2328 +f 5328/2785/103 4904/2602/103 5331/2786/103 +f 4905/2602/103 4908/2606/103 5330/2786/103 +f 4909/2607/2329 4910/2607/2330 5329/2787/2463 +f 4910/2607/2330 4911/2608/2179 5329/2787/2463 +f 4911/2608/2179 4912/2609/103 5329/2787/2463 +f 5334/2786/103 4913/2788/103 5338/2789/103 +f 4913/2788/103 4916/2790/103 5338/2789/103 +f 4917/2790/2464 4920/2791/2338 5337/2789/2464 +f 4920/2791/2338 4923/2613/2340 5337/2789/2464 +f 4923/2613/2340 4924/2615/2339 5337/2789/2464 +f 4925/2616/2339 4928/2617/2186 5336/2792/2464 +f 4928/2617/2186 4931/2618/2464 5336/2792/2464 +f 4932/2462/103 4934/2464/103 5335/2793/103 +f 5341/2793/103 4935/2464/103 5343/2794/103 +f 4936/2464/2346 4937/2619/2347 5342/2794/105 +f 5348/2795/105 4938/2620/1822 5349/2622/2465 +f 4938/2620/1822 4940/2622/2349 5349/2622/2465 +f 5349/2622/2465 4940/2622/2349 5350/2796/2466 +f 5350/2796/2466 4940/2622/2349 5351/2797/2467 +f 4941/2622/2350 4942/2624/2352 5352/2797/2468 +f 5352/2797/2468 4942/2624/2352 5354/2798/79 +f 4942/2624/2352 4944/2799/2469 5354/2798/79 +f 4945/2626/2015 4947/2626/2353 5353/2800/79 +f 4947/2626/2353 4949/2627/2470 5353/2800/79 +f 5357/2800/2471 5356/2800/2472 5358/2627/2473 +f 5356/2800/2472 4950/2627/2474 5358/2627/2473 +f 5358/2627/2473 4950/2627/2474 5359/2801/2475 +f 4950/2627/2474 4955/2628/2178 5359/2801/2475 +f 5359/2801/2475 4955/2628/2178 5360/2802/2476 +f 5361/2802/2179 4956/2628/2477 5362/2803/2179 +f 4956/2628/2477 4961/2629/2178 5362/2803/2179 +f 4962/2630/2178 4964/2631/2478 5363/2804/2179 +f 5363/2804/2179 4964/2631/2478 5364/2805/2479 +f 5364/2805/2479 4964/2631/2478 5365/2632/2480 +f 4964/2631/2478 4966/2632/2039 5365/2632/2480 +f 4966/2632/2039 4968/2633/2354 5365/2632/2480 +f 5365/2632/2480 4969/2633/2355 5366/2632/2024 +f 4969/2633/2355 4970/2634/2356 5366/2632/2024 +f 5366/2632/2024 4970/2634/2356 5367/2806/2481 +f 4970/2634/2356 4972/2635/2357 5367/2806/2481 +f 4973/2635/2190 4976/2636/2190 5368/2806/1451 +f 5368/2806/1451 4976/2636/2190 5369/2807/2482 +f 5369/2807/2482 4976/2636/2190 5370/2808/2483 +f 4976/2636/2190 4981/2637/2484 5370/2808/2483 +f 5370/2808/2483 4981/2637/2484 5371/2809/2485 +f 4981/2637/2484 4986/2638/2359 5371/2809/2485 +f 4986/2638/2359 4987/2639/2358 5371/2809/2485 +f 5371/2809/2485 4987/2639/2358 5372/2810/2174 +f 5373/2811/2486 5372/2810/2174 5374/2812/2487 +f 5372/2810/2174 4987/2639/2358 5374/2812/2487 +f 4987/2639/2358 4988/2640/2360 5374/2812/2487 +f 5374/2812/2487 4988/2640/2360 5377/2813/2488 +f 4989/2640/1276 4993/2641/1276 5376/2813/1276 +f 4994/2641/1276 4996/2642/1276 5375/2813/1276 +f 5380/2813/1276 4997/2642/1276 5381/2814/1276 +f 4998/2642/2362 4999/2642/2363 5383/2814/2489 +f 5383/2814/2489 4999/2642/2363 5384/2815/2490 +f 4999/2642/2363 5000/2643/2364 5384/2815/2490 +f 5000/2643/2364 5002/2816/2491 5384/2815/2490 +f 5002/2816/2491 5004/2817/2492 5384/2815/2490 +f 5384/2815/2490 5004/2817/2492 5385/2818/2493 +f 5386/2819/2494 5005/2820/2495 583/2821/102 +f 586/2822/2496 5006/2646/2497 5387/2823/2498 +f 5390/2823/2499 5007/2646/2368 5392/2824/2500 +f 5007/2646/2368 5008/2647/2369 5392/2824/2500 +f 5009/2647/2370 5010/2648/2371 5391/2824/2501 +f 5010/2648/2371 5012/2649/2372 5391/2824/2501 +f 5394/2825/2502 5011/2826/2503 5398/2827/2504 +f 5398/2827/2504 5011/2826/2503 5399/2828/2505 +f 5011/2826/2503 5013/2826/2506 5399/2828/2505 +f 5399/2828/2505 5013/2826/2506 5400/2829/2507 +f 5013/2826/2506 5016/2651/2374 5400/2829/2507 +f 5016/2651/2374 5017/2652/2376 5400/2829/2507 +f 5017/2652/2376 5018/2653/2375 5400/2829/2507 +f 5400/2829/2507 5018/2653/2375 5401/2830/1143 +f 5018/2653/2375 5020/2654/2379 5401/2830/1143 +f 5402/2831/2508 5021/2655/2509 5403/2832/2510 +f 5404/2833/109 5403/2832/2510 5405/2834/2511 +f 5403/2832/2510 5021/2655/2509 5405/2834/2511 +f 5021/2655/2509 5023/2657/1842 5405/2834/2511 +f 5024/2657/2380 5025/2658/2381 5406/2834/2385 +f 5025/2658/2381 5027/2659/2385 5406/2834/2385 +f 5406/2834/2385 5027/2659/2385 5409/2835/2512 +f 5028/2659/2385 5030/2660/2385 5408/2835/2385 +f 5031/2660/2385 5034/2661/2385 5407/2835/2385 +f 5411/2835/2385 5035/2661/2385 5413/2836/2385 +f 5036/2661/2385 5038/2662/2385 5412/2836/2385 +f 5416/2837/2513 5039/2838/1842 5417/2839/2514 +f 5417/2839/2514 5039/2838/1842 5418/2840/109 +f 5418/2840/109 5039/2838/1842 5419/2841/1257 +f 5039/2838/1842 5042/2842/2515 5419/2841/1257 +f 5042/2842/2515 5045/2843/2389 5419/2841/1257 +f 5419/2841/1257 5045/2843/2389 5420/2844/2516 +f 5046/2845/2517 5048/2846/2518 5421/2847/2519 +f 5421/2847/2519 5048/2846/2518 5422/2848/2520 +f 5422/2848/2520 5048/2846/2518 5423/2849/2521 +f 5048/2846/2518 5051/2850/2522 5423/2849/2521 +f 5423/2849/2521 5051/2850/2522 5424/2851/2523 +f 5424/2851/2523 5051/2850/2522 578/237/100 +f 5052/2668/2393 5053/2669/2394 579/2852/100 +f 579/2852/100 5053/2669/2394 5425/2853/2393 +f 5053/2669/2394 5055/2671/2397 5425/2853/2393 +f 5056/2672/2397 5057/2674/2393 5426/2854/2524 +f 5057/2674/2393 5059/2675/2398 5426/2854/2524 +f 5426/2854/2524 5059/2675/2398 5427/2675/2525 +f 5428/2675/2526 5058/2675/2527 5430/2855/2528 +f 5058/2675/2527 5433/2856/2529 5430/2855/2528 +f 5430/2855/2528 5433/2856/2529 5434/2857/2530 +f 5435/2857/2531 5431/2856/2532 5436/2858/2533 +f 5432/2856/2534 5059/2675/2398 5437/2858/2535 +f 5059/2675/2398 5060/2676/2399 5437/2858/2535 +f 5437/2858/2535 5060/2676/2399 5439/2859/2536 +f 5060/2676/2399 5061/2677/2400 5439/2859/2536 +f 5439/2859/2536 5061/2677/2400 5441/2860/2537 +f 5061/2677/2400 5062/2679/2402 5441/2860/2537 +f 5441/2860/2537 5062/2679/2402 5443/2861/2538 +f 5062/2679/2402 5063/2682/2405 5443/2861/2538 +f 5443/2861/2538 5063/2682/2405 5445/2683/2539 +f 5063/2682/2405 5064/2683/2406 5445/2683/2539 +f 5450/2862/2540 5452/2863/2541 5454/2864/2541 +f 5452/2863/2541 5447/2865/2541 5454/2864/2541 +f 5444/2865/2539 5065/2687/2409 5453/2864/2542 +f 5065/2687/2409 5067/2689/2410 5453/2864/2542 +f 5460/2866/2542 5066/2867/2543 5462/2868/2542 +f 5462/2868/2542 5066/2867/2543 5464/2869/2544 +f 5066/2867/2543 5069/2870/2545 5464/2869/2544 +f 5069/2870/2545 5073/2871/2255 5464/2869/2544 +f 5074/2692/2257 5075/2694/2412 5463/2872/2546 +f 5075/2694/2412 5076/2695/2413 5463/2872/2546 +f 5077/2695/2414 5079/2696/2415 5466/2872/2541 +f 5466/2872/2541 5079/2696/2415 5468/2873/2541 +f 5472/2874/2547 5475/2875/2548 5476/2876/2549 +f 5477/2877/2550 5473/2877/2406 5479/2878/2551 +f 5474/2879/2552 5471/2880/2553 5478/2601/2554 +f 5480/2881/2406 5467/2873/2553 5481/2882/2555 +f 5467/2873/2553 5078/2696/2556 5481/2882/2555 +f 5078/2696/2556 5081/2697/2416 5481/2882/2555 +f 5081/2697/2416 5082/2698/2417 5481/2882/2555 +f 5481/2882/2555 5082/2698/2417 5482/2883/2557 +f 5082/2698/2417 5085/2699/2418 5482/2883/2557 +f 5485/2883/23 5083/2699/23 5488/2884/23 +f 5084/2699/23 5087/2700/23 5487/2884/23 +f 5088/2700/23 5090/2701/23 5486/2884/23 +f 5091/2701/2558 5093/2701/2559 5490/2884/2560 +f 5490/2884/2560 5093/2701/2559 5491/2885/2561 +f 5491/2885/2561 5093/2701/2559 5492/2886/2562 +f 5094/2701/2422 5095/2701/2423 5493/2886/2563 +f 5095/2701/2423 5096/2702/2424 5493/2886/2563 +f 5096/2702/2424 5098/2702/2564 5493/2886/2563 +f 5493/2886/2563 5098/2702/2564 5496/2887/2565 +f 5496/2887/2565 5098/2702/2564 5498/2888/2566 +f 5098/2702/2564 5100/2703/2567 5498/2888/2566 +f 5498/2888/2566 5100/2703/2567 5501/2889/2568 +f 5100/2703/2567 5102/2705/2569 5501/2889/2568 +f 5102/2705/2569 5104/2706/2570 5501/2889/2568 +f 5104/2706/2570 5107/2707/2571 5501/2889/2568 +f 5501/2889/2568 5107/2707/2571 5503/2890/2572 +f 5108/2708/2429 5109/2709/2430 5502/2891/2573 +f 5109/2709/2430 5110/2710/2431 5502/2891/2573 +f 5110/2710/2431 5112/2710/2433 5502/2891/2573 +f 5505/2891/2574 5111/2710/2575 5507/2892/2576 +f 5111/2710/2575 5114/2711/2435 5507/2892/2576 +f 5114/2711/2435 5115/2712/2437 5507/2892/2576 +f 5507/2892/2576 5115/2712/2437 5508/2893/2577 +f 5115/2712/2437 5120/2713/2578 5508/2893/2577 +f 5120/2713/2578 5124/2894/2579 5508/2893/2577 +f 5509/2893/2580 5125/2894/2581 5511/2895/2582 +f 5126/2720/2445 5127/2721/1770 5510/2896/2583 +f 5127/2721/1770 5128/2722/2446 5510/2896/2583 +f 5511/2895/2582 5129/2897/2584 5512/2898/2585 +f 5514/2899/2586 5130/2723/2447 5515/2900/2587 +f 5130/2723/2447 5131/2725/2448 5515/2900/2587 +f 5131/2725/2448 5134/2726/1789 5515/2900/2587 +f 5516/2901/2588 5135/2727/1986 5517/2902/2589 +f 5135/2727/1986 5136/2728/1770 5517/2902/2589 +f 5517/2902/2589 5136/2728/1770 5518/2903/2590 +f 5136/2728/1770 5138/2729/1806 5518/2903/2590 +f 5518/2903/2590 5138/2729/1806 5520/2904/2312 +f 5139/2730/1789 5142/2731/1789 5519/2905/1789 +f 5522/2905/1789 5143/2731/1789 5526/2906/1789 +f 5144/2731/1789 5151/2732/1789 5525/2906/1789 +f 5152/2732/2591 5157/2734/2591 5524/2906/2591 +f 5158/2734/1789 5154/2733/1789 5523/2906/1789 +f 5154/2733/1789 5162/2735/1789 5523/2906/1789 +f 5529/2907/2586 5527/2908/2308 5530/2909/2592 +f 5523/2906/1789 5162/2735/1789 5532/2910/2593 +f 5532/2910/2593 5162/2735/1789 5534/2911/1789 +f 5163/2735/1789 5166/2736/1789 5533/2911/1789 +f 5537/2911/1789 5167/2736/1789 5540/2912/1789 +f 5168/2736/1789 5173/2737/1789 5539/2912/1789 +f 5174/2737/1789 5182/2738/1789 5538/2912/1789 +f 5542/2912/1789 5183/2738/1789 5549/2913/1789 +f 5184/2738/1789 5186/2739/1789 5548/2913/1789 +f 5187/2739/1789 5194/2740/1789 5547/2913/1789 +f 5195/2914/1789 5198/2915/1789 5546/2916/1789 +f 5199/2915/2594 5203/2743/2594 5545/2916/2594 +f 5204/2743/1789 5208/2745/1789 5544/2916/1789 +f 5209/2745/1789 5212/2749/1789 5543/2916/1789 +f 5551/2916/1789 5213/2749/1789 5553/2917/1789 +f 5214/2749/1789 5215/2750/1805 5552/2917/1789 +f 5554/2917/2595 5216/2750/2450 5556/2918/2596 +f 5556/2918/2596 5216/2750/2450 5557/2919/2597 +f 5216/2750/2450 5217/2751/2452 5557/2919/2597 +f 5217/2751/2452 5218/2752/2453 5557/2919/2597 +f 5558/2920/2598 5219/2754/2455 5559/2921/2598 +f 5219/2754/2455 5221/2753/2454 5559/2921/2598 +f 5560/2922/2599 5220/2923/2600 5561/2924/2601 +f 5562/2925/2602 5561/2924/2601 5563/2926/2603 +f 5561/2924/2601 5220/2923/2600 5563/2926/2603 +f 5566/2927/2604 5563/2926/2603 5568/2928/2605 +f 5568/2928/2605 5563/2926/2603 5569/2929/2606 +f 5563/2926/2603 5220/2923/2600 5569/2929/2606 +f 5221/2753/2454 5222/2755/2456 5570/2930/2607 +f 5573/2931/2608 5570/2930/2607 5574/2932/2609 +f 5570/2930/2607 5222/2755/2456 5574/2932/2609 +f 5575/2933/2610 5223/2756/2457 5576/2934/2611 +f 5223/2756/2457 5224/2758/2459 5576/2934/2611 +f 5576/2934/2611 5224/2758/2459 5577/2935/2612 +f 5225/2759/2003 5226/2760/2460 5579/2936/2004 +f 5226/2760/2460 5228/2937/2613 5579/2936/2004 +f 5228/2937/2613 5235/2938/2614 5579/2936/2004 +f 5579/2936/2004 5235/2938/2614 5580/2939/2615 +f 5236/2938/103 5241/2940/105 5580/2939/2615 +f 5580/2939/2615 5241/2940/105 5585/2941/105 +f 5242/2765/107 5249/2767/107 5584/2942/2616 +f 5250/2767/103 5256/2768/103 5583/2942/103 +f 5257/2768/103 5261/2769/103 5582/2942/103 +f 5262/2770/103 5590/2943/103 5581/2941/103 +f 5587/2941/103 5588/2943/103 5591/2944/103 +f 5593/2944/105 5589/2943/105 5594/2945/2617 +f 5589/2943/105 5263/2770/103 5594/2945/2617 +f 5263/2770/103 5266/2772/2618 5594/2945/2617 +f 5266/2772/2618 5270/2773/2619 5594/2945/2617 +f 5594/2945/2617 5270/2773/2619 5596/2946/105 +f 5271/2774/105 5274/2775/105 5595/2947/105 +f 5598/2947/2619 5275/2775/2619 5600/2948/2619 +f 5276/2775/2619 5279/2776/2619 5599/2948/2619 +f 5603/2948/103 5280/2776/103 5605/2949/103 +f 5281/2776/103 5285/2777/105 5604/2949/105 +f 5608/2949/103 5286/2777/103 5610/2950/103 +f 5287/2777/103 5293/2778/103 5609/2950/103 +f 5613/2950/103 5294/2778/103 5616/2951/103 +f 5295/2778/103 5298/2779/103 5615/2951/103 +f 5299/2779/103 5623/2952/103 5614/2951/103 +f 5620/2951/103 5621/2952/103 5626/2953/105 +f 5622/2952/103 5300/2779/103 5625/2953/103 +f 5301/2779/103 5305/2780/103 5624/2953/103 +f 5629/2953/103 5306/2780/103 5632/2954/103 +f 5307/2780/103 5312/2781/103 5631/2954/105 +f 5313/2781/103 5315/2782/103 5630/2954/103 +f 5635/2955/103 5316/2956/103 5636/2957/103 +f 5638/2957/1818 5317/2956/1818 5639/2958/1579 +f 5318/2782/2462 5324/2783/2462 5640/2959/1579 +f 5324/2783/2462 5325/2784/1579 5640/2959/1579 +f 5640/2959/1579 5325/2784/1579 5644/2960/2328 +f 5325/2784/1579 5326/2785/2328 5644/2960/2328 +f 5327/2785/2620 5332/2786/2620 5643/2960/2620 +f 5333/2786/103 5339/2789/103 5642/2960/103 +f 5340/2793/103 5344/2794/103 5641/2961/103 +f 5646/2961/105 5345/2794/105 5647/2962/105 +f 5651/2963/103 5346/2964/103 5652/2965/103 +f 5347/2795/2621 5349/2622/2465 5654/2796/2172 +f 5349/2622/2465 5350/2796/2466 5654/2796/2172 +f 5654/2796/2172 5350/2796/2466 5655/2966/2622 +f 5350/2796/2466 5351/2797/2467 5655/2966/2622 +f 5351/2797/2467 5355/2798/2623 5655/2966/2622 +f 5356/2800/2472 5357/2800/2471 5657/2967/2622 +f 5657/2967/2622 5357/2800/2471 5659/2968/2624 +f 5357/2800/2471 5358/2627/2473 5659/2968/2624 +f 5659/2968/2624 5358/2627/2473 5660/2969/2329 +f 5358/2627/2473 5359/2801/2475 5660/2969/2329 +f 5662/2969/103 5359/2801/2475 5663/2970/2625 +f 5359/2801/2475 5360/2802/2476 5663/2970/2625 +f 5361/2802/2179 5362/2803/2179 5665/2970/2625 +f 5665/2970/2625 5362/2803/2179 5666/2803/2626 +f 5363/2804/2179 5364/2805/2479 5669/2971/2333 +f 5669/2971/2333 5364/2805/2479 5672/2972/2627 +f 5672/2972/2627 5364/2805/2479 5673/2973/2628 +f 5364/2805/2479 5365/2632/2480 5673/2973/2628 +f 5365/2632/2480 5366/2632/2024 5673/2973/2628 +f 5366/2632/2024 5367/2806/2481 5673/2973/2628 +f 5368/2806/1451 5369/2807/2482 5675/2973/2629 +f 5675/2973/2629 5369/2807/2482 5676/2974/2630 +f 5369/2807/2482 5370/2808/2483 5676/2974/2630 +f 5676/2974/2630 5370/2808/2483 5677/2975/2631 +f 5370/2808/2483 5371/2809/2485 5677/2975/2631 +f 5677/2975/2631 5371/2809/2485 5679/2976/2632 +f 5371/2809/2485 5372/2810/2174 5679/2976/2632 +f 5372/2810/2174 5373/2811/2486 5679/2976/2632 +f 5679/2976/2632 5373/2811/2486 5681/2977/2633 +f 5681/2977/2633 5373/2811/2486 5683/2978/2634 +f 5683/2978/2634 5373/2811/2486 5684/2979/2635 +f 5373/2811/2486 5374/2812/2487 5684/2979/2635 +f 5374/2812/2487 5377/2813/2488 5684/2979/2635 +f 5684/2979/2635 5377/2813/2488 5685/2980/2636 +f 5687/2980/1284 5378/2813/1284 5690/2981/1284 +f 5379/2813/1292 5382/2814/1292 5689/2981/1292 +f 5383/2814/2489 5384/2815/2490 5688/2981/2637 +f 5384/2815/2490 5385/2818/2493 5688/2981/2637 +f 5386/2819/2494 583/2821/102 5693/2982/2638 +f 5693/2982/2638 583/2821/102 5694/2983/2639 +f 583/2821/102 581/2821/2640 5694/2983/2639 +f 5698/2984/2641 582/239/101 76/2985/2641 +f 582/239/101 580/238/101 76/2985/2641 +f 76/2985/2641 580/238/101 5701/2986/2642 +f 580/238/101 584/239/102 5700/2986/2643 +f 5706/2987/2644 5703/2987/2645 5708/2988/2646 +f 5699/2987/971 585/2989/2496 5707/2988/2647 +f 585/2989/2496 5388/2990/2648 5707/2988/2647 +f 5709/2988/2649 5389/2990/2650 5710/2991/2651 +f 5389/2990/2650 5393/2825/2500 5710/2991/2651 +f 5393/2825/2500 5395/2827/2651 5710/2991/2651 +f 5710/2991/2651 5395/2827/2651 5711/2991/2652 +f 5395/2827/2651 5712/2992/2653 5711/2991/2652 +f 5711/2991/2652 5712/2992/2653 5713/2992/2654 +f 5713/2992/2654 5712/2992/2653 5715/2993/2655 +f 5712/2992/2653 5395/2827/2651 5714/2993/2649 +f 5716/2993/2656 5396/2827/2657 5720/2994/2656 +f 5724/2995/2658 5725/2996/101 5727/2997/2496 +f 5722/2998/2659 5728/2997/2660 5730/2999/20 +f 5730/2999/20 5728/2997/2660 5731/3000/2661 +f 5726/2997/2662 5725/2996/101 5734/3000/2663 +f 5734/3000/2663 5725/2996/101 5735/2995/1433 +f 5725/2996/101 5724/2995/2658 5735/2995/1433 +f 5736/3001/1433 5723/3002/2658 5737/3003/2664 +f 5737/3003/2664 5723/3002/2658 5739/3004/2665 +f 5724/2995/2658 5727/2997/2496 5738/3005/2666 +f 5727/2997/2496 5717/2998/2648 5738/3005/2666 +f 5742/3005/2649 5718/2998/2649 5744/3006/2649 +f 5719/2994/2649 5397/2827/2649 5743/3007/2649 +f 5398/2827/2504 5399/2828/2505 5747/3007/2667 +f 5747/3007/2667 5399/2828/2505 5748/3007/2668 +f 5399/2828/2505 5400/2829/2507 5748/3007/2668 +f 5400/2829/2507 5401/2830/1143 5748/3007/2668 +f 5402/2831/2508 5403/2832/2510 5750/3008/2668 +f 5750/3008/2668 5403/2832/2510 5751/3009/2669 +f 5403/2832/2510 5404/2833/109 5751/3009/2669 +f 5754/3010/2670 5752/3011/2671 5755/3012/13 +f 5751/3009/2669 5404/2833/109 5756/3013/2672 +f 5759/3014/2673 5756/3013/2672 5760/3015/109 +f 5756/3013/2672 5404/2833/109 5760/3015/109 +f 5404/2833/109 5405/2834/2511 5760/3015/109 +f 5405/2834/2511 5410/2835/1842 5760/3015/109 +f 5410/2835/1842 5414/2836/1842 5760/3015/109 +f 5762/3016/2674 5415/3017/1842 5763/3018/1748 +f 5416/2837/2513 5417/2839/2514 5764/3019/2514 +f 5764/3019/2514 5417/2839/2514 5765/3020/2675 +f 5417/2839/2514 5418/2840/109 5765/3020/2675 +f 5765/3020/2675 5418/2840/109 5766/3021/2676 +f 5418/2840/109 5419/2841/1257 5766/3021/2676 +f 5419/2841/1257 5420/2844/2516 5766/3021/2676 +f 5766/3021/2676 5420/2844/2516 5767/3021/2677 +f 5421/2847/2519 5422/2848/2520 5768/3022/2678 +f 5768/3022/2678 5422/2848/2520 5770/3023/2679 +f 5770/3023/2679 5422/2848/2520 5772/3024/2680 +f 5422/2848/2520 5423/2849/2521 5772/3024/2680 +f 5772/3024/2680 5423/2849/2521 5773/3025/2681 +f 5423/2849/2521 5424/2851/2523 5773/3025/2681 +f 5424/2851/2523 578/237/100 5773/3025/2681 +f 578/237/100 577/236/99 5773/3025/2681 +f 5774/3025/2682 575/236/2683 5775/3026/2684 +f 5776/3026/2685 576/236/2686 5777/3027/2687 +f 576/236/2686 573/235/2688 5777/3027/2687 +f 5777/3027/2687 573/235/2688 5779/235/1834 +f 574/235/98 578/237/100 5778/235/2689 +f 578/237/100 5426/2854/2524 5778/235/2689 +f 5426/2854/2524 5427/2675/2525 5778/235/2689 +f 5779/235/1834 5428/2675/2526 5780/3028/2690 +f 5428/2675/2526 5430/2855/2528 5780/3028/2690 +f 5781/3028/2691 5429/2855/2692 5782/3029/2693 +f 5782/3029/2693 5429/2855/2692 5783/2855/5 +f 5430/2855/2528 5434/2857/2530 5784/2855/2694 +f 5784/2855/2694 5434/2857/2530 5785/3030/2695 +f 5787/3030/2696 5435/2857/2531 5789/3031/2533 +f 5435/2857/2531 5436/2858/2533 5789/3031/2533 +f 5436/2858/2533 5438/2859/2697 5789/3031/2533 +f 5789/3031/2533 5438/2859/2697 5791/3032/2698 +f 5438/2859/2697 5440/2860/2699 5791/3032/2698 +f 5791/3032/2698 5440/2860/2699 5793/3033/2699 +f 5440/2860/2699 5442/2861/2700 5793/3033/2699 +f 5793/3033/2699 5442/2861/2700 5795/3034/2701 +f 5442/2861/2700 5446/2683/2541 5795/3034/2701 +f 5795/3034/2701 5446/2683/2541 5796/3035/2541 +f 5799/3036/2541 5447/2865/2541 5802/3037/2702 +f 5802/3037/2702 5447/2865/2541 5803/3038/2541 +f 5447/2865/2541 5452/2863/2541 5803/3038/2541 +f 5804/3039/2703 5451/3040/2704 5805/3040/2705 +f 5805/3040/2705 5451/3040/2704 5806/3041/2706 +f 5451/3040/2704 5448/3041/2707 5806/3041/2706 +f 5807/3042/2708 5449/3043/2709 5808/3044/2710 +f 5449/3043/2709 5455/3045/2702 5808/3044/2710 +f 5808/3044/2710 5455/3045/2702 5810/3046/2541 +f 5814/3047/2711 5812/3048/2712 5816/3047/2713 +f 5819/3049/2714 5821/3050/2715 5822/3051/2716 +f 5821/3050/2715 5824/3051/2717 5822/3051/2716 +f 5824/3051/2717 5825/3052/2718 5822/3051/2716 +f 5825/3052/2718 5817/3052/2548 5822/3051/2716 +f 5827/3053/2719 5823/3054/2720 5828/3055/2721 +f 5823/3054/2720 5815/3056/2722 5828/3055/2721 +f 5816/3047/2713 5812/3048/2712 5830/3057/2554 +f 5830/3057/2554 5812/3048/2712 5832/3058/2723 +f 5809/3048/2541 5456/2866/2541 5831/3058/2541 +f 5834/3058/2541 5457/2866/2541 5835/3059/2541 +f 5837/3059/2541 5458/2866/2541 5838/3060/2541 +f 5459/2866/2541 5461/2868/2541 5840/3060/2541 +f 5840/3060/2541 5461/2868/2541 5842/3061/2541 +f 5842/3061/2541 5461/2868/2541 5843/3062/2541 +f 5461/2868/2541 5465/2869/2541 5843/3062/2541 +f 5465/2869/2541 5469/2880/2541 5843/3062/2541 +f 5845/3062/2702 5470/2880/2702 5847/3063/2702 +f 5471/2880/2553 5474/2879/2552 5846/3063/2539 +f 5848/3064/2724 5475/2875/2548 5849/3065/2725 +f 5475/2875/2548 5472/2874/2547 5849/3065/2725 +f 5849/3065/2725 5472/2874/2547 5850/2874/1135 +f 5851/2874/1829 5472/2874/2547 5852/2876/2726 +f 5472/2874/2547 5476/2876/2549 5852/2876/2726 +f 5855/3066/2727 5477/2877/2550 5856/3067/2727 +f 5477/2877/2550 5479/2878/2551 5856/3067/2727 +f 5480/2881/2406 5481/2882/2555 5859/3068/2728 +f 5859/3068/2728 5481/2882/2555 5860/3069/2727 +f 5481/2882/2555 5482/2883/2557 5860/3069/2727 +f 5862/3069/23 5483/2883/23 5864/3070/23 +f 5484/2883/23 5489/2884/23 5863/3070/23 +f 5866/3070/2729 5490/2884/2560 5867/3071/2730 +f 5490/2884/2560 5491/2885/2561 5867/3071/2730 +f 5867/3071/2730 5491/2885/2561 5869/3072/2731 +f 5869/3072/2731 5491/2885/2561 5870/3073/2732 +f 5491/2885/2561 5492/2886/2562 5870/3073/2732 +f 5492/2886/2562 5494/2887/2733 5870/3073/2732 +f 5495/2887/2734 5497/2888/2735 5871/3073/2736 +f 5497/2888/2735 5499/2889/2737 5871/3073/2736 +f 5871/3073/2736 5499/2889/2737 5872/3074/2738 +f 5500/2889/2739 5504/2890/2740 5872/3074/2738 +f 5872/3074/2738 5504/2890/2740 5874/3075/2741 +f 5505/2891/2574 5507/2892/2576 5873/3076/2742 +f 5876/3076/2743 5506/2892/2744 5878/3077/2745 +f 5878/3077/2745 5506/2892/2744 5879/3078/2746 +f 5506/2892/2744 5509/2893/2580 5879/3078/2746 +f 5509/2893/2580 5511/2895/2582 5879/3078/2746 +f 5511/2895/2582 5512/2898/2585 5879/3078/2746 +f 5879/3078/2746 5512/2898/2585 5880/3079/2747 +f 5881/3080/2748 5513/3081/2749 5883/3082/2577 +f 5514/2899/2586 5515/2900/2587 5882/3083/2750 +f 5885/3084/2580 5516/2901/2588 5886/3085/2751 +f 5516/2901/2588 5517/2902/2589 5886/3085/2751 +f 5886/3085/2751 5517/2902/2589 5887/3086/2752 +f 5517/2902/2589 5518/2903/2590 5887/3086/2752 +f 5518/2903/2590 5520/2904/2312 5887/3086/2752 +f 5521/3087/2753 5527/2908/2308 5888/3088/2749 +f 5527/2908/2308 5529/2907/2586 5888/3088/2749 +f 5888/3088/2749 5529/2907/2586 5889/3089/2576 +f 5890/3090/2754 5528/3091/2755 5891/3092/2756 +f 5891/3092/2756 5528/3091/2755 5892/3093/2757 +f 5892/3093/2757 5528/3091/2755 5893/3094/2758 +f 5528/3091/2755 5531/3095/2759 5893/3094/2758 +f 5531/3095/2759 5535/3096/2760 5893/3094/2758 +f 5893/3094/2758 5535/3096/2760 5894/3097/2596 +f 5536/3098/2595 5541/3099/2595 5897/3100/2596 +f 5897/3100/2596 5541/3099/2595 5901/3101/2596 +f 5541/3099/2595 5550/2916/2595 5901/3101/2596 +f 5901/3101/2596 5550/2916/2595 5902/3102/2596 +f 5550/2916/2595 5554/2917/2595 5902/3102/2596 +f 5554/2917/2595 5556/2918/2596 5902/3102/2596 +f 5905/3103/12 5555/3104/12 5906/3105/2157 +f 5555/3104/12 5558/2920/2598 5906/3105/2157 +f 5558/2920/2598 5559/2921/2598 5906/3105/2157 +f 5560/2922/2599 5561/2924/2601 5908/3106/2761 +f 5908/3106/2761 5561/2924/2601 5909/3107/2601 +f 5910/3108/2762 5909/3107/2601 5911/3109/2762 +f 5909/3107/2601 5561/2924/2601 5911/3109/2762 +f 5561/2924/2601 5562/2925/2602 5911/3109/2762 +f 5911/3109/2762 5562/2925/2602 5914/3110/2763 +f 5562/2925/2602 5563/2926/2603 5914/3110/2763 +f 5563/2926/2603 5566/2927/2604 5914/3110/2763 +f 5564/3111/23 572/234/23 5913/3112/23 +f 567/234/23 491/191/23 5912/3112/23 +f 5916/3112/23 492/191/23 5917/3113/23 +f 493/191/23 490/190/75 5920/3113/82 +f 5920/3113/82 490/190/75 5921/3114/2764 +f 5922/3115/2765 5921/3114/2764 5923/3116/2766 +f 5921/3114/2764 490/190/75 5923/3116/2766 +f 490/190/75 494/192/76 5923/3116/2766 +f 494/192/76 495/193/77 5923/3116/2766 +f 496/194/78 497/195/79 5925/3117/2767 +f 5925/3117/2767 497/195/79 5926/3118/2767 +f 498/195/80 499/196/81 5929/3118/80 +f 499/196/81 500/198/83 5929/3118/80 +f 500/198/83 504/200/83 5929/3118/80 +f 5929/3118/80 504/200/83 5932/3119/80 +f 504/200/83 503/199/81 5932/3119/80 +f 5932/3119/80 503/199/81 5933/3120/2768 +f 5935/3121/2768 501/3122/81 5936/3123/2769 +f 502/3122/81 506/202/85 5938/3123/2770 +f 506/202/85 510/204/87 5938/3123/2770 +f 5938/3123/2770 510/204/87 5940/3124/2771 +f 5940/3124/2771 510/204/87 5942/3125/2772 +f 5942/3125/2772 510/204/87 5946/3126/2773 +f 5946/3126/2773 510/204/87 5947/3127/2187 +f 510/204/87 511/205/88 5947/3127/2187 +f 5947/3127/2187 511/205/88 5953/3128/1776 +f 512/206/23 515/208/23 5952/3129/23 +f 516/208/23 522/215/23 5951/3129/23 +f 523/215/23 524/216/23 5950/3129/23 +f 524/216/23 529/217/23 5950/3129/23 +f 530/217/23 535/218/23 5949/3129/23 +f 5956/3130/23 5954/3129/23 5960/3131/23 +f 5948/3129/23 536/218/23 5959/3131/23 +f 537/218/23 540/219/23 5958/3131/23 +f 541/219/23 544/220/23 5957/3131/23 +f 5962/3131/23 545/220/23 5964/3132/23 +f 546/220/23 548/3133/23 5963/3132/23 +f 5966/3134/2774 5965/3135/2763 5967/3136/2775 +f 5965/3135/2763 549/3137/2776 5967/3136/2775 +f 549/3137/2776 551/3138/2777 5967/3136/2775 +f 5967/3136/2775 551/3138/2777 5968/3139/2778 +f 551/3138/2777 553/3140/61 5968/3139/2778 +f 553/3140/61 555/3141/2779 5968/3139/2778 +f 5969/3142/2780 556/225/2781 5971/3143/2782 +f 5971/3143/2782 556/225/2781 5972/3144/96 +f 556/225/2781 559/226/94 5972/3144/96 +f 559/226/94 561/227/95 5972/3144/96 +f 5973/3145/93 562/230/93 5974/3146/60 +f 562/230/93 563/231/59 5974/3146/60 +f 5974/3146/60 563/231/59 5975/3147/2783 +f 563/231/59 566/232/97 5975/3147/2783 +f 5977/3148/23 564/3149/23 5980/3150/23 +f 565/3149/23 570/3151/23 5979/3150/23 +f 571/234/23 5565/3111/23 5978/3152/23 +f 5566/2927/2604 5568/2928/2605 5983/3153/88 +f 5983/3153/88 5568/2928/2605 5984/3154/2784 +f 5985/3155/2785 5567/3156/2786 5987/3157/2405 +f 5568/2928/2605 5569/2929/2606 5986/3158/2787 +f 5569/2929/2606 5571/3159/2788 5986/3158/2787 +f 5988/3160/2326 5572/3161/2789 5991/3162/2790 +f 5573/2931/2608 5574/2932/2609 5990/3163/2790 +f 5575/2933/2610 5576/2934/2611 5989/3164/2791 +f 5576/2934/2611 5577/2935/2612 5989/3164/2791 +f 5992/3165/2791 5578/2936/2792 5995/3166/2793 +f 5579/2936/2004 5580/2939/2615 5994/3166/2794 +f 5580/2939/2615 5585/2941/105 5994/3166/2794 +f 5586/2941/103 5592/2944/103 5993/3166/103 +f 5995/3166/2793 5593/2944/105 5996/3167/2795 +f 5593/2944/105 5594/2945/2617 5996/3167/2795 +f 5996/3167/2795 5594/2945/2617 5997/3168/2795 +f 5594/2945/2617 5597/2946/2796 5997/3168/2795 +f 5597/2946/2796 5601/3169/2796 5997/3168/2795 +f 5997/3168/2795 5601/3169/2796 6001/3170/2797 +f 5602/3171/103 5606/3172/103 6000/3173/103 +f 5607/3172/107 5611/3174/107 5999/3173/107 +f 5612/3174/2798 5617/3175/2798 5998/3173/2798 +f 6004/3173/103 5618/3175/103 6008/3176/103 +f 5619/3175/103 5627/3177/103 6007/3176/103 +f 5628/3177/103 5633/2955/103 6006/3176/103 +f 5634/2955/103 5637/2957/103 6005/3176/103 +f 6011/3176/2462 5638/2957/1818 6012/3178/1579 +f 5638/2957/1818 5639/2958/1579 6012/3178/1579 +f 5639/2958/1579 5645/2961/2328 6012/3178/1579 +f 5645/2961/2328 5648/2962/2799 6012/3178/1579 +f 6015/3179/2800 6014/3180/1577 6016/3181/2801 +f 6014/3180/1577 5649/3182/2328 6016/3181/2801 +f 5650/3183/107 5653/3184/103 6017/3185/2802 +f 5653/3184/103 5656/3186/103 6017/3185/2802 +f 5656/3186/103 5658/3187/103 6017/3185/2802 +f 5658/3187/103 5661/3188/103 6017/3185/2802 +f 5661/3188/103 5664/3189/103 6017/3185/2802 +f 5664/3189/103 5667/3190/103 6017/3185/2802 +f 6017/3185/2802 5667/3190/103 6019/3191/2802 +f 5668/2971/103 5670/2972/103 6018/3192/2802 +f 5671/2972/1818 5674/2973/2803 6020/3192/1818 +f 5674/2973/2803 5676/2974/2630 6020/3192/1818 +f 5676/2974/2630 5677/2975/2631 6020/3192/1818 +f 6020/3192/1818 5677/2975/2631 6022/3193/2804 +f 6022/3193/2804 5677/2975/2631 6023/3194/2805 +f 5677/2975/2631 5679/2976/2632 6023/3194/2805 +f 6023/3194/2805 5679/2976/2632 6024/3195/2806 +f 5678/2976/2807 5680/2977/2808 6025/3195/2809 +f 5680/2977/2808 5682/2978/2476 6025/3195/2809 +f 6025/3195/2809 5682/2978/2476 6026/3196/2810 +f 5683/2978/2634 5684/2979/2635 6027/3196/2811 +f 6027/3196/2811 5684/2979/2635 6028/3197/2812 +f 6028/3197/2812 5684/2979/2635 6031/3198/2813 +f 5684/2979/2635 5685/2980/2636 6031/3198/2813 +f 5686/2980/1284 5691/2981/1284 6030/3198/1284 +f 5692/2981/1276 5695/3199/1276 6029/3198/1276 +f 6032/3198/1276 5696/3199/1276 6035/3200/1276 +f 5697/2984/1284 73/2985/1284 6034/3201/1284 +f 74/2985/1276 70/3202/1276 6033/3201/1276 +f 6037/3203/1398 71/3204/1398 6038/3203/2814 +f 6038/3203/2814 71/3204/1398 6039/3205/2815 +f 6039/3205/2815 71/3204/1398 6040/3206/2816 +f 6041/3207/2817 72/43/39 6042/3208/2818 +f 72/43/39 69/42/38 6042/3208/2818 +f 6044/3208/2819 6042/3208/2818 6046/42/2820 +f 6042/3208/2818 69/42/38 6046/42/2820 +f 6046/42/2820 69/42/38 6047/44/2821 +f 69/42/38 75/44/40 6047/44/2821 +f 75/44/40 5702/3209/2645 6047/44/2821 +f 5702/3209/2645 5704/3209/2822 6047/44/2821 +f 6049/3210/2821 5705/3211/2823 6050/3212/2824 +f 5706/2987/2644 5708/2988/2646 6052/3213/2824 +f 6052/3213/2824 5708/2988/2646 6054/2991/2825 +f 5708/2988/2646 5710/2991/2651 6054/2991/2825 +f 5710/2991/2651 5711/2991/2652 6054/2991/2825 +f 6054/2991/2825 5711/2991/2652 6056/2991/2826 +f 5711/2991/2652 5713/2992/2654 6056/2991/2826 +f 6056/2991/2826 5713/2992/2654 6058/3214/2827 +f 6058/3214/2827 5713/2992/2654 6059/3214/2828 +f 5713/2992/2654 5715/2993/2655 6059/3214/2828 +f 5715/2993/2655 5721/2994/2829 6059/3214/2828 +f 6061/3215/2830 5722/2998/2659 6062/2998/2831 +f 5722/2998/2659 5730/2999/20 6062/2998/2831 +f 6062/2998/2831 5730/2999/20 6063/3216/19 +f 5729/3217/2832 5732/3218/2661 6064/3219/2833 +f 5732/3218/2661 6067/3220/2834 6064/3219/2833 +f 6064/3219/2833 6067/3220/2834 6068/3221/2835 +f 6069/3222/2836 6065/3222/2837 6070/3223/2838 +f 6066/3222/1276 5733/3000/1276 6072/3223/1400 +f 6072/3223/1400 5733/3000/1276 6074/3224/1400 +f 5734/3000/2663 5735/2995/1433 6073/3224/2839 +f 6076/3225/2840 5736/3001/1433 6078/3226/2841 +f 5736/3001/1433 5737/3003/2664 6078/3226/2841 +f 6078/3226/2841 5737/3003/2664 6080/3227/1786 +f 5737/3003/2664 6081/3228/1784 6080/3227/1786 +f 6080/3227/1786 6081/3228/1784 6082/3229/2144 +f 6081/3228/1784 6084/3230/1770 6082/3229/2144 +f 6084/3230/1770 6086/3231/2842 6082/3229/2144 +f 6086/3231/2842 6090/3232/2843 6082/3229/2144 +f 6083/3229/23 6087/3232/23 6092/3233/23 +f 6092/3233/23 6087/3232/23 6093/3234/23 +f 6096/3235/2844 6088/3236/2845 6097/3237/2846 +f 6097/3237/2846 6088/3236/2845 6098/3238/2847 +f 6089/3236/2848 6085/3239/2849 6099/3238/2850 +f 6100/3240/2851 6126/3241/2852 6102/3242/2853 +f 6126/3241/2852 6099/3238/2850 6102/3242/2853 +f 6099/3238/2850 6085/3239/2849 6102/3242/2853 +f 6086/3231/2842 6084/3230/1770 6101/3243/2854 +f 6101/3243/2854 6084/3230/1770 6103/3244/2855 +f 6084/3230/1770 6081/3228/1784 6103/3244/2855 +f 6103/3244/2855 6081/3228/1784 6105/3245/2856 +f 6081/3228/1784 5737/3003/2664 6105/3245/2856 +f 5737/3003/2664 5740/3004/2857 6105/3245/2856 +f 6104/3245/2858 5741/3004/2649 6108/3246/2859 +f 5741/3004/2649 5745/3247/2649 6108/3246/2859 +f 5746/3248/2667 5749/3248/2668 6107/3249/2860 +f 5749/3248/2668 5752/3011/2671 6107/3249/2860 +f 5752/3011/2671 5754/3010/2670 6107/3249/2860 +f 6106/3249/2861 5753/3010/2 6109/3250/2 +f 6109/3250/2 5753/3010/2 6110/3251/2 +f 6110/3251/2 5753/3010/2 6112/3252/2 +f 5754/3010/2670 5755/3012/13 6111/3252/2862 +f 5755/3012/13 5757/3253/13 6111/3252/2862 +f 6111/3252/2862 5757/3253/13 6117/3254/2863 +f 6118/3255/2 6113/3256/2 6121/3257/2 +f 6114/3256/2864 6125/3258/2864 6120/3257/2864 +f 6129/3259/2865 6119/3260/2866 6131/3259/2867 +f 6119/3260/2866 6122/3259/2868 6131/3259/2867 +f 6133/3261/2869 6131/3259/2867 6134/3262/2870 +f 6131/3259/2867 6122/3259/2868 6134/3262/2870 +f 6136/3263/2 6123/3258/2 6138/3264/2 +f 6124/3258/2 6115/3256/2 6137/3264/2 +f 6116/3256/2 5758/3265/13 6144/3264/2862 +f 6144/3264/2862 5758/3265/13 6145/3266/1255 +f 5758/3265/13 5761/3267/2871 6145/3266/1255 +f 5761/3267/2871 5764/3019/2514 6145/3266/1255 +f 5764/3019/2514 5765/3020/2675 6145/3266/1255 +f 6145/3266/1255 5765/3020/2675 6146/3268/1256 +f 5765/3020/2675 5766/3021/2676 6146/3268/1256 +f 5766/3021/2676 5767/3021/2677 6146/3268/1256 +f 6146/3268/1256 5767/3021/2677 6148/3269/2872 +f 5768/3022/2678 5770/3023/2679 6147/3270/2873 +f 6152/3271/2 6149/3270/2874 6154/3272/2875 +f 6149/3270/2874 5769/3023/2876 6154/3272/2875 +f 6154/3272/2875 5769/3023/2876 6156/3273/2877 +f 5769/3023/2876 5771/3024/2878 6156/3273/2877 +f 6156/3273/2877 5771/3024/2878 6158/3274/2879 +f 5771/3024/2878 5774/3025/2682 6158/3274/2879 +f 6158/3274/2879 5774/3025/2682 6160/3275/2880 +f 5774/3025/2682 5775/3026/2684 6160/3275/2880 +f 6160/3275/2880 5775/3026/2684 6161/3276/2881 +f 6163/3276/2882 5776/3026/2685 6165/3277/2883 +f 5776/3026/2685 5777/3027/2687 6165/3277/2883 +f 6165/3277/2883 5777/3027/2687 6167/3027/2884 +f 6167/3027/2884 5777/3027/2687 6169/3278/2885 +f 5777/3027/2687 5779/235/1834 6168/3278/2886 +f 5779/235/1834 5780/3028/2690 6168/3278/2886 +f 6171/3279/2887 6170/3278/2888 6172/3280/2889 +f 6170/3278/2888 5781/3028/2691 6172/3280/2889 +f 5781/3028/2691 5782/3029/2693 6172/3280/2889 +f 6172/3280/2889 5782/3029/2693 6173/3281/2890 +f 6173/3281/2890 5782/3029/2693 6174/3282/2891 +f 5782/3029/2693 5783/2855/5 6174/3282/2891 +f 6174/3282/2891 5783/2855/5 6175/3283/2892 +f 5784/2855/2694 5785/3030/2695 6176/3283/2893 +f 6176/3283/2893 5785/3030/2695 6177/3030/2894 +f 5786/3030/2895 5788/3031/2896 6178/3030/2897 +f 6178/3030/2897 5788/3031/2896 6179/3284/2898 +f 5788/3031/2896 5790/3032/2899 6179/3284/2898 +f 5790/3032/2899 6180/3032/2900 6179/3284/2898 +f 6179/3284/2898 6180/3032/2900 6181/3285/2901 +f 6180/3032/2900 5790/3032/2899 6181/3285/2901 +f 5790/3032/2899 5792/3033/2902 6181/3285/2901 +f 6181/3285/2901 5792/3033/2902 6182/3286/2903 +f 5792/3033/2902 5794/3034/2904 6182/3286/2903 +f 6182/3286/2903 5794/3034/2904 6183/3287/2905 +f 5794/3034/2904 5797/3035/2553 6183/3287/2905 +f 6184/3288/2906 5798/3289/2553 6185/3290/2410 +f 5798/3289/2553 5800/3291/2553 6185/3290/2410 +f 6185/3290/2410 5800/3291/2553 6186/3292/2907 +f 6189/3293/2015 6187/3292/2908 6190/3292/2909 +f 6187/3292/2908 5801/3291/2910 6190/3292/2909 +f 6190/3292/2909 5801/3291/2910 6191/3294/2909 +f 6191/3294/2909 5801/3291/2910 6193/3295/2911 +f 5801/3291/2910 5804/3039/2703 6193/3295/2911 +f 5804/3039/2703 5805/3040/2705 6193/3295/2911 +f 6193/3295/2911 5805/3040/2705 6195/3296/2912 +f 6195/3296/2912 5805/3040/2705 6197/3297/2913 +f 5805/3040/2705 5806/3041/2706 6197/3297/2913 +f 6197/3297/2913 5806/3041/2706 6198/3297/2914 +f 5807/3042/2708 5808/3044/2710 6201/3044/2915 +f 5808/3044/2710 5811/3046/2916 6201/3044/2915 +f 6201/3044/2915 5811/3046/2916 6202/3298/2915 +f 5811/3046/2916 5813/3046/2917 6202/3298/2915 +f 5813/3046/2917 5817/3052/2548 6202/3298/2915 +f 5817/3052/2548 5825/3052/2718 6202/3298/2915 +f 5825/3052/2718 5824/3051/2717 6203/3298/1050 +f 5824/3051/2717 5820/3050/1135 6203/3298/1050 +f 6203/3298/1050 5820/3050/1135 6204/3299/1135 +f 6205/3299/2918 5821/3050/2715 6206/3300/2919 +f 5821/3050/2715 5819/3049/2714 6206/3300/2919 +f 6207/3301/1419 5818/3302/45 6208/3303/1419 +f 6208/3303/1419 5818/3302/45 6209/3304/2783 +f 5818/3302/45 5823/3054/2720 6209/3304/2783 +f 6209/3304/2783 5823/3054/2720 6210/3305/2783 +f 5823/3054/2720 5827/3053/2719 6210/3305/2783 +f 6213/3306/2920 6211/3307/2921 6215/3308/2922 +f 6211/3307/2921 5826/3308/2923 6215/3308/2922 +f 5826/3308/2923 5829/3309/2924 6215/3308/2922 +f 6215/3308/2922 5829/3309/2924 6217/3310/2925 +f 5829/3309/2924 5833/3311/2926 6217/3310/2925 +f 6217/3310/2925 5833/3311/2926 6219/3312/2927 +f 5833/3311/2926 5836/3313/2928 6219/3312/2927 +f 6219/3312/2927 5836/3313/2928 6221/3314/2929 +f 5836/3313/2928 5839/3315/2930 6221/3314/2929 +f 6221/3314/2929 5839/3315/2930 6223/3316/2931 +f 5839/3315/2930 5841/3317/2932 6223/3316/2931 +f 5841/3317/2932 5844/3317/2933 6223/3316/2931 +f 6223/3316/2931 5844/3317/2933 6224/3316/2934 +f 5844/3317/2933 5848/3064/2724 6224/3316/2934 +f 5848/3064/2724 5849/3065/2725 6224/3316/2934 +f 6225/3316/2935 5849/3065/2725 6227/3318/2935 +f 6227/3318/2935 5849/3065/2725 6228/3319/1050 +f 5849/3065/2725 5850/2874/1135 6228/3319/1050 +f 6230/3320/2936 6229/3319/2937 6232/3321/2715 +f 6228/3319/1050 5850/2874/1135 6231/3321/1135 +f 6232/3321/2715 5851/2874/1829 6234/3322/2938 +f 5851/2874/1829 5852/2876/2726 6234/3322/2938 +f 6236/3323/59 6235/3324/47 6240/3325/47 +f 6233/3324/23 5853/3066/23 6239/3325/23 +f 5854/3066/23 5857/3067/23 6238/3325/23 +f 5858/3068/23 5861/3069/23 6237/3326/23 +f 5861/3069/23 5865/3070/23 6237/3326/23 +f 6242/3326/2939 5866/3070/2729 6244/3327/2940 +f 5866/3070/2729 5867/3071/2730 6244/3327/2940 +f 6244/3327/2940 5867/3071/2730 6245/3328/2941 +f 5867/3071/2730 5869/3072/2731 6245/3328/2941 +f 6248/3328/2942 5868/3072/2943 6250/3329/2944 +f 6250/3329/2944 5868/3072/2943 6251/3330/2945 +f 5868/3072/2943 5871/3073/2736 6251/3330/2945 +f 5871/3073/2736 5872/3074/2738 6251/3330/2945 +f 5872/3074/2738 5874/3075/2741 6251/3330/2945 +f 5875/3075/2946 5877/3331/2947 6252/3330/2948 +f 6252/3330/2948 5877/3331/2947 6253/3332/2949 +f 6253/3332/2949 5877/3331/2947 6254/3333/2950 +f 6256/3334/2951 5878/3077/2745 6257/3335/2952 +f 5878/3077/2745 5879/3078/2746 6257/3335/2952 +f 5879/3078/2746 5880/3079/2747 6257/3335/2952 +f 6260/3336/2742 6258/3337/2742 6261/3338/2576 +f 6258/3337/2742 5881/3080/2748 6261/3338/2576 +f 5881/3080/2748 5883/3082/2577 6261/3338/2576 +f 6262/3338/2744 5884/3082/2580 6263/3339/2953 +f 5885/3084/2580 5886/3085/2751 6264/3340/2954 +f 5886/3085/2751 5887/3086/2752 6264/3340/2954 +f 6264/3340/2954 5887/3086/2752 6265/3341/2955 +f 5888/3088/2749 5889/3089/2576 6267/3342/2576 +f 6269/3343/2741 6267/3342/2576 6270/3344/2742 +f 6267/3342/2576 5889/3089/2576 6270/3344/2742 +f 6271/3345/2956 5890/3090/2754 6272/3346/2957 +f 5890/3090/2754 5891/3092/2756 6272/3346/2957 +f 6272/3346/2957 5891/3092/2756 6274/3347/2958 +f 5891/3092/2756 5892/3093/2757 6274/3347/2958 +f 6274/3347/2958 5892/3093/2757 6275/3348/2959 +f 5892/3093/2757 5893/3094/2758 6275/3348/2959 +f 5893/3094/2758 5894/3097/2596 6275/3348/2959 +f 6276/3348/2960 5895/3097/2598 6278/3349/2961 +f 5896/3097/2157 5898/3350/2157 6277/3349/2157 +f 6280/3349/2157 5899/3350/2157 6283/3351/2157 +f 5900/3350/2157 5903/3103/2157 6282/3351/2157 +f 5904/3103/2157 5907/3105/2157 6281/3351/2157 +f 5908/3106/2761 5909/3107/2601 6284/3352/2962 +f 5909/3107/2601 5910/3108/2762 6284/3352/2962 +f 6284/3352/2962 5910/3108/2762 6285/3353/2762 +f 6285/3353/2762 5910/3108/2762 6287/3354/2763 +f 5910/3108/2762 5911/3109/2762 6287/3354/2763 +f 5911/3109/2762 5914/3110/2763 6287/3354/2763 +f 5915/3112/23 5918/3113/23 6286/3355/23 +f 6289/3355/23 5919/3113/23 6290/3356/23 +f 5920/3113/82 5921/3114/2764 6293/3356/58 +f 6293/3356/58 5921/3114/2764 6294/3357/83 +f 5921/3114/2764 5922/3115/2765 6294/3357/83 +f 6294/3357/83 5922/3115/2765 6297/3358/2766 +f 5922/3115/2765 5923/3116/2766 6297/3358/2766 +f 5924/3117/2767 5927/3118/2767 6296/3359/2767 +f 5928/3118/2963 5930/3119/2963 6295/3359/2963 +f 5931/3360/2964 5934/3361/2965 6298/3362/2966 +f 6298/3362/2966 5934/3361/2965 6300/3363/2967 +f 5934/3361/2965 5937/3364/2968 6300/3363/2967 +f 6300/3363/2967 5937/3364/2968 6303/3365/2969 +f 5937/3364/2968 5939/3366/2970 6303/3365/2969 +f 6303/3365/2969 5939/3366/2970 6305/3367/2025 +f 5939/3366/2970 5941/3368/2800 6305/3367/2025 +f 6305/3367/2025 5941/3368/2800 6306/3369/2971 +f 5941/3368/2800 5943/3370/2971 6306/3369/2971 +f 6308/3369/2971 5944/3370/2971 6310/3371/2971 +f 6316/3372/2971 6313/3371/2971 6319/3373/2971 +f 6309/3371/2971 5945/3370/2971 6318/3373/2971 +f 5946/3126/2773 5947/3127/2187 6317/3374/2773 +f 5947/3127/2187 6323/3375/2187 6322/3374/2410 +f 6322/3374/2410 6323/3375/2187 6326/3376/2972 +f 6326/3376/2972 6323/3375/2187 6327/3377/2973 +f 6327/3377/2973 6323/3375/2187 6328/3378/2785 +f 6323/3375/2187 5947/3127/2187 6328/3378/2785 +f 5947/3127/2187 5953/3128/1776 6328/3378/2785 +f 5953/3128/1776 5955/3379/2974 6328/3378/2785 +f 6328/3378/2785 5955/3379/2974 6329/3380/2774 +f 5955/3379/2974 5961/3381/2763 6329/3380/2774 +f 5961/3381/2763 5965/3135/2763 6329/3380/2774 +f 5965/3135/2763 5966/3134/2774 6329/3380/2774 +f 6329/3380/2774 5966/3134/2774 6330/3382/2975 +f 6330/3382/2975 5966/3134/2774 6331/3383/2975 +f 5966/3134/2774 5967/3136/2775 6331/3383/2975 +f 6331/3383/2975 5967/3136/2775 6333/3384/2976 +f 6333/3384/2976 5967/3136/2775 6334/3385/2977 +f 5967/3136/2775 5968/3139/2778 6334/3385/2977 +f 6337/3386/2978 6335/3387/2979 6339/3388/2980 +f 6335/3387/2979 5969/3142/2780 6339/3388/2980 +f 6339/3388/2980 5969/3142/2780 6340/3389/2981 +f 5969/3142/2780 5971/3143/2782 6340/3389/2981 +f 6341/3390/2982 5970/3391/2983 6342/3392/61 +f 5970/3391/2983 5973/3145/93 6342/3392/61 +f 5973/3145/93 5974/3146/60 6342/3392/61 +f 6342/3392/61 5974/3146/60 6343/3393/60 +f 6345/3394/2984 6343/3393/60 6348/3395/45 +f 6343/3393/60 5974/3146/60 6348/3395/45 +f 6348/3395/45 5974/3146/60 6349/3396/45 +f 5974/3146/60 5975/3147/2783 6349/3396/45 +f 6352/3397/23 5976/3148/23 6353/3398/23 +f 5976/3148/23 5981/3150/23 6353/3398/23 +f 5982/3150/88 5985/3155/2785 6355/3398/88 +f 6355/3398/88 5985/3155/2785 6356/3399/2784 +f 6356/3399/2784 5985/3155/2785 6357/3400/2985 +f 5985/3155/2785 5987/3157/2405 6357/3400/2985 +f 6360/3401/2986 6358/3402/1577 6361/3403/2971 +f 6358/3402/1577 5988/3160/2326 6361/3403/2971 +f 5988/3160/2326 5991/3162/2790 6361/3403/2971 +f 5992/3165/2791 5995/3166/2793 6362/3404/2987 +f 5995/3166/2793 5996/3167/2795 6362/3404/2987 +f 5996/3167/2795 5997/3168/2795 6362/3404/2987 +f 6362/3404/2987 5997/3168/2795 6363/3405/2988 +f 5997/3168/2795 6001/3170/2797 6363/3405/2988 +f 6367/3406/2989 6365/3407/2990 6368/3408/2991 +f 6365/3407/2990 6002/3409/2797 6368/3408/2991 +f 6369/3410/2802 6003/3173/103 6370/3411/2802 +f 6003/3173/103 6009/3176/103 6370/3411/2802 +f 6372/3412/1819 6010/3413/2462 6373/3179/2992 +f 6010/3413/2462 6013/3180/1579 6373/3179/2992 +f 6014/3180/1577 6015/3179/2800 6374/3179/2993 +f 6374/3179/2993 6015/3179/2800 6375/3414/2994 +f 6375/3414/2994 6015/3179/2800 6376/3415/2995 +f 6015/3179/2800 6016/3181/2801 6376/3415/2995 +f 6377/3416/2996 6017/3185/2802 6378/3417/2997 +f 6017/3185/2802 6019/3191/2802 6378/3417/2997 +f 6019/3191/2802 6021/3418/2998 6378/3417/2997 +f 6378/3417/2997 6021/3418/2998 6379/3419/2999 +f 6379/3419/2999 6021/3418/2998 6381/3420/3000 +f 6022/3193/2804 6023/3194/2805 6380/3421/3001 +f 6381/3420/3000 6383/3422/3002 6384/3423/3003 +f 6384/3423/3003 6383/3422/3002 6385/3424/3002 +f 6383/3422/3002 6387/3425/3004 6385/3424/3002 +f 6386/3424/3002 6387/3425/3004 6388/3426/3005 +f 6388/3426/3005 6387/3425/3004 6389/3427/3006 +f 6387/3425/3004 6383/3422/3002 6389/3427/3006 +f 6390/3428/3007 6382/3429/3008 6391/3430/3009 +f 6382/3429/3008 6380/3421/3001 6391/3430/3009 +f 6380/3421/3001 6023/3194/2805 6391/3430/3009 +f 6023/3194/2805 6024/3195/2806 6391/3430/3009 +f 6391/3430/3009 6024/3195/2806 6392/3431/3010 +f 6025/3195/2809 6026/3196/2810 6393/3431/3011 +f 6393/3431/3011 6026/3196/2810 6394/3432/3012 +f 6027/3196/2811 6028/3197/2812 6395/3432/3013 +f 6395/3432/3013 6028/3197/2812 6396/3433/3014 +f 6028/3197/2812 6031/3198/2813 6396/3433/3014 +f 6031/3198/2813 6036/3200/3015 6396/3433/3014 +f 6397/3434/3016 6037/3203/1398 6398/3435/3017 +f 6037/3203/1398 6038/3203/2814 6398/3435/3017 +f 6038/3203/2814 6039/3205/2815 6398/3435/3017 +f 6398/3435/3017 6039/3205/2815 6399/3436/3018 +f 6039/3205/2815 6040/3206/2816 6399/3436/3018 +f 6399/3436/3018 6040/3206/2816 6400/3437/3019 +f 6041/3207/2817 6042/3208/2818 6402/3438/3020 +f 6042/3208/2818 6044/3208/2819 6402/3438/3020 +f 6402/3438/3020 6044/3208/2819 6403/3439/3021 +f 6043/3440/3022 6045/3441/1426 6404/3442/1427 +f 6045/3441/1426 6048/3210/1426 6404/3442/1427 +f 6048/3210/1426 6051/3212/1425 6404/3442/1427 +f 6051/3212/1425 6053/3443/1580 6404/3442/1427 +f 6053/3443/1580 6055/3443/1579 6404/3442/1427 +f 6404/3442/1427 6055/3443/1579 6405/3444/3023 +f 6410/3445/3024 6412/3446/3025 6414/3447/3026 +f 6411/3444/3025 6407/3444/3027 6413/3448/3028 +f 6405/3444/3023 6055/3443/1579 6415/3448/3029 +f 6055/3443/1579 6057/3449/3030 6415/3448/3029 +f 6057/3449/3030 6060/3215/1773 6415/3448/3029 +f 6415/3448/3029 6060/3215/1773 6417/3448/1573 +f 6060/3215/1773 6062/2998/2831 6417/3448/1573 +f 6417/3448/1573 6062/2998/2831 6418/3450/19 +f 6062/2998/2831 6063/3216/19 6418/3450/19 +f 6064/3219/2833 6068/3221/2835 6419/3451/3031 +f 6419/3451/3031 6068/3221/2835 6420/3221/3032 +f 6421/3452/3033 6069/3222/2836 6422/3453/3018 +f 6069/3222/2836 6070/3223/2838 6422/3453/3018 +f 6423/3453/3034 6071/3223/3035 6425/3454/3036 +f 6072/3223/1400 6074/3224/1400 6424/3454/3037 +f 6075/3225/1400 6077/3226/3038 6427/3455/3038 +f 6077/3226/3038 6079/3227/1561 6427/3455/3038 +f 6427/3455/3038 6079/3227/1561 6428/3456/1561 +f 6079/3227/1561 6083/3229/23 6428/3456/1561 +f 6083/3229/23 6092/3233/23 6428/3456/1561 +f 6429/3456/3039 6091/3233/3040 6430/3457/3041 +f 6430/3457/3041 6091/3233/3040 6431/3458/3042 +f 6091/3233/3040 6094/3234/75 6431/3458/3042 +f 6431/3458/3042 6094/3234/75 6432/3459/1385 +f 6433/3459/3043 6095/3234/3044 6434/3459/3045 +f 6434/3459/3045 6095/3234/3044 6435/3459/1425 +f 6096/3235/2844 6097/3237/2846 6437/3460/1425 +f 6097/3237/2846 6098/3238/2847 6437/3460/1425 +f 6098/3238/2847 6127/3241/1559 6437/3460/1425 +f 6437/3460/1425 6127/3241/1559 6438/3461/1559 +f 6128/3259/3046 6130/3259/1579 6441/3462/1559 +f 6441/3462/1559 6130/3259/1579 6443/3463/1579 +f 6130/3259/1579 6132/3261/1662 6443/3463/1579 +f 6443/3463/1579 6132/3261/1662 6445/3464/3047 +f 6445/3464/3047 6132/3261/1662 67/41/1773 +f 66/41/3048 6133/3261/2869 6448/40/3049 +f 6133/3261/2869 6134/3262/2870 6448/40/3049 +f 6135/3263/3050 6139/3264/3050 6447/3465/3050 +f 6446/3465/2 6140/3264/2 6450/3465/3051 +f 6449/3465/2 6141/3264/2 6454/3466/2 +f 6453/3466/2 6142/3264/2 6455/3467/2 +f 6458/3467/2 6143/3264/2 6459/3468/2 +f 6144/3264/2862 6145/3266/1255 6463/3468/2862 +f 6145/3266/1255 6146/3268/1256 6463/3468/2862 +f 6463/3468/2862 6146/3268/1256 6465/3469/2862 +f 6146/3268/1256 6148/3269/2872 6465/3469/2862 +f 6149/3270/2874 6152/3271/2 6464/3470/2 +f 6468/3470/2 6150/3271/2 6470/3471/2 +f 6473/3472/3052 6472/3471/3053 6474/3473/2528 +f 6475/3473/1255 6469/3471/3054 6476/3474/1255 +f 6469/3471/3054 6151/3271/3055 6476/3474/1255 +f 6476/3474/1255 6151/3271/3055 6477/3475/1256 +f 6151/3271/3055 6153/3272/3056 6477/3475/1256 +f 6477/3475/1256 6153/3272/3056 6478/3476/3057 +f 6153/3272/3056 6155/3273/3058 6478/3476/3057 +f 6478/3476/3057 6155/3273/3058 6479/3477/2600 +f 6155/3273/3058 6157/3274/3059 6479/3477/2600 +f 6479/3477/2600 6157/3274/3059 6480/3274/3060 +f 6157/3274/3059 6159/3275/3061 6480/3274/3060 +f 6480/3274/3060 6159/3275/3061 6481/3478/3062 +f 6159/3275/3061 6162/3276/3063 6481/3478/3062 +f 6482/3478/3064 6163/3276/2882 6483/3276/3065 +f 6163/3276/2882 6165/3277/2883 6483/3276/3065 +f 6484/3276/3066 6164/3277/3067 6485/3479/3068 +f 6164/3277/3067 6166/3027/3069 6485/3479/3068 +f 6485/3479/3068 6166/3027/3069 6486/3480/3070 +f 6166/3027/3069 6170/3278/2888 6486/3480/3070 +f 6170/3278/2888 6171/3279/2887 6486/3480/3070 +f 6486/3480/3070 6171/3279/2887 6487/3481/3071 +f 6171/3279/2887 6172/3280/2889 6487/3481/3071 +f 6487/3481/3071 6172/3280/2889 6488/3482/3072 +f 6488/3482/3072 6172/3280/2889 6489/3483/3073 +f 6172/3280/2889 6173/3281/2890 6489/3483/3073 +f 6489/3483/3073 6490/3484/3074 6491/3484/3075 +f 6491/3484/3075 6490/3484/3074 6492/3485/3076 +f 6492/3485/3076 6490/3484/3074 6493/3485/3077 +f 6494/3485/3078 6493/3485/3077 6495/3486/3079 +f 6493/3485/3077 6490/3484/3074 6496/3486/3080 +f 6496/3486/3080 6490/3484/3074 6497/3487/3081 +f 6490/3484/3074 6489/3483/3073 6497/3487/3081 +f 6497/3487/3081 6489/3483/3073 6498/3488/3082 +f 6489/3483/3073 6173/3281/2890 6498/3488/3082 +f 6173/3281/2890 6174/3282/2891 6498/3488/3082 +f 6174/3282/2891 6175/3283/2892 6498/3488/3082 +f 6498/3488/3082 6175/3283/2892 6499/3489/2892 +f 6501/3489/3083 6500/3489/3084 6502/3283/3085 +f 6500/3489/3084 6176/3283/2893 6502/3283/3085 +f 6176/3283/2893 6177/3030/2894 6502/3283/3085 +f 6502/3283/3085 6177/3030/2894 6503/3490/3086 +f 6506/3490/3087 6178/3030/2897 6509/3491/3088 +f 6178/3030/2897 6179/3284/2898 6509/3491/3088 +f 6179/3284/2898 6181/3285/2901 6509/3491/3088 +f 6509/3491/3088 6181/3285/2901 6513/3492/3089 +f 6181/3285/2901 6182/3286/2903 6513/3492/3089 +f 6513/3492/3089 6182/3286/2903 6514/3493/2727 +f 6182/3286/2903 6183/3287/2905 6514/3493/2727 +f 6184/3288/2906 6185/3290/2410 6518/3494/3090 +f 6518/3494/3090 6185/3290/2410 6519/3293/2550 +f 6185/3290/2410 6186/3292/2907 6519/3293/2550 +f 6187/3292/2908 6189/3293/2015 6522/3293/3091 +f 6522/3293/3091 6189/3293/2015 6523/3495/3092 +f 6526/3496/59 6525/3495/3093 6527/3497/3094 +f 6528/3497/3095 6523/3495/3092 6529/3497/3096 +f 6523/3495/3092 6189/3293/2015 6529/3497/3096 +f 6188/3293/1842 6190/3292/2909 6530/3497/1841 +f 6190/3292/2909 6191/3294/2909 6530/3497/1841 +f 6191/3294/2909 6192/3295/3097 6530/3497/1841 +f 6530/3497/1841 6192/3295/3097 6533/3498/3098 +f 6192/3295/3097 6194/3296/3042 6533/3498/3098 +f 6533/3498/3098 6194/3296/3042 6535/3499/3099 +f 6194/3296/3042 6196/3297/14 6535/3499/3099 +f 6535/3499/3099 6196/3297/14 6536/3500/12 +f 6196/3297/14 6199/3297/3100 6536/3500/12 +f 6538/3501/14 6200/3044/2935 6539/3502/3101 +f 6200/3044/2935 6203/3298/1050 6539/3502/3101 +f 6203/3298/1050 6204/3299/1135 6539/3502/3101 +f 6540/3502/3102 6205/3299/2918 6541/3503/3103 +f 6205/3299/2918 6206/3300/2919 6541/3503/3103 +f 6207/3301/1419 6208/3303/1419 6543/3504/3104 +f 6543/3504/3104 6208/3303/1419 6546/3505/3105 +f 6546/3505/3105 6208/3303/1419 6547/3506/1419 +f 6208/3303/1419 6209/3304/2783 6547/3506/1419 +f 6209/3304/2783 6210/3305/2783 6547/3506/1419 +f 6211/3307/2921 6213/3306/2920 6548/3507/3106 +f 6548/3507/3106 6213/3306/2920 6549/3508/3107 +f 6212/3306/1841 6214/3308/1841 6550/3508/1842 +f 6214/3308/1841 6216/3310/1841 6550/3508/1842 +f 6216/3310/1841 6218/3312/3108 6550/3508/1842 +f 6550/3508/1842 6218/3312/3108 6552/3509/3108 +f 6218/3312/3108 6220/3314/3109 6552/3509/3108 +f 6552/3509/3108 6220/3314/3109 6554/3510/2963 +f 6220/3314/3109 6222/3316/3110 6554/3510/2963 +f 6554/3510/2963 6222/3316/3110 6555/3318/3111 +f 6222/3316/3110 6225/3316/2935 6555/3318/3111 +f 6225/3316/2935 6227/3318/2935 6555/3318/3111 +f 6556/3318/3112 6226/3318/3113 6559/3318/3103 +f 6559/3318/3103 6226/3318/3113 6560/3511/3114 +f 6226/3318/3113 6229/3319/2937 6560/3511/3114 +f 6229/3319/2937 6230/3320/2936 6560/3511/3114 +f 6563/3511/3115 6230/3320/2936 6564/3512/3116 +f 6230/3320/2936 6232/3321/2715 6564/3512/3116 +f 6232/3321/2715 6234/3322/2938 6564/3512/3116 +f 6235/3324/47 6236/3323/59 6565/3513/3117 +f 6565/3513/3117 6236/3323/59 6566/3514/3118 +f 6236/3323/59 6241/3325/3119 6566/3514/3118 +f 6241/3325/3119 6243/3515/2940 6566/3514/3118 +f 6243/3515/2940 6246/3462/2941 6566/3514/3118 +f 6566/3514/3118 6246/3462/2941 6567/3516/3120 +f 6568/3517/3121 6247/3518/3122 6571/3519/3121 +f 6248/3328/2942 6250/3329/2944 6570/3520/3121 +f 6573/3521/95 6572/3522/3123 6574/3523/3124 +f 6575/3524/3125 6569/3520/3120 6576/3525/3126 +f 6569/3520/3120 6249/3329/3127 6576/3525/3126 +f 6249/3329/3127 6252/3330/2948 6576/3525/3126 +f 6252/3330/2948 6253/3332/2949 6576/3525/3126 +f 6576/3525/3126 6253/3332/2949 6577/3526/3128 +f 6253/3332/2949 6254/3333/2950 6577/3526/3128 +f 6578/3527/3129 6255/3528/3130 6579/3529/2945 +f 6255/3528/3130 6258/3337/2742 6579/3529/2945 +f 6258/3337/2742 6260/3336/2742 6579/3529/2945 +f 6579/3529/2945 6260/3336/2742 6580/3530/2945 +f 6581/3530/2948 6259/3336/2946 6582/3531/2745 +f 6259/3336/2946 6262/3338/2744 6582/3531/2745 +f 6262/3338/2744 6263/3339/2953 6582/3531/2745 +f 6263/3339/2953 6266/3532/3131 6582/3531/2745 +f 6266/3532/3131 6268/3533/3132 6582/3531/2745 +f 6582/3531/2745 6268/3533/3132 6583/3534/3133 +f 6269/3343/2741 6270/3344/2742 6584/3535/3130 +f 6584/3535/3130 6270/3344/2742 6585/3536/2945 +f 6586/3537/3134 6271/3345/2956 6587/3538/3135 +f 6271/3345/2956 6272/3346/2957 6587/3538/3135 +f 6587/3538/3135 6272/3346/2957 6588/3539/3136 +f 6272/3346/2957 6274/3347/2958 6588/3539/3136 +f 6589/3539/3137 6273/3347/3138 6591/3540/3139 +f 6591/3540/3139 6273/3347/3138 6592/3541/3140 +f 6273/3347/3138 6276/3348/2960 6592/3541/3140 +f 6276/3348/2960 6278/3349/2961 6592/3541/3140 +f 6593/3542/3141 6279/3543/2962 6594/3544/3142 +f 6279/3543/2962 6284/3352/2962 6594/3544/3142 +f 6284/3352/2962 6285/3353/2762 6594/3544/3142 +f 6594/3544/3142 6285/3353/2762 6595/3545/2763 +f 6285/3353/2762 6287/3354/2763 6595/3545/2763 +f 6288/3355/23 6291/3356/23 6598/3546/23 +f 6597/3547/23 6292/3548/23 6599/3549/23 +f 6293/3356/58 6294/3357/83 6602/3550/3143 +f 6605/3551/3144 6602/3550/3143 6606/3552/3145 +f 6602/3550/3143 6294/3357/83 6606/3552/3145 +f 6294/3357/83 6297/3358/2766 6606/3552/3145 +f 6297/3358/2766 6299/3553/3146 6606/3552/3145 +f 6299/3553/3146 6301/3554/3147 6606/3552/3145 +f 6606/3552/3145 6301/3554/3147 6607/3555/3148 +f 6302/3365/2969 6304/3367/3149 6608/3556/3148 +f 6608/3556/3148 6304/3367/3149 6609/3557/2187 +f 6304/3367/3149 6307/3369/3150 6609/3557/2187 +f 6307/3369/3150 6311/3371/2773 6609/3557/2187 +f 6609/3557/2187 6311/3371/2773 6610/3558/3151 +f 6312/3371/3150 6314/3372/2773 6611/3558/3151 +f 6611/3558/3151 6314/3372/2773 6612/3559/3151 +f 6612/3559/3151 6314/3372/2773 6615/3560/2773 +f 6315/3372/2971 6320/3373/2971 6614/3560/2971 +f 6321/3373/2971 6324/3561/2971 6613/3560/2971 +f 6616/3560/2971 6325/3561/2971 6619/3562/3152 +f 6619/3562/3152 6325/3561/2971 6621/3563/3153 +f 6326/3376/2972 6327/3377/2973 6620/3564/3154 +f 6327/3377/2973 6328/3378/2785 6622/3564/3155 +f 6328/3378/2785 6329/3380/2774 6622/3564/3155 +f 6622/3564/3155 6329/3380/2774 6624/3565/3156 +f 6624/3565/3156 6329/3380/2774 6626/3566/2962 +f 6626/3566/2962 6329/3380/2774 6630/3567/2962 +f 6329/3380/2774 6330/3382/2975 6630/3567/2962 +f 6630/3567/2962 6330/3382/2975 6631/3568/2761 +f 6330/3382/2975 6331/3383/2975 6631/3568/2761 +f 6331/3383/2975 6333/3384/2976 6631/3568/2761 +f 6634/3569/3157 6332/3570/3158 6637/3571/3157 +f 6637/3571/3157 6332/3570/3158 6638/3572/2014 +f 6332/3570/3158 6335/3387/2979 6638/3572/2014 +f 6335/3387/2979 6337/3386/2978 6638/3572/2014 +f 6640/3573/3159 6336/3574/3160 6641/3575/3161 +f 6336/3574/3160 6338/3576/3162 6641/3575/3161 +f 6641/3575/3161 6338/3576/3162 6642/3577/3163 +f 6338/3576/3162 6341/3390/2982 6642/3577/3163 +f 6341/3390/2982 6342/3392/61 6642/3577/3163 +f 6342/3392/61 6343/3393/60 6642/3577/3163 +f 6343/3393/60 6345/3394/2984 6642/3577/3163 +f 6645/3578/3164 6642/3577/3163 6646/3579/3165 +f 6642/3577/3163 6345/3394/2984 6646/3579/3165 +f 6649/3580/23 6344/3581/23 6652/3582/23 +f 6344/3581/23 6346/3583/23 6652/3582/23 +f 6347/3583/23 6350/3397/23 6651/3582/23 +f 6351/3397/23 6354/3398/23 6650/3582/23 +f 6654/3582/88 6355/3398/88 6655/3584/2785 +f 6355/3398/88 6356/3399/2784 6655/3584/2785 +f 6356/3399/2784 6656/3585/3166 6655/3584/2785 +f 6655/3584/2785 6656/3585/3166 6657/3586/3151 +f 6656/3585/3166 6356/3399/2784 6657/3586/3151 +f 6356/3399/2784 6357/3400/2985 6657/3586/3151 +f 6357/3400/2985 6359/3587/1579 6657/3586/3151 +f 6658/3588/3167 6360/3401/2986 6660/3589/1577 +f 6360/3401/2986 6361/3403/2971 6660/3589/1577 +f 6361/3403/2971 6364/3590/2971 6660/3589/1577 +f 6660/3589/1577 6364/3590/2971 6662/3591/3168 +f 6364/3590/2971 6366/3592/2800 6662/3591/3168 +f 6662/3591/3168 6366/3592/2800 6663/3593/3169 +f 6664/3594/3170 6367/3406/2989 6665/3595/3171 +f 6367/3406/2989 6368/3408/2991 6665/3595/3171 +f 6665/3595/3171 6368/3408/2991 6666/3596/3172 +f 6669/3597/2999 6667/3598/3173 6670/3599/2996 +f 6667/3598/3173 6369/3410/2802 6670/3599/2996 +f 6369/3410/2802 6370/3411/2802 6670/3599/2996 +f 6371/3412/1819 6374/3179/2993 6671/3600/3174 +f 6374/3179/2993 6375/3414/2994 6671/3600/3174 +f 6673/3601/2419 6671/3600/3174 6674/3602/3175 +f 6671/3600/3174 6375/3414/2994 6674/3602/3175 +f 6674/3602/3175 6375/3414/2994 6675/3603/3176 +f 6375/3414/2994 6376/3415/2995 6675/3603/3176 +f 6377/3416/2996 6378/3417/2997 6676/3604/3000 +f 6378/3417/2997 6379/3419/2999 6676/3604/3000 +f 6379/3419/2999 6381/3420/3000 6676/3604/3000 +f 6381/3420/3000 6384/3423/3003 6676/3604/3000 +f 6676/3604/3000 6384/3423/3003 6678/3605/3002 +f 6384/3423/3003 6386/3424/3002 6678/3605/3002 +f 6386/3424/3002 6388/3426/3005 6678/3605/3002 +f 6678/3605/3002 6388/3426/3005 6680/3606/3177 +f 6680/3606/3177 6388/3426/3005 6681/3607/3178 +f 6388/3426/3005 6389/3427/3006 6681/3607/3178 +f 6682/3608/3179 6390/3428/3007 6683/3609/3180 +f 6390/3428/3007 6391/3430/3009 6683/3609/3180 +f 6391/3430/3009 6392/3431/3010 6683/3609/3180 +f 6683/3609/3180 6392/3431/3010 6684/3610/3181 +f 6393/3431/3011 6394/3432/3012 6685/3610/3182 +f 6685/3610/3182 6394/3432/3012 6687/3611/3183 +f 6395/3432/3013 6396/3433/3014 6686/3611/3184 +f 6689/3612/3185 6397/3434/3016 6690/3613/3186 +f 6397/3434/3016 6398/3435/3017 6690/3613/3186 +f 6398/3435/3017 6399/3436/3018 6690/3613/3186 +f 6399/3436/3018 6400/3437/3019 6690/3613/3186 +f 6693/3614/3187 6401/3615/3187 6694/3616/3187 +f 6402/3438/3020 6403/3439/3021 6697/3617/3188 +f 6403/3439/3021 6406/3446/3189 6697/3617/3188 +f 6697/3617/3188 6406/3446/3189 6698/3618/3190 +f 6406/3446/3189 6412/3446/3025 6698/3618/3190 +f 6412/3446/3025 6410/3445/3024 6698/3618/3190 +f 6701/3619/3191 6408/3620/3191 6702/3621/3191 +f 6707/3621/3191 6409/3620/3191 6708/3622/3191 +f 6410/3445/3024 6414/3447/3026 6711/3445/3192 +f 6414/3447/3026 6416/3623/3193 6711/3445/3192 +f 6711/3445/3192 6416/3623/3193 6712/3624/3194 +f 6416/3623/3193 6419/3451/3031 6712/3624/3194 +f 6419/3451/3031 6420/3221/3032 6712/3624/3194 +f 6715/3625/3195 6421/3452/3033 6716/3626/3196 +f 6421/3452/3033 6422/3453/3018 6716/3626/3196 +f 6719/3626/3186 6423/3453/3034 6722/3627/3197 +f 6722/3627/3197 6423/3453/3034 6723/3628/3198 +f 6423/3453/3034 6425/3454/3036 6723/3628/3198 +f 6426/3455/3036 6429/3456/3039 6724/3629/3199 +f 6429/3456/3039 6430/3457/3041 6724/3629/3199 +f 6724/3629/3199 6430/3457/3041 6725/3630/3200 +f 6430/3457/3041 6431/3458/3042 6725/3630/3200 +f 6725/3630/3200 6431/3458/3042 6726/3459/3201 +f 6431/3458/3042 6432/3459/1385 6726/3459/3201 +f 6433/3459/3043 6434/3459/3045 6727/3459/3202 +f 6727/3459/3202 6434/3459/3045 6730/3631/3203 +f 6434/3459/3045 6436/3459/116 6730/3631/3203 +f 6436/3459/116 6439/3632/3204 6730/3631/3203 +f 6730/3631/3203 6439/3632/3204 6733/3632/3178 +f 6733/3632/3178 6439/3632/3204 6734/3633/3205 +f 6440/3462/3204 6442/3463/3169 6736/3463/3206 +f 6442/3463/3169 6444/3464/3207 6736/3463/3206 +f 6736/3463/3206 6444/3464/3207 6738/3464/3208 +f 6738/3464/3208 6444/3464/3207 6739/3464/3209 +f 6445/3464/3047 67/41/1773 6742/3464/1773 +f 67/41/1773 38/28/19 6742/3464/1773 +f 6742/3464/1773 38/28/19 6744/3634/19 +f 39/28/19 42/29/19 6743/3634/19 +f 6745/3634/3210 43/29/21 6746/3635/3211 +f 43/29/21 45/30/22 6746/3635/3211 +f 6747/3635/1385 46/30/75 6748/3636/1385 +f 46/30/75 48/31/75 6748/3636/1385 +f 6748/3636/1385 48/31/75 6749/33/81 +f 48/31/75 51/32/75 6749/33/81 +f 52/32/25 54/33/24 6750/33/3212 +f 6750/33/3212 54/33/24 6751/3637/3213 +f 53/33/1425 55/34/1426 6752/3637/1426 +f 55/34/1426 57/35/1426 6752/3637/1426 +f 6752/3637/1426 57/35/1426 6754/3638/1427 +f 58/35/29 61/35/30 6753/3638/3214 +f 61/35/30 6451/3639/3215 6753/3638/3214 +f 6452/3466/3215 6456/3467/3216 6755/3466/3217 +f 6755/3466/3217 6456/3467/3216 6757/3640/3218 +f 6771/3641/3219 6761/3642/2827 6763/3643/3220 +f 6762/3644/3220 6758/3644/3221 6766/3644/3222 +f 6758/3644/3221 6757/3640/3218 6766/3644/3222 +f 6757/3640/3218 6456/3467/3216 6766/3644/3222 +f 6457/3467/2 6460/3468/2 6765/3644/2 +f 6764/3644/2 6461/3468/2 6770/3645/2 +f 6462/3468/2 6466/3469/2 6769/3645/2 +f 6467/3470/3050 6471/3471/3050 6768/3646/3050 +f 6767/3646/36 6472/3471/3053 6773/3641/3223 +f 6773/3641/3223 6472/3471/3053 6774/3647/19 +f 6472/3471/3053 6473/3472/3052 6774/3647/19 +f 6473/3472/3052 6776/3648/19 6774/3647/19 +f 6774/3647/19 6776/3648/19 6777/3649/3224 +f 6777/3649/3224 6775/3648/3225 6778/3650/3226 +f 6778/3650/3226 6775/3648/3225 6779/3648/10 +f 6775/3648/3225 6473/3472/3052 6779/3648/10 +f 6473/3472/3052 6474/3473/2528 6779/3648/10 +f 6475/3473/1255 6476/3474/1255 6781/3648/2843 +f 6781/3648/2843 6476/3474/1255 6786/3651/15 +f 6476/3474/1255 6477/3475/1256 6786/3651/15 +f 6477/3475/1256 6478/3476/3057 6786/3651/15 +f 6786/3651/15 6478/3476/3057 6790/3652/3227 +f 6478/3476/3057 6479/3477/2600 6790/3652/3227 +f 6790/3652/3227 6479/3477/2600 6794/3653/3228 +f 6479/3477/2600 6480/3274/3060 6794/3653/3228 +f 6794/3653/3228 6480/3274/3060 6795/3654/3229 +f 6480/3274/3060 6481/3478/3062 6795/3654/3229 +f 6482/3478/3064 6483/3276/3065 6797/3654/3230 +f 6797/3654/3230 6483/3276/3065 6798/3654/3231 +f 6484/3276/3066 6485/3479/3068 6799/3654/3066 +f 6799/3654/3066 6485/3479/3068 6800/3655/3232 +f 6485/3479/3068 6486/3480/3070 6800/3655/3232 +f 6486/3480/3070 6487/3481/3071 6800/3655/3232 +f 6487/3481/3071 6488/3482/3072 6800/3655/3232 +f 6800/3655/3232 6488/3482/3072 6801/3656/3233 +f 6801/3656/3233 6488/3482/3072 6802/3657/3234 +f 6488/3482/3072 6489/3483/3073 6802/3657/3234 +f 6489/3483/3073 6491/3484/3075 6802/3657/3234 +f 6802/3657/3234 6491/3484/3075 6803/3485/3235 +f 6491/3484/3075 6492/3485/3076 6803/3485/3235 +f 6803/3485/3235 6492/3485/3076 6804/3658/3236 +f 6492/3485/3076 6493/3485/3077 6804/3658/3236 +f 6493/3485/3077 6494/3485/3078 6804/3658/3236 +f 6494/3485/3078 20/19/7 6804/3658/3236 +f 20/19/7 19/19/6 6804/3658/3236 +f 6804/3658/3236 19/19/6 6805/3658/3237 +f 19/19/6 17/17/4 6805/3658/3237 +f 6805/3658/3237 17/17/4 6806/3659/3238 +f 6807/3660/3239 6806/3659/3238 6808/16/3240 +f 6806/3659/3238 17/17/4 6808/16/3240 +f 17/17/4 16/16/3 6808/16/3240 +f 6808/16/3240 16/16/3 6809/3661/3241 +f 6809/3661/3241 16/16/3 6811/3662/3242 +f 16/16/3 18/18/5 6811/3662/3242 +f 6811/3662/3242 18/18/5 6813/3663/3242 +f 18/18/5 20/19/7 6813/3663/3242 +f 20/19/7 6494/3485/3078 6813/3663/3242 +f 6494/3485/3078 6495/3486/3079 6813/3663/3242 +f 6813/3663/3242 6495/3486/3079 6814/3664/3243 +f 6495/3486/3079 6497/3487/3081 6814/3664/3243 +f 6497/3487/3081 6498/3488/3082 6814/3664/3243 +f 6498/3488/3082 6499/3489/2892 6814/3664/3243 +f 6500/3489/3084 6501/3489/3083 6816/3664/3244 +f 6816/3664/3244 6501/3489/3083 6818/3665/3245 +f 6818/3665/3245 6501/3489/3083 6819/3665/3246 +f 6501/3489/3083 6502/3283/3085 6819/3665/3246 +f 6502/3283/3085 6503/3490/3086 6819/3665/3246 +f 6821/3665/23 6504/3490/23 6824/3666/23 +f 6505/3490/23 6507/3491/23 6823/3666/23 +f 6508/3491/23 6510/3492/23 6822/3666/23 +f 6826/3666/23 6511/3492/23 6828/3667/23 +f 6512/3492/23 6515/3493/23 6827/3667/23 +f 6830/3667/23 6516/3493/23 6834/3668/23 +f 6517/3494/23 6520/3293/23 6833/3669/23 +f 6521/3293/23 6524/3495/23 6832/3669/23 +f 6525/3495/3093 6526/3496/59 6831/3669/60 +f 6836/3669/47 6526/3496/59 6838/3670/3247 +f 6838/3670/3247 6526/3496/59 6839/3671/94 +f 6526/3496/59 6527/3497/3094 6839/3671/94 +f 6839/3671/94 6527/3497/3094 6841/3672/3105 +f 6528/3497/3095 6529/3497/3096 6840/3672/3248 +f 6529/3497/3096 6531/3498/3249 6840/3672/3248 +f 6843/3673/3250 6532/3674/3251 6846/3675/3252 +f 6846/3675/3252 6532/3674/3251 6850/3675/3253 +f 6532/3674/3251 6534/3674/3254 6850/3675/3253 +f 6534/3674/3254 6537/3501/3255 6850/3675/3253 +f 6850/3675/3253 6537/3501/3255 6853/3502/3256 +f 6853/3502/3256 6537/3501/3255 6855/3502/3257 +f 6537/3501/3255 6540/3502/3102 6855/3502/3257 +f 6540/3502/3102 6541/3503/3103 6855/3502/3257 +f 6542/3504/3258 6544/3505/3258 6854/3676/3258 +f 6858/3676/3259 6545/3505/3259 6860/3677/3259 +f 6546/3505/3105 6547/3506/1419 6859/3677/3105 +f 6548/3507/3106 6549/3508/3107 6863/3678/3260 +f 6549/3508/3107 6551/3509/3261 6863/3678/3260 +f 6863/3678/3260 6551/3509/3261 6866/3509/3262 +f 6551/3509/3261 6553/3510/3263 6866/3509/3262 +f 6866/3509/3262 6553/3510/3263 6869/3510/3264 +f 6553/3510/3263 6556/3318/3112 6869/3510/3264 +f 6869/3510/3264 6556/3318/3112 6870/3318/3265 +f 6556/3318/3112 6559/3318/3103 6870/3318/3265 +f 6873/3679/3266 6557/3680/3266 6876/3681/3266 +f 6558/3680/3258 6561/3682/3258 6875/3681/3258 +f 6562/3683/3105 6565/3513/3117 6874/3684/3267 +f 6565/3513/3117 6566/3514/3118 6874/3684/3267 +f 6566/3514/3118 6567/3516/3120 6874/3684/3267 +f 6568/3517/3121 6571/3519/3121 6879/3681/3258 +f 6879/3681/3258 6571/3519/3121 6880/3685/3258 +f 6572/3522/3123 6573/3521/95 6882/3686/3267 +f 6882/3686/3267 6573/3521/95 6884/3687/1419 +f 6884/3687/1419 6573/3521/95 6885/3688/59 +f 6573/3521/95 6574/3523/3124 6885/3688/59 +f 6885/3688/59 6574/3523/3124 6886/3689/3268 +f 6889/3690/3268 6575/3524/3125 6890/3691/3269 +f 6575/3524/3125 6576/3525/3126 6890/3691/3269 +f 6576/3525/3126 6577/3526/3128 6890/3691/3269 +f 6578/3527/3129 6579/3529/2945 6891/3692/3270 +f 6579/3529/2945 6892/3693/3271 6891/3692/3270 +f 6891/3692/3270 6892/3693/3271 6893/3694/2944 +f 6892/3693/3271 6579/3529/2945 6893/3694/2944 +f 6579/3529/2945 6580/3530/2945 6893/3694/2944 +f 6894/3694/3127 6581/3530/2948 6895/3695/3272 +f 6581/3530/2948 6582/3531/2745 6895/3695/3272 +f 6582/3531/2745 6583/3534/3133 6895/3695/3272 +f 6895/3695/3272 6583/3534/3133 6896/3696/3128 +f 6584/3535/3130 6585/3536/2945 6897/3697/3103 +f 6897/3697/3103 6585/3536/2945 6898/3698/3273 +f 6899/3699/3274 6586/3537/3134 6900/3700/3275 +f 6586/3537/3134 6587/3538/3135 6900/3700/3275 +f 6900/3700/3275 6587/3538/3135 6901/3701/3276 +f 6587/3538/3135 6588/3539/3136 6901/3701/3276 +f 6903/3701/3277 6589/3539/3137 6904/3702/3278 +f 6589/3539/3137 6591/3540/3139 6904/3702/3278 +f 6906/3703/3279 6590/3704/3280 6907/3705/3281 +f 6590/3704/3280 6593/3542/3141 6907/3705/3281 +f 6907/3705/3281 6593/3542/3141 6910/3706/3282 +f 6593/3542/3141 6594/3544/3142 6910/3706/3282 +f 6594/3544/3142 6595/3545/2763 6910/3706/3282 +f 6596/3547/23 6600/3549/23 6909/3707/23 +f 6601/3549/23 6603/3708/23 6908/3707/23 +f 6911/3707/23 6604/3708/23 6913/3709/3283 +f 6605/3551/3144 6606/3552/3145 6912/3710/3284 +f 6606/3552/3145 6607/3555/3148 6912/3710/3284 +f 6913/3709/3283 6608/3556/3148 6915/3711/3285 +f 6608/3556/3148 6609/3557/2187 6915/3711/3285 +f 6920/3712/88 6915/3711/3285 6921/3713/3286 +f 6915/3711/3285 6609/3557/2187 6921/3713/3286 +f 6609/3557/2187 6610/3558/3151 6921/3713/3286 +f 6611/3558/3151 6612/3559/3151 6922/3713/2785 +f 6612/3559/3151 6615/3560/2773 6922/3713/2785 +f 6922/3713/2785 6615/3560/2773 6923/3714/3287 +f 6615/3560/2773 6617/3562/3288 6923/3714/3287 +f 6924/3715/3287 6618/3716/3289 6925/3717/3290 +f 6619/3562/3152 6621/3563/3153 6926/3718/3291 +f 6926/3718/3291 6621/3563/3153 6927/3719/3292 +f 6621/3563/3153 6623/3720/3293 6927/3719/3292 +f 6930/3721/3294 6929/3722/3292 6931/3723/3295 +f 6927/3719/3292 6623/3720/3293 6932/3724/1280 +f 6623/3720/3293 6625/3725/3296 6932/3724/1280 +f 6932/3724/1280 6625/3725/3296 6933/3726/2961 +f 6625/3725/3296 6627/3727/3157 6933/3726/2961 +f 6935/3728/3157 6628/3729/3157 6938/3730/3157 +f 6629/3729/3157 6632/3569/3157 6937/3730/3157 +f 6633/3569/3157 6635/3571/3157 6936/3730/3157 +f 6939/3731/2962 6636/3732/2962 6940/3733/1767 +f 6636/3732/2962 6639/3734/3159 6940/3733/1767 +f 6940/3733/1767 6639/3734/3159 6941/3735/1767 +f 6640/3573/3159 6641/3575/3161 6942/3736/2774 +f 6942/3736/2774 6641/3575/3161 6943/3737/3297 +f 6641/3575/3161 6642/3577/3163 6943/3737/3297 +f 6642/3577/3163 6645/3578/3164 6943/3737/3297 +f 6946/3738/23 6643/3739/23 6949/3740/23 +f 6644/3739/23 6647/3580/23 6948/3740/23 +f 6648/3580/23 6653/3582/23 6947/3740/23 +f 6951/3740/3298 6654/3582/88 6952/3741/3299 +f 6654/3582/88 6655/3584/2785 6952/3741/3299 +f 6655/3584/2785 6657/3586/3151 6952/3741/3299 +f 6657/3586/3151 6659/3742/1579 6952/3741/3299 +f 6659/3742/1579 6661/3743/2412 6952/3741/3299 +f 6952/3741/3299 6661/3743/2412 6953/3744/3300 +f 6662/3591/3168 6663/3593/3169 6954/3745/2805 +f 6954/3745/2805 6663/3593/3169 6955/3746/3301 +f 6956/3747/3302 6664/3594/3170 6957/3748/3303 +f 6664/3594/3170 6665/3595/3171 6957/3748/3303 +f 6665/3595/3171 6666/3596/3172 6957/3748/3303 +f 6666/3596/3172 6668/3749/3304 6957/3748/3303 +f 6957/3748/3303 6668/3749/3304 6958/3750/3305 +f 6669/3597/2999 6670/3599/2996 6959/3751/3003 +f 6670/3599/2996 6672/3752/3003 6959/3751/3003 +f 6959/3751/3003 6672/3752/3003 6961/3753/3306 +f 6961/3753/3306 6672/3752/3003 6962/3754/3002 +f 6963/3755/3307 6673/3601/2419 6964/3756/3308 +f 6673/3601/2419 6674/3602/3175 6964/3756/3308 +f 6674/3602/3175 6675/3603/3176 6964/3756/3308 +f 6675/3603/3176 6677/3757/3309 6964/3756/3308 +f 6964/3756/3308 6677/3757/3309 6965/3758/3310 +f 6677/3757/3309 6679/3759/3311 6965/3758/3310 +f 6967/3760/3312 6966/3761/3313 6969/3762/3314 +f 6965/3758/3310 6679/3759/3311 6968/3763/3315 +f 6680/3606/3177 6681/3607/3178 6970/3764/3316 +f 6970/3764/3316 6681/3607/3178 6971/3765/3317 +f 6972/3766/3318 6682/3608/3179 6973/3767/3319 +f 6682/3608/3179 6683/3609/3180 6973/3767/3319 +f 6683/3609/3180 6684/3610/3181 6973/3767/3319 +f 6973/3767/3319 6684/3610/3181 6979/3768/3320 +f 6685/3610/3182 6687/3611/3183 6978/3768/3321 +f 6688/3769/3322 6691/3614/3323 6977/3770/3324 +f 6692/3614/3325 6695/3616/3325 6976/3770/3325 +f 6696/3616/3191 6699/3771/3191 6975/3770/3191 +f 6700/3771/3191 6703/3772/3191 6974/3770/3191 +f 6981/3770/3191 6704/3772/3191 6982/3773/3191 +f 6985/3774/3191 6705/3775/3191 6990/3776/3191 +f 6706/3621/3191 6709/3622/3191 6989/3777/3191 +f 6710/3622/3191 6713/3625/3191 6988/3777/3191 +f 6714/3625/3191 6717/3626/3191 6987/3777/3191 +f 6718/3626/3191 6720/3627/3191 6986/3777/3191 +f 6994/3777/3191 6721/3627/3191 6998/3778/3191 +f 6722/3627/3197 6723/3628/3198 6997/3778/3326 +f 6724/3629/3199 6725/3630/3200 6996/3630/3326 +f 6725/3630/3200 6726/3459/3201 6996/3630/3326 +f 6726/3459/3201 6728/3631/3327 6996/3630/3326 +f 6729/3631/3191 6731/3632/3191 6995/3630/3191 +f 7001/3630/3325 6732/3632/3325 7003/3633/3325 +f 6733/3632/3178 6734/3633/3205 7002/3633/3328 +f 6735/3779/3329 6737/3780/3330 7006/3781/3331 +f 7006/3781/3331 6737/3780/3330 7009/3782/3186 +f 6737/3780/3330 6740/3780/3332 7009/3782/3186 +f 7009/3782/3186 6740/3780/3332 7011/3780/3333 +f 7011/3780/3333 6740/3780/3332 7013/3783/3334 +f 6741/3464/3335 6745/3634/3210 7012/3784/3336 +f 6745/3634/3210 6746/3635/3211 7012/3784/3336 +f 6747/3635/1385 6748/3636/1385 7015/3784/3327 +f 7015/3784/3327 6748/3636/1385 7016/3785/3327 +f 6748/3636/1385 6749/33/81 7016/3785/3327 +f 6750/33/3212 6751/3637/3213 7019/3785/3203 +f 7019/3785/3203 6751/3637/3213 7020/3786/3337 +f 6752/3637/1426 6754/3638/1427 7022/3786/1580 +f 6754/3638/1427 6756/3787/3338 7022/3786/1580 +f 7022/3786/1580 6756/3787/3338 7025/3786/1762 +f 6756/3787/3338 6759/3788/3339 7025/3786/1762 +f 7025/3786/1762 6759/3788/3339 7026/3789/94 +f 6760/3642/1662 6772/3641/1773 7029/3790/94 +f 7029/3790/94 6772/3641/1773 7031/3790/1573 +f 6772/3641/1773 6774/3647/19 7031/3790/1573 +f 7031/3790/1573 6774/3647/19 7033/3791/19 +f 6774/3647/19 6777/3649/3224 7033/3791/19 +f 7033/3791/19 6777/3649/3224 7034/3792/3340 +f 6777/3649/3224 6778/3650/3226 7034/3792/3340 +f 7034/3792/3340 6778/3650/3226 7036/3792/3341 +f 7036/3792/3341 6778/3650/3226 7038/3650/3342 +f 6778/3650/3226 6779/3648/10 7038/3650/3342 +f 6780/3648/23 6782/3651/23 7037/3650/23 +f 7040/3650/23 6783/3651/23 7041/3793/23 +f 7043/3793/23 6784/3651/23 7045/3794/23 +f 6785/3651/23 6787/3652/23 7044/3794/23 +f 7047/3794/23 6788/3652/23 7049/3795/23 +f 6789/3652/23 6791/3653/23 7048/3795/23 +f 7051/3795/23 6792/3653/23 7053/3796/23 +f 6793/3653/23 6796/3654/23 7052/3796/23 +f 7055/3796/3343 6797/3654/3230 7056/3796/3238 +f 6797/3654/3230 6798/3654/3231 7056/3796/3238 +f 7056/3796/3238 6798/3654/3231 7057/3797/3344 +f 6799/3654/3066 6800/3655/3232 7058/3797/3066 +f 7058/3797/3066 6800/3655/3232 7060/3797/3066 +f 6800/3655/3232 6801/3656/3233 7060/3797/3066 +f 7060/3797/3066 6801/3656/3233 7061/3798/3345 +f 6801/3656/3233 6802/3657/3234 7061/3798/3345 +f 6802/3657/3234 6803/3485/3235 7061/3798/3345 +f 7061/3798/3345 6803/3485/3235 7062/3799/3346 +f 7062/3799/3346 6803/3485/3235 7063/3658/3347 +f 6803/3485/3235 6804/3658/3236 7063/3658/3347 +f 6804/3658/3236 6805/3658/3237 7063/3658/3347 +f 7063/3658/3347 6805/3658/3237 7064/3800/3348 +f 6805/3658/3237 6806/3659/3238 7064/3800/3348 +f 7064/3800/3348 6806/3659/3238 7065/3801/3349 +f 6806/3659/3238 6807/3660/3239 7065/3801/3349 +f 7065/3801/3349 6807/3660/3239 7066/3802/3350 +f 7066/3802/3350 6807/3660/3239 7067/3803/3351 +f 6807/3660/3239 6808/16/3240 7067/3803/3351 +f 7067/3803/3351 6808/16/3240 7068/3804/3352 +f 6808/16/3240 6809/3661/3241 7068/3804/3352 +f 7068/3804/3352 6809/3661/3241 7069/3805/1945 +f 6809/3661/3241 6811/3662/3242 7069/3805/1945 +f 7070/3805/3353 6810/3662/3354 7072/3806/3355 +f 6810/3662/3354 6812/3663/3356 7072/3806/3355 +f 7072/3806/3355 6812/3663/3356 7073/3807/3357 +f 6812/3663/3356 6815/3664/3358 7073/3807/3357 +f 7075/3807/3359 6816/3664/3244 7076/3808/3360 +f 6816/3664/3244 6818/3665/3245 7076/3808/3360 +f 7078/3808/3361 6817/3665/3362 7079/3809/3363 +f 6817/3665/3362 6820/3665/3364 7079/3809/3363 +f 6820/3665/3364 6825/3666/3365 7079/3809/3363 +f 7079/3809/3363 6825/3666/3365 7080/3810/3366 +f 6825/3666/3365 6829/3667/3367 7080/3810/3366 +f 7080/3810/3366 6829/3667/3367 7081/3811/3368 +f 6829/3667/3367 6835/3668/47 7081/3811/3368 +f 6835/3668/47 6837/3812/3369 7081/3811/3368 +f 7081/3811/3368 6837/3812/3369 7082/3813/3370 +f 6838/3670/3247 6839/3671/94 7085/3814/3370 +f 7085/3814/3370 6839/3671/94 7088/3815/3267 +f 6839/3671/94 6841/3672/3105 7088/3815/3267 +f 6842/3816/3258 6844/3817/3258 7087/3818/3258 +f 6845/3817/3266 6847/3819/3266 7086/3818/3259 +f 7091/3818/3258 6848/3819/3258 7100/3820/3258 +f 6849/3819/3258 6851/3821/3258 7099/3820/3258 +f 6852/3821/3258 6856/3676/3258 7098/3820/3259 +f 6857/3676/3258 6861/3677/3258 7097/3820/3258 +f 6862/3677/3258 6864/3822/3258 7096/3820/3258 +f 6865/3822/3371 6867/3823/3372 7095/3820/3372 +f 6868/3823/3258 6871/3679/3258 7094/3820/3258 +f 6872/3679/3258 6877/3681/3258 7093/3820/3258 +f 6878/3681/3258 6881/3685/3258 7092/3820/3258 +f 6882/3686/3267 6884/3687/1419 7102/3824/3373 +f 6884/3687/1419 7103/3687/3374 7102/3824/3373 +f 7102/3824/3373 7103/3687/3374 7104/3825/3375 +f 7103/3687/3374 6883/3687/3117 7104/3825/3375 +f 7104/3825/3375 6883/3687/3117 7106/3825/1968 +f 7106/3825/1968 6883/3687/3117 7107/3826/59 +f 6884/3687/1419 6885/3688/59 7107/3826/59 +f 7107/3826/59 6885/3688/59 7108/3688/3376 +f 6885/3688/59 6886/3689/3268 7108/3688/3376 +f 7111/3827/3258 6887/3828/3121 7112/3829/3121 +f 6888/3830/3121 6891/3692/3270 7113/3831/3121 +f 6891/3692/3270 6893/3694/2944 7113/3831/3121 +f 7113/3831/3121 6893/3694/2944 7114/3832/3121 +f 7118/3832/3377 6894/3694/3127 7119/3833/3126 +f 6894/3694/3127 6895/3695/3272 7119/3833/3126 +f 6895/3695/3272 6896/3696/3128 7119/3833/3126 +f 7119/3833/3126 6896/3696/3128 7120/3834/3378 +f 6897/3697/3103 6898/3698/3273 7121/3835/3273 +f 7121/3835/3273 6898/3698/3273 7122/3836/3121 +f 7123/3837/3379 6899/3699/3274 7124/3838/3380 +f 6899/3699/3274 6900/3700/3275 7124/3838/3380 +f 6900/3700/3275 6901/3701/3276 7124/3838/3380 +f 7125/3839/3381 6902/3840/3277 7126/3841/2781 +f 6902/3840/3277 6905/3842/3382 7126/3841/2781 +f 7128/3843/3383 6906/3703/3279 7129/3844/3384 +f 6906/3703/3279 6907/3705/3281 7129/3844/3384 +f 7131/3845/1291 7130/3846/3385 7135/3847/3386 +f 7129/3844/3384 6907/3705/3281 7134/3848/45 +f 6907/3705/3281 6910/3706/3282 7134/3848/45 +f 6911/3707/23 6913/3709/3283 7133/3849/23 +f 6913/3709/3283 6915/3711/3285 7133/3849/23 +f 6914/3711/23 6916/3712/23 7132/3849/23 +f 7136/3849/23 6917/3712/23 7137/3850/23 +f 7140/3851/23 6918/3852/23 7142/3853/23 +f 6919/3712/88 6922/3713/2785 7141/3854/88 +f 6922/3713/2785 6923/3714/3287 7141/3854/88 +f 7145/3855/88 6924/3715/3287 7148/3856/3387 +f 7148/3856/3387 6924/3715/3287 7149/3857/3388 +f 6924/3715/3287 6925/3717/3290 7149/3857/3388 +f 6925/3717/3290 6928/3722/3389 7149/3857/3388 +f 6928/3722/3389 6930/3721/3294 7149/3857/3388 +f 7149/3857/3388 6930/3721/3294 7150/3858/3390 +f 6930/3721/3294 6931/3723/3295 7150/3858/3390 +f 7150/3858/3390 6931/3723/3295 7151/3859/2310 +f 6931/3723/3295 6934/3860/2962 7151/3859/2310 +f 7151/3859/2310 6934/3860/2962 7152/3861/1767 +f 6934/3860/2962 6939/3731/2962 7152/3861/1767 +f 7152/3861/1767 6939/3731/2962 7154/3862/2763 +f 6939/3731/2962 6940/3733/1767 7154/3862/2763 +f 7154/3862/2763 6940/3733/1767 7156/3863/2763 +f 6940/3733/1767 6941/3735/1767 7156/3863/2763 +f 6942/3736/2774 6943/3737/3297 7155/3864/2763 +f 7159/3865/23 6944/3738/23 7161/3866/23 +f 6945/3738/23 6950/3740/23 7160/3866/23 +f 7166/3867/3391 7164/3866/82 7167/3868/1557 +f 7164/3866/82 6951/3740/3298 7167/3868/1557 +f 6951/3740/3298 6952/3741/3299 7167/3868/1557 +f 7167/3868/1557 6952/3741/3299 7168/3869/3392 +f 6952/3741/3299 6953/3744/3300 7168/3869/3392 +f 7168/3869/3392 6953/3744/3300 7169/3870/3393 +f 6954/3745/2805 6955/3746/3301 7170/3871/3238 +f 7172/3872/3394 7170/3871/3238 7173/3873/3395 +f 7170/3871/3238 6955/3746/3301 7173/3873/3395 +f 7174/3874/3396 6956/3747/3302 7175/3875/3397 +f 6956/3747/3302 6957/3748/3303 7175/3875/3397 +f 6957/3748/3303 6958/3750/3305 7175/3875/3397 +f 6958/3750/3305 6960/3876/3398 7175/3875/3397 +f 7175/3875/3397 6960/3876/3398 7176/3877/3399 +f 6961/3753/3306 6962/3754/3002 7177/3878/3004 +f 7179/3879/3178 7177/3878/3004 7181/3880/3177 +f 7177/3878/3004 6962/3754/3002 7181/3880/3177 +f 6963/3755/3307 6964/3756/3308 7180/3881/3400 +f 6964/3756/3308 6965/3758/3310 7180/3881/3400 +f 6966/3761/3313 6967/3760/3312 7182/3882/3401 +f 7182/3882/3401 6967/3760/3312 7184/3883/3402 +f 7184/3883/3402 6967/3760/3312 7185/3884/3403 +f 7186/3885/3404 7185/3884/3403 7187/3886/3405 +f 7189/3887/3406 7190/3888/3407 7191/3888/3408 +f 7190/3888/3407 7187/3886/3405 7191/3888/3408 +f 7187/3886/3405 7185/3884/3403 7191/3888/3408 +f 7185/3884/3403 6967/3760/3312 7191/3888/3408 +f 6967/3760/3312 6969/3762/3314 7191/3888/3408 +f 6970/3764/3316 6971/3765/3317 7194/3889/3409 +f 7194/3889/3409 6971/3765/3317 7195/3890/3409 +f 7197/3891/3410 6972/3766/3318 7199/3892/3411 +f 7199/3892/3411 6972/3766/3318 7200/3892/3412 +f 6972/3766/3318 6973/3767/3319 7200/3892/3412 +f 6973/3767/3319 6979/3768/3320 7200/3892/3412 +f 7201/3893/3413 6980/3770/3414 7202/3773/3415 +f 6980/3770/3414 6983/3773/3327 7202/3773/3415 +f 7203/3774/3416 6984/3774/3327 7204/3894/3416 +f 6984/3774/3327 6991/3776/3327 7204/3894/3416 +f 7204/3894/3416 6991/3776/3327 7205/3895/3327 +f 7207/3895/3187 6992/3776/3187 7211/3896/3187 +f 6993/3777/3417 6999/3778/3417 7210/3897/3417 +f 7000/3778/3191 7004/3898/3191 7209/3897/3191 +f 7005/3781/3191 7007/3782/3191 7208/3896/3191 +f 7008/3782/3418 7010/3780/3419 7212/3896/3191 +f 7010/3780/3419 7014/3783/3420 7212/3896/3191 +f 7014/3783/3420 7017/3899/3421 7212/3896/3191 +f 7212/3896/3191 7017/3899/3421 7213/3900/3191 +f 7018/3899/3203 7021/3901/3422 7214/3900/3188 +f 7214/3900/3188 7021/3901/3422 7216/3902/3331 +f 7021/3901/3422 7023/3902/3423 7216/3902/3331 +f 7216/3902/3331 7023/3902/3423 7217/3903/3424 +f 7024/3786/3425 7027/3789/3426 7220/3904/3427 +f 7220/3904/3427 7027/3789/3426 7221/3789/3428 +f 7028/3790/3106 7030/3790/3429 7224/3905/3428 +f 7224/3905/3428 7030/3790/3429 7227/3906/3430 +f 7227/3906/3430 7030/3790/3429 7228/3907/3431 +f 7030/3790/3429 7032/3791/3432 7228/3907/3431 +f 7032/3791/3432 7034/3792/3340 7228/3907/3431 +f 7034/3792/3340 7036/3792/3341 7228/3907/3431 +f 7231/3907/3327 7035/3792/1385 7234/3908/3433 +f 7234/3908/3433 7035/3792/1385 7235/3909/1385 +f 7035/3792/1385 7039/3650/75 7235/3909/1385 +f 7039/3650/75 7042/3793/75 7235/3909/1385 +f 7235/3909/1385 7042/3793/75 7236/3910/3434 +f 7042/3793/75 7046/3794/3435 7236/3910/3434 +f 7236/3910/3434 7046/3794/3435 7237/3911/3436 +f 7046/3794/3435 7050/3795/3437 7237/3911/3436 +f 7237/3911/3436 7050/3795/3437 7238/3912/3438 +f 7238/3912/3438 7050/3795/3437 7239/3913/3439 +f 7050/3795/3437 7054/3796/3440 7239/3913/3439 +f 7055/3796/3343 7056/3796/3238 7240/3913/3441 +f 7056/3796/3238 7057/3797/3344 7240/3913/3441 +f 7057/3797/3344 7059/3797/3442 7240/3913/3441 +f 7240/3913/3441 7059/3797/3442 7243/3913/3443 +f 7243/3913/3443 7059/3797/3442 7246/3914/3444 +f 7246/3914/3444 7059/3797/3442 7247/3915/3445 +f 7060/3797/3066 7061/3798/3345 7248/3915/3446 +f 7061/3798/3345 7062/3799/3346 7248/3915/3446 +f 7062/3799/3346 7063/3658/3347 7248/3915/3446 +f 7063/3658/3347 7064/3800/3348 7248/3915/3446 +f 7248/3915/3446 7064/3800/3348 7250/3916/3066 +f 7064/3800/3348 7065/3801/3349 7250/3916/3066 +f 7065/3801/3349 7066/3802/3350 7250/3916/3066 +f 7250/3916/3066 7066/3802/3350 7252/3917/3447 +f 7066/3802/3350 7067/3803/3351 7252/3917/3447 +f 7252/3917/3447 7067/3803/3351 7254/3918/3448 +f 7254/3918/3448 7067/3803/3351 7255/3919/3449 +f 7067/3803/3351 7068/3804/3352 7255/3919/3449 +f 7255/3919/3449 7068/3804/3352 7256/3920/3450 +f 7068/3804/3352 7069/3805/1945 7256/3920/3450 +f 7257/3920/3451 7070/3805/3353 7258/3921/3452 +f 7070/3805/3353 7072/3806/3355 7258/3921/3452 +f 7259/3921/3453 7071/3806/3454 7260/3922/3455 +f 7260/3922/3455 7071/3806/3454 7261/3923/3456 +f 7071/3806/3454 7074/3807/3457 7261/3923/3456 +f 7074/3807/3457 7077/3808/3458 7261/3923/3456 +f 7078/3808/3361 7079/3809/3363 7262/3923/3459 +f 7079/3809/3363 7080/3810/3366 7262/3923/3459 +f 7262/3923/3459 7080/3810/3366 7263/3924/3460 +f 7080/3810/3366 7081/3811/3368 7263/3924/3460 +f 7081/3811/3368 7082/3813/3370 7263/3924/3460 +f 7264/3924/3461 7083/3813/3462 7265/3925/3463 +f 7268/3926/3463 7084/3927/3462 7270/3928/3462 +f 7270/3928/3462 7084/3927/3462 7272/3929/3258 +f 7084/3927/3462 7089/3818/3258 7272/3929/3258 +f 7090/3818/3258 7101/3820/3258 7271/3929/3258 +f 7102/3824/3373 7104/3825/3375 7273/3930/3267 +f 7273/3930/3267 7104/3825/3375 7274/3931/95 +f 7104/3825/3375 7105/3825/95 7274/3931/95 +f 7274/3931/95 7105/3825/95 7275/3932/59 +f 7278/3933/3464 7275/3932/59 7280/3934/59 +f 7275/3932/59 7105/3825/95 7280/3934/59 +f 7106/3825/1968 7107/3826/59 7279/3934/3268 +f 7107/3826/59 7108/3688/3376 7279/3934/3268 +f 7283/3935/3258 7109/3827/3258 7284/3936/3258 +f 7288/3936/3258 7110/3827/3258 7290/3937/3258 +f 7111/3827/3258 7112/3829/3121 7289/3937/3258 +f 7112/3829/3121 7115/3938/3121 7289/3937/3258 +f 7291/3937/3373 7116/3938/3120 7292/3939/3465 +f 7293/3940/3466 7117/3941/3120 7294/3942/3381 +f 7118/3832/3377 7119/3833/3126 7295/3943/3381 +f 7295/3943/3381 7119/3833/3126 7296/3944/3464 +f 7119/3833/3126 7120/3834/3378 7296/3944/3464 +f 7121/3835/3273 7122/3836/3121 7298/3945/3121 +f 7298/3945/3121 7122/3836/3121 7300/3946/3258 +f 7123/3837/3379 7124/3838/3380 7299/3947/3467 +f 7124/3838/3380 7306/3948/3468 7299/3947/3467 +f 7302/3946/3469 7303/3949/3468 7308/3950/3470 +f 7308/3950/3470 7303/3949/3468 7309/3951/3471 +f 7312/3952/96 7304/3953/96 7314/3954/96 +f 7305/3953/96 7125/3839/3381 7313/3954/96 +f 7125/3839/3381 7126/3841/2781 7313/3954/96 +f 7127/3841/3472 7130/3846/3385 7317/3954/62 +f 7130/3846/3385 7131/3845/1291 7317/3954/62 +f 7317/3954/62 7131/3845/1291 7318/3955/1291 +f 7131/3845/1291 7135/3847/3386 7318/3955/1291 +f 7135/3847/3386 7138/3851/45 7318/3955/1291 +f 7318/3955/1291 7138/3851/45 7319/3956/2777 +f 7319/3956/2777 7138/3851/45 7322/3957/45 +f 7139/3851/23 7143/3853/23 7321/3957/23 +f 7144/3853/23 7146/3958/23 7320/3957/23 +f 7325/3957/23 7147/3958/23 7326/3959/23 +f 7148/3856/3387 7149/3857/3388 7329/3960/3473 +f 7149/3857/3388 7150/3858/3390 7329/3960/3473 +f 7329/3960/3473 7150/3858/3390 7332/3961/3474 +f 7150/3858/3390 7151/3859/2310 7332/3961/3474 +f 7332/3961/3474 7151/3859/2310 7333/3962/2763 +f 7151/3859/2310 7152/3861/1767 7333/3962/2763 +f 7152/3861/1767 7154/3862/2763 7333/3962/2763 +f 7335/3963/23 7153/3964/23 7337/3965/23 +f 7153/3964/23 7157/3966/23 7337/3965/23 +f 7158/3966/23 7162/3967/23 7336/3965/23 +f 7339/3965/3475 7163/3967/82 7340/3968/3391 +f 7163/3967/82 7165/3969/3476 7340/3968/3391 +f 7340/3968/3391 7165/3969/3476 7342/3970/3477 +f 7166/3867/3391 7167/3868/1557 7341/3971/3478 +f 7167/3868/1557 7168/3869/3392 7341/3971/3478 +f 7168/3869/3392 7169/3870/3393 7341/3971/3478 +f 7169/3870/3393 7171/3972/3479 7341/3971/3478 +f 7343/3970/3480 7172/3872/3394 7344/3973/3481 +f 7172/3872/3394 7173/3873/3395 7344/3973/3481 +f 7345/3974/3482 7174/3874/3396 7346/3975/3483 +f 7174/3874/3396 7175/3875/3397 7346/3975/3483 +f 7175/3875/3397 7176/3877/3399 7346/3975/3483 +f 7176/3877/3399 7178/3976/3484 7346/3975/3483 +f 7346/3975/3483 7178/3976/3484 7347/3977/3485 +f 7179/3879/3178 7181/3880/3177 7348/3978/3316 +f 7181/3880/3177 7183/3979/3486 7348/3978/3316 +f 7348/3978/3316 7183/3979/3486 7349/3980/3409 +f 7184/3883/3402 7185/3884/3403 7350/3885/3487 +f 7185/3884/3403 7186/3885/3404 7350/3885/3487 +f 7350/3885/3487 7186/3885/3404 7351/3981/3488 +f 7186/3885/3404 7187/3886/3405 7351/3981/3488 +f 7351/3981/3488 7187/3886/3405 7352/3887/3042 +f 7187/3886/3405 7190/3888/3407 7352/3887/3042 +f 7190/3888/3407 7189/3887/3406 7352/3887/3042 +f 7353/3982/3489 7188/3983/3490 7355/3984/3491 +f 7188/3983/3490 7192/3985/3492 7355/3984/3491 +f 7360/3986/3191 7356/3987/3191 7363/3988/3191 +f 7354/3987/3191 7193/3989/3409 7362/3988/3191 +f 7194/3889/3409 7195/3890/3409 7361/3990/3191 +f 7196/3991/3410 7198/3992/3411 7366/3993/3487 +f 7366/3993/3487 7198/3992/3411 7368/3994/3493 +f 7198/3992/3411 7201/3893/3413 7368/3994/3493 +f 7368/3994/3493 7201/3893/3413 7369/3995/1385 +f 7201/3893/3413 7202/3773/3415 7369/3995/1385 +f 7203/3774/3416 7204/3894/3416 7370/3996/1385 +f 7370/3996/1385 7204/3894/3416 7371/3997/1385 +f 7204/3894/3416 7205/3895/3327 7371/3997/1385 +f 7371/3997/1385 7205/3895/3327 7372/3998/1385 +f 7372/3998/1385 7205/3895/3327 7373/3999/3327 +f 7206/3895/3191 7212/3896/3191 7374/3999/3191 +f 7212/3896/3191 7213/3900/3191 7374/3999/3191 +f 7213/3900/3191 7215/3902/3191 7374/3999/3191 +f 7215/3902/3191 7218/3903/3191 7374/3999/3191 +f 7374/3999/3191 7218/3903/3191 7379/4000/3191 +f 7219/3903/3191 7222/4001/3191 7378/4000/3191 +f 7223/4001/3191 7225/4002/3191 7377/4000/3187 +f 7226/4002/3191 7229/4003/3191 7376/4000/3191 +f 7230/4003/3191 7232/4004/3191 7375/4000/3191 +f 7382/4000/3191 7233/4004/3191 7383/4005/3191 +f 7234/3908/3433 7235/3909/1385 7386/4006/3494 +f 7235/3909/1385 7236/3910/3434 7386/4006/3494 +f 7236/3910/3434 7237/3911/3436 7386/4006/3494 +f 7386/4006/3494 7237/3911/3436 7388/4007/3495 +f 7237/3911/3436 7238/3912/3438 7388/4007/3495 +f 7388/4007/3495 7238/3912/3438 7390/4008/3496 +f 7238/3912/3438 7239/3913/3439 7390/4008/3496 +f 7239/3913/3439 7241/3913/3497 7390/4008/3496 +f 7242/3913/3498 7244/3914/3499 7389/4008/3500 +f 7391/4008/3501 7245/3914/3502 7392/4009/3503 +f 7246/3914/3444 7247/3915/3445 7393/4009/3504 +f 7247/3915/3445 7249/3916/3505 7393/4009/3504 +f 7249/3916/3505 7251/3917/3506 7393/4009/3504 +f 7251/3917/3506 7253/3918/3507 7393/4009/3504 +f 7393/4009/3504 7253/3918/3507 7396/4010/3508 +f 7254/3918/3448 7255/3919/3449 7395/4010/3509 +f 7255/3919/3449 7256/3920/3450 7395/4010/3509 +f 7257/3920/3451 7258/3921/3452 7394/4010/3510 +f 7259/3921/3453 7260/3922/3455 7397/4010/3511 +f 7397/4010/3511 7260/3922/3455 7399/4011/3512 +f 7399/4011/3512 7260/3922/3455 7401/4012/3452 +f 7260/3922/3455 7261/3923/3456 7401/4012/3452 +f 7401/4012/3452 7261/3923/3456 7402/4013/3193 +f 7261/3923/3456 7264/3924/3461 7402/4013/3193 +f 7264/3924/3461 7265/3925/3463 7402/4013/3193 +f 7403/4013/3513 7266/3925/3514 7404/4014/3515 +f 7405/4015/3516 7267/4016/3517 7406/4017/3518 +f 7267/4016/3517 7269/4018/3519 7406/4017/3518 +f 7269/4018/3519 7273/3930/3267 7406/4017/3518 +f 7273/3930/3267 7274/3931/95 7406/4017/3518 +f 7274/3931/95 7275/3932/59 7406/4017/3518 +f 7406/4017/3518 7275/3932/59 7407/4019/3268 +f 7275/3932/59 7278/3933/3464 7407/4019/3268 +f 7408/4020/3520 7276/4021/3258 7409/4022/3462 +f 7409/4022/3462 7276/4021/3258 7412/4023/3258 +f 7277/4021/3258 7281/4024/3258 7411/4023/3258 +f 7282/4024/3258 7285/4025/3258 7410/4023/3258 +f 7414/4023/3267 7286/4025/3267 7415/4026/95 +f 7417/4027/3521 7287/3936/3373 7418/4028/95 +f 7287/3936/3373 7291/3937/3373 7418/4028/95 +f 7291/3937/3373 7292/3939/3465 7418/4028/95 +f 7418/4028/95 7292/3939/3465 7419/4029/59 +f 7293/3940/3466 7294/3942/3381 7420/4030/3522 +f 7294/3942/3381 7297/3945/3268 7420/4030/3522 +f 7420/4030/3522 7297/3945/3268 7423/4031/3376 +f 7298/3945/3121 7300/3946/3258 7422/4031/3258 +f 7428/4032/3258 7424/4031/3258 7430/4033/3258 +f 7421/4031/3258 7301/3946/3258 7429/4033/3258 +f 7302/3946/3469 7308/3950/3470 7433/4033/3523 +f 7308/3950/3470 7434/4034/3524 7433/4033/3523 +f 7437/4035/3523 7433/4033/3523 7438/4036/3525 +f 7433/4033/3523 7434/4034/3524 7438/4036/3525 +f 7438/4036/3525 7434/4034/3524 7439/4037/3470 +f 7434/4034/3524 7307/3950/3470 7439/4037/3470 +f 7439/4037/3470 7307/3950/3470 7441/4038/3471 +f 7307/3950/3470 7310/3951/3468 7441/4038/3471 +f 7311/3952/96 7315/3954/96 7440/4039/96 +f 7443/4039/3117 7316/3954/3117 7444/4040/3117 +f 7317/3954/62 7318/3955/1291 7446/4040/62 +f 7446/4040/62 7318/3955/1291 7447/4041/3526 +f 7318/3955/1291 7319/3956/2777 7447/4041/3526 +f 7447/4041/3526 7319/3956/2777 7448/4042/2777 +f 7319/3956/2777 7322/3957/45 7448/4042/2777 +f 7449/4043/91 7323/4044/45 7452/4045/3527 +f 7324/4044/23 7327/4046/23 7451/4045/23 +f 7328/4046/23 7330/4047/23 7450/4045/23 +f 7453/4045/23 7331/4047/23 7454/4048/23 +f 7331/4047/23 7334/3963/23 7454/4048/23 +f 7334/3963/23 7338/3965/23 7454/4048/23 +f 7455/4048/82 7339/3965/3475 7456/4049/3391 +f 7456/4049/3391 7339/3965/3475 7460/4050/2766 +f 7460/4050/2766 7339/3965/3475 7461/4051/2766 +f 7339/3965/3475 7340/3968/3391 7461/4051/2766 +f 7340/3968/3391 7342/3970/3477 7461/4051/2766 +f 7463/4051/2963 7343/3970/3480 7464/4052/3480 +f 7343/3970/3480 7344/3973/3481 7464/4052/3480 +f 7465/4053/3528 7345/3974/3482 7466/4054/3529 +f 7345/3974/3482 7346/3975/3483 7466/4054/3529 +f 7466/4054/3529 7346/3975/3483 7467/4055/3530 +f 7346/3975/3483 7347/3977/3485 7467/4055/3530 +f 7348/3978/3316 7349/3980/3409 7468/4056/3409 +f 7471/4057/3191 7468/4056/3409 7472/4058/3191 +f 7468/4056/3409 7349/3980/3409 7472/4058/3191 +f 7350/3885/3487 7351/3981/3488 7475/4059/3487 +f 7475/4059/3487 7351/3981/3488 7479/4060/3487 +f 7479/4060/3487 7351/3981/3488 7480/4061/3042 +f 7351/3981/3488 7352/3887/3042 7480/4061/3042 +f 7353/3982/3489 7355/3984/3491 7481/4062/3042 +f 7355/3984/3491 7357/4063/3531 7481/4062/3042 +f 7481/4062/3042 7357/4063/3531 7482/4064/3531 +f 7484/4065/3191 7358/3986/3191 7486/4066/3191 +f 7359/3986/3191 7364/3988/3191 7485/4066/3191 +f 7365/4067/3487 7367/3996/3532 7489/4068/3410 +f 7367/3996/3532 7370/3996/1385 7489/4068/3410 +f 7370/3996/1385 7371/3997/1385 7489/4068/3410 +f 7489/4068/3410 7371/3997/1385 7493/4069/3533 +f 7371/3997/1385 7372/3998/1385 7493/4069/3533 +f 7372/3998/1385 7494/4070/3493 7493/4069/3533 +f 7493/4069/3533 7494/4070/3493 7496/4071/3534 +f 7494/4070/3493 7497/4072/3532 7496/4071/3534 +f 7496/4071/3534 7497/4072/3532 7498/4073/1385 +f 7497/4072/3532 7494/4070/3493 7498/4073/1385 +f 7494/4070/3493 7372/3998/1385 7498/4073/1385 +f 7372/3998/1385 7373/3999/3327 7498/4073/1385 +f 7373/3999/3327 7380/4000/3433 7498/4073/1385 +f 7498/4073/1385 7380/4000/3433 7500/4074/3535 +f 7381/4000/3191 7384/4005/3191 7499/4074/3536 +f 7502/4075/3537 7385/4006/3538 7504/4076/3539 +f 7385/4006/3538 7387/4007/3540 7504/4076/3539 +f 7504/4076/3539 7387/4007/3540 7505/4077/3541 +f 7387/4007/3540 7391/4008/3501 7505/4077/3541 +f 7391/4008/3501 7392/4009/3503 7505/4077/3541 +f 7392/4009/3503 7397/4010/3511 7505/4077/3541 +f 7397/4010/3511 7399/4011/3512 7505/4077/3541 +f 7507/4078/3542 7506/4077/3543 7508/4079/3544 +f 7506/4077/3543 7398/4011/3545 7508/4079/3544 +f 7398/4011/3545 7400/4012/3546 7508/4079/3544 +f 7508/4079/3544 7400/4012/3546 7509/4080/3547 +f 7400/4012/3546 7403/4013/3513 7509/4080/3547 +f 7403/4013/3513 7404/4014/3515 7509/4080/3547 +f 7509/4080/3547 7404/4014/3515 7511/4081/3548 +f 7405/4015/3516 7406/4017/3518 7510/4082/3549 +f 7406/4017/3518 7407/4019/3268 7510/4082/3549 +f 7513/4083/3550 7512/4084/3551 7514/4085/3463 +f 7512/4084/3551 7408/4020/3520 7514/4085/3463 +f 7408/4020/3520 7409/4022/3462 7514/4085/3463 +f 7514/4085/3463 7409/4022/3462 7515/4086/3462 +f 7409/4022/3462 7412/4023/3258 7515/4086/3462 +f 7517/4087/3552 7516/4088/3370 7518/4089/95 +f 7516/4088/3370 7413/4090/3553 7518/4089/95 +f 7413/4090/3553 7416/4091/3521 7518/4089/95 +f 7518/4089/95 7416/4091/3521 7521/4091/59 +f 7417/4027/3521 7418/4028/95 7520/4092/3554 +f 7418/4028/95 7419/4029/59 7520/4092/3554 +f 7420/4030/3522 7423/4031/3376 7519/4093/3555 +f 7423/4031/3376 7425/4032/3376 7519/4093/3555 +f 7522/4091/3555 7426/4094/3376 7525/4095/3376 +f 7427/4032/3258 7431/4033/3258 7524/4096/3258 +f 7432/4033/3258 7435/4035/3258 7523/4096/3258 +f 7527/4096/3258 7436/4035/3258 7528/4097/3462 +f 7437/4035/3523 7438/4036/3525 7529/4097/3556 +f 7529/4097/3556 7438/4036/3525 7530/4098/3557 +f 7438/4036/3525 7439/4037/3470 7530/4098/3557 +f 7439/4037/3470 7441/4038/3471 7530/4098/3557 +f 7530/4098/3557 7441/4038/3471 7531/4099/3558 +f 7442/4039/94 7445/4040/3117 7532/4100/67 +f 7532/4100/67 7445/4040/3117 7534/4101/3559 +f 7446/4040/62 7447/4041/3526 7533/4101/3560 +f 7447/4041/3526 7448/4042/2777 7533/4101/3560 +f 7537/4102/3561 7539/4103/3562 7540/4104/3563 +f 7539/4103/3562 7535/4105/3560 7540/4104/3563 +f 7535/4105/3560 7449/4043/91 7540/4104/3563 +f 7449/4043/91 7452/4045/3527 7540/4104/3563 +f 7540/4104/3563 7452/4045/3527 7541/4106/3564 +f 7452/4045/3527 7455/4048/82 7541/4106/3564 +f 7455/4048/82 7456/4049/3391 7541/4106/3564 +f 7541/4106/3564 7456/4049/3391 7543/4107/2766 +f 7456/4049/3391 7460/4050/2766 7543/4107/2766 +f 7546/4108/3565 7544/4109/3480 7548/4110/3480 +f 7542/4107/2963 7457/4050/2963 7547/4111/2963 +f 7550/4111/2966 7458/4050/2966 7553/4112/2966 +f 7459/4050/2963 7462/4051/2963 7552/4112/2963 +f 7463/4051/2963 7464/4052/3480 7551/4112/2963 +f 7554/4113/3566 7465/4053/3528 7555/4114/3567 +f 7465/4053/3528 7466/4054/3529 7555/4114/3567 +f 7466/4054/3529 7467/4055/3530 7555/4114/3567 +f 7467/4055/3530 7469/4115/3568 7555/4114/3567 +f 7555/4114/3567 7469/4115/3568 7558/4116/3568 +f 7470/4057/3187 7473/4058/3187 7557/4117/3187 +f 7474/4058/3417 7476/4118/3417 7556/4117/3417 +f 7560/4117/3191 7477/4118/3191 7561/4119/3191 +f 7478/4120/3410 7481/4062/3042 7564/4121/3410 +f 7564/4121/3410 7481/4062/3042 7565/4122/1385 +f 7481/4062/3042 7482/4064/3531 7565/4122/1385 +f 7565/4122/1385 7482/4064/3531 7568/4123/3531 +f 7483/4065/3191 7487/4066/3191 7567/4124/3191 +f 7488/4066/3191 7490/4125/3191 7566/4124/3191 +f 7569/4124/3191 7491/4125/3191 7570/4126/3191 +f 7492/4125/3191 7495/4127/3191 7572/4126/3191 +f 7572/4126/3191 7495/4127/3191 7573/4128/3569 +f 7496/4071/3534 7498/4073/1385 7574/4129/111 +f 7574/4129/111 7498/4073/1385 7575/4130/3570 +f 7498/4073/1385 7500/4074/3535 7575/4130/3570 +f 7501/4075/3571 7503/4076/3572 7576/4131/3573 +f 7576/4131/3573 7503/4076/3572 7577/4132/3574 +f 7503/4076/3572 7506/4077/3543 7577/4132/3574 +f 7506/4077/3543 7507/4078/3542 7577/4132/3574 +f 7577/4132/3574 7507/4078/3542 7579/4133/3575 +f 7579/4133/3575 7507/4078/3542 7580/4134/3576 +f 7507/4078/3542 7508/4079/3544 7580/4134/3576 +f 7508/4079/3544 7509/4080/3547 7580/4134/3576 +f 7582/4135/3512 7581/4136/3512 7584/4137/3452 +f 7580/4134/3576 7509/4080/3547 7583/4138/3577 +f 7509/4080/3547 7511/4081/3548 7583/4138/3577 +f 7584/4137/3452 7512/4084/3551 7585/4139/3578 +f 7512/4084/3551 7513/4083/3550 7585/4139/3578 +f 7585/4139/3578 7513/4083/3550 7586/4140/3551 +f 7513/4083/3550 7514/4085/3463 7586/4140/3551 +f 7514/4085/3463 7515/4086/3462 7586/4140/3551 +f 7587/4141/3579 7516/4088/3370 7588/4142/3580 +f 7516/4088/3370 7517/4087/3552 7588/4142/3580 +f 7588/4142/3580 7517/4087/3552 7589/4143/3581 +f 7517/4087/3552 7518/4089/95 7589/4143/3581 +f 7518/4089/95 7521/4091/59 7589/4143/3581 +f 7521/4091/59 7526/4095/3582 7589/4143/3581 +f 7589/4143/3581 7526/4095/3582 7590/4144/3376 +f 7527/4096/3258 7528/4097/3462 7591/4145/3462 +f 7591/4145/3462 7528/4097/3462 7592/4146/3463 +f 7529/4097/3556 7530/4098/3557 7594/4146/3583 +f 7594/4146/3583 7530/4098/3557 7595/4147/3584 +f 7530/4098/3557 7531/4099/3558 7595/4147/3584 +f 7532/4100/67 7534/4101/3559 7596/4148/3585 +f 7534/4101/3559 7538/4149/3586 7596/4148/3585 +f 7596/4148/3585 7538/4149/3586 7598/4150/3587 +f 7538/4149/3586 7536/4151/3588 7598/4150/3587 +f 7598/4150/3587 7536/4151/3588 7601/4152/3589 +f 7601/4152/3589 7536/4151/3588 7603/4153/3590 +f 7537/4102/3561 7540/4104/3563 7602/4154/3591 +f 7540/4104/3563 7541/4106/3564 7602/4154/3591 +f 7541/4106/3564 7543/4107/2766 7602/4154/3591 +f 7603/4153/3590 7544/4109/3480 7604/4155/3592 +f 7544/4109/3480 7546/4108/3565 7604/4155/3592 +f 7605/4156/3593 7545/4157/3594 7606/4158/3595 +f 7606/4158/3595 7545/4157/3594 7607/4159/3596 +f 7545/4157/3594 7549/4160/3566 7607/4159/3596 +f 7549/4160/3566 7554/4113/3566 7607/4159/3596 +f 7554/4113/3566 7555/4114/3567 7607/4159/3596 +f 7607/4159/3596 7555/4114/3567 7609/4161/3597 +f 7555/4114/3567 7558/4116/3568 7609/4161/3597 +f 7559/4162/3191 7562/4121/3191 7608/4163/3191 +f 7611/4163/3191 7563/4121/3191 7612/4164/3191 +f 7564/4121/3410 7565/4122/1385 7614/4164/3410 +f 7614/4164/3410 7565/4122/1385 7616/4165/1385 +f 7565/4122/1385 7568/4123/3531 7616/4165/1385 +f 7568/4123/3531 7571/4166/3327 7616/4165/1385 +f 7616/4165/1385 7571/4166/3327 7617/4167/3571 +f 7572/4126/3191 7573/4128/3569 7619/4168/3536 +f 7619/4168/3536 7573/4128/3569 7622/4169/3598 +f 7574/4129/111 7575/4130/3570 7621/4170/3599 +f 7576/4131/3573 7577/4132/3574 7620/4171/3599 +f 7577/4132/3574 7579/4133/3575 7620/4171/3599 +f 7622/4169/3598 7578/4172/3600 7623/4173/3600 +f 7578/4172/3600 7581/4136/3512 7623/4173/3600 +f 7581/4136/3512 7582/4135/3512 7623/4173/3600 +f 7623/4173/3600 7582/4135/3512 7625/4174/3601 +f 7625/4174/3601 7582/4135/3512 7626/4175/3452 +f 7582/4135/3512 7584/4137/3452 7626/4175/3452 +f 7584/4137/3452 7585/4139/3578 7626/4175/3452 +f 7585/4139/3578 7586/4140/3551 7626/4175/3452 +f 7627/4176/3602 7587/4141/3579 7628/4177/3603 +f 7587/4141/3579 7588/4142/3580 7628/4177/3603 +f 7628/4177/3603 7588/4142/3580 7630/4178/3604 +f 7588/4142/3580 7589/4143/3581 7630/4178/3604 +f 7589/4143/3581 7590/4144/3376 7630/4178/3604 +f 7591/4145/3462 7592/4146/3463 7629/4179/3463 +f 7631/4180/3605 7593/4181/3463 7632/4182/3193 +f 7633/4183/3076 7594/4146/3583 7634/4184/3606 +f 7594/4146/3583 7595/4147/3584 7634/4184/3606 +f 7595/4147/3584 7597/4185/3607 7634/4184/3606 +f 7634/4184/3606 7597/4185/3607 7635/4186/3608 +f 7597/4185/3607 7599/4187/3609 7635/4186/3608 +f 7636/4188/3608 7600/4189/3609 7638/4190/3610 +f 7601/4152/3589 7603/4153/3590 7637/4191/3611 +f 7603/4153/3590 7604/4155/3592 7637/4191/3611 +f 7605/4156/3593 7606/4158/3595 7638/4190/3610 +f 7638/4190/3610 7606/4158/3595 7640/4192/3612 +f 7640/4192/3612 7606/4158/3595 7641/4193/3613 +f 7606/4158/3595 7607/4159/3596 7641/4193/3613 +f 7607/4159/3596 7609/4161/3597 7641/4193/3613 +f 7642/4194/3536 7610/4163/3191 7644/4195/3569 +f 7610/4163/3191 7613/4164/3191 7644/4195/3569 +f 7614/4164/3410 7616/4165/1385 7643/4195/111 +f 7645/4196/111 7615/4197/1385 7646/4198/3614 +f 7615/4197/1385 7618/4199/3535 7646/4198/3614 +f 7646/4198/3614 7618/4199/3535 7647/4200/3615 +f 7619/4168/3536 7622/4169/3598 7648/4201/3616 +f 7622/4169/3598 7623/4173/3600 7648/4201/3616 +f 7648/4201/3616 7623/4173/3600 7649/4202/3600 +f 7623/4173/3600 7625/4174/3601 7649/4202/3600 +f 7650/4203/3617 7624/4204/3618 7651/4205/3544 +f 7651/4205/3544 7624/4204/3618 7652/4206/3619 +f 7624/4204/3618 7627/4176/3602 7652/4206/3619 +f 7627/4176/3602 7628/4177/3603 7652/4206/3619 +f 7652/4206/3619 7628/4177/3603 7653/4207/3620 +f 7628/4177/3603 7630/4178/3604 7653/4207/3620 +f 7631/4180/3605 7632/4182/3193 7654/4208/3621 +f 7654/4208/3621 7632/4182/3193 7655/4209/3452 +f 7633/4183/3076 7634/4184/3606 7656/4210/3622 +f 7634/4184/3606 7635/4186/3608 7656/4210/3622 +f 7656/4210/3622 7635/4186/3608 7657/4211/3623 +f 7659/4212/3623 7636/4188/3608 7660/4213/3624 +f 7636/4188/3608 7638/4190/3610 7660/4213/3624 +f 7638/4190/3610 7640/4192/3612 7660/4213/3624 +f 7661/4214/3600 7639/4215/3625 7662/4216/3626 +f 7639/4215/3625 7642/4194/3536 7662/4216/3626 +f 7642/4194/3536 7644/4195/3569 7662/4216/3626 +f 7645/4196/111 7646/4198/3614 7663/4217/3627 +f 7663/4217/3627 7646/4198/3614 7664/4218/3574 +f 7646/4198/3614 7647/4200/3615 7664/4218/3574 +f 7647/4200/3615 7650/4203/3617 7664/4218/3574 +f 7664/4218/3574 7650/4203/3617 7665/4219/3628 +f 7650/4203/3617 7651/4205/3544 7665/4219/3628 +f 7651/4205/3544 7652/4206/3619 7665/4219/3628 +f 7665/4219/3628 7652/4206/3619 7666/4220/3629 +f 7652/4206/3619 7653/4207/3620 7666/4220/3629 +f 7654/4208/3621 7655/4209/3452 7667/4221/3512 +f 7655/4209/3452 7658/4222/3601 7667/4221/3512 +f 7658/4222/3601 7661/4214/3600 7667/4221/3512 +f 7667/4221/3512 7661/4214/3600 7668/4223/3600 +f 7661/4214/3600 7662/4216/3626 7668/4223/3600 +f 7663/4217/3627 7664/4218/3574 7669/4224/3630 +f 7664/4218/3574 7665/4219/3628 7669/4224/3630 +f 7666/4220/3629 7669/4224/3630 7665/4219/3628 diff --git a/maniskill3_environment_assets/racks/uppsnofsad_box.mtl b/maniskill3_environment_assets/racks/uppsnofsad_box.mtl new file mode 100644 index 0000000000000000000000000000000000000000..72fdd763c00f660562fef51f703ef79cda5683ad --- /dev/null +++ b/maniskill3_environment_assets/racks/uppsnofsad_box.mtl @@ -0,0 +1,12 @@ +# Blender 4.3.2 MTL File: 'None' +# www.blender.org + +newmtl Material_0.004 +Ns 250.000000 +Ka 1.000000 1.000000 1.000000 +Kd 0.800000 0.800000 0.800000 +Ks 0.500000 0.500000 0.500000 +Ke 0.000000 0.000000 0.000000 +Ni 1.500000 +d 1.000000 +illum 2 diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/README.md b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/README.md new file mode 100644 index 0000000000000000000000000000000000000000..85abb266da1d8c6dcfc121d805491593c4a4bbee --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/README.md @@ -0,0 +1,9 @@ +# ManiSkill Baselines + +This folder contains code for all implemented ManiSkill baselines which currently include online Reinforcement Learning and Imitation Learning. All baseline results are published to our [public wandb page](https://wandb.ai/stonet2000/ManiSkill). + +For more details on baselines (e.g. how to setup maniskill for RL, run baselines etc.) follow the links below in our documentation: + +- Online Reinforcement Learning: https://maniskill.readthedocs.io/en/latest/user_guide/reinforcement_learning/index.html +- Learning From Demonstrations / Imitation Learning: https://maniskill.readthedocs.io/en/latest/user_guide/learning_from_demos/index.html + diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/.gitignore b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..fa81ef3dba456fe7f0a7070233d278b6f8f742b3 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/.gitignore @@ -0,0 +1,2 @@ +runs/ +wandb/ \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/README.md b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/README.md new file mode 100644 index 0000000000000000000000000000000000000000..95f69b2a074131219f81167b599aba771c7b1955 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/README.md @@ -0,0 +1,61 @@ +# Action Chunking with Transformers (ACT) + +Code for running the ACT algorithm based on ["Learning Fine-Grained Bimanual Manipulation with Low-Cost Hardware"](https://arxiv.org/pdf/2304.13705). It is adapted from the [original code](https://github.com/tonyzhaozh/act). + +## Installation + +To get started, we recommend using conda/mamba to create a new environment and install the dependencies + +```bash +conda create -n act-ms python=3.9 +conda activate act-ms +pip install -e . +``` + +## Setup + +Read through the [imitation learning setup documentation](https://maniskill.readthedocs.io/en/latest/user_guide/learning_from_demos/setup.html) which details everything you need to know regarding running imitation learning baselines in ManiSkill. It includes details on how to download demonstration datasets, preprocess them, evaluate policies fairly for comparison, as well as suggestions to improve performance and avoid bugs. + +## Training + +We provide scripts to train ACT on demonstrations. + +Note that some demonstrations are slow (e.g. motion planning or human teleoperated) and can exceed the default max episode steps which can be an issue as imitation learning algorithms learn to solve the task at the same speed the demonstrations solve it. In this case, you can use the `--max-episode-steps` flag to set a higher value so that the policy can solve the task in time. General recommendation is to set `--max-episode-steps` to about 2x the length of the mean demonstrations length you are using for training. We have tuned baselines in the `baselines.sh` script that set a recommended `--max-episode-steps` for each task. + +Example state-based training, learning from 100 demonstrations generated via motionplanning in the PickCube-v1 task. + +```bash +seed=1 +demos=100 +python train.py --env-id PickCube-v1 \ + --demo-path ~/.maniskill/demos/PickCube-v1/motionplanning/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num_demos $demos --max_episode_steps 100 \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PickCube-v1-state-${demos}_motionplanning_demos-$seed \ + --track # track training on wandb +``` + +## Citation + +If you use this baseline please cite the following +``` +@inproceedings{DBLP:conf/rss/ZhaoKLF23, + author = {Tony Z. Zhao and + Vikash Kumar and + Sergey Levine and + Chelsea Finn}, + editor = {Kostas E. Bekris and + Kris Hauser and + Sylvia L. Herbert and + Jingjin Yu}, + title = {Learning Fine-Grained Bimanual Manipulation with Low-Cost Hardware}, + booktitle = {Robotics: Science and Systems XIX, Daegu, Republic of Korea, July + 10-14, 2023}, + year = {2023}, + url = {https://doi.org/10.15607/RSS.2023.XIX.016}, + doi = {10.15607/RSS.2023.XIX.016}, + timestamp = {Thu, 20 Jul 2023 15:37:49 +0200}, + biburl = {https://dblp.org/rec/conf/rss/ZhaoKLF23.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +``` diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/backbone.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/backbone.py new file mode 100644 index 0000000000000000000000000000000000000000..d1328e43558ebdad681c9e8b74618a55977e4c38 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/backbone.py @@ -0,0 +1,129 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Backbone modules. +""" +from collections import OrderedDict + +import torch +import torch.nn.functional as F +import torchvision +from torch import nn +from torchvision.models._utils import IntermediateLayerGetter +from typing import Dict, List + +from act.utils import NestedTensor, is_main_process +from act.detr.position_encoding import build_position_encoding + +import IPython +e = IPython.embed + +class FrozenBatchNorm2d(torch.nn.Module): + """ + BatchNorm2d where the batch statistics and the affine parameters are fixed. + + Copy-paste from torchvision.misc.ops with added eps before rqsrt, + without which any other policy_models than torchvision.policy_models.resnet[18,34,50,101] + produce nans. + """ + + def __init__(self, n): + super(FrozenBatchNorm2d, self).__init__() + self.register_buffer("weight", torch.ones(n)) + self.register_buffer("bias", torch.zeros(n)) + self.register_buffer("running_mean", torch.zeros(n)) + self.register_buffer("running_var", torch.ones(n)) + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + num_batches_tracked_key = prefix + 'num_batches_tracked' + if num_batches_tracked_key in state_dict: + del state_dict[num_batches_tracked_key] + + super(FrozenBatchNorm2d, self)._load_from_state_dict( + state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs) + + def forward(self, x): + # move reshapes to the beginning + # to make it fuser-friendly + w = self.weight.reshape(1, -1, 1, 1) + b = self.bias.reshape(1, -1, 1, 1) + rv = self.running_var.reshape(1, -1, 1, 1) + rm = self.running_mean.reshape(1, -1, 1, 1) + eps = 1e-5 + scale = w * (rv + eps).rsqrt() + bias = b - rm * scale + return x * scale + bias + + +class BackboneBase(nn.Module): + + def __init__(self, backbone: nn.Module, train_backbone: bool, num_channels: int, return_interm_layers: bool): + super().__init__() + # for name, parameter in backbone.named_parameters(): # only train later layers # TODO do we want this? + # if not train_backbone or 'layer2' not in name and 'layer3' not in name and 'layer4' not in name: + # parameter.requires_grad_(False) + if return_interm_layers: + return_layers = {"layer1": "0", "layer2": "1", "layer3": "2", "layer4": "3"} + else: + return_layers = {'layer4': "0"} + self.body = IntermediateLayerGetter(backbone, return_layers=return_layers) + self.num_channels = num_channels + + def forward(self, tensor): + xs = self.body(tensor) + return xs + # out: Dict[str, NestedTensor] = {} + # for name, x in xs.items(): + # m = tensor_list.mask + # assert m is not None + # mask = F.interpolate(m[None].float(), size=x.shape[-2:]).to(torch.bool)[0] + # out[name] = NestedTensor(x, mask) + # return out + + +class Backbone(BackboneBase): + """ResNet backbone with frozen BatchNorm.""" + def __init__(self, name: str, + train_backbone: bool, + return_interm_layers: bool, + dilation: bool, + include_depth: bool): + backbone = getattr(torchvision.models, name)( + replace_stride_with_dilation=[False, False, dilation], + pretrained=is_main_process(), norm_layer=FrozenBatchNorm2d) # pretrained # TODO do we want frozen batch_norm?? + + # for rgbd data + if include_depth: + w = backbone.conv1.weight + w = torch.cat([w, torch.full((64, 1, 7, 7), 0)], dim=1) + backbone.conv1.weight = nn.Parameter(w) + + num_channels = 512 if name in ('resnet18', 'resnet34') else 2048 + super().__init__(backbone, train_backbone, num_channels, return_interm_layers) + + +class Joiner(nn.Sequential): + def __init__(self, backbone, position_embedding): + super().__init__(backbone, position_embedding) + + def forward(self, tensor_list: NestedTensor): + xs = self[0](tensor_list) + out: List[NestedTensor] = [] + pos = [] + for name, x in xs.items(): + out.append(x) + # position encoding + pos.append(self[1](x).to(x.dtype)) + + return out, pos + + +def build_backbone(args): + position_embedding = build_position_encoding(args) + train_backbone = args.lr_backbone > 0 + return_interm_layers = args.masks + backbone = Backbone(args.backbone, train_backbone, return_interm_layers, args.dilation, args.include_depth) + model = Joiner(backbone, position_embedding) + model.num_channels = backbone.num_channels + return model diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/detr_vae.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/detr_vae.py new file mode 100644 index 0000000000000000000000000000000000000000..65c2b281147b6e7846ea95624c944e4446379fe9 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/detr_vae.py @@ -0,0 +1,141 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +DETR model and criterion classes. +""" +import torch +from torch import nn +from torch.autograd import Variable +from act.detr.transformer import build_transformer, TransformerEncoder, TransformerEncoderLayer + +import numpy as np + +import IPython +e = IPython.embed + + +def reparametrize(mu, logvar): + std = logvar.div(2).exp() + eps = Variable(std.data.new(std.size()).normal_()) + return mu + std * eps + + +def get_sinusoid_encoding_table(n_position, d_hid): + def get_position_angle_vec(position): + return [position / np.power(10000, 2 * (hid_j // 2) / d_hid) for hid_j in range(d_hid)] + + sinusoid_table = np.array([get_position_angle_vec(pos_i) for pos_i in range(n_position)]) + sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i + sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1 + + return torch.FloatTensor(sinusoid_table).unsqueeze(0) + + +class DETRVAE(nn.Module): + """ This is the DETR module that performs object detection """ + def __init__(self, backbones, transformer, encoder, state_dim, action_dim, num_queries): + super().__init__() + self.num_queries = num_queries + self.transformer = transformer + self.encoder = encoder + hidden_dim = transformer.d_model + self.action_head = nn.Linear(hidden_dim, action_dim) + self.query_embed = nn.Embedding(num_queries, hidden_dim) + if backbones is not None: + self.input_proj = nn.Conv2d(backbones[0].num_channels, hidden_dim, kernel_size=1) + self.backbones = nn.ModuleList(backbones) + self.input_proj_robot_state = nn.Linear(state_dim, hidden_dim) + else: + self.input_proj_robot_state = nn.Linear(state_dim, hidden_dim) + self.backbones = None + + # encoder extra parameters + self.latent_dim = 32 # size of latent z + self.cls_embed = nn.Embedding(1, hidden_dim) # extra cls token embedding + self.encoder_state_proj = nn.Linear(state_dim, hidden_dim) # project state to embedding + self.encoder_action_proj = nn.Linear(action_dim, hidden_dim) # project action to embedding + self.latent_proj = nn.Linear(hidden_dim, self.latent_dim*2) # project hidden state to latent std, var + self.register_buffer('pos_table', get_sinusoid_encoding_table(1+1+num_queries, hidden_dim)) # [CLS], state, actions + + # decoder extra parameters + self.latent_out_proj = nn.Linear(self.latent_dim, hidden_dim) # project latent sample to embedding + self.additional_pos_embed = nn.Embedding(2, hidden_dim) # learned position embedding for state and proprio + + def forward(self, obs, actions=None): + is_training = actions is not None + state = obs['state'] if self.backbones is not None else obs + bs = state.shape[0] + + if is_training: + # project CLS token, state sequence, and action sequence to embedding dim + cls_embed = self.cls_embed.weight # (1, hidden_dim) + cls_embed = torch.unsqueeze(cls_embed, axis=0).repeat(bs, 1, 1) # (bs, 1, hidden_dim) + state_embed = self.encoder_state_proj(state) # (bs, hidden_dim) + state_embed = torch.unsqueeze(state_embed, axis=1) # (bs, 1, hidden_dim) + action_embed = self.encoder_action_proj(actions) # (bs, seq, hidden_dim) + # concat them together to form an input to the CVAE encoder + encoder_input = torch.cat([cls_embed, state_embed, action_embed], axis=1) # (bs, seq+2, hidden_dim) + encoder_input = encoder_input.permute(1, 0, 2) # (seq+2, bs, hidden_dim) + # no masking is applied to all parts of the CVAE encoder input + is_pad = torch.full((bs, encoder_input.shape[0]), False).to(state.device) # False: not a padding + # obtain position embedding + pos_embed = self.pos_table.clone().detach() + pos_embed = pos_embed.permute(1, 0, 2) # (seq+2, 1, hidden_dim) + # query CVAE encoder + encoder_output = self.encoder(encoder_input, pos=pos_embed, src_key_padding_mask=is_pad) + encoder_output = encoder_output[0] # take cls output only + latent_info = self.latent_proj(encoder_output) + mu = latent_info[:, :self.latent_dim] + logvar = latent_info[:, self.latent_dim:] + latent_sample = reparametrize(mu, logvar) + latent_input = self.latent_out_proj(latent_sample) + else: + mu = logvar = None + latent_sample = torch.zeros([bs, self.latent_dim], dtype=torch.float32).to(state.device) + latent_input = self.latent_out_proj(latent_sample) + + # CVAE decoder + if self.backbones is not None: + vis_data = obs['rgb'] + if "depth" in obs: + vis_data = torch.cat([vis_data, obs['depth']], dim=2) + num_cams = vis_data.shape[1] + + # Image observation features and position embeddings + all_cam_features = [] + all_cam_pos = [] + for cam_id in range(num_cams): + features, pos = self.backbones[0](vis_data[:, cam_id]) # HARDCODED + features = features[0] # take the last layer feature # (batch, hidden_dim, H, W) + pos = pos[0] # (1, hidden_dim, H, W) + all_cam_features.append(self.input_proj(features)) + all_cam_pos.append(pos) + + # proprioception features (state) + proprio_input = self.input_proj_robot_state(state) + # fold camera dimension into width dimension + src = torch.cat(all_cam_features, axis=3) # (batch, hidden_dim, 4, 8) + pos = torch.cat(all_cam_pos, axis=3) # (batch, hidden_dim, 4, 8) + hs = self.transformer(src, None, self.query_embed.weight, pos, latent_input, proprio_input, self.additional_pos_embed.weight)[0] # (batch, num_queries, hidden_dim) + else: + state = self.input_proj_robot_state(state) + hs = self.transformer(None, None, self.query_embed.weight, None, latent_input, state, self.additional_pos_embed.weight)[0] + + a_hat = self.action_head(hs) + return a_hat, [mu, logvar] + + +def build_encoder(args): + d_model = args.hidden_dim # 256 + dropout = args.dropout # 0.1 + nhead = args.nheads # 8 + dim_feedforward = args.dim_feedforward # 2048 + num_encoder_layers = args.enc_layers # 4 # TODO shared with VAE decoder + normalize_before = args.pre_norm # False + activation = "relu" + + encoder_layer = TransformerEncoderLayer(d_model, nhead, dim_feedforward, + dropout, activation, normalize_before) + encoder_norm = nn.LayerNorm(d_model) if normalize_before else None + encoder = TransformerEncoder(encoder_layer, num_encoder_layers, encoder_norm) + + return encoder diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/position_encoding.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/position_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..f7585ccd33e8c7b2dc7584c26df571416ef65778 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/position_encoding.py @@ -0,0 +1,93 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Various positional encodings for the transformer. +""" +import math +import torch +from torch import nn + +from act.utils import NestedTensor + +import IPython +e = IPython.embed + +class PositionEmbeddingSine(nn.Module): + """ + This is a more standard version of the position embedding, very similar to the one + used by the Attention is all you need paper, generalized to work on images. + """ + def __init__(self, num_pos_feats=64, temperature=10000, normalize=False, scale=None): + super().__init__() + self.num_pos_feats = num_pos_feats + self.temperature = temperature + self.normalize = normalize + if scale is not None and normalize is False: + raise ValueError("normalize should be True if scale is passed") + if scale is None: + scale = 2 * math.pi + self.scale = scale + + def forward(self, tensor): + x = tensor + # mask = tensor_list.mask + # assert mask is not None + # not_mask = ~mask + + not_mask = torch.ones_like(x[0, [0]]) + y_embed = not_mask.cumsum(1, dtype=torch.float32) + x_embed = not_mask.cumsum(2, dtype=torch.float32) + if self.normalize: + eps = 1e-6 + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) + dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3) + pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + +class PositionEmbeddingLearned(nn.Module): + """ + Absolute pos embedding, learned. + """ + def __init__(self, num_pos_feats=256): + super().__init__() + self.row_embed = nn.Embedding(50, num_pos_feats) + self.col_embed = nn.Embedding(50, num_pos_feats) + self.reset_parameters() + + def reset_parameters(self): + nn.init.uniform_(self.row_embed.weight) + nn.init.uniform_(self.col_embed.weight) + + def forward(self, tensor_list: NestedTensor): + x = tensor_list.tensors + h, w = x.shape[-2:] + i = torch.arange(w, device=x.device) + j = torch.arange(h, device=x.device) + x_emb = self.col_embed(i) + y_emb = self.row_embed(j) + pos = torch.cat([ + x_emb.unsqueeze(0).repeat(h, 1, 1), + y_emb.unsqueeze(1).repeat(1, w, 1), + ], dim=-1).permute(2, 0, 1).unsqueeze(0).repeat(x.shape[0], 1, 1, 1) + return pos + + +def build_position_encoding(args): + N_steps = args.hidden_dim // 2 + if args.position_embedding in ('v2', 'sine'): + # TODO find a better way of exposing other arguments + position_embedding = PositionEmbeddingSine(N_steps, normalize=True) + elif args.position_embedding in ('v3', 'learned'): + position_embedding = PositionEmbeddingLearned(N_steps) + else: + raise ValueError(f"not supported {args.position_embedding}") + + return position_embedding diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/transformer.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..335a414062e2e8f1fa474540225a1c13395f94d8 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/detr/transformer.py @@ -0,0 +1,313 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +DETR Transformer class. + +Copy-paste from torch.nn.Transformer with modifications: + * positional encodings are passed in MHattention + * extra LN at the end of encoder is removed + * decoder returns a stack of activations from all decoding layers +""" +import copy +from typing import Optional, List + +import torch +import torch.nn.functional as F +from torch import nn, Tensor + +import IPython +e = IPython.embed + +class Transformer(nn.Module): + + def __init__(self, d_model=512, nhead=8, num_encoder_layers=6, + num_decoder_layers=6, dim_feedforward=2048, dropout=0.1, + activation="relu", normalize_before=False, + return_intermediate_dec=False): + super().__init__() + + encoder_layer = TransformerEncoderLayer(d_model, nhead, dim_feedforward, + dropout, activation, normalize_before) + encoder_norm = nn.LayerNorm(d_model) if normalize_before else None + self.encoder = TransformerEncoder(encoder_layer, num_encoder_layers, encoder_norm) + + decoder_layer = TransformerDecoderLayer(d_model, nhead, dim_feedforward, + dropout, activation, normalize_before) + decoder_norm = nn.LayerNorm(d_model) + self.decoder = TransformerDecoder(decoder_layer, num_decoder_layers, decoder_norm, + return_intermediate=return_intermediate_dec) + + self._reset_parameters() + + self.d_model = d_model + self.nhead = nhead + + def _reset_parameters(self): + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward(self, src, mask, query_embed, pos_embed, latent_input=None, proprio_input=None, additional_pos_embed=None): + if src is None: + bs = proprio_input.shape[0] + query_embed = query_embed.unsqueeze(1).repeat(1, bs, 1) + pos_embed = additional_pos_embed.unsqueeze(1).repeat(1, bs, 1) # seq, bs, dim + src = torch.stack([latent_input, proprio_input], axis=0) + # TODO flatten only when input has H and W + elif len(src.shape) == 4: # has H and W + # flatten NxCxHxW to HWxNxC + bs, c, h, w = src.shape + src = src.flatten(2).permute(2, 0, 1) + pos_embed = pos_embed.flatten(2).permute(2, 0, 1).repeat(1, bs, 1) + query_embed = query_embed.unsqueeze(1).repeat(1, bs, 1) + # mask = mask.flatten(1) + + additional_pos_embed = additional_pos_embed.unsqueeze(1).repeat(1, bs, 1) # seq, bs, dim + pos_embed = torch.cat([additional_pos_embed, pos_embed], axis=0) + + addition_input = torch.stack([latent_input, proprio_input], axis=0) + src = torch.cat([addition_input, src], axis=0) + + tgt = torch.zeros_like(query_embed) + memory = self.encoder(src, src_key_padding_mask=mask, pos=pos_embed) + hs = self.decoder(tgt, memory, memory_key_padding_mask=mask, + pos=pos_embed, query_pos=query_embed) + hs = hs.transpose(1, 2) + return hs + + +class TransformerEncoder(nn.Module): + + def __init__(self, encoder_layer, num_layers, norm=None): + super().__init__() + self.layers = _get_clones(encoder_layer, num_layers) + self.num_layers = num_layers + self.norm = norm + + def forward(self, src, + mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + output = src + + for layer in self.layers: + output = layer(output, src_mask=mask, + src_key_padding_mask=src_key_padding_mask, pos=pos) + + if self.norm is not None: + output = self.norm(output) + + return output + + +class TransformerDecoder(nn.Module): + + def __init__(self, decoder_layer, num_layers, norm=None, return_intermediate=False): + super().__init__() + self.layers = _get_clones(decoder_layer, num_layers) + self.num_layers = num_layers + self.norm = norm + self.return_intermediate = return_intermediate + + def forward(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + output = tgt + + intermediate = [] + + for layer in self.layers: + output = layer(output, memory, tgt_mask=tgt_mask, + memory_mask=memory_mask, + tgt_key_padding_mask=tgt_key_padding_mask, + memory_key_padding_mask=memory_key_padding_mask, + pos=pos, query_pos=query_pos) + if self.return_intermediate: + intermediate.append(self.norm(output)) + + if self.norm is not None: + output = self.norm(output) + if self.return_intermediate: + intermediate.pop() + intermediate.append(output) + + if self.return_intermediate: + return torch.stack(intermediate) + + return output.unsqueeze(0) + + +class TransformerEncoderLayer(nn.Module): + + def __init__(self, d_model, nhead, dim_feedforward=2048, dropout=0.1, + activation="relu", normalize_before=False): + super().__init__() + self.self_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout) + # Implementation of Feedforward model + self.linear1 = nn.Linear(d_model, dim_feedforward) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_feedforward, d_model) + + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + + self.activation = _get_activation_fn(activation) + self.normalize_before = normalize_before + + def with_pos_embed(self, tensor, pos: Optional[Tensor]): + return tensor if pos is None else tensor + pos + + def forward_post(self, + src, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + q = k = self.with_pos_embed(src, pos) + src2 = self.self_attn(q, k, value=src, attn_mask=src_mask, + key_padding_mask=src_key_padding_mask)[0] + src = src + self.dropout1(src2) + src = self.norm1(src) + src2 = self.linear2(self.dropout(self.activation(self.linear1(src)))) + src = src + self.dropout2(src2) + src = self.norm2(src) + return src + + def forward_pre(self, src, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + src2 = self.norm1(src) + q = k = self.with_pos_embed(src2, pos) + src2 = self.self_attn(q, k, value=src2, attn_mask=src_mask, + key_padding_mask=src_key_padding_mask)[0] + src = src + self.dropout1(src2) + src2 = self.norm2(src) + src2 = self.linear2(self.dropout(self.activation(self.linear1(src2)))) + src = src + self.dropout2(src2) + return src + + def forward(self, src, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + if self.normalize_before: + return self.forward_pre(src, src_mask, src_key_padding_mask, pos) + return self.forward_post(src, src_mask, src_key_padding_mask, pos) + + +class TransformerDecoderLayer(nn.Module): + + def __init__(self, d_model, nhead, dim_feedforward=2048, dropout=0.1, + activation="relu", normalize_before=False): + super().__init__() + self.self_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout) + self.multihead_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout) + # Implementation of Feedforward model + self.linear1 = nn.Linear(d_model, dim_feedforward) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_feedforward, d_model) + + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + self.norm3 = nn.LayerNorm(d_model) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + self.dropout3 = nn.Dropout(dropout) + + self.activation = _get_activation_fn(activation) + self.normalize_before = normalize_before + + def with_pos_embed(self, tensor, pos: Optional[Tensor]): + return tensor if pos is None else tensor + pos + + def forward_post(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + q = k = self.with_pos_embed(tgt, query_pos) + tgt2 = self.self_attn(q, k, value=tgt, attn_mask=tgt_mask, + key_padding_mask=tgt_key_padding_mask)[0] + tgt = tgt + self.dropout1(tgt2) + tgt = self.norm1(tgt) + tgt2 = self.multihead_attn(query=self.with_pos_embed(tgt, query_pos), + key=self.with_pos_embed(memory, pos), + value=memory, attn_mask=memory_mask, + key_padding_mask=memory_key_padding_mask)[0] + tgt = tgt + self.dropout2(tgt2) + tgt = self.norm2(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt)))) + tgt = tgt + self.dropout3(tgt2) + tgt = self.norm3(tgt) + return tgt + + def forward_pre(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + tgt2 = self.norm1(tgt) + q = k = self.with_pos_embed(tgt2, query_pos) + tgt2 = self.self_attn(q, k, value=tgt2, attn_mask=tgt_mask, + key_padding_mask=tgt_key_padding_mask)[0] + tgt = tgt + self.dropout1(tgt2) + tgt2 = self.norm2(tgt) + tgt2 = self.multihead_attn(query=self.with_pos_embed(tgt2, query_pos), + key=self.with_pos_embed(memory, pos), + value=memory, attn_mask=memory_mask, + key_padding_mask=memory_key_padding_mask)[0] + tgt = tgt + self.dropout2(tgt2) + tgt2 = self.norm3(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt2)))) + tgt = tgt + self.dropout3(tgt2) + return tgt + + def forward(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + if self.normalize_before: + return self.forward_pre(tgt, memory, tgt_mask, memory_mask, + tgt_key_padding_mask, memory_key_padding_mask, pos, query_pos) + return self.forward_post(tgt, memory, tgt_mask, memory_mask, + tgt_key_padding_mask, memory_key_padding_mask, pos, query_pos) + + +def _get_clones(module, N): + return nn.ModuleList([copy.deepcopy(module) for i in range(N)]) + + +def build_transformer(args): + return Transformer( + d_model=args.hidden_dim, + dropout=args.dropout, + nhead=args.nheads, + dim_feedforward=args.dim_feedforward, + num_encoder_layers=args.enc_layers, + num_decoder_layers=args.dec_layers, + normalize_before=args.pre_norm, + return_intermediate_dec=True, + ) + + +def _get_activation_fn(activation): + """Return an activation function given a string""" + if activation == "relu": + return F.relu + if activation == "gelu": + return F.gelu + if activation == "glu": + return F.glu + raise RuntimeError(F"activation should be relu/gelu, not {activation}.") diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/evaluate.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..b31cdc46b25fc20a11a79a5f28f1a4c8a12fe92b --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/evaluate.py @@ -0,0 +1,98 @@ +from collections import defaultdict +import gymnasium +import numpy as np +import torch + +from mani_skill.utils import common + +def evaluate(n: int, agent, eval_envs, eval_kwargs): + stats, num_queries, temporal_agg, max_timesteps, device, sim_backend = eval_kwargs.values() + + use_visual_obs = isinstance(eval_envs.single_observation_space.sample(), dict) + delta_control = not stats + if not delta_control: + if sim_backend == "physx_cpu": + pre_process = lambda s_obs: (s_obs - stats['state_mean'].cpu().numpy()) / stats['state_std'].cpu().numpy() + else: + pre_process = lambda s_obs: (s_obs - stats['state_mean']) / stats['state_std'] + post_process = lambda a: a * stats['action_std'] + stats['action_mean'] + + # create action table for temporal ensembling + action_dim = eval_envs.action_space.shape[-1] + num_envs = eval_envs.num_envs + if temporal_agg: + query_frequency = 1 + all_time_actions = torch.zeros([num_envs, max_timesteps, max_timesteps+num_queries, action_dim], device=device) + else: + query_frequency = num_queries + actions_to_take = torch.zeros([num_envs, num_queries, action_dim], device=device) + + agent.eval() + with torch.no_grad(): + eval_metrics = defaultdict(list) + obs, info = eval_envs.reset() + ts, eps_count = 0, 0 + while eps_count < n: + # pre-process obs + if use_visual_obs: + obs['state'] = pre_process(obs['state']) if not delta_control else obs['state'] # (num_envs, obs_dim) + obs = {k: common.to_tensor(v, device) for k, v in obs.items()} + else: + obs = pre_process(obs) if not delta_control else obs # (num_envs, obs_dim) + obs = common.to_tensor(obs, device) + + # query policy + if ts % query_frequency == 0: + action_seq = agent.get_action(obs) # (num_envs, num_queries, action_dim) + + # we assume ignore_terminations=True. Otherwise, some envs could be done + # earlier, so we would need to temporally ensemble at corresponding timestep + # for each env. + if temporal_agg: + assert query_frequency == 1, "query_frequency != 1 has not been implemented for temporal_agg==1." + all_time_actions[:, ts, ts:ts+num_queries] = action_seq # (num_envs, num_queries, act_dim) + actions_for_curr_step = all_time_actions[:, :, ts] # (num_envs, max_timesteps, act_dim) + # since we pad the action with 0 in 'delta_pos' control mode, this causes error. + #actions_populated = torch.all(actions_for_curr_step[0] != 0, axis=1) # (max_timesteps,) + actions_populated = torch.zeros(max_timesteps, dtype=torch.bool, device=device) # (max_timesteps,) + actions_populated[max(0, ts + 1 - num_queries):ts+1] = True + actions_for_curr_step = actions_for_curr_step[:, actions_populated] # (num_envs, num_populated, act_dim) + k = 0.01 + if ts < num_queries: + exp_weights = torch.exp(-k * torch.arange(len(actions_for_curr_step[0]), device=device)) # (num_populated,) + exp_weights = exp_weights / exp_weights.sum() # (num_populated,) + exp_weights = torch.tile(exp_weights, (num_envs, 1)) # (num_envs, num_populated) + exp_weights = torch.unsqueeze(exp_weights, -1) # (num_envs, num_populated, 1) + raw_action = (actions_for_curr_step * exp_weights).sum(dim=1) # (num_envs, act_dim) + else: + if ts % query_frequency == 0: + actions_to_take = action_seq + raw_action = actions_to_take[:, ts % query_frequency] + + action = post_process(raw_action) if not delta_control else raw_action # (num_envs, act_dim) + if sim_backend == "physx_cpu": + action = action.cpu().numpy() + + # step the environment + obs, rew, terminated, truncated, info = eval_envs.step(action) + ts += 1 + + # collect episode info + if truncated.any(): + assert truncated.all() == truncated.any(), "all episodes should truncate at the same time for fair evaluation with other algorithms" + if isinstance(info["final_info"], dict): + for k, v in info["final_info"]["episode"].items(): + eval_metrics[k].append(v.float().cpu().numpy()) + else: + for final_info in info["final_info"]: + for k, v in final_info["episode"].items(): + eval_metrics[k].append(v) + # new episodes begin + eps_count += num_envs + ts = 0 + all_time_actions = torch.zeros([num_envs, max_timesteps, max_timesteps+num_queries, action_dim], device=device) + + agent.train() + for k in eval_metrics.keys(): + eval_metrics[k] = np.stack(eval_metrics[k]) + return eval_metrics diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/make_env.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/make_env.py new file mode 100644 index 0000000000000000000000000000000000000000..55db1629f8e645f093b277a16cc8dd265088e104 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/make_env.py @@ -0,0 +1,46 @@ +from typing import Optional +import gymnasium as gym +import mani_skill.envs +from mani_skill.utils import gym_utils +from mani_skill.vector.wrappers.gymnasium import ManiSkillVectorEnv +from mani_skill.utils.wrappers import RecordEpisode, CPUGymWrapper + + +def make_eval_envs(env_id, num_envs: int, sim_backend: str, env_kwargs: dict, other_kwargs: dict, video_dir: Optional[str] = None, wrappers: list[gym.Wrapper] = []): + """Create vectorized environment for evaluation and/or recording videos. + For CPU vectorized environments only the first parallel environment is used to record videos. + For GPU vectorized environments all parallel environments are used to record videos. + + Args: + env_id: the environment id + num_envs: the number of parallel environments + sim_backend: the simulation backend to use. can be "cpu" or "gpu + env_kwargs: the environment kwargs. You can also pass in max_episode_steps in env_kwargs to override the default max episode steps for the environment. + video_dir: the directory to save the videos. If None no videos are recorded. + wrappers: the list of wrappers to apply to the environment. + """ + if sim_backend == "physx_cpu": + def cpu_make_env(env_id, seed, video_dir=None, env_kwargs = dict(), other_kwargs = dict()): + def thunk(): + env = gym.make(env_id, reconfiguration_freq=1, **env_kwargs) + for wrapper in wrappers: + env = wrapper(env) + env = CPUGymWrapper(env, ignore_terminations=True, record_metrics=True) + if video_dir: + env = RecordEpisode(env, output_dir=video_dir, save_trajectory=False, info_on_video=True, source_type="act", source_desc="act evaluation rollout") + env.action_space.seed(seed) + env.observation_space.seed(seed) + return env + + return thunk + vector_cls = gym.vector.SyncVectorEnv if num_envs == 1 else lambda x : gym.vector.AsyncVectorEnv(x, context="forkserver") + env = vector_cls([cpu_make_env(env_id, seed, video_dir if seed == 0 else None, env_kwargs, other_kwargs) for seed in range(num_envs)]) + else: + env = gym.make(env_id, num_envs=num_envs, sim_backend=sim_backend, reconfiguration_freq=1, **env_kwargs) + max_episode_steps = gym_utils.find_max_episode_steps_value(env) + for wrapper in wrappers: + env = wrapper(env) + if video_dir: + env = RecordEpisode(env, output_dir=video_dir, save_trajectory=False, save_video=True, source_type="act", source_desc="act evaluation rollout", max_steps_per_video=max_episode_steps) + env = ManiSkillVectorEnv(env, ignore_terminations=True, record_metrics=True) + return env diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/utils.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c6da0285639ada0f42945d87f0c8d27b9e86df49 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/act/utils.py @@ -0,0 +1,161 @@ +from torch.utils.data.sampler import Sampler +import numpy as np +import torch +import torch.distributed as dist +from torch import Tensor +from h5py import File, Group, Dataset +from typing import Optional + + +class NestedTensor(object): + def __init__(self, tensors, mask: Optional[Tensor]): + self.tensors = tensors + self.mask = mask + + def to(self, device): + # type: (Device) -> NestedTensor # noqa + cast_tensor = self.tensors.to(device) + mask = self.mask + if mask is not None: + assert mask is not None + cast_mask = mask.to(device) + else: + cast_mask = None + return NestedTensor(cast_tensor, cast_mask) + + def decompose(self): + return self.tensors, self.mask + + def __repr__(self): + return str(self.tensors) + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + +def is_main_process(): + return get_rank() == 0 + + +class IterationBasedBatchSampler(Sampler): + """Wraps a BatchSampler. + Resampling from it until a specified number of iterations have been sampled + References: + https://github.com/facebookresearch/maskrcnn-benchmark/blob/master/maskrcnn_benchmark/data/samplers/iteration_based_batch_sampler.py + """ + + def __init__(self, batch_sampler, num_iterations, start_iter=0): + self.batch_sampler = batch_sampler + self.num_iterations = num_iterations + self.start_iter = start_iter + + def __iter__(self): + iteration = self.start_iter + while iteration < self.num_iterations: + # if the underlying sampler has a set_epoch method, like + # DistributedSampler, used for making each process see + # a different split of the dataset, then set it + if hasattr(self.batch_sampler.sampler, "set_epoch"): + self.batch_sampler.sampler.set_epoch(iteration) + for batch in self.batch_sampler: + yield batch + iteration += 1 + if iteration >= self.num_iterations: + break + + def __len__(self): + return self.num_iterations - self.start_iter + + +def worker_init_fn(worker_id, base_seed=None): + """The function is designed for pytorch multi-process dataloader. + Note that we use the pytorch random generator to generate a base_seed. + Please try to be consistent. + References: + https://pytorch.org/docs/stable/notes/faq.html#dataloader-workers-random-seed + """ + if base_seed is None: + base_seed = torch.IntTensor(1).random_().item() + # print(worker_id, base_seed) + np.random.seed(base_seed + worker_id) + +TARGET_KEY_TO_SOURCE_KEY = { + 'states': 'env_states', + 'observations': 'obs', + 'success': 'success', + 'next_observations': 'obs', + # 'dones': 'dones', + # 'rewards': 'rewards', + 'actions': 'actions', +} +def load_content_from_h5_file(file): + if isinstance(file, (File, Group)): + return {key: load_content_from_h5_file(file[key]) for key in list(file.keys())} + elif isinstance(file, Dataset): + return file[()] + else: + raise NotImplementedError(f"Unspported h5 file type: {type(file)}") + +def load_hdf5(path, ): + print('Loading HDF5 file', path) + file = File(path, 'r') + ret = load_content_from_h5_file(file) + file.close() + print('Loaded') + return ret + +def load_traj_hdf5(path, num_traj=None): + print('Loading HDF5 file', path) + file = File(path, 'r') + keys = list(file.keys()) + if num_traj is not None: + assert num_traj <= len(keys), f"num_traj: {num_traj} > len(keys): {len(keys)}" + keys = sorted(keys, key=lambda x: int(x.split('_')[-1])) + keys = keys[:num_traj] + ret = { + key: load_content_from_h5_file(file[key]) for key in keys + } + file.close() + print('Loaded') + return ret +def load_demo_dataset(path, keys=['observations', 'actions'], num_traj=None, concat=True): + # assert num_traj is None + raw_data = load_traj_hdf5(path, num_traj) + # raw_data has keys like: ['traj_0', 'traj_1', ...] + # raw_data['traj_0'] has keys like: ['actions', 'dones', 'env_states', 'infos', ...] + _traj = raw_data['traj_0'] + for key in keys: + source_key = TARGET_KEY_TO_SOURCE_KEY[key] + assert source_key in _traj, f"key: {source_key} not in traj_0: {_traj.keys()}" + dataset = {} + for target_key in keys: + # if 'next' in target_key: + # raise NotImplementedError('Please carefully deal with the length of trajectory') + source_key = TARGET_KEY_TO_SOURCE_KEY[target_key] + dataset[target_key] = [ raw_data[idx][source_key] for idx in raw_data ] + if isinstance(dataset[target_key][0], np.ndarray) and concat: + if target_key in ['observations', 'states'] and \ + len(dataset[target_key][0]) > len(raw_data['traj_0']['actions']): + dataset[target_key] = np.concatenate([ + t[:-1] for t in dataset[target_key] + ], axis=0) + elif target_key in ['next_observations', 'next_states'] and \ + len(dataset[target_key][0]) > len(raw_data['traj_0']['actions']): + dataset[target_key] = np.concatenate([ + t[1:] for t in dataset[target_key] + ], axis=0) + else: + dataset[target_key] = np.concatenate(dataset[target_key], axis=0) + + print('Load', target_key, dataset[target_key].shape) + else: + print('Load', target_key, len(dataset[target_key]), type(dataset[target_key][0])) + return dataset diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/baselines.sh b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/baselines.sh new file mode 100644 index 0000000000000000000000000000000000000000..75ec9207b4e5800ee4bd9948ae34e5c0982918f5 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/baselines.sh @@ -0,0 +1,77 @@ +seed=1 +# State based +for demos in 100; do + python train.py --env-id PickCube-v1 \ + --demo-path ~/.maniskill/demos/PickCube-v1/motionplanning/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num_demos $demos --max_episode_steps 100 \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PickCube-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train.py --env-id PushCube-v1 \ + --demo-path ~/.maniskill/demos/PushCube-v1/motionplanning/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num_demos $demos --max_episode_steps 100 \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PushCube-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train.py --env-id StackCube-v1 \ + --demo-path ~/.maniskill/demos/StackCube-v1/motionplanning/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num_demos $demos --max_episode_steps 200 \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-StackCube-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train.py --env-id PegInsertionSide-v1 \ + --demo-path ~/.maniskill/demos/PegInsertionSide-v1/motionplanning/trajectory.state.pd_ee_delta_pose.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pose" --sim-backend "physx_cpu" --num_demos $demos --max_episode_steps 300 \ + --total_iters 100000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PegInsertionSide-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train.py --env-id PushT-v1 \ + --demo-path ~/.maniskill/demos/PushT-v1/rl/trajectory.state.pd_ee_delta_pose.physx_cuda.h5 \ + --control-mode "pd_ee_delta_pose" --sim-backend "physx_cuda" --num_demos $demos --max_episode_steps 150 \ + --total_iters 100000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PushT-v1-state-${demos}_rl_demos-$seed \ + --demo_type rl --track +done + +# RGB based + +for demos in 100; do + python train_rgbd.py --env-id PickCube-v1 --no_include_depth \ + --demo-path ~/.maniskill/demos/PickCube-v1/motionplanning/trajectory.rgb.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cuda" --num_demos $demos --max_episode_steps 100 --num_eval_envs 100 --no-capture-video \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PickCube-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train_rgbd.py --env-id PushCube-v1 --no_include_depth \ + --demo-path ~/.maniskill/demos/PushCube-v1/motionplanning/trajectory.rgb.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cuda" --num_demos $demos --max_episode_steps 100 --num_eval_envs 100 --no-capture-video \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PushCube-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train_rgbd.py --env-id StackCube-v1 --no_include_depth \ + --demo-path ~/.maniskill/demos/StackCube-v1/motionplanning/trajectory.rgb.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cuda" --num_demos $demos --max_episode_steps 200 --num_eval_envs 100 --no-capture-video \ + --total_iters 30000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-StackCube-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train_rgbd.py --env-id PegInsertionSide-v1 --no_include_depth \ + --demo-path ~/.maniskill/demos/PegInsertionSide-v1/motionplanning/trajectory.rgb.pd_ee_delta_pose.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pose" --sim-backend "physx_cpu" --num_demos $demos --max_episode_steps 300 \ + --total_iters 100000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PegInsertionSide-v1-state-${demos}_motionplanning_demos-$seed \ + --demo_type motionplanning --track + + python train_rgbd.py --env-id PushT-v1 --no_include_depth \ + --demo-path ~/.maniskill/demos/PushT-v1/rl/trajectory.rgb.pd_ee_delta_pose.physx_cuda.h5 \ + --control-mode "pd_ee_delta_pose" --sim-backend "physx_cuda" --num_demos $demos --max_episode_steps 150 \ + --total_iters 100000 --log_freq 100 --eval_freq 5000 \ + --exp-name=act-PushT-v1-rgb-${demos}_rl_demos-$seed \ + --demo_type rl --track +done \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/setup.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..960b2ec1b995d113adb2405ba5699c1f473b39df --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/setup.py @@ -0,0 +1,17 @@ +from setuptools import setup, find_packages + +setup( + name="act", + version="0.1.0", + packages=find_packages(), + install_requires=[ + "torchvision", + "diffusers", + "tensorboard", + "wandb", + "mani_skill" + ], + description="A minimal setup for ACT for ManiSkill", + long_description=open("README.md").read(), + long_description_content_type="text/markdown", +) diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/train.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/train.py new file mode 100644 index 0000000000000000000000000000000000000000..c2cc9a752166ddf3a6d9df40f04d08567f88c9eb --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/train.py @@ -0,0 +1,456 @@ +ALGO_NAME = 'BC_ACT_state' + +import argparse +import os +import random +from distutils.util import strtobool +import time +import gymnasium as gym +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim +import torch.nn.functional as F +import torchvision.transforms as T +from torch.utils.tensorboard import SummaryWriter +from act.evaluate import evaluate +from mani_skill.utils import common, gym_utils +from mani_skill.utils.registration import REGISTERED_ENVS + +from collections import defaultdict + +from torch.utils.data.dataset import Dataset +from torch.utils.data.sampler import RandomSampler, BatchSampler +from torch.utils.data.dataloader import DataLoader +from act.utils import IterationBasedBatchSampler, worker_init_fn +from act.make_env import make_eval_envs +from diffusers.training_utils import EMAModel +from act.detr.transformer import build_transformer +from act.detr.detr_vae import build_encoder, DETRVAE +from dataclasses import dataclass, field +from typing import Optional, List +import tyro + +@dataclass +class Args: + exp_name: Optional[str] = None + """the name of this experiment""" + seed: int = 1 + """seed of the experiment""" + torch_deterministic: bool = True + """if toggled, `torch.backends.cudnn.deterministic=False`""" + cuda: bool = True + """if toggled, cuda will be enabled by default""" + track: bool = False + """if toggled, this experiment will be tracked with Weights and Biases""" + wandb_project_name: str = "ManiSkill" + """the wandb's project name""" + wandb_entity: Optional[str] = None + """the entity (team) of wandb's project""" + capture_video: bool = True + """whether to capture videos of the agent performances (check out `videos` folder)""" + + env_id: str = "PickCube-v1" + """the id of the environment""" + demo_path: str = 'pickcube.trajectory.state.pd_joint_delta_pos.cpu.h5' + """the path of demo dataset (pkl or h5)""" + num_demos: Optional[int] = None + """number of trajectories to load from the demo dataset""" + total_iters: int = 1_000_000 + """total timesteps of the experiment""" + batch_size: int = 1024 + """the batch size of sample from the replay memory""" + + # ACT specific arguments + lr: float = 1e-4 + """the learning rate of the Action Chunking with Transformers""" + kl_weight: float = 10 + """weight for the kl loss term""" + temporal_agg: bool = True + """if toggled, temporal ensembling will be performed""" + + # Backbone + position_embedding: str = 'sine' + backbone: str = 'resnet18' + lr_backbone: float = 1e-5 + masks: bool = False + dilation: bool = False + + # Transformer + enc_layers: int = 2 + dec_layers: int = 4 + dim_feedforward: int = 512 + hidden_dim: int = 256 + dropout: float = 0.1 + nheads: int = 4 + num_queries: int = 30 + pre_norm: bool = False + + # Environment/experiment specific arguments + max_episode_steps: Optional[int] = None + """Change the environments' max_episode_steps to this value. Sometimes necessary if the demonstrations being imitated are too short. Typically the default + max episode steps of environments in ManiSkill are tuned lower so reinforcement learning agents can learn faster.""" + log_freq: int = 1000 + """the frequency of logging the training metrics""" + eval_freq: int = 5000 + """the frequency of evaluating the agent on the evaluation environments""" + save_freq: Optional[int] = None + """the frequency of saving the model checkpoints. By default this is None and will only save checkpoints based on the best evaluation metrics.""" + num_eval_episodes: int = 100 + """the number of episodes to evaluate the agent on""" + num_eval_envs: int = 10 + """the number of parallel environments to evaluate the agent on""" + sim_backend: str = "physx_cpu" + """the simulation backend to use for evaluation environments. can be "physx_cpu" or "physx_cuda" """ + num_dataload_workers: int = 0 + """the number of workers to use for loading the training data in the torch dataloader""" + control_mode: str = 'pd_joint_delta_pos' + """the control mode to use for the evaluation environments. Must match the control mode of the demonstration dataset.""" + + # additional tags/configs for logging purposes to wandb and shared comparisons with other algorithms + demo_type: Optional[str] = None + + +class SmallDemoDataset_ACTPolicy(Dataset): # Load everything into GPU memory + def __init__(self, data_path, num_queries, device, num_traj): + if data_path[-4:] == '.pkl': + raise NotImplementedError() + else: + from act.utils import load_demo_dataset + trajectories = load_demo_dataset(data_path, num_traj=num_traj, concat=False) + # trajectories['observations'] is a list of np.ndarray (L+1, obs_dim) + # trajectories['actions'] is a list of np.ndarray (L, act_dim) + + for k, v in trajectories.items(): + for i in range(len(v)): + trajectories[k][i] = torch.Tensor(v[i]).to(device) + + # When the robot reaches the goal state, its joints and gripper fingers need to remain stationary + if 'delta_pos' in args.control_mode or args.control_mode == 'base_pd_joint_vel_arm_pd_joint_vel': + self.pad_action_arm = torch.zeros((trajectories['actions'][0].shape[1]-1,), device=device) + # to make the arm stay still, we pad the action with 0 in 'delta_pos' control mode + # gripper action needs to be copied from the last action + # else: + # raise NotImplementedError(f'Control Mode {args.control_mode} not supported') + + self.slices = [] + self.num_traj = len(trajectories['actions']) + for traj_idx in range(self.num_traj): + episode_len = trajectories['actions'][traj_idx].shape[0] + self.slices += [ + (traj_idx, ts) for ts in range(episode_len) + ] + + print(f"Length of Dataset: {len(self.slices)}") + + self.num_queries = num_queries + self.trajectories = trajectories + self.delta_control = 'delta' in args.control_mode + self.norm_stats = self.get_norm_stats() if not self.delta_control else None + + def __getitem__(self, index): + traj_idx, ts = self.slices[index] + + # get observation at ts only + obs = self.trajectories['observations'][traj_idx][ts] + # get num_queries actions + act_seq = self.trajectories['actions'][traj_idx][ts:ts+self.num_queries] + action_len = act_seq.shape[0] + + # Pad after the trajectory, so all the observations are utilized in training + if action_len < self.num_queries: + if 'delta_pos' in args.control_mode or args.control_mode == 'base_pd_joint_vel_arm_pd_joint_vel': + gripper_action = act_seq[-1, -1] + pad_action = torch.cat((self.pad_action_arm, gripper_action[None]), dim=0) + act_seq = torch.cat([act_seq, pad_action.repeat(self.num_queries-action_len, 1)], dim=0) + # making the robot (arm and gripper) stay still + elif not self.delta_control: + target = act_seq[-1] + act_seq = torch.cat([act_seq, target.repeat(self.num_queries-action_len, 1)], dim=0) + + # normalize obs and act_seq + if not self.delta_control: + obs = (obs - self.norm_stats["state_mean"][0]) / self.norm_stats["state_std"][0] + act_seq = (act_seq - self.norm_stats["action_mean"]) / self.norm_stats["action_std"] + + return { + 'observations': obs, + 'actions': act_seq, + } + + def __len__(self): + return len(self.slices) + + def get_norm_stats(self): + traj_idx, ts = self.slices[index] + + # get observation at start_ts only + obs = self.trajectories['observations'][traj_idx][ts] + # get num_queries actions + act_seq = self.trajectories['actions'][traj_idx][ts:ts+self.num_queries] + action_len = act_seq.shape[0] + + # Pad after the trajectory, so all the observations are utilized in training + if action_len < self.num_queries: + if 'delta_pos' in args.control_mode or args.control_mode == 'base_pd_joint_vel_arm_pd_joint_vel': + gripper_action = act_seq[-1, -1] + pad_action = torch.cat((self.pad_action_arm, gripper_action[None]), dim=0) + act_seq = torch.cat([act_seq, pad_action.repeat(self.num_queries-action_len, 1)], dim=0) + # making the robot (arm and gripper) stay still + elif not self.delta_control: + target = act_seq[-1] + act_seq = torch.cat([act_seq, target.repeat(self.num_queries-action_len, 1)], dim=0) + + # normalize obs and act_seq + if not self.delta_control: + obs = (obs - self.norm_stats["state_mean"][0]) / self.norm_stats["state_std"][0] + act_seq = (act_seq - self.norm_stats["action_mean"]) / self.norm_stats["action_std"] + + return { + 'observations': obs, + 'actions': act_seq, + } + + +class Agent(nn.Module): + def __init__(self, env, args): + super().__init__() + assert len(env.single_observation_space.shape) == 1 # (obs_dim,) + assert len(env.single_action_space.shape) == 1 # (act_dim,) + #assert (env.single_action_space.high == 1).all() and (env.single_action_space.low == -1).all() + + self.kl_weight = args.kl_weight + self.state_dim = env.single_observation_space.shape[0] + self.act_dim = env.single_action_space.shape[0] + + # CNN backbone + backbones = None + + # CVAE decoder + transformer = build_transformer(args) + + # CVAE encoder + encoder = build_encoder(args) + + # ACT ( CVAE encoder + (CNN backbones + CVAE decoder) ) + self.model = DETRVAE( + backbones, + transformer, + encoder, + state_dim=self.state_dim, + action_dim=self.act_dim, + num_queries=args.num_queries, + ) + + def compute_loss(self, obs, action_seq): + # forward pass + a_hat, (mu, logvar) = self.model(obs, action_seq) + + # compute l1 loss and kl loss + total_kld, dim_wise_kld, mean_kld = kl_divergence(mu, logvar) + all_l1 = F.l1_loss(action_seq, a_hat, reduction='none') + l1 = all_l1.mean() + + # store all loss + loss_dict = dict() + loss_dict['l1'] = l1 + loss_dict['kl'] = total_kld[0] + loss_dict['loss'] = loss_dict['l1'] + loss_dict['kl'] * self.kl_weight + return loss_dict + + def get_action(self, obs): + # forward pass + a_hat, (_, _) = self.model(obs) # no action, sample from prior + return a_hat + + +def kl_divergence(mu, logvar): + batch_size = mu.size(0) + assert batch_size != 0 + if mu.data.ndimension() == 4: + mu = mu.view(mu.size(0), mu.size(1)) + if logvar.data.ndimension() == 4: + logvar = logvar.view(logvar.size(0), logvar.size(1)) + + klds = -0.5 * (1 + logvar - mu.pow(2) - logvar.exp()) + total_kld = klds.sum(1).mean(0, True) + dimension_wise_kld = klds.mean(0) + mean_kld = klds.mean(1).mean(0, True) + + return total_kld, dimension_wise_kld, mean_kld + +def save_ckpt(run_name, tag): + os.makedirs(f'runs/{run_name}/checkpoints', exist_ok=True) + ema.copy_to(ema_agent.parameters()) + torch.save({ + 'norm_stats': dataset.norm_stats, + 'agent': agent.state_dict(), + 'ema_agent': ema_agent.state_dict(), + }, f'runs/{run_name}/checkpoints/{tag}.pt') + +if __name__ == "__main__": + args = tyro.cli(Args) + if args.exp_name is None: + args.exp_name = os.path.basename(__file__)[: -len(".py")] + run_name = f"{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}" + else: + run_name = args.exp_name + + if args.demo_path.endswith('.h5'): + import json + json_file = args.demo_path[:-2] + 'json' + with open(json_file, 'r') as f: + demo_info = json.load(f) + if 'control_mode' in demo_info['env_info']['env_kwargs']: + control_mode = demo_info['env_info']['env_kwargs']['control_mode'] + elif 'control_mode' in demo_info['episodes'][0]: + control_mode = demo_info['episodes'][0]['control_mode'] + else: + raise Exception('Control mode not found in json') + assert control_mode == args.control_mode, f"Control mode mismatched. Dataset has control mode {control_mode}, but args has control mode {args.control_mode}" + + # TRY NOT TO MODIFY: seeding + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.backends.cudnn.deterministic = args.torch_deterministic + + device = torch.device("cuda" if torch.cuda.is_available() and args.cuda else "cpu") + + # env setup + env_kwargs = dict(control_mode=args.control_mode, reward_mode="sparse", obs_mode="state", render_mode="rgb_array") + if args.max_episode_steps is not None: + env_kwargs["max_episode_steps"] = args.max_episode_steps + other_kwargs = None + envs = make_eval_envs(args.env_id, args.num_eval_envs, args.sim_backend, env_kwargs, other_kwargs, video_dir=f'runs/{run_name}/videos' if args.capture_video else None) + + # dataloader setup + dataset = SmallDemoDataset_ACTPolicy(args.demo_path, args.num_queries, device, num_traj=args.num_demos) + sampler = RandomSampler(dataset, replacement=False) + batch_sampler = BatchSampler(sampler, batch_size=args.batch_size, drop_last=True) + batch_sampler = IterationBasedBatchSampler(batch_sampler, args.total_iters) + train_dataloader = DataLoader( + dataset, + batch_sampler=batch_sampler, + num_workers=args.num_dataload_workers, + worker_init_fn=lambda worker_id: worker_init_fn(worker_id, base_seed=args.seed), + ) + if args.num_demos is None: + args.num_demos = dataset.num_traj + + if args.track: + import wandb + config = vars(args) + config["eval_env_cfg"] = dict(**env_kwargs, num_envs=args.num_eval_envs, env_id=args.env_id, env_horizon=args.max_episode_steps) + wandb.init( + project=args.wandb_project_name, + entity=args.wandb_entity, + sync_tensorboard=True, + config=config, + name=run_name, + save_code=True, + group="ACT", + tags=["act"] + ) + writer = SummaryWriter(f"runs/{run_name}") + writer.add_text( + "hyperparameters", + "|param|value|\n|-|-|\n%s" % ("\n".join([f"|{key}|{value}|" for key, value in vars(args).items()])), + ) + + # agent setup + agent = Agent(envs, args).to(device) + + # optimizer setup + param_dicts = [ + {"params": [p for n, p in agent.named_parameters() if "backbone" not in n and p.requires_grad]}, + { + "params": [p for n, p in agent.named_parameters() if "backbone" in n and p.requires_grad], + "lr": args.lr_backbone, + }, + ] + optimizer = optim.AdamW(param_dicts, lr=args.lr, weight_decay=1e-4) + + # LR drop by a factor of 10 after lr_drop iters + lr_drop = int((2/3)*args.total_iters) + lr_scheduler = optim.lr_scheduler.StepLR(optimizer, lr_drop) + + # Exponential Moving Average + # accelerates training and improves stability + # holds a copy of the model weights + ema = EMAModel(parameters=agent.parameters(), power=0.75) + ema_agent = Agent(envs, args).to(device) + + # Evaluation + #eval_kwargs = dict( + # stats=dataset.norm_stats, num_queries=args.num_queries, temporal_agg=args.temporal_agg, + # max_timesteps=gym_utils.find_max_episode_steps_value(envs), device=device, sim_backend=args.sim_backend + #) + eval_kwargs = dict( + stats=dataset.norm_stats, num_queries=args.num_queries, temporal_agg=args.temporal_agg, + max_timesteps=args.max_episode_steps, device=device, sim_backend=args.sim_backend + ) + + # ---------------------------------------------------------------------------- # + # Training begins. + # ---------------------------------------------------------------------------- # + print("Training begins...") + agent.train() + + best_eval_metrics = defaultdict(float) + timings = defaultdict(float) + + for cur_iter, data_batch in enumerate(train_dataloader): + last_tick = time.time() + # forward and compute loss + loss_dict = agent.compute_loss( + obs=data_batch['observations'], # (B, obs_dim) + action_seq=data_batch['actions'], # (B, num_queries, act_dim) + ) + total_loss = loss_dict['loss'] # total_loss = l1 + kl * self.kl_weight + + # backward + optimizer.zero_grad() + total_loss.backward() + optimizer.step() + lr_scheduler.step() # step lr scheduler every batch, this is different from standard pytorch behavior + + # update Exponential Moving Average of the model weights + ema.step(agent.parameters()) + timings["update"] += time.time() - last_tick + + # Evaluation + if cur_iter % args.eval_freq == 0: + last_tick = time.time() + + ema.copy_to(ema_agent.parameters()) + + eval_metrics = evaluate(args.num_eval_episodes, ema_agent, envs, eval_kwargs) + timings["eval"] += time.time() - last_tick + + + print(f"Evaluated {len(eval_metrics['success_at_end'])} episodes") + for k in eval_metrics.keys(): + eval_metrics[k] = np.mean(eval_metrics[k]) + writer.add_scalar(f"eval/{k}", eval_metrics[k], cur_iter) + print(f"{k}: {eval_metrics[k]:.4f}") + + save_on_best_metrics = ["success_once", "success_at_end"] + for k in save_on_best_metrics: + if k in eval_metrics and eval_metrics[k] > best_eval_metrics[k]: + best_eval_metrics[k] = eval_metrics[k] + save_ckpt(run_name, f"best_eval_{k}") + print(f'New best {k}_rate: {eval_metrics[k]:.4f}. Saving checkpoint.') + + if cur_iter % args.log_freq == 0: + print(f"Iteration {cur_iter}, loss: {total_loss.item()}") + writer.add_scalar("charts/learning_rate", optimizer.param_groups[0]["lr"], cur_iter) + writer.add_scalar("losses/total_loss", total_loss.item(), cur_iter) + for k, v in timings.items(): + writer.add_scalar(f"time/{k}", v, cur_iter) + # Checkpoint + if args.save_freq is not None and cur_iter % args.save_freq == 0: + save_ckpt(run_name, str(cur_iter)) + + envs.close() + writer.close() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/train_rgbd.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/train_rgbd.py new file mode 100644 index 0000000000000000000000000000000000000000..6d385735bbe3b854de796e9cac62dbe71079d2b5 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/act/train_rgbd.py @@ -0,0 +1,612 @@ +ALGO_NAME = 'BC_ACT_rgbd' + +import argparse +import os +import random +from distutils.util import strtobool +from functools import partial +import time +import gymnasium as gym +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim +import torch.nn.functional as F +import torchvision.transforms as T +from torch.utils.tensorboard import SummaryWriter +from act.evaluate import evaluate +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.utils import common, gym_utils +from mani_skill.utils.registration import REGISTERED_ENVS + +from collections import defaultdict + +from torch.utils.data.dataset import Dataset +from torch.utils.data.sampler import RandomSampler, BatchSampler +from torch.utils.data.dataloader import DataLoader +from act.utils import IterationBasedBatchSampler, worker_init_fn +from act.make_env import make_eval_envs +from diffusers.training_utils import EMAModel +from act.detr.backbone import build_backbone +from act.detr.transformer import build_transformer +from act.detr.detr_vae import build_encoder, DETRVAE +from dataclasses import dataclass, field +from typing import Optional, List, Dict +import tyro + +@dataclass +class Args: + exp_name: Optional[str] = None + """the name of this experiment""" + seed: int = 1 + """seed of the experiment""" + torch_deterministic: bool = True + """if toggled, `torch.backends.cudnn.deterministic=False`""" + cuda: bool = True + """if toggled, cuda will be enabled by default""" + track: bool = False + """if toggled, this experiment will be tracked with Weights and Biases""" + wandb_project_name: str = "ManiSkill" + """the wandb's project name""" + wandb_entity: Optional[str] = None + """the entity (team) of wandb's project""" + capture_video: bool = True + """whether to capture videos of the agent performances (check out `videos` folder)""" + + env_id: str = "PickCube-v1" + """the id of the environment""" + demo_path: str = 'pickcube.trajectory.rgbd.pd_joint_delta_pos.cpu.h5' + """the path of demo dataset (pkl or h5)""" + num_demos: Optional[int] = None + """number of trajectories to load from the demo dataset""" + total_iters: int = 1_000_000 + """total timesteps of the experiment""" + batch_size: int = 256 + """the batch size of sample from the replay memory""" + + # ACT specific arguments + lr: float = 1e-4 + """the learning rate of the Action Chunking with Transformers""" + kl_weight: float = 10 + """weight for the kl loss term""" + temporal_agg: bool = True + """if toggled, temporal ensembling will be performed""" + + # Backbone + position_embedding: str = 'sine' + backbone: str = 'resnet18' + lr_backbone: float = 1e-5 + masks: bool = False + dilation: bool = False + include_depth: bool = True + + # Transformer + enc_layers: int = 2 + dec_layers: int = 4 + dim_feedforward: int = 512 + hidden_dim: int = 256 + dropout: float = 0.1 + nheads: int = 8 + num_queries: int = 30 + pre_norm: bool = False + + # Environment/experiment specific arguments + max_episode_steps: Optional[int] = None + """Change the environments' max_episode_steps to this value. Sometimes necessary if the demonstrations being imitated are too short. Typically the default + max episode steps of environments in ManiSkill are tuned lower so reinforcement learning agents can learn faster.""" + log_freq: int = 1000 + """the frequency of logging the training metrics""" + eval_freq: int = 5000 + """the frequency of evaluating the agent on the evaluation environments""" + save_freq: Optional[int] = None + """the frequency of saving the model checkpoints. By default this is None and will only save checkpoints based on the best evaluation metrics.""" + num_eval_episodes: int = 100 + """the number of episodes to evaluate the agent on""" + num_eval_envs: int = 10 + """the number of parallel environments to evaluate the agent on""" + sim_backend: str = "cpu" + """the simulation backend to use for evaluation environments. can be "cpu" or "gpu""" + num_dataload_workers: int = 0 + """the number of workers to use for loading the training data in the torch dataloader""" + control_mode: str = 'pd_joint_delta_pos' + """the control mode to use for the evaluation environments. Must match the control mode of the demonstration dataset.""" + + # additional tags/configs for logging purposes to wandb and shared comparisons with other algorithms + demo_type: Optional[str] = None + + +class FlattenRGBDObservationWrapper(gym.ObservationWrapper): + """ + Flattens the rgbd mode observations into a dictionary with two keys, "rgbd" and "state" + + Args: + rgb (bool): Whether to include rgb images in the observation + depth (bool): Whether to include depth images in the observation + state (bool): Whether to include state data in the observation + + Note that the returned observations will have a "rgbd" or "rgb" or "depth" key depending on the rgb/depth bool flags. + """ + + def __init__(self, env, rgb=True, depth=True, state=True) -> None: + self.base_env: BaseEnv = env.unwrapped + super().__init__(env) + self.include_rgb = rgb + self.include_depth = depth + self.include_state = state + self.transforms = T.Compose( + [ + T.Resize((224, 224), antialias=True), + ] + ) # resize the input image to be at least 224x224 + new_obs = self.observation(self.base_env._init_raw_obs) + self.base_env.update_obs_space(new_obs) + + def observation(self, observation: Dict): + sensor_data = observation.pop("sensor_data") + del observation["sensor_param"] + images_rgb = [] + images_depth = [] + for cam_data in sensor_data.values(): + if self.include_rgb: + resized_rgb = self.transforms( + cam_data["rgb"].permute(0, 3, 1, 2) + ) # (1, 3, 224, 224) + images_rgb.append(resized_rgb) + if self.include_depth: + depth = (cam_data["depth"].to(torch.float32) / 1024).to(torch.float16) + resized_depth = self.transforms( + depth.permute(0, 3, 1, 2) + ) # (1, 1, 224, 224) + images_depth.append(resized_depth) + + rgb = torch.stack(images_rgb, dim=1) # (1, num_cams, C, 224, 224), uint8 + if self.include_depth: + depth = torch.stack(images_depth, dim=1) # (1, num_cams, C, 224, 224), float16 + + # flatten the rest of the data which should just be state data + observation = common.flatten_state_dict(observation, use_torch=True) + ret = dict() + if self.include_state: + ret["state"] = observation + if self.include_rgb and not self.include_depth: + ret["rgb"] = rgb + elif self.include_rgb and self.include_depth: + ret["rgb"] = rgb + ret["depth"] = depth + elif self.include_depth and not self.include_rgb: + ret["depth"] = depth + return ret + + +class SmallDemoDataset_ACTPolicy(Dataset): # Load everything into memory + def __init__(self, data_path, num_queries, num_traj, include_depth=True): + if data_path[-4:] == '.pkl': + raise NotImplementedError() + else: + from act.utils import load_demo_dataset + trajectories = load_demo_dataset(data_path, num_traj=num_traj, concat=False) + # trajectories['observations'] is a list of np.ndarray (L+1, obs_dim) + # trajectories['actions'] is a list of np.ndarray (L, act_dim) + print('Raw trajectory loaded, start to pre-process the observations...') + + self.include_depth = include_depth + self.transforms = T.Compose( + [ + T.Resize((224, 224), antialias=True), + ] + ) # pre-trained models from torchvision.models expect input image to be at least 224x224 + + # Pre-process the observations, make them align with the obs returned by the FlattenRGBDObservationWrapper + obs_traj_dict_list = [] + for obs_traj_dict in trajectories['observations']: + obs_traj_dict = self.process_obs(obs_traj_dict) + obs_traj_dict_list.append(obs_traj_dict) + trajectories['observations'] = obs_traj_dict_list + self.obs_keys = list(obs_traj_dict.keys()) + + # Pre-process the actions + for i in range(len(trajectories['actions'])): + trajectories['actions'][i] = torch.Tensor(trajectories['actions'][i]) + print('Obs/action pre-processing is done.') + + # When the robot reaches the goal state, its joints and gripper fingers need to remain stationary + if 'delta_pos' in args.control_mode or args.control_mode == 'base_pd_joint_vel_arm_pd_joint_vel': + self.pad_action_arm = torch.zeros((trajectories['actions'][0].shape[1]-1,)) + # to make the arm stay still, we pad the action with 0 in 'delta_pos' control mode + # gripper action needs to be copied from the last action + # else: + # raise NotImplementedError(f'Control Mode {args.control_mode} not supported') + + self.slices = [] + self.num_traj = len(trajectories['actions']) + for traj_idx in range(self.num_traj): + episode_len = trajectories['actions'][traj_idx].shape[0] + self.slices += [ + (traj_idx, ts) for ts in range(episode_len) + ] + + print(f"Length of Dataset: {len(self.slices)}") + + self.num_queries = num_queries + self.trajectories = trajectories + self.delta_control = 'delta' in args.control_mode + self.norm_stats = self.get_norm_stats() if not self.delta_control else None + + def __getitem__(self, index): + traj_idx, ts = self.slices[index] + + # get state at start_ts only + state = self.trajectories['observations'][traj_idx]['state'][ts] + # get num_queries actions + act_seq = self.trajectories['actions'][traj_idx][ts:ts+self.num_queries] + action_len = act_seq.shape[0] + + # Pad after the trajectory, so all the observations are utilized in training + if action_len < self.num_queries: + if 'delta_pos' in args.control_mode or args.control_mode == 'base_pd_joint_vel_arm_pd_joint_vel': + gripper_action = act_seq[-1, -1] + pad_action = torch.cat((self.pad_action_arm, gripper_action[None]), dim=0) + act_seq = torch.cat([act_seq, pad_action.repeat(self.num_queries-action_len, 1)], dim=0) + # making the robot (arm and gripper) stay still + elif not self.delta_control: + target = act_seq[-1] + act_seq = torch.cat([act_seq, target.repeat(self.num_queries-action_len, 1)], dim=0) + + # normalize state and act_seq + if not self.delta_control: + state = (state - self.norm_stats["state_mean"][0]) / self.norm_stats["state_std"][0] + act_seq = (act_seq - self.norm_stats["action_mean"]) / self.norm_stats["action_std"] + + # get rgb or rgbd data at start_ts and combine with state to form obs + if self.include_depth: + rgb = self.trajectories['observations'][traj_idx]['rgb'][ts] + depth = self.trajectories['observations'][traj_idx]['depth'][ts] + obs = dict(state=state, rgb=rgb, depth=depth) + else: + rgb = self.trajectories['observations'][traj_idx]['rgb'][ts] + obs = dict(state=state, rgb=rgb) + + return { + 'observations': obs, + 'actions': act_seq, + } + + def __len__(self): + return len(self.slices) + + def process_obs(self, obs_dict): + # get rgbd data + sensor_data = obs_dict.pop("sensor_data") + del obs_dict["sensor_param"] + images_rgb = [] + images_depth = [] + for cam_data in sensor_data.values(): + rgb = torch.from_numpy(cam_data["rgb"]) # (ep_len, H, W, 3) + resized_rgb = self.transforms( + rgb.permute(0, 3, 1, 2) + ) # (ep_len, 3, 224, 224); pre-trained models from torchvision.models expect input image to be at least 224x224 + images_rgb.append(resized_rgb) + if self.include_depth: + depth = torch.Tensor(cam_data["depth"].astype(np.float32) / 1024).to(torch.float16) # (ep_len, H, W, 1) + resized_depth = self.transforms( + depth.permute(0, 3, 1, 2) + ) # (ep_len, 1, 224, 224); pre-trained models from torchvision.models expect input image to be at least 224x224 + images_depth.append(resized_depth) + rgb = torch.stack(images_rgb, dim=1) # (ep_len, num_cams, 3, 224, 224) # still uint8 + if self.include_depth: + depth = torch.stack(images_depth, dim=1) # (ep_len, num_cams, 1, 224, 224) # float16 + + # flatten the rest of the data which should just be state data + obs_dict['extra'] = {k: v[:, None] if len(v.shape) == 1 else v for k, v in obs_dict['extra'].items()} # dirty fix for data that has one dimension (e.g. is_grasped) + obs_dict = common.flatten_state_dict(obs_dict, use_torch=True) + + processed_obs = dict(state=obs_dict, rgb=rgb, depth=depth) if self.include_depth else dict(state=obs_dict, rgb=rgb) + + return processed_obs + + def get_norm_stats(self): + all_state_data = [] + all_action_data = [] + for traj_idx, ts in self.slices: + state = self.trajectories['observations'][traj_idx]['state'][ts] + act_seq = self.trajectories['actions'][traj_idx][ts:ts+self.num_queries] + action_len = act_seq.shape[0] + if action_len < self.num_queries: + target_pos = act_seq[-1] + act_seq = torch.cat([act_seq, target_pos.repeat(self.num_queries-action_len, 1)], dim=0) + all_state_data.append(state) + all_action_data.append(act_seq) + + all_state_data = torch.stack(all_state_data) + all_action_data = torch.concatenate(all_action_data) + + # normalize obs (state) data + state_mean = all_state_data.mean(dim=0, keepdim=True) + state_std = all_state_data.std(dim=0, keepdim=True) + state_std = torch.clip(state_std, 1e-2, np.inf) # clipping + + # normalize action data + action_mean = all_action_data.mean(dim=0, keepdim=True) + action_std = all_action_data.std(dim=0, keepdim=True) + action_std = torch.clip(action_std, 1e-2, np.inf) # clipping + + stats = {"action_mean": action_mean, "action_std": action_std, + "state_mean": state_mean, "state_std": state_std, + "example_state": state} + + return stats + + +class Agent(nn.Module): + def __init__(self, env, args): + super().__init__() + assert len(env.single_observation_space['state'].shape) == 1 # (obs_dim,) + assert len(env.single_observation_space['rgb'].shape) == 4 # (num_cams, C, H, W) + assert len(env.single_action_space.shape) == 1 # (act_dim,) + #assert (env.single_action_space.high == 1).all() and (env.single_action_space.low == -1).all() + + self.state_dim = env.single_observation_space['state'].shape[0] + self.act_dim = env.single_action_space.shape[0] + self.kl_weight = args.kl_weight + self.normalize = T.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225]) + + # CNN backbone + backbones = [] + backbone = build_backbone(args) + backbones.append(backbone) + + # CVAE decoder + transformer = build_transformer(args) + + # CVAE encoder + encoder = build_encoder(args) + + # ACT ( CVAE encoder + (CNN backbones + CVAE decoder) ) + self.model = DETRVAE( + backbones, + transformer, + encoder, + state_dim=self.state_dim, + action_dim=self.act_dim, + num_queries=args.num_queries, + ) + + def compute_loss(self, obs, action_seq): + # normalize rgb data + obs['rgb'] = obs['rgb'].float() / 255.0 + obs['rgb'] = self.normalize(obs['rgb']) + + # depth data + if args.include_depth: + obs['depth'] = obs['depth'].float() + + # forward pass + a_hat, (mu, logvar) = self.model(obs, action_seq) + + # compute l1 loss and kl loss + total_kld, dim_wise_kld, mean_kld = kl_divergence(mu, logvar) + all_l1 = F.l1_loss(action_seq, a_hat, reduction='none') + l1 = all_l1.mean() + + # store all loss + loss_dict = dict() + loss_dict['l1'] = l1 + loss_dict['kl'] = total_kld[0] + loss_dict['loss'] = loss_dict['l1'] + loss_dict['kl'] * self.kl_weight + return loss_dict + + def get_action(self, obs): + # normalize rgb data + obs['rgb'] = obs['rgb'].float() / 255.0 + obs['rgb'] = self.normalize(obs['rgb']) + + # depth data + if args.include_depth: + obs['depth'] = obs['depth'].float() + + # forward pass + a_hat, (_, _) = self.model(obs) # no action, sample from prior + + return a_hat + + +def kl_divergence(mu, logvar): + batch_size = mu.size(0) + assert batch_size != 0 + if mu.data.ndimension() == 4: + mu = mu.view(mu.size(0), mu.size(1)) + if logvar.data.ndimension() == 4: + logvar = logvar.view(logvar.size(0), logvar.size(1)) + + klds = -0.5 * (1 + logvar - mu.pow(2) - logvar.exp()) + total_kld = klds.sum(1).mean(0, True) + dimension_wise_kld = klds.mean(0) + mean_kld = klds.mean(1).mean(0, True) + + return total_kld, dimension_wise_kld, mean_kld + +def save_ckpt(run_name, tag): + os.makedirs(f'runs/{run_name}/checkpoints', exist_ok=True) + ema.copy_to(ema_agent.parameters()) + torch.save({ + 'norm_stats': dataset.norm_stats, + 'agent': agent.state_dict(), + 'ema_agent': ema_agent.state_dict(), + }, f'runs/{run_name}/checkpoints/{tag}.pt') + +if __name__ == "__main__": + args = tyro.cli(Args) + + if args.exp_name is None: + args.exp_name = os.path.basename(__file__)[: -len(".py")] + run_name = f"{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}" + else: + run_name = args.exp_name + + if args.demo_path.endswith('.h5'): + import json + json_file = args.demo_path[:-2] + 'json' + with open(json_file, 'r') as f: + demo_info = json.load(f) + if 'control_mode' in demo_info['env_info']['env_kwargs']: + control_mode = demo_info['env_info']['env_kwargs']['control_mode'] + elif 'control_mode' in demo_info['episodes'][0]: + control_mode = demo_info['episodes'][0]['control_mode'] + else: + raise Exception('Control mode not found in json') + assert control_mode == args.control_mode, f"Control mode mismatched. Dataset has control mode {control_mode}, but args has control mode {args.control_mode}" + + # TRY NOT TO MODIFY: seeding + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.backends.cudnn.deterministic = args.torch_deterministic + + device = torch.device("cuda" if torch.cuda.is_available() and args.cuda else "cpu") + + # env setup + env_kwargs = dict(control_mode=args.control_mode, reward_mode="sparse", obs_mode="rgbd" if args.include_depth else "rgb", render_mode="rgb_array") + if args.max_episode_steps is not None: + env_kwargs["max_episode_steps"] = args.max_episode_steps + other_kwargs = None + wrappers = [partial(FlattenRGBDObservationWrapper, depth=args.include_depth)] + envs = make_eval_envs(args.env_id, args.num_eval_envs, args.sim_backend, env_kwargs, other_kwargs, video_dir=f'runs/{run_name}/videos' if args.capture_video else None, wrappers=wrappers) + + # dataloader setup + dataset = SmallDemoDataset_ACTPolicy(args.demo_path, args.num_queries, num_traj=args.num_demos, include_depth=args.include_depth) + sampler = RandomSampler(dataset, replacement=False) + batch_sampler = BatchSampler(sampler, batch_size=args.batch_size, drop_last=True) + batch_sampler = IterationBasedBatchSampler(batch_sampler, args.total_iters) + train_dataloader = DataLoader( + dataset, + batch_sampler=batch_sampler, + num_workers=args.num_dataload_workers, + worker_init_fn=lambda worker_id: worker_init_fn(worker_id, base_seed=args.seed), + ) + if args.num_demos is None: + args.num_demos = dataset.num_traj + + obs_mode = "rgb+depth" if args.include_depth else "rgb" + + if args.track: + import wandb + config = vars(args) + config["eval_env_cfg"] = dict(**env_kwargs, num_envs=args.num_eval_envs, env_id=args.env_id, env_horizon=args.max_episode_steps) + wandb.init( + project=args.wandb_project_name, + entity=args.wandb_entity, + sync_tensorboard=True, + config=config, + name=run_name, + save_code=True, + group="ACT", + tags=["act"] + ) + writer = SummaryWriter(f"runs/{run_name}") + writer.add_text( + "hyperparameters", + "|param|value|\n|-|-|\n%s" % ("\n".join([f"|{key}|{value}|" for key, value in vars(args).items()])), + ) + + # agent setup + agent = Agent(envs, args).to(device) + + # optimizer setup + param_dicts = [ + {"params": [p for n, p in agent.named_parameters() if "backbone" not in n and p.requires_grad]}, + { + "params": [p for n, p in agent.named_parameters() if "backbone" in n and p.requires_grad], + "lr": args.lr_backbone, + }, + ] + optimizer = optim.AdamW(param_dicts, lr=args.lr, weight_decay=1e-4) + + # LR drop by a factor of 10 after lr_drop iters + lr_drop = int((2/3)*args.total_iters) + lr_scheduler = optim.lr_scheduler.StepLR(optimizer, lr_drop) + + # Exponential Moving Average + # accelerates training and improves stability + # holds a copy of the model weights + ema = EMAModel(parameters=agent.parameters(), power=0.75) + ema_agent = Agent(envs, args).to(device) + + # Evaluation + #eval_kwargs = dict( + # stats=dataset.norm_stats, num_queries=args.num_queries, temporal_agg=args.temporal_agg, + # max_timesteps=gym_utils.find_max_episode_steps_value(envs), device=device, sim_backend=args.sim_backend + #) + eval_kwargs = dict( + stats=dataset.norm_stats, num_queries=args.num_queries, temporal_agg=args.temporal_agg, + max_timesteps=args.max_episode_steps, device=device, sim_backend=args.sim_backend + ) + + # ---------------------------------------------------------------------------- # + # Training begins. + # ---------------------------------------------------------------------------- # + agent.train() + + best_eval_metrics = defaultdict(float) + timings = defaultdict(float) + + for cur_iter, data_batch in enumerate(train_dataloader): + last_tick = time.time() + # copy data from cpu to gpu + obs_batch_dict = data_batch['observations'] + obs_batch_dict = {k: v.cuda(non_blocking=True) for k, v in obs_batch_dict.items()} + act_batch = data_batch['actions'].cuda(non_blocking=True) + + # forward and compute loss + loss_dict = agent.compute_loss( + obs=obs_batch_dict, # obs_batch_dict['state'] is (B, obs_dim) + action_seq=act_batch, # (B, num_queries, act_dim) + ) + total_loss = loss_dict['loss'] # total_loss = l1 + kl * self.kl_weight + + # backward + optimizer.zero_grad() + total_loss.backward() + optimizer.step() + lr_scheduler.step() # step lr scheduler every batch, this is different from standard pytorch behavior + + # update Exponential Moving Average of the model weights + ema.step(agent.parameters()) + timings["update"] += time.time() - last_tick + + # Evaluation + if cur_iter % args.eval_freq == 0: + last_tick = time.time() + + ema.copy_to(ema_agent.parameters()) + + eval_metrics = evaluate(args.num_eval_episodes, ema_agent, envs, eval_kwargs) + timings["eval"] += time.time() - last_tick + + print(f"Evaluated {len(eval_metrics['success_at_end'])} episodes") + for k in eval_metrics.keys(): + eval_metrics[k] = np.mean(eval_metrics[k]) + writer.add_scalar(f"eval/{k}", eval_metrics[k], cur_iter) + print(f"{k}: {eval_metrics[k]:.4f}") + + save_on_best_metrics = ["success_once", "success_at_end"] + for k in save_on_best_metrics: + if k in eval_metrics and eval_metrics[k] > best_eval_metrics[k]: + best_eval_metrics[k] = eval_metrics[k] + save_ckpt(run_name, f"best_eval_{k}") + print(f'New best {k}_rate: {eval_metrics[k]:.4f}. Saving checkpoint.') + + if cur_iter % args.log_freq == 0: + print(f"Iteration {cur_iter}, loss: {total_loss.item()}") + writer.add_scalar("charts/learning_rate", optimizer.param_groups[0]["lr"], cur_iter) + writer.add_scalar("charts/backbone_learning_rate", optimizer.param_groups[1]["lr"], cur_iter) + writer.add_scalar("losses/total_loss", total_loss.item(), cur_iter) + for k, v in timings.items(): + writer.add_scalar(f"time/{k}", v, cur_iter) + + # Checkpoint + if args.save_freq is not None and cur_iter % args.save_freq == 0: + save_ckpt(run_name, str(cur_iter)) + + envs.close() + writer.close() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/bc/.gitignore b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/bc/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6f999795602e2dce809eb628584a67d19b4ba04b --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/bc/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +runs/ +wandb/ +*.egg-info/ diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/.gitignore b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6f999795602e2dce809eb628584a67d19b4ba04b --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +runs/ +wandb/ +*.egg-info/ diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/README.md b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08b7f2580af7596dc6180eb241cbfc9dce8530d8 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/README.md @@ -0,0 +1,64 @@ +# Diffusion Policy + +Code for running the Diffusion Policy algorithm based on ["Diffusion Policy: Visuomotor Policy Learning via Action Diffusion"](https://arxiv.org/abs/2303.04137v4). It is adapted from the [original code](https://github.com/real-stanford/diffusion_policy). + +## Installation + +To get started, we recommend using conda/mamba to create a new environment and install the dependencies + +```bash +conda create -n diffusion-policy-ms python=3.9 +conda activate diffusion-policy-ms +pip install -e . +``` + +## Setup + +Read through the [imitation learning setup documentation](https://maniskill.readthedocs.io/en/latest/user_guide/learning_from_demos/setup.html) which details everything you need to know regarding running imitation learning baselines in ManiSkill. It includes details on how to download demonstration datasets, preprocess them, evaluate policies fairly for comparison, as well as suggestions to improve performance and avoid bugs. + +## Training + +We provide scripts to train Diffusion Policy on demonstrations. + +Note that some demonstrations are slow (e.g. motion planning or human teleoperated) and can exceed the default max episode steps which can be an issue as imitation learning algorithms learn to solve the task at the same speed the demonstrations solve it. In this case, you can use the `--max-episode-steps` flag to set a higher value so that the policy can solve the task in time. General recommendation is to set `--max-episode-steps` to about 2x the length of the mean demonstrations length you are using for training. We have tuned baselines in the `baselines.sh` script that set a recommended `--max-episode-steps` for each task. + +Example state based training, learning from 100 demonstrations generated via motionplanning in the PickCube-v1 task + +```bash +seed=1 +demos=100 +python train.py --env-id PickCube-v1 \ + --demo-path ~/.maniskill/demos/PickCube-v1/motionplanning/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num-demos ${demos} --max_episode_steps 100 \ + --total_iters 30000 \ + --exp-name diffusion_policy-PickCube-v1-state-${demos}_motionplanning_demos-${seed} \ + --track # track training on wandb +``` + +## Citation + +If you use this baseline please cite the following +``` +@inproceedings{DBLP:conf/rss/ChiFDXCBS23, + author = {Cheng Chi and + Siyuan Feng and + Yilun Du and + Zhenjia Xu and + Eric Cousineau and + Benjamin Burchfiel and + Shuran Song}, + editor = {Kostas E. Bekris and + Kris Hauser and + Sylvia L. Herbert and + Jingjin Yu}, + title = {Diffusion Policy: Visuomotor Policy Learning via Action Diffusion}, + booktitle = {Robotics: Science and Systems XIX, Daegu, Republic of Korea, July + 10-14, 2023}, + year = {2023}, + url = {https://doi.org/10.15607/RSS.2023.XIX.026}, + doi = {10.15607/RSS.2023.XIX.026}, + timestamp = {Mon, 29 Apr 2024 21:28:50 +0200}, + biburl = {https://dblp.org/rec/conf/rss/ChiFDXCBS23.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +``` \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/baselines.sh b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/baselines.sh new file mode 100644 index 0000000000000000000000000000000000000000..c0ec6d98430dd32de78cae15b3c9da87cb44accd --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/baselines.sh @@ -0,0 +1,78 @@ +# Baseline scripts + +# state based baselines +seed=1 +demos=100 +for demo_type in "motionplanning" "rl" +do + demo_path=~/.maniskill/demos/PickCube-v1/${demo_type}/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 + if [ -f "$demo_path" ]; then + python train.py --env-id PickCube-v1 \ + --demo-path $demo_path \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num-demos ${demos} --max_episode_steps 100 \ + --total_iters 30000 \ + --exp-name diffusion_policy-PickCube-v1-state-${demos}_motionplanning_demos-${seed} \ + --demo_type=${demo_type} --track # additional tag for logging purposes on wandb + else + echo "Demo path $demo_path does not exist. Skipping PickCube-v1 for ${demo_type}." + fi + + demo_path=~/.maniskill/demos/PushCube-v1/${demo_type}/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 + if [ -f "$demo_path" ]; then + python train.py --env-id PushCube-v1 \ + --demo-path $demo_path \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num-demos ${demos} --max_episode_steps 100 \ + --total_iters 30000 \ + --exp-name diffusion_policy-PushCube-v1-state-${demos}_motionplanning_demos-${seed} \ + --demo_type=${demo_type} --track # additional tag for logging purposes on wandb + else + echo "Demo path $demo_path does not exist. Skipping PushCube-v1 for ${demo_type}." + fi + demo_path=~/.maniskill/demos/AnymalCReach-v1/${demo_type}/trajectory.state.pd_joint_delta_pos.physx_cuda.h5 + if [ -f "$demo_path" ]; then + python train.py --env-id AnymalCReach-v1 \ + --demo-path $demo_path \ + --control-mode "pd_joint_delta_pos" --sim-backend "physx_cuda" --num-demos ${demos} --max_episode_steps 200 --num_eval_envs 100 \ + --total_iters 30000 --act_horizon 1 \ + --exp-name diffusion_policy-AnymalCReach-v1-state-${demos}_${demo_type}_demos-${seed} --no_capture_video \ + --demo_type=${demo_type} --track # additional tag for logging purposes on wandb + else + echo "Demo path $demo_path does not exist. Skipping AnymalCReach-v1 for ${demo_type}." + fi + + demo_path=~/.maniskill/demos/PushT-v1/${demo_type}/trajectory.state.pd_ee_delta_pose.physx_cuda.h5 + if [ -f "$demo_path" ]; then + python train.py --env-id PushT-v1 \ + --demo-path $demo_path \ + --control-mode "pd_ee_delta_pose" --sim-backend "physx_cuda" --num-demos ${demos} --max_episode_steps 150 --num_eval_envs 100 \ + --total_iters 50000 --act_horizon 1 \ + --exp-name diffusion_policy-PushT-v1-state-${demos}_${demo_type}_demos-${seed} --no_capture_video \ + --demo_type=${demo_type} --track # additional tag for logging purposes on wandb + else + echo "Demo path $demo_path does not exist. Skipping PushT-v1 for ${demo_type}." + fi + + demo_path=~/.maniskill/demos/StackCube-v1/${demo_type}/trajectory.state.pd_ee_delta_pos.physx_cpu.h5 + if [ -f "$demo_path" ]; then + python train.py --env-id StackCube-v1 \ + --demo-path $demo_path \ + --control-mode "pd_ee_delta_pos" --sim-backend "physx_cpu" --num-demos ${demos} --max_episode_steps 200 \ + --total_iters 30000 \ + --exp-name diffusion_policy-StackCube-v1-state-${demos}_${demo_type}_demos-${seed} \ + --demo_type=${demo_type} --track # additional tag for logging purposes on wandb + else + echo "Demo path $demo_path does not exist. Skipping StackCube-v1 for ${demo_type}." + fi + + demo_path=~/.maniskill/demos/PegInsertionSide-v1/${demo_type}/trajectory.state.pd_ee_delta_pose.physx_cpu.h5 + if [ -f "$demo_path" ]; then + python train.py --env-id PegInsertionSide-v1 \ + --demo-path $demo_path \ + --control-mode "pd_ee_delta_pose" --sim-backend "physx_cpu" --num-demos ${demos} --max_episode_steps 300 \ + --total_iters 100000 \ + --exp-name diffusion_policy-PegInsertionSide-v1-state-${demos}_motionplanning_demos-${seed} \ + --demo_type=${demo_type} --track # additional tag for logging purposes on wandb + else + echo "Demo path $demo_path does not exist. Skipping PegInsertionSide-v1 for ${demo_type}." + fi +done diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/evaluate.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..8ae5331654823801626e9f73c6a534164b08b62e --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/evaluate.py @@ -0,0 +1,52 @@ +from collections import defaultdict +import gymnasium +import numpy as np +import torch + +from mani_skill.utils import common + +def collect_episode_info(infos, result): + if "final_info" in infos: # infos is a dict + + indices = np.where(infos["_final_info"])[0] # not all envs are done at the same time + for i in indices: + info = infos["final_info"][i] # info is also a dict + ep = info['episode'] + result['return'].append(ep['r'][0]) + result['episode_len'].append(ep["l"][0]) + if "success" in info: + result['success'].append(info['success']) + if "fail" in info: + result['fail'].append(info['fail']) + return result + +def evaluate(n: int, agent, eval_envs, device, sim_backend: str): + agent.eval() + with torch.no_grad(): + eval_metrics = defaultdict(list) + obs, info = eval_envs.reset() + eps_count = 0 + while eps_count < n: + obs = common.to_tensor(obs, device) + action_seq = agent.get_action(obs) + if sim_backend == "cpu": + action_seq = action_seq.cpu().numpy() + for i in range(action_seq.shape[1]): + obs, rew, terminated, truncated, info = eval_envs.step(action_seq[:, i]) + if truncated.any(): + break + + if truncated.any(): + assert truncated.all() == truncated.any(), "all episodes should truncate at the same time for fair evaluation with other algorithms" + if isinstance(info["final_info"], dict): + for k, v in info["final_info"]["episode"].items(): + eval_metrics[k].append(v.float().cpu().numpy()) + else: + for final_info in info["final_info"]: + for k, v in final_info["episode"].items(): + eval_metrics[k].append(v) + eps_count += eval_envs.num_envs + agent.train() + for k in eval_metrics.keys(): + eval_metrics[k] = np.stack(eval_metrics[k]) + return eval_metrics diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/make_env.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/make_env.py new file mode 100644 index 0000000000000000000000000000000000000000..13b6d0add23b0c85f58e1b0c755ed8a020f35fa0 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/make_env.py @@ -0,0 +1,48 @@ +from typing import Optional +import gymnasium as gym +import mani_skill.envs +from mani_skill.utils import gym_utils +from mani_skill.vector.wrappers.gymnasium import ManiSkillVectorEnv +from mani_skill.utils.wrappers import RecordEpisode, FrameStack, CPUGymWrapper + + +def make_eval_envs(env_id, num_envs: int, sim_backend: str, env_kwargs: dict, other_kwargs: dict, video_dir: Optional[str] = None, wrappers: list[gym.Wrapper] = []): + """Create vectorized environment for evaluation and/or recording videos. + For CPU vectorized environments only the first parallel environment is used to record videos. + For GPU vectorized environments all parallel environments are used to record videos. + + Args: + env_id: the environment id + num_envs: the number of parallel environments + sim_backend: the simulation backend to use. can be "cpu" or "gpu + env_kwargs: the environment kwargs. You can also pass in max_episode_steps in env_kwargs to override the default max episode steps for the environment. + video_dir: the directory to save the videos. If None no videos are recorded. + wrappers: the list of wrappers to apply to the environment. + """ + if sim_backend == "cpu": + def cpu_make_env(env_id, seed, video_dir=None, env_kwargs = dict(), other_kwargs = dict()): + def thunk(): + env = gym.make(env_id, reconfiguration_freq=1, **env_kwargs) + for wrapper in wrappers: + env = wrapper(env) + env = CPUGymWrapper(env, ignore_terminations=True, record_metrics=True) + if video_dir: + env = RecordEpisode(env, output_dir=video_dir, save_trajectory=False, info_on_video=True, source_type="diffusion_policy", source_desc="diffusion_policy evaluation rollout") + env = gym.wrappers.FrameStack(env, other_kwargs['obs_horizon']) + env.action_space.seed(seed) + env.observation_space.seed(seed) + return env + + return thunk + vector_cls = gym.vector.SyncVectorEnv if num_envs == 1 else lambda x : gym.vector.AsyncVectorEnv(x, context="forkserver") + env = vector_cls([cpu_make_env(env_id, seed, video_dir if seed == 0 else None, env_kwargs, other_kwargs) for seed in range(num_envs)]) + else: + env = gym.make(env_id, num_envs=num_envs, sim_backend=sim_backend, reconfiguration_freq=1, **env_kwargs) + max_episode_steps = gym_utils.find_max_episode_steps_value(env) + for wrapper in wrappers: + env = wrapper(env) + env = FrameStack(env, num_stack=other_kwargs['obs_horizon']) + if video_dir: + env = RecordEpisode(env, output_dir=video_dir, save_trajectory=False, save_video=True, source_type="diffusion_policy", source_desc="diffusion_policy evaluation rollout", max_steps_per_video=max_episode_steps) + env = ManiSkillVectorEnv(env, ignore_terminations=True, record_metrics=True) + return env diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/utils.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..af3876fca4a4dcd63b235aa2192c31fd89ac0304 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/diffusion_policy/utils.py @@ -0,0 +1,119 @@ +from torch.utils.data.sampler import Sampler +import numpy as np +import torch +from h5py import File, Group, Dataset + +class IterationBasedBatchSampler(Sampler): + """Wraps a BatchSampler. + Resampling from it until a specified number of iterations have been sampled + References: + https://github.com/facebookresearch/maskrcnn-benchmark/blob/master/maskrcnn_benchmark/data/samplers/iteration_based_batch_sampler.py + """ + + def __init__(self, batch_sampler, num_iterations, start_iter=0): + self.batch_sampler = batch_sampler + self.num_iterations = num_iterations + self.start_iter = start_iter + + def __iter__(self): + iteration = self.start_iter + while iteration < self.num_iterations: + # if the underlying sampler has a set_epoch method, like + # DistributedSampler, used for making each process see + # a different split of the dataset, then set it + if hasattr(self.batch_sampler.sampler, "set_epoch"): + self.batch_sampler.sampler.set_epoch(iteration) + for batch in self.batch_sampler: + yield batch + iteration += 1 + if iteration >= self.num_iterations: + break + + def __len__(self): + return self.num_iterations - self.start_iter + + +def worker_init_fn(worker_id, base_seed=None): + """The function is designed for pytorch multi-process dataloader. + Note that we use the pytorch random generator to generate a base_seed. + Please try to be consistent. + References: + https://pytorch.org/docs/stable/notes/faq.html#dataloader-workers-random-seed + """ + if base_seed is None: + base_seed = torch.IntTensor(1).random_().item() + # print(worker_id, base_seed) + np.random.seed(base_seed + worker_id) + +TARGET_KEY_TO_SOURCE_KEY = { + 'states': 'env_states', + 'observations': 'obs', + 'success': 'success', + 'next_observations': 'obs', + # 'dones': 'dones', + # 'rewards': 'rewards', + 'actions': 'actions', +} +def load_content_from_h5_file(file): + if isinstance(file, (File, Group)): + return {key: load_content_from_h5_file(file[key]) for key in list(file.keys())} + elif isinstance(file, Dataset): + return file[()] + else: + raise NotImplementedError(f"Unspported h5 file type: {type(file)}") + +def load_hdf5(path, ): + print('Loading HDF5 file', path) + file = File(path, 'r') + ret = load_content_from_h5_file(file) + file.close() + print('Loaded') + return ret + +def load_traj_hdf5(path, num_traj=None): + print('Loading HDF5 file', path) + file = File(path, 'r') + keys = list(file.keys()) + if num_traj is not None: + assert num_traj <= len(keys), f"num_traj: {num_traj} > len(keys): {len(keys)}" + keys = sorted(keys, key=lambda x: int(x.split('_')[-1])) + keys = keys[:num_traj] + ret = { + key: load_content_from_h5_file(file[key]) for key in keys + } + file.close() + print('Loaded') + return ret +def load_demo_dataset(path, keys=['observations', 'actions'], num_traj=None, concat=True): + # assert num_traj is None + raw_data = load_traj_hdf5(path, num_traj) + # raw_data has keys like: ['traj_0', 'traj_1', ...] + # raw_data['traj_0'] has keys like: ['actions', 'dones', 'env_states', 'infos', ...] + _traj = raw_data['traj_0'] + for key in keys: + source_key = TARGET_KEY_TO_SOURCE_KEY[key] + assert source_key in _traj, f"key: {source_key} not in traj_0: {_traj.keys()}" + dataset = {} + for target_key in keys: + # if 'next' in target_key: + # raise NotImplementedError('Please carefully deal with the length of trajectory') + source_key = TARGET_KEY_TO_SOURCE_KEY[target_key] + dataset[target_key] = [ raw_data[idx][source_key] for idx in raw_data ] + if isinstance(dataset[target_key][0], np.ndarray) and concat: + if target_key in ['observations', 'states'] and \ + len(dataset[target_key][0]) > len(raw_data['traj_0']['actions']): + dataset[target_key] = np.concatenate([ + t[:-1] for t in dataset[target_key] + ], axis=0) + elif target_key in ['next_observations', 'next_states'] and \ + len(dataset[target_key][0]) > len(raw_data['traj_0']['actions']): + dataset[target_key] = np.concatenate([ + t[1:] for t in dataset[target_key] + ], axis=0) + else: + dataset[target_key] = np.concatenate(dataset[target_key], axis=0) + + print('Load', target_key, dataset[target_key].shape) + else: + print('Load', target_key, len(dataset[target_key]), type(dataset[target_key][0])) + return dataset diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/train.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/train.py new file mode 100644 index 0000000000000000000000000000000000000000..58b487a7061b6fa0f94dcd0e352cc4f6fe510aa3 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/diffusion_policy/train.py @@ -0,0 +1,418 @@ +ALGO_NAME = 'BC_Diffusion_state_UNet' + +import argparse +import os +import random +from distutils.util import strtobool +import time +import gymnasium as gym +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim +import torch.nn.functional as F +from torch.utils.tensorboard import SummaryWriter +from diffusion_policy.evaluate import evaluate +from mani_skill.utils import gym_utils +from mani_skill.utils.registration import REGISTERED_ENVS + +from collections import defaultdict + +from torch.utils.data.dataset import Dataset +from torch.utils.data.sampler import RandomSampler, BatchSampler +from torch.utils.data.dataloader import DataLoader +from diffusion_policy.utils import IterationBasedBatchSampler, worker_init_fn +from diffusion_policy.make_env import make_eval_envs +from diffusers.schedulers.scheduling_ddpm import DDPMScheduler +from diffusers.training_utils import EMAModel +from diffusers.optimization import get_scheduler +from diffusion_policy.conditional_unet1d import ConditionalUnet1D +from dataclasses import dataclass, field +from typing import Optional, List +import tyro + +@dataclass +class Args: + exp_name: Optional[str] = None + """the name of this experiment""" + seed: int = 1 + """seed of the experiment""" + torch_deterministic: bool = True + """if toggled, `torch.backends.cudnn.deterministic=False`""" + cuda: bool = True + """if toggled, cuda will be enabled by default""" + track: bool = False + """if toggled, this experiment will be tracked with Weights and Biases""" + wandb_project_name: str = "ManiSkill" + """the wandb's project name""" + wandb_entity: Optional[str] = None + """the entity (team) of wandb's project""" + capture_video: bool = True + """whether to capture videos of the agent performances (check out `videos` folder)""" + + env_id: str = "PegInsertionSide-v0" + """the id of the environment""" + demo_path: str = 'data/ms2_official_demos/rigid_body/PegInsertionSide-v0/trajectory.state.pd_ee_delta_pose.h5' + """the path of demo dataset (pkl or h5)""" + num_demos: Optional[int] = None + """number of trajectories to load from the demo dataset""" + total_iters: int = 1_000_000 + """total timesteps of the experiment""" + batch_size: int = 1024 + """the batch size of sample from the replay memory""" + + # Diffusion Policy specific arguments + lr: float = 1e-4 + """the learning rate of the diffusion policy""" + obs_horizon: int = 2 # Seems not very important in ManiSkill, 1, 2, 4 work well + act_horizon: int = 8 # Seems not very important in ManiSkill, 4, 8, 15 work well + pred_horizon: int = 16 # 16->8 leads to worse performance, maybe it is like generate a half image; 16->32, improvement is very marginal + diffusion_step_embed_dim: int = 64 # not very important + unet_dims: List[int] = field(default_factory=lambda: [64, 128, 256]) # default setting is about ~4.5M params + n_groups: int = 8 # jigu says it is better to let each group have at least 8 channels; it seems 4 and 8 are simila + + # Environment/experiment specific arguments + max_episode_steps: Optional[int] = None + """Change the environments' max_episode_steps to this value. Sometimes necessary if the demonstrations being imitated are too short. Typically the default + max episode steps of environments in ManiSkill are tuned lower so reinforcement learning agents can learn faster.""" + log_freq: int = 1000 + """the frequency of logging the training metrics""" + eval_freq: int = 5000 + """the frequency of evaluating the agent on the evaluation environments""" + save_freq: Optional[int] = None + """the frequency of saving the model checkpoints. By default this is None and will only save checkpoints based on the best evaluation metrics.""" + num_eval_episodes: int = 100 + """the number of episodes to evaluate the agent on""" + num_eval_envs: int = 10 + """the number of parallel environments to evaluate the agent on""" + sim_backend: str = "cpu" + """the simulation backend to use for evaluation environments. can be "cpu" or "gpu""" + num_dataload_workers: int = 0 + """the number of workers to use for loading the training data in the torch dataloader""" + control_mode: str = 'pd_joint_delta_pos' + """the control mode to use for the evaluation environments. Must match the control mode of the demonstration dataset.""" + + # additional tags/configs for logging purposes to wandb and shared comparisons with other algorithms + demo_type: Optional[str] = None + + +class SmallDemoDataset_DiffusionPolicy(Dataset): # Load everything into GPU memory + def __init__(self, data_path, device, num_traj): + if data_path[-4:] == '.pkl': + raise NotImplementedError() + else: + from diffusion_policy.utils import load_demo_dataset + trajectories = load_demo_dataset(data_path, num_traj=num_traj, concat=False) + # trajectories['observations'] is a list of np.ndarray (L+1, obs_dim) + # trajectories['actions'] is a list of np.ndarray (L, act_dim) + + for k, v in trajectories.items(): + for i in range(len(v)): + trajectories[k][i] = torch.Tensor(v[i]).to(device) + + # Pre-compute all possible (traj_idx, start, end) tuples, this is very specific to Diffusion Policy + if 'delta_pos' in args.control_mode or args.control_mode == 'base_pd_joint_vel_arm_pd_joint_vel': + self.pad_action_arm = torch.zeros((trajectories['actions'][0].shape[1]-1,), device=device) + # to make the arm stay still, we pad the action with 0 in 'delta_pos' control mode + # gripper action needs to be copied from the last action + # else: + # raise NotImplementedError(f'Control Mode {args.control_mode} not supported') + self.obs_horizon, self.pred_horizon = obs_horizon, pred_horizon = args.obs_horizon, args.pred_horizon + self.slices = [] + num_traj = len(trajectories['actions']) + total_transitions = 0 + for traj_idx in range(num_traj): + L = trajectories['actions'][traj_idx].shape[0] + assert trajectories['observations'][traj_idx].shape[0] == L + 1 + total_transitions += L + + # |o|o| observations: 2 + # | |a|a|a|a|a|a|a|a| actions executed: 8 + # |p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p| actions predicted: 16 + pad_before = obs_horizon - 1 + # Pad before the trajectory, so the first action of an episode is in "actions executed" + # obs_horizon - 1 is the number of "not used actions" + pad_after = pred_horizon - obs_horizon + # Pad after the trajectory, so all the observations are utilized in training + # Note that in the original code, pad_after = act_horizon - 1, but I think this is not the best choice + self.slices += [ + (traj_idx, start, start + pred_horizon) for start in range(-pad_before, L - pred_horizon + pad_after) + ] # slice indices follow convention [start, end) + + print(f"Total transitions: {total_transitions}, Total obs sequences: {len(self.slices)}") + + self.trajectories = trajectories + + def __getitem__(self, index): + traj_idx, start, end = self.slices[index] + L, act_dim = self.trajectories['actions'][traj_idx].shape + + obs_seq = self.trajectories['observations'][traj_idx][max(0, start):start+self.obs_horizon] + # start+self.obs_horizon is at least 1 + act_seq = self.trajectories['actions'][traj_idx][max(0, start):end] + if start < 0: # pad before the trajectory + obs_seq = torch.cat([obs_seq[0].repeat(-start, 1), obs_seq], dim=0) + act_seq = torch.cat([act_seq[0].repeat(-start, 1), act_seq], dim=0) + if end > L: # pad after the trajectory + gripper_action = act_seq[-1, -1] + pad_action = torch.cat((self.pad_action_arm, gripper_action[None]), dim=0) + act_seq = torch.cat([act_seq, pad_action.repeat(end-L, 1)], dim=0) + # making the robot (arm and gripper) stay still + assert obs_seq.shape[0] == self.obs_horizon and act_seq.shape[0] == self.pred_horizon + return { + 'observations': obs_seq, + 'actions': act_seq, + } + + def __len__(self): + return len(self.slices) + + +class Agent(nn.Module): + def __init__(self, env, args): + super().__init__() + self.obs_horizon = args.obs_horizon + self.act_horizon = args.act_horizon + self.pred_horizon = args.pred_horizon + assert len(env.single_observation_space.shape) == 2 # (obs_horizon, obs_dim) + assert len(env.single_action_space.shape) == 1 # (act_dim, ) + assert (env.single_action_space.high == 1).all() and (env.single_action_space.low == -1).all() + # denoising results will be clipped to [-1,1], so the action should be in [-1,1] as well + self.act_dim = env.single_action_space.shape[0] + + self.noise_pred_net = ConditionalUnet1D( + input_dim=self.act_dim, # act_horizon is not used (U-Net doesn't care) + global_cond_dim=np.prod(env.single_observation_space.shape), # obs_horizon * obs_dim + diffusion_step_embed_dim=args.diffusion_step_embed_dim, + down_dims=args.unet_dims, + n_groups=args.n_groups, + ) + self.num_diffusion_iters = 100 + self.noise_scheduler = DDPMScheduler( + num_train_timesteps=self.num_diffusion_iters, + beta_schedule='squaredcos_cap_v2', # has big impact on performance, try not to change + clip_sample=True, # clip output to [-1,1] to improve stability + prediction_type='epsilon' # predict noise (instead of denoised action) + ) + + def compute_loss(self, obs_seq, action_seq): + B = obs_seq.shape[0] + + # observation as FiLM conditioning + obs_cond = obs_seq.flatten(start_dim=1) # (B, obs_horizon * obs_dim) + + # sample noise to add to actions + noise = torch.randn((B, self.pred_horizon, self.act_dim), device=device) + + # sample a diffusion iteration for each data point + timesteps = torch.randint( + 0, self.noise_scheduler.config.num_train_timesteps, + (B,), device=device + ).long() + + # add noise to the clean images(actions) according to the noise magnitude at each diffusion iteration + # (this is the forward diffusion process) + noisy_action_seq = self.noise_scheduler.add_noise( + action_seq, noise, timesteps) + + # predict the noise residual + noise_pred = self.noise_pred_net( + noisy_action_seq, timesteps, global_cond=obs_cond) + + return F.mse_loss(noise_pred, noise) + + def get_action(self, obs_seq): + # init scheduler + # self.noise_scheduler.set_timesteps(self.num_diffusion_iters) + # set_timesteps will change noise_scheduler.timesteps is only used in noise_scheduler.step() + # noise_scheduler.step() is only called during inference + # if we use DDPM, and inference_diffusion_steps == train_diffusion_steps, then we can skip this + + # obs_seq: (B, obs_horizon, obs_dim) + B = obs_seq.shape[0] + with torch.no_grad(): + obs_cond = obs_seq.flatten(start_dim=1) # (B, obs_horizon * obs_dim) + + # initialize action from Guassian noise + noisy_action_seq = torch.randn((B, self.pred_horizon, self.act_dim), device=obs_seq.device) + + for k in self.noise_scheduler.timesteps: + # predict noise + noise_pred = self.noise_pred_net( + sample=noisy_action_seq, + timestep=k, + global_cond=obs_cond, + ) + + # inverse diffusion step (remove noise) + noisy_action_seq = self.noise_scheduler.step( + model_output=noise_pred, + timestep=k, + sample=noisy_action_seq, + ).prev_sample + + # only take act_horizon number of actions + start = self.obs_horizon - 1 + end = start + self.act_horizon + return noisy_action_seq[:, start:end] # (B, act_horizon, act_dim) + +def save_ckpt(run_name, tag): + os.makedirs(f'runs/{run_name}/checkpoints', exist_ok=True) + ema.copy_to(ema_agent.parameters()) + torch.save({ + 'agent': agent.state_dict(), + 'ema_agent': ema_agent.state_dict(), + }, f'runs/{run_name}/checkpoints/{tag}.pt') + +if __name__ == "__main__": + args = tyro.cli(Args) + if args.exp_name is None: + args.exp_name = os.path.basename(__file__)[: -len(".py")] + run_name = f"{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}" + else: + run_name = args.exp_name + + if args.demo_path.endswith('.h5'): + import json + json_file = args.demo_path[:-2] + 'json' + with open(json_file, 'r') as f: + demo_info = json.load(f) + if 'control_mode' in demo_info['env_info']['env_kwargs']: + control_mode = demo_info['env_info']['env_kwargs']['control_mode'] + elif 'control_mode' in demo_info['episodes'][0]: + control_mode = demo_info['episodes'][0]['control_mode'] + else: + raise Exception('Control mode not found in json') + assert control_mode == args.control_mode, f"Control mode mismatched. Dataset has control mode {control_mode}, but args has control mode {args.control_mode}" + assert args.obs_horizon + args.act_horizon - 1 <= args.pred_horizon + assert args.obs_horizon >= 1 and args.act_horizon >= 1 and args.pred_horizon >= 1 + + # TRY NOT TO MODIFY: seeding + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.backends.cudnn.deterministic = args.torch_deterministic + + device = torch.device("cuda" if torch.cuda.is_available() and args.cuda else "cpu") + + # env setup + env_kwargs = dict(control_mode=args.control_mode, reward_mode="sparse", obs_mode="state", render_mode="rgb_array") + if args.max_episode_steps is not None: + env_kwargs["max_episode_steps"] = args.max_episode_steps + other_kwargs = dict(obs_horizon=args.obs_horizon) + envs = make_eval_envs(args.env_id, args.num_eval_envs, args.sim_backend, env_kwargs, other_kwargs, video_dir=f'runs/{run_name}/videos' if args.capture_video else None) + + if args.track: + import wandb + config = vars(args) + config["eval_env_cfg"] = dict(**env_kwargs, num_envs=args.num_eval_envs, env_id=args.env_id, env_horizon=args.max_episode_steps or gym_utils.find_max_episode_steps_value(envs)) + wandb.init( + project=args.wandb_project_name, + entity=args.wandb_entity, + sync_tensorboard=True, + config=config, + name=run_name, + save_code=True, + group="DiffusionPolicy", + tags=["diffusion_policy"] + ) + writer = SummaryWriter(f"runs/{run_name}") + writer.add_text( + "hyperparameters", + "|param|value|\n|-|-|\n%s" % ("\n".join([f"|{key}|{value}|" for key, value in vars(args).items()])), + ) + + # dataloader setup + dataset = SmallDemoDataset_DiffusionPolicy(args.demo_path, device, num_traj=args.num_demos) + sampler = RandomSampler(dataset, replacement=False) + batch_sampler = BatchSampler(sampler, batch_size=args.batch_size, drop_last=True) + batch_sampler = IterationBasedBatchSampler(batch_sampler, args.total_iters) + train_dataloader = DataLoader( + dataset, + batch_sampler=batch_sampler, + num_workers=args.num_dataload_workers, + worker_init_fn=lambda worker_id: worker_init_fn(worker_id, base_seed=args.seed), + ) + if args.num_demos is None: + args.num_demos = len(dataset) + + # agent setup + agent = Agent(envs, args).to(device) + optimizer = optim.AdamW(params=agent.parameters(), + lr=args.lr, betas=(0.95, 0.999), weight_decay=1e-6) + + # Cosine LR schedule with linear warmup + lr_scheduler = get_scheduler( + name='cosine', + optimizer=optimizer, + num_warmup_steps=500, + num_training_steps=args.total_iters, + ) + + # Exponential Moving Average + # accelerates training and improves stability + # holds a copy of the model weights + ema = EMAModel(parameters=agent.parameters(), power=0.75) + ema_agent = Agent(envs, args).to(device) + + # ---------------------------------------------------------------------------- # + # Training begins. + # ---------------------------------------------------------------------------- # + agent.train() + + best_eval_metrics = defaultdict(float) + timings = defaultdict(float) + + for iteration, data_batch in enumerate(train_dataloader): + # # copy data from cpu to gpu + # data_batch = {k: v.cuda(non_blocking=True) for k, v in data_batch.items()} + + # forward and compute loss + total_loss = agent.compute_loss( + obs_seq=data_batch['observations'], # (B, L, obs_dim) + action_seq=data_batch['actions'], # (B, L, act_dim) + ) + + # backward + optimizer.zero_grad() + total_loss.backward() + optimizer.step() + lr_scheduler.step() # step lr scheduler every batch, this is different from standard pytorch behavior + last_tick = time.time() + + # update Exponential Moving Average of the model weights + ema.step(agent.parameters()) + # TRY NOT TO MODIFY: record rewards for plotting purposes + if iteration % args.log_freq == 0: + print(f"Iteration {iteration}, loss: {total_loss.item()}") + writer.add_scalar("charts/learning_rate", optimizer.param_groups[0]["lr"], iteration) + writer.add_scalar("losses/total_loss", total_loss.item(), iteration) + for k, v in timings.items(): + writer.add_scalar(f"time/{k}", v, iteration) + # Evaluation + if iteration % args.eval_freq == 0: + last_tick = time.time() + + ema.copy_to(ema_agent.parameters()) + # def sample_fn(obs): + + eval_metrics = evaluate(args.num_eval_episodes, ema_agent, envs, device, args.sim_backend) + timings["eval"] += time.time() - last_tick + + print(f"Evaluated {len(eval_metrics['success_at_end'])} episodes") + for k in eval_metrics.keys(): + eval_metrics[k] = np.mean(eval_metrics[k]) + writer.add_scalar(f"eval/{k}", eval_metrics[k], iteration) + print(f"{k}: {eval_metrics[k]:.4f}") + + save_on_best_metrics = ["success_once", "success_at_end"] + for k in save_on_best_metrics: + if k in eval_metrics and eval_metrics[k] > best_eval_metrics[k]: + best_eval_metrics[k] = eval_metrics[k] + save_ckpt(run_name, f"best_eval_{k}") + print(f'New best {k}_rate: {eval_metrics[k]:.4f}. Saving checkpoint.') + # Checkpoint + if args.save_freq is not None and iteration % args.save_freq == 0: + save_ckpt(run_name, str(iteration)) + envs.close() + writer.close() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/.gitignore b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..07c07300e18ad72495711f7ec85fd71d210d5190 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/.gitignore @@ -0,0 +1,4 @@ +/runs +/videos +/pretrained +/wandb \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/README.md b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e36cb69300369eb1bf7739b051e27861b0c8c64f --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/README.md @@ -0,0 +1,119 @@ +# Proximal Policy Optimization (PPO) + +Code for running the PPO RL algorithm is adapted from [CleanRL](https://github.com/vwxyzjn/cleanrl/) and [LeanRL](https://github.com/pytorch-labs/LeanRL/). It is written to be single-file and easy to follow/read, and supports state-based RL and visual-based RL code. + +Note that ManiSkill is still in beta, so we have not finalized training scripts for every pre-built task (some of which are simply too hard to solve with RL anyway). + +Official baseline results can be run by using the scripts in the baselines.sh file. Results are organized and published to our [wandb report](https://api.wandb.ai/links/stonet2000/k6lz966q) + +There is also now experimental support for PPO compiled and with CUDA Graphs enabled based on LeanRL. The code is in ppo_fast.py and you need to install [torchrl](https://github.com/pytorch/rl) and [tensordict](https://github.com/pytorch/tensordict/): + +```bash +pip install torchrl tensordict +``` + +## State Based RL + +Below is a sample of various commands you can run to train a state-based policy to solve various tasks with PPO that are lightly tuned already. The fastest one is the PushCube-v1 task which can take less than a minute to train on the GPU and the PickCube-v1 task which can take 2-5 minutes on the GPU. + +The PPO baseline is not guaranteed to work for all tasks as some tasks do not have dense rewards yet or well tuned ones, or simply are too hard with standard PPO. + + +```bash +python ppo.py --env_id="PushCube-v1" \ + --num_envs=2048 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=2_000_000 --eval_freq=10 --num-steps=20 +``` + +To evaluate, you can run +```bash +python ppo.py --env_id="PushCube-v1" \ + --evaluate --checkpoint=path/to/model.pt \ + --num_eval_envs=1 --num-eval-steps=1000 +``` + +Note that with `--evaluate`, trajectories are saved from a GPU simulation. In order to support replaying these trajectories correctly with the `maniskill.trajectory.replay_trajectory` tool for some task, the number of evaluation environments must be fixed to `1`. This is necessary in order to ensure reproducibility for tasks that have randomizations on geometry (e.g. PickSingleYCB). Other tasks without geometrical randomization like PushCube are fine and you can increase the number of evaluation environments. + +The examples.sh file has a full list of tested commands for running state based PPO successfully on many tasks. + +The results of running the baseline scripts for state based PPO are here: https://api.wandb.ai/links/stonet2000/k6lz966q. + +## Visual (RGB) Based RL + +Below is a sample of various commands for training a image-based policy with PPO that are lightly tuned. The fastest again is also PushCube-v1 which can take about 1-5 minutes and PickCube-v1 which takes 15-45 minutes. You will need to tune the `--num_envs` argument according to how much GPU memory you have as rendering visual observations uses a lot of memory. The settings below should all take less than 15GB of GPU memory. The examples.sh file has a full list of tested commands for running visual based PPO successfully on many tasks. + + +```bash +python ppo_rgb.py --env_id="PushCube-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=1_000_000 --eval_freq=10 --num-steps=20 +python ppo_rgb.py --env_id="PickCube-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=10_000_000 +python ppo_rgb.py --env_id="AnymalC-Reach-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 --num-steps=200 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 +``` + +To evaluate a trained policy you can run + +```bash +python ppo_rgb.py --env_id="PickCube-v1" \ + --evaluate --checkpoint=path/to/model.pt \ + --num_eval_envs=1 --num-eval-steps=1000 +``` + +and it will save videos to the `path/to/test_videos`. + +The examples.sh file has a full list of tested commands for running RGB based PPO successfully on many tasks. + +The results of running the baseline scripts for RGB based PPO are here: https://api.wandb.ai/links/stonet2000/k6lz966q + +## Visual (RGB+Depth) Based RL + +WIP + +## Visual (Pointcloud) Based RL + +WIP + +## Replaying Evaluation Trajectories + +It might be useful to get some nicer looking videos. A simple way to do that is to first use the evaluation scripts provided above. It will then save a .h5 and .json file with a name equal to the date and time that you can then replay with different settings as so + +```bash +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path=path/to/trajectory.h5 --use-env-states --shader="rt-fast" \ + --save-video --allow-failure -o "none" +``` + +This will use environment states to replay trajectories, turn on the ray-tracer (There is also "rt" which is higher quality but slower), and save all videos including failed trajectories. + +## Some Notes + +- Evaluation with GPU simulation (especially with randomized objects) is a bit tricky. We recommend reading through [our docs](https://maniskill.readthedocs.io/en/latest/user_guide/reinforcement_learning/baselines.html#evaluation) on online RL evaluation in order to understand how to fairly evaluate policies with GPU simulation. +- Many tasks support visual observations, however we have not carefully verified yet if the camera poses for the tasks are setup in a way that makes it possible to solve some tasks from visual observations. + +## Citation + +If you use this baseline please cite the following +``` +@article{DBLP:journals/corr/SchulmanWDRK17, + author = {John Schulman and + Filip Wolski and + Prafulla Dhariwal and + Alec Radford and + Oleg Klimov}, + title = {Proximal Policy Optimization Algorithms}, + journal = {CoRR}, + volume = {abs/1707.06347}, + year = {2017}, + url = {http://arxiv.org/abs/1707.06347}, + eprinttype = {arXiv}, + eprint = {1707.06347}, + timestamp = {Mon, 13 Aug 2018 16:47:34 +0200}, + biburl = {https://dblp.org/rec/journals/corr/SchulmanWDRK17.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +``` \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/baselines.sh b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/baselines.sh new file mode 100644 index 0000000000000000000000000000000000000000..e8693f4ed1212d5ee85ed035676fbfa0c4153e19 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/baselines.sh @@ -0,0 +1,185 @@ +# Baseline results for PPO + +seeds=(9351 4796 1788) + +### State Based PPO Baselines ### +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="PushCube-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-PushCube-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="PickCube-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-PickCube-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="PushT-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 --gamma=0.99 \ + --total_timesteps=50_000_000 --num_eval_steps=100 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-PushT-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="StackCube-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-StackCube-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="RollBall-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 --gamma=0.95 \ + --total_timesteps=50_000_000 --num-eval-steps=80 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-RollBall-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="PullCube-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-PullCube-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="PokeCube-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-PokeCube-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="LiftPegUpright-v1" --seed=${seed} \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-LiftPegUpright-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="AnymalC-Reach-v1" --seed=${seed} \ + --num_envs=4096 --update_epochs=8 --num_minibatches=32 --gamma=0.99 --gae_lambda=0.95 \ + --total_timesteps=50_000_000 --num-steps=16 --num-eval-steps=200 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-AnymalC-Reach-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="PegInsertionSide-v1" --seed=${seed} \ + --num_envs=2048 --update_epochs=8 --num_minibatches=32 --gamma=0.97 --gae_lambda=0.95 \ + --total_timesteps=75_000_000 --num-steps=16 --num-eval-steps=100 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-PegInsertionSide-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="TwoRobotPickCube-v1" --seed=${seed} \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=100 --num-eval-steps=100 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-TwoRobotPickCube-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="UnitreeG1PlaceAppleInBowl-v1" --seed=${seed} \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=32 --num-eval-steps=100 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-UnitreeG1PlaceAppleInBowl-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="UnitreeG1TransportBox-v1" --seed=${seed} \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=100_000_000 --num-steps=32 --num-eval-steps=100 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-UnitreeG1TransportBox-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_fast.py --env_id="OpenCabinetDrawer-v1" --seed=${seed} \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=16 --num-eval-steps=100 \ + --num_eval_envs=16 \ + --save-model --cudagraphs --exp-name="ppo-OpenCabinetDrawer-v1-state-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +### RGB Based PPO Baselines ### +for seed in ${seeds[@]} +do + python ppo_rgb.py --env_id="PushCube-v1" --seed=${seed} \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --exp-name="ppo-PushCube-v1-rgb-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_rgb.py --env_id="PickCube-v1" --seed=${seed} \ + --num_envs=1024 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --num_eval_envs=16 \ + --exp-name="ppo-PickCube-v1-rgb-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_rgb.py --env_id="PushT-v1" --seed=${seed} \ + --num_envs=1024 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num_eval_steps=100 --gamma=0.99 \ + --num_eval_envs=16 \ + --exp-name="ppo-PushT-v1-rgb-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done + +for seed in ${seeds[@]} +do + python ppo_rgb.py --env_id="AnymalC-Reach-v1" --seed=${seed} \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 --gamma=0.99 --gae_lambda=0.95 \ + --total_timesteps=50_000_000 --num-steps=16 --num-eval-steps=200 \ + --num_eval_envs=16 --eval-reconfiguration-freq=0 \ + --exp-name="ppo-AnymalC-Reach-v1-rgb-${seed}-walltime_efficient" \ + --wandb_entity="stonet2000" --track +done \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/examples.sh b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/examples.sh new file mode 100644 index 0000000000000000000000000000000000000000..b0958b55cc89a3bd0cbd95eaa640471c4cb65ea8 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/examples.sh @@ -0,0 +1,133 @@ +# This file is a giant collection of tested example commands for PPO +# Note these are tuned for wall time speed. For official baseline results which run +# more fair comparisons of RL algorithms see the baselines.sh file + +### State Based PPO ### +python ppo.py --env_id="PickCube-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 +python ppo.py --env_id="StackCube-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=25_000_000 +python ppo.py --env_id="PushT-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=25_000_000 --num-steps=100 --num_eval_steps=100 --gamma=0.99 +python ppo.py --env_id="PickSingleYCB-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=25_000_000 +python ppo.py --env_id="PegInsertionSide-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=250_000_000 --num-steps=100 --num-eval-steps=100 +python ppo.py --env_id="TwoRobotPickCube-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=20_000_000 --num-steps=100 --num-eval-steps=100 +python ppo.py --env_id="TwoRobotStackCube-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=40_000_000 --num-steps=100 --num-eval-steps=100 +python ppo.py --env_id="TriFingerRotateCubeLevel0-v1" \ + --num_envs=128 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=250 --num-eval-steps=250 +python ppo.py --env_id="TriFingerRotateCubeLevel1-v1" \ + --num_envs=128 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=250 --num-eval-steps=250 +python ppo.py --env_id="TriFingerRotateCubeLevel2-v1" \ + --num_envs=128 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=250 --num-eval-steps=250 +python ppo.py --env_id="TriFingerRotateCubeLevel3-v1" \ + --num_envs=128 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=250 --num-eval-steps=250 +python ppo.py --env_id="TriFingerRotateCubeLevel4-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=500_000_000 --num-steps=250 --num-eval-steps=250 +python ppo.py --env_id="PokeCube-v1" --update_epochs=8 --num_minibatches=32 \ + --num_envs=1024 --total_timesteps=5_000_000 --eval_freq=10 --num-steps=20 +python ppo.py --env_id="MS-CartpoleBalance-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=4_000_000 --num-steps=250 --num-eval-steps=1000 \ + --gamma=0.99 --gae_lambda=0.95 \ + --eval_freq=5 + +python ppo.py --env_id="MS-CartpoleSwingUp-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 --num-steps=250 --num-eval-steps=1000 \ + --gamma=0.99 --gae_lambda=0.95 \ + --eval_freq=5 +python ppo.py --env_id="MS-AntWalk-v1" --num_envs=2048 --eval_freq=10 \ + --update_epochs=8 --num_minibatches=32 --total_timesteps=20_000_000 \ + --num_eval_steps=1000 --num_steps=200 --gamma=0.97 --ent_coef=1e-3 +python ppo.py --env_id="MS-AntRun-v1" --num_envs=2048 --eval_freq=10 \ + --update_epochs=8 --num_minibatches=32 --total_timesteps=20_000_000 \ + --num_eval_steps=1000 --num_steps=200 --gamma=0.97 --ent_coef=1e-3 +python ppo.py --env_id="MS-HumanoidStand-v1" --num_envs=2048 --eval_freq=10 \ + --update_epochs=8 --num_minibatches=32 --total_timesteps=40_000_000 \ + --num_eval_steps=1000 --num_steps=200 --gamma=0.95 +python ppo.py --env_id="MS-HumanoidWalk-v1" --num_envs=2048 --eval_freq=10 \ + --update_epochs=8 --num_minibatches=32 --total_timesteps=80_000_000 \ + --num_eval_steps=1000 --num_steps=200 --gamma=0.97 --ent_coef=1e-3 +python ppo.py --env_id="MS-HumanoidRun-v1" --num_envs=2048 --eval_freq=10 \ + --update_epochs=8 --num_minibatches=32 --total_timesteps=60_000_000 \ + --num_eval_steps=1000 --num_steps=200 --gamma=0.97 --ent_coef=1e-3 +python ppo.py --env_id="UnitreeG1PlaceAppleInBowl-v1" \ + --num_envs=512 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=100 --num-eval-steps=100 +python ppo.py --env_id="AnymalC-Reach-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=25_000_000 --num-steps=200 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 +python ppo.py --env_id="AnymalC-Spin-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=200 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 +python ppo.py --env_id="UnitreeGo2-Reach-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=200 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 +python ppo.py --env_id="UnitreeH1Stand-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=100_000_000 --num-steps=100 --num-eval-steps=1000 \ + --gamma=0.99 --gae_lambda=0.95 +python ppo.py --env_id="UnitreeG1Stand-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=100_000_000 --num-steps=100 --num-eval-steps=1000 \ + --gamma=0.99 --gae_lambda=0.95 + +python ppo.py --env_id="OpenCabinetDrawer-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 --num-steps=100 --num-eval-steps=100 + +python ppo.py --env_id="RollBall-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=20_000_000 --num-steps=80 --num_eval_steps=80 --gamma=0.95 + +### RGB Based PPO ### +python ppo_rgb.py --env_id="PushCube-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=1_000_000 --eval_freq=10 --num-steps=20 +python ppo_rgb.py --env_id="PickCube-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=10_000_000 +python ppo_rgb.py --env_id="AnymalC-Reach-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 --num-steps=200 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 +python ppo_rgb.py --env_id="PickSingleYCB-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=10_000_000 +python ppo_rgb.py --env_id="PushT-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=8 \ + --total_timesteps=25_000_000 --num-steps=100 --num_eval_steps=100 --gamma=0.99 +python ppo_rgb.py --env_id="MS-AntWalk-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=5_000_000 --eval_freq=15 --num_eval_steps=1000 \ + --num_steps=200 --gamma=0.97 --no-include-state --render_mode="rgb_array" \ + --ent_coef=1e-3 +python ppo_rgb.py --env_id="MS-AntRun-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=15_000_000 --eval_freq=15 --num_eval_steps=1000 \ + --num_steps=200 --gamma=0.97 --no-include-state --render_mode="rgb_array" \ + --ent_coef=1e-3 +python ppo_rgb.py --env_id="MS-HumanoidRun-v1" \ + --num_envs=256 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=80_000_000 --eval_freq=15 --num_eval_steps=1000 \ + --num_steps=200 --gamma=0.98 --no-include-state --render_mode="rgb_array" \ + --ent_coef=1e-3 diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo.py new file mode 100644 index 0000000000000000000000000000000000000000..66802de0653061da550d6600d6346be374dbf974 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo.py @@ -0,0 +1,470 @@ +from collections import defaultdict +import os +import random +import time +from dataclasses import dataclass +from typing import Optional + +import gymnasium as gym +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim +import tyro +from torch.distributions.normal import Normal +from torch.utils.tensorboard import SummaryWriter + +# ManiSkill specific imports +import mani_skill.envs +from mani_skill.utils import gym_utils +from mani_skill.utils.wrappers.flatten import FlattenActionSpaceWrapper +from mani_skill.utils.wrappers.record import RecordEpisode +from mani_skill.vector.wrappers.gymnasium import ManiSkillVectorEnv + +@dataclass +class Args: + exp_name: Optional[str] = None + """the name of this experiment""" + seed: int = 1 + """seed of the experiment""" + torch_deterministic: bool = True + """if toggled, `torch.backends.cudnn.deterministic=True`""" + cuda: bool = True + """if toggled, cuda will be enabled by default""" + track: bool = False + """if toggled, this experiment will be tracked with Weights and Biases""" + wandb_project_name: str = "ManiSkill" + """the wandb's project name""" + wandb_entity: Optional[str] = None + """the entity (team) of wandb's project""" + capture_video: bool = True + """whether to capture videos of the agent performances (check out `videos` folder)""" + save_model: bool = True + """whether to save model into the `runs/{run_name}` folder""" + evaluate: bool = False + """if toggled, only runs evaluation with the given model checkpoint and saves the evaluation trajectories""" + checkpoint: Optional[str] = None + """path to a pretrained checkpoint file to start evaluation/training from""" + + # Algorithm specific arguments + env_id: str = "PickCube-v1" + """the id of the environment""" + total_timesteps: int = 10000000 + """total timesteps of the experiments""" + learning_rate: float = 3e-4 + """the learning rate of the optimizer""" + num_envs: int = 512 + """the number of parallel environments""" + num_eval_envs: int = 8 + """the number of parallel evaluation environments""" + partial_reset: bool = True + """whether to let parallel environments reset upon termination instead of truncation""" + eval_partial_reset: bool = False + """whether to let parallel evaluation environments reset upon termination instead of truncation""" + num_steps: int = 50 + """the number of steps to run in each environment per policy rollout""" + num_eval_steps: int = 50 + """the number of steps to run in each evaluation environment during evaluation""" + reconfiguration_freq: Optional[int] = None + """how often to reconfigure the environment during training""" + eval_reconfiguration_freq: Optional[int] = 1 + """for benchmarking purposes we want to reconfigure the eval environment each reset to ensure objects are randomized in some tasks""" + control_mode: Optional[str] = "pd_joint_delta_pos" + """the control mode to use for the environment""" + anneal_lr: bool = False + """Toggle learning rate annealing for policy and value networks""" + gamma: float = 0.8 + """the discount factor gamma""" + gae_lambda: float = 0.9 + """the lambda for the general advantage estimation""" + num_minibatches: int = 32 + """the number of mini-batches""" + update_epochs: int = 4 + """the K epochs to update the policy""" + norm_adv: bool = True + """Toggles advantages normalization""" + clip_coef: float = 0.2 + """the surrogate clipping coefficient""" + clip_vloss: bool = False + """Toggles whether or not to use a clipped loss for the value function, as per the paper.""" + ent_coef: float = 0.0 + """coefficient of the entropy""" + vf_coef: float = 0.5 + """coefficient of the value function""" + max_grad_norm: float = 0.5 + """the maximum norm for the gradient clipping""" + target_kl: float = 0.1 + """the target KL divergence threshold""" + reward_scale: float = 1.0 + """Scale the reward by this factor""" + eval_freq: int = 25 + """evaluation frequency in terms of iterations""" + save_train_video_freq: Optional[int] = None + """frequency to save training videos in terms of iterations""" + finite_horizon_gae: bool = False + + + # to be filled in runtime + batch_size: int = 0 + """the batch size (computed in runtime)""" + minibatch_size: int = 0 + """the mini-batch size (computed in runtime)""" + num_iterations: int = 0 + """the number of iterations (computed in runtime)""" + +def layer_init(layer, std=np.sqrt(2), bias_const=0.0): + torch.nn.init.orthogonal_(layer.weight, std) + torch.nn.init.constant_(layer.bias, bias_const) + return layer + + +class Agent(nn.Module): + def __init__(self, envs): + super().__init__() + self.critic = nn.Sequential( + layer_init(nn.Linear(np.array(envs.single_observation_space.shape).prod(), 256)), + nn.Tanh(), + layer_init(nn.Linear(256, 256)), + nn.Tanh(), + layer_init(nn.Linear(256, 256)), + nn.Tanh(), + layer_init(nn.Linear(256, 1)), + ) + self.actor_mean = nn.Sequential( + layer_init(nn.Linear(np.array(envs.single_observation_space.shape).prod(), 256)), + nn.Tanh(), + layer_init(nn.Linear(256, 256)), + nn.Tanh(), + layer_init(nn.Linear(256, 256)), + nn.Tanh(), + layer_init(nn.Linear(256, np.prod(envs.single_action_space.shape)), std=0.01*np.sqrt(2)), + ) + self.actor_logstd = nn.Parameter(torch.ones(1, np.prod(envs.single_action_space.shape)) * -0.5) + + def get_value(self, x): + return self.critic(x) + def get_action(self, x, deterministic=False): + action_mean = self.actor_mean(x) + if deterministic: + return action_mean + action_logstd = self.actor_logstd.expand_as(action_mean) + action_std = torch.exp(action_logstd) + probs = Normal(action_mean, action_std) + return probs.sample() + def get_action_and_value(self, x, action=None): + action_mean = self.actor_mean(x) + action_logstd = self.actor_logstd.expand_as(action_mean) + action_std = torch.exp(action_logstd) + probs = Normal(action_mean, action_std) + if action is None: + action = probs.sample() + return action, probs.log_prob(action).sum(1), probs.entropy().sum(1), self.critic(x) + +class Logger: + def __init__(self, log_wandb=False, tensorboard: SummaryWriter = None) -> None: + self.writer = tensorboard + self.log_wandb = log_wandb + def add_scalar(self, tag, scalar_value, step): + if self.log_wandb: + wandb.log({tag: scalar_value}, step=step) + self.writer.add_scalar(tag, scalar_value, step) + def close(self): + self.writer.close() + +if __name__ == "__main__": + args = tyro.cli(Args) + args.batch_size = int(args.num_envs * args.num_steps) + args.minibatch_size = int(args.batch_size // args.num_minibatches) + args.num_iterations = args.total_timesteps // args.batch_size + if args.exp_name is None: + args.exp_name = os.path.basename(__file__)[: -len(".py")] + run_name = f"{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}" + else: + run_name = args.exp_name + + + # TRY NOT TO MODIFY: seeding + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.backends.cudnn.deterministic = args.torch_deterministic + + device = torch.device("cuda" if torch.cuda.is_available() and args.cuda else "cpu") + + # env setup + env_kwargs = dict(obs_mode="state", render_mode="rgb_array", sim_backend="physx_cuda") + if args.control_mode is not None: + env_kwargs["control_mode"] = args.control_mode + envs = gym.make(args.env_id, num_envs=args.num_envs if not args.evaluate else 1, reconfiguration_freq=args.reconfiguration_freq, **env_kwargs) + eval_envs = gym.make(args.env_id, num_envs=args.num_eval_envs, reconfiguration_freq=args.eval_reconfiguration_freq, **env_kwargs) + if isinstance(envs.action_space, gym.spaces.Dict): + envs = FlattenActionSpaceWrapper(envs) + eval_envs = FlattenActionSpaceWrapper(eval_envs) + if args.capture_video: + eval_output_dir = f"runs/{run_name}/videos" + if args.evaluate: + eval_output_dir = f"{os.path.dirname(args.checkpoint)}/test_videos" + print(f"Saving eval videos to {eval_output_dir}") + if args.save_train_video_freq is not None: + save_video_trigger = lambda x : (x // args.num_steps) % args.save_train_video_freq == 0 + envs = RecordEpisode(envs, output_dir=f"runs/{run_name}/train_videos", save_trajectory=False, save_video_trigger=save_video_trigger, max_steps_per_video=args.num_steps, video_fps=30) + eval_envs = RecordEpisode(eval_envs, output_dir=eval_output_dir, save_trajectory=args.evaluate, trajectory_name="trajectory", max_steps_per_video=args.num_eval_steps, video_fps=30) + envs = ManiSkillVectorEnv(envs, args.num_envs, ignore_terminations=not args.partial_reset, record_metrics=True) + eval_envs = ManiSkillVectorEnv(eval_envs, args.num_eval_envs, ignore_terminations=not args.eval_partial_reset, record_metrics=True) + assert isinstance(envs.single_action_space, gym.spaces.Box), "only continuous action space is supported" + + max_episode_steps = gym_utils.find_max_episode_steps_value(envs._env) + logger = None + if not args.evaluate: + print("Running training") + if args.track: + import wandb + config = vars(args) + config["env_cfg"] = dict(**env_kwargs, num_envs=args.num_envs, env_id=args.env_id, reward_mode="normalized_dense", env_horizon=max_episode_steps, partial_reset=args.partial_reset) + config["eval_env_cfg"] = dict(**env_kwargs, num_envs=args.num_eval_envs, env_id=args.env_id, reward_mode="normalized_dense", env_horizon=max_episode_steps, partial_reset=False) + wandb.init( + project=args.wandb_project_name, + entity=args.wandb_entity, + sync_tensorboard=False, + config=config, + name=run_name, + save_code=True, + group="PPO", + tags=["ppo", "walltime_efficient"] + ) + writer = SummaryWriter(f"runs/{run_name}") + writer.add_text( + "hyperparameters", + "|param|value|\n|-|-|\n%s" % ("\n".join([f"|{key}|{value}|" for key, value in vars(args).items()])), + ) + logger = Logger(log_wandb=args.track, tensorboard=writer) + else: + print("Running evaluation") + + agent = Agent(envs).to(device) + optimizer = optim.Adam(agent.parameters(), lr=args.learning_rate, eps=1e-5) + + # ALGO Logic: Storage setup + obs = torch.zeros((args.num_steps, args.num_envs) + envs.single_observation_space.shape).to(device) + actions = torch.zeros((args.num_steps, args.num_envs) + envs.single_action_space.shape).to(device) + logprobs = torch.zeros((args.num_steps, args.num_envs)).to(device) + rewards = torch.zeros((args.num_steps, args.num_envs)).to(device) + dones = torch.zeros((args.num_steps, args.num_envs)).to(device) + values = torch.zeros((args.num_steps, args.num_envs)).to(device) + + # TRY NOT TO MODIFY: start the game + global_step = 0 + start_time = time.time() + next_obs, _ = envs.reset(seed=args.seed) + eval_obs, _ = eval_envs.reset(seed=args.seed) + next_done = torch.zeros(args.num_envs, device=device) + print(f"####") + print(f"args.num_iterations={args.num_iterations} args.num_envs={args.num_envs} args.num_eval_envs={args.num_eval_envs}") + print(f"args.minibatch_size={args.minibatch_size} args.batch_size={args.batch_size} args.update_epochs={args.update_epochs}") + print(f"####") + action_space_low, action_space_high = torch.from_numpy(envs.single_action_space.low).to(device), torch.from_numpy(envs.single_action_space.high).to(device) + def clip_action(action: torch.Tensor): + return torch.clamp(action.detach(), action_space_low, action_space_high) + + if args.checkpoint: + agent.load_state_dict(torch.load(args.checkpoint)) + + for iteration in range(1, args.num_iterations + 1): + print(f"Epoch: {iteration}, global_step={global_step}") + final_values = torch.zeros((args.num_steps, args.num_envs), device=device) + agent.eval() + if iteration % args.eval_freq == 1: + print("Evaluating") + eval_obs, _ = eval_envs.reset() + eval_metrics = defaultdict(list) + num_episodes = 0 + for _ in range(args.num_eval_steps): + with torch.no_grad(): + eval_obs, eval_rew, eval_terminations, eval_truncations, eval_infos = eval_envs.step(agent.get_action(eval_obs, deterministic=True)) + if "final_info" in eval_infos: + mask = eval_infos["_final_info"] + num_episodes += mask.sum() + for k, v in eval_infos["final_info"]["episode"].items(): + eval_metrics[k].append(v) + print(f"Evaluated {args.num_eval_steps * args.num_eval_envs} steps resulting in {num_episodes} episodes") + for k, v in eval_metrics.items(): + mean = torch.stack(v).float().mean() + if logger is not None: + logger.add_scalar(f"eval/{k}", mean, global_step) + print(f"eval_{k}_mean={mean}") + if args.evaluate: + break + if args.save_model and iteration % args.eval_freq == 1: + model_path = f"runs/{run_name}/ckpt_{iteration}.pt" + torch.save(agent.state_dict(), model_path) + print(f"model saved to {model_path}") + # Annealing the rate if instructed to do so. + if args.anneal_lr: + frac = 1.0 - (iteration - 1.0) / args.num_iterations + lrnow = frac * args.learning_rate + optimizer.param_groups[0]["lr"] = lrnow + + rollout_time = time.time() + for step in range(0, args.num_steps): + global_step += args.num_envs + obs[step] = next_obs + dones[step] = next_done + + # ALGO LOGIC: action logic + with torch.no_grad(): + action, logprob, _, value = agent.get_action_and_value(next_obs) + values[step] = value.flatten() + actions[step] = action + logprobs[step] = logprob + + # TRY NOT TO MODIFY: execute the game and log data. + next_obs, reward, terminations, truncations, infos = envs.step(clip_action(action)) + next_done = torch.logical_or(terminations, truncations).to(torch.float32) + rewards[step] = reward.view(-1) * args.reward_scale + + if "final_info" in infos: + final_info = infos["final_info"] + done_mask = infos["_final_info"] + for k, v in final_info["episode"].items(): + logger.add_scalar(f"train/{k}", v[done_mask].float().mean(), global_step) + with torch.no_grad(): + final_values[step, torch.arange(args.num_envs, device=device)[done_mask]] = agent.get_value(infos["final_observation"][done_mask]).view(-1) + rollout_time = time.time() - rollout_time + # bootstrap value according to termination and truncation + with torch.no_grad(): + next_value = agent.get_value(next_obs).reshape(1, -1) + advantages = torch.zeros_like(rewards).to(device) + lastgaelam = 0 + for t in reversed(range(args.num_steps)): + if t == args.num_steps - 1: + next_not_done = 1.0 - next_done + nextvalues = next_value + else: + next_not_done = 1.0 - dones[t + 1] + nextvalues = values[t + 1] + real_next_values = next_not_done * nextvalues + final_values[t] # t instead of t+1 + # next_not_done means nextvalues is computed from the correct next_obs + # if next_not_done is 1, final_values is always 0 + # if next_not_done is 0, then use final_values, which is computed according to bootstrap_at_done + if args.finite_horizon_gae: + """ + See GAE paper equation(16) line 1, we will compute the GAE based on this line only + 1 *( -V(s_t) + r_t + gamma * V(s_{t+1}) ) + lambda *( -V(s_t) + r_t + gamma * r_{t+1} + gamma^2 * V(s_{t+2}) ) + lambda^2 *( -V(s_t) + r_t + gamma * r_{t+1} + gamma^2 * r_{t+2} + ... ) + lambda^3 *( -V(s_t) + r_t + gamma * r_{t+1} + gamma^2 * r_{t+2} + gamma^3 * r_{t+3} + We then normalize it by the sum of the lambda^i (instead of 1-lambda) + """ + if t == args.num_steps - 1: # initialize + lam_coef_sum = 0. + reward_term_sum = 0. # the sum of the second term + value_term_sum = 0. # the sum of the third term + lam_coef_sum = lam_coef_sum * next_not_done + reward_term_sum = reward_term_sum * next_not_done + value_term_sum = value_term_sum * next_not_done + + lam_coef_sum = 1 + args.gae_lambda * lam_coef_sum + reward_term_sum = args.gae_lambda * args.gamma * reward_term_sum + lam_coef_sum * rewards[t] + value_term_sum = args.gae_lambda * args.gamma * value_term_sum + args.gamma * real_next_values + + advantages[t] = (reward_term_sum + value_term_sum) / lam_coef_sum - values[t] + else: + delta = rewards[t] + args.gamma * real_next_values - values[t] + advantages[t] = lastgaelam = delta + args.gamma * args.gae_lambda * next_not_done * lastgaelam # Here actually we should use next_not_terminated, but we don't have lastgamlam if terminated + returns = advantages + values + + # flatten the batch + b_obs = obs.reshape((-1,) + envs.single_observation_space.shape) + b_logprobs = logprobs.reshape(-1) + b_actions = actions.reshape((-1,) + envs.single_action_space.shape) + b_advantages = advantages.reshape(-1) + b_returns = returns.reshape(-1) + b_values = values.reshape(-1) + + # Optimizing the policy and value network + agent.train() + b_inds = np.arange(args.batch_size) + clipfracs = [] + update_time = time.time() + for epoch in range(args.update_epochs): + np.random.shuffle(b_inds) + for start in range(0, args.batch_size, args.minibatch_size): + end = start + args.minibatch_size + mb_inds = b_inds[start:end] + + _, newlogprob, entropy, newvalue = agent.get_action_and_value(b_obs[mb_inds], b_actions[mb_inds]) + logratio = newlogprob - b_logprobs[mb_inds] + ratio = logratio.exp() + + with torch.no_grad(): + # calculate approx_kl http://joschu.net/blog/kl-approx.html + old_approx_kl = (-logratio).mean() + approx_kl = ((ratio - 1) - logratio).mean() + clipfracs += [((ratio - 1.0).abs() > args.clip_coef).float().mean().item()] + + if args.target_kl is not None and approx_kl > args.target_kl: + break + + mb_advantages = b_advantages[mb_inds] + if args.norm_adv: + mb_advantages = (mb_advantages - mb_advantages.mean()) / (mb_advantages.std() + 1e-8) + + # Policy loss + pg_loss1 = -mb_advantages * ratio + pg_loss2 = -mb_advantages * torch.clamp(ratio, 1 - args.clip_coef, 1 + args.clip_coef) + pg_loss = torch.max(pg_loss1, pg_loss2).mean() + + # Value loss + newvalue = newvalue.view(-1) + if args.clip_vloss: + v_loss_unclipped = (newvalue - b_returns[mb_inds]) ** 2 + v_clipped = b_values[mb_inds] + torch.clamp( + newvalue - b_values[mb_inds], + -args.clip_coef, + args.clip_coef, + ) + v_loss_clipped = (v_clipped - b_returns[mb_inds]) ** 2 + v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped) + v_loss = 0.5 * v_loss_max.mean() + else: + v_loss = 0.5 * ((newvalue - b_returns[mb_inds]) ** 2).mean() + + entropy_loss = entropy.mean() + loss = pg_loss - args.ent_coef * entropy_loss + v_loss * args.vf_coef + + optimizer.zero_grad() + loss.backward() + nn.utils.clip_grad_norm_(agent.parameters(), args.max_grad_norm) + optimizer.step() + + if args.target_kl is not None and approx_kl > args.target_kl: + break + + update_time = time.time() - update_time + + y_pred, y_true = b_values.cpu().numpy(), b_returns.cpu().numpy() + var_y = np.var(y_true) + explained_var = np.nan if var_y == 0 else 1 - np.var(y_true - y_pred) / var_y + + logger.add_scalar("charts/learning_rate", optimizer.param_groups[0]["lr"], global_step) + logger.add_scalar("losses/value_loss", v_loss.item(), global_step) + logger.add_scalar("losses/policy_loss", pg_loss.item(), global_step) + logger.add_scalar("losses/entropy", entropy_loss.item(), global_step) + logger.add_scalar("losses/old_approx_kl", old_approx_kl.item(), global_step) + logger.add_scalar("losses/approx_kl", approx_kl.item(), global_step) + logger.add_scalar("losses/clipfrac", np.mean(clipfracs), global_step) + logger.add_scalar("losses/explained_variance", explained_var, global_step) + print("SPS:", int(global_step / (time.time() - start_time))) + logger.add_scalar("charts/SPS", int(global_step / (time.time() - start_time)), global_step) + logger.add_scalar("time/step", global_step, global_step) + logger.add_scalar("time/update_time", update_time, global_step) + logger.add_scalar("time/rollout_time", rollout_time, global_step) + logger.add_scalar("time/rollout_fps", args.num_envs * args.num_steps / rollout_time, global_step) + if not args.evaluate: + if args.save_model: + model_path = f"runs/{run_name}/final_ckpt.pt" + torch.save(agent.state_dict(), model_path) + print(f"model saved to {model_path}") + logger.close() + envs.close() + eval_envs.close() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo_fast.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo_fast.py new file mode 100644 index 0000000000000000000000000000000000000000..e8436903c46e2fb1a4ad735f71ebde3218a38476 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo_fast.py @@ -0,0 +1,522 @@ +import os + +from mani_skill.utils import gym_utils +from mani_skill.utils.wrappers.flatten import FlattenActionSpaceWrapper +from mani_skill.utils.wrappers.record import RecordEpisode +from mani_skill.vector.wrappers.gymnasium import ManiSkillVectorEnv + +os.environ["TORCHDYNAMO_INLINE_INBUILT_NN_MODULES"] = "1" + +import math +import os +import random +import time +from collections import defaultdict +from dataclasses import dataclass +from typing import Optional, Tuple + +import gymnasium as gym +import numpy as np +import tensordict +import torch +import torch.nn as nn +import torch.optim as optim +import tqdm +import tyro +from torch.utils.tensorboard import SummaryWriter +import wandb +from tensordict import from_module +from tensordict.nn import CudaGraphModule +from torch.distributions.normal import Normal + + +@dataclass +class Args: + exp_name: Optional[str] = None + """the name of this experiment""" + seed: int = 1 + """seed of the experiment""" + torch_deterministic: bool = True + """if toggled, `torch.backends.cudnn.deterministic=False`""" + cuda: bool = True + """if toggled, cuda will be enabled by default""" + track: bool = False + """if toggled, this experiment will be tracked with Weights and Biases""" + wandb_project_name: str = "ManiSkill" + """the wandb's project name""" + wandb_entity: Optional[str] = None + """the entity (team) of wandb's project""" + wandb_group: str = "PPO" + """the group of the run for wandb""" + capture_video: bool = True + """whether to capture videos of the agent performances (check out `videos` folder)""" + save_trajectory: bool = False + """whether to save trajectory data into the `videos` folder""" + save_model: bool = False + """whether to save model into the `runs/{run_name}` folder""" + evaluate: bool = False + """if toggled, only runs evaluation with the given model checkpoint and saves the evaluation trajectories""" + checkpoint: Optional[str] = None + """path to a pretrained checkpoint file to start evaluation/training from""" + + # Environment specific arguments + env_id: str = "PickCube-v1" + """the id of the environment""" + env_vectorization: str = "gpu" + """the type of environment vectorization to use""" + num_envs: int = 512 + """the number of parallel environments""" + num_eval_envs: int = 16 + """the number of parallel evaluation environments""" + partial_reset: bool = True + """whether to let parallel environments reset upon termination instead of truncation""" + eval_partial_reset: bool = False + """whether to let parallel evaluation environments reset upon termination instead of truncation""" + num_steps: int = 50 + """the number of steps to run in each environment per policy rollout""" + num_eval_steps: int = 50 + """the number of steps to run in each evaluation environment during evaluation""" + reconfiguration_freq: Optional[int] = None + """how often to reconfigure the environment during training""" + eval_reconfiguration_freq: Optional[int] = 1 + """for benchmarking purposes we want to reconfigure the eval environment each reset to ensure objects are randomized in some tasks""" + eval_freq: int = 25 + """evaluation frequency in terms of iterations""" + save_train_video_freq: Optional[int] = None + """frequency to save training videos in terms of iterations""" + control_mode: Optional[str] = "pd_joint_delta_pos" + """the control mode to use for the environment""" + + # Algorithm specific arguments + total_timesteps: int = 10000000 + """total timesteps of the experiments""" + learning_rate: float = 3e-4 + """the learning rate of the optimizer""" + anneal_lr: bool = False + """Toggle learning rate annealing for policy and value networks""" + gamma: float = 0.8 + """the discount factor gamma""" + gae_lambda: float = 0.9 + """the lambda for the general advantage estimation""" + num_minibatches: int = 32 + """the number of mini-batches""" + update_epochs: int = 4 + """the K epochs to update the policy""" + norm_adv: bool = True + """Toggles advantages normalization""" + clip_coef: float = 0.2 + """the surrogate clipping coefficient""" + clip_vloss: bool = False + """Toggles whether or not to use a clipped loss for the value function, as per the paper.""" + ent_coef: float = 0.0 + """coefficient of the entropy""" + vf_coef: float = 0.5 + """coefficient of the value function""" + max_grad_norm: float = 0.5 + """the maximum norm for the gradient clipping""" + target_kl: float = 0.1 + """the target KL divergence threshold""" + reward_scale: float = 1.0 + """Scale the reward by this factor""" + finite_horizon_gae: bool = False + + # to be filled in runtime + batch_size: int = 0 + """the batch size (computed in runtime)""" + minibatch_size: int = 0 + """the mini-batch size (computed in runtime)""" + num_iterations: int = 0 + """the number of iterations (computed in runtime)""" + + # Torch optimizations + compile: bool = False + """whether to use torch.compile.""" + cudagraphs: bool = False + """whether to use cudagraphs on top of compile.""" + +def layer_init(layer, std=np.sqrt(2), bias_const=0.0): + torch.nn.init.orthogonal_(layer.weight, std) + torch.nn.init.constant_(layer.bias, bias_const) + return layer + + +class Agent(nn.Module): + def __init__(self, n_obs, n_act, device=None): + super().__init__() + self.critic = nn.Sequential( + layer_init(nn.Linear(n_obs, 256, device=device)), + nn.Tanh(), + layer_init(nn.Linear(256, 256, device=device)), + nn.Tanh(), + layer_init(nn.Linear(256, 256, device=device)), + nn.Tanh(), + layer_init(nn.Linear(256, 1, device=device)), + ) + self.actor_mean = nn.Sequential( + layer_init(nn.Linear(n_obs, 256, device=device)), + nn.Tanh(), + layer_init(nn.Linear(256, 256, device=device)), + nn.Tanh(), + layer_init(nn.Linear(256, 256, device=device)), + nn.Tanh(), + layer_init(nn.Linear(256, n_act, device=device), std=0.01*np.sqrt(2)), + ) + self.actor_logstd = nn.Parameter(torch.zeros(1, n_act, device=device)) + + def get_value(self, x): + return self.critic(x) + + def get_action_and_value(self, obs, action=None): + action_mean = self.actor_mean(obs) + action_logstd = self.actor_logstd.expand_as(action_mean) + action_std = torch.exp(action_logstd) + probs = Normal(action_mean, action_std) + if action is None: + action = action_mean + action_std * torch.randn_like(action_mean) + return action, probs.log_prob(action).sum(1), probs.entropy().sum(1), self.critic(obs) + +class Logger: + def __init__(self, log_wandb=False, tensorboard: SummaryWriter = None) -> None: + self.writer = tensorboard + self.log_wandb = log_wandb + def add_scalar(self, tag, scalar_value, step): + if self.log_wandb: + wandb.log({tag: scalar_value}, step=step) + self.writer.add_scalar(tag, scalar_value, step) + def close(self): + self.writer.close() + +def gae(next_obs, next_done, container, final_values): + # bootstrap value if not done + next_value = get_value(next_obs).reshape(-1) + lastgaelam = 0 + nextnonterminals = (~container["dones"]).float().unbind(0) + vals = container["vals"] + vals_unbind = vals.unbind(0) + rewards = container["rewards"].unbind(0) + + advantages = [] + nextnonterminal = (~next_done).float() + nextvalues = next_value + for t in range(args.num_steps - 1, -1, -1): + cur_val = vals_unbind[t] + # real_next_values = nextvalues * nextnonterminal + real_next_values = nextnonterminal * nextvalues + final_values[t] # t instead of t+1 + delta = rewards[t] + args.gamma * real_next_values - cur_val + advantages.append(delta + args.gamma * args.gae_lambda * nextnonterminal * lastgaelam) + lastgaelam = advantages[-1] + + nextnonterminal = nextnonterminals[t] + nextvalues = cur_val + + advantages = container["advantages"] = torch.stack(list(reversed(advantages))) + container["returns"] = advantages + vals + return container + + +def rollout(obs, done): + ts = [] + final_values = torch.zeros((args.num_steps, args.num_envs), device=device) + for step in range(args.num_steps): + # ALGO LOGIC: action logic + action, logprob, _, value = policy(obs=obs) + + # TRY NOT TO MODIFY: execute the game and log data. + next_obs, reward, next_done, infos = step_func(action) + + if "final_info" in infos: + final_info = infos["final_info"] + done_mask = infos["_final_info"] + for k, v in final_info["episode"].items(): + logger.add_scalar(f"train/{k}", v[done_mask].float().mean(), global_step) + with torch.no_grad(): + final_values[step, torch.arange(args.num_envs, device=device)[done_mask]] = agent.get_value(infos["final_observation"][done_mask]).view(-1) + + ts.append( + tensordict.TensorDict._new_unsafe( + obs=obs, + # cleanrl ppo examples associate the done with the previous obs (not the done resulting from action) + dones=done, + vals=value.flatten(), + actions=action, + logprobs=logprob, + rewards=reward, + batch_size=(args.num_envs,), + ) + ) + # NOTE (stao): change here for gpu env + obs = next_obs = next_obs + done = next_done + # NOTE (stao): need to do .to(device) i think? otherwise container.device is None, not sure if this affects anything + container = torch.stack(ts, 0).to(device) + return next_obs, done, container, final_values + + +def update(obs, actions, logprobs, advantages, returns, vals): + optimizer.zero_grad() + _, newlogprob, entropy, newvalue = agent.get_action_and_value(obs, actions) + logratio = newlogprob - logprobs + ratio = logratio.exp() + + with torch.no_grad(): + # calculate approx_kl http://joschu.net/blog/kl-approx.html + old_approx_kl = (-logratio).mean() + approx_kl = ((ratio - 1) - logratio).mean() + clipfrac = ((ratio - 1.0).abs() > args.clip_coef).float().mean() + + if args.norm_adv: + advantages = (advantages - advantages.mean()) / (advantages.std() + 1e-8) + + # Policy loss + pg_loss1 = -advantages * ratio + pg_loss2 = -advantages * torch.clamp(ratio, 1 - args.clip_coef, 1 + args.clip_coef) + pg_loss = torch.max(pg_loss1, pg_loss2).mean() + + # Value loss + newvalue = newvalue.view(-1) + if args.clip_vloss: + v_loss_unclipped = (newvalue - returns) ** 2 + v_clipped = vals + torch.clamp( + newvalue - vals, + -args.clip_coef, + args.clip_coef, + ) + v_loss_clipped = (v_clipped - returns) ** 2 + v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped) + v_loss = 0.5 * v_loss_max.mean() + else: + v_loss = 0.5 * ((newvalue - returns) ** 2).mean() + + entropy_loss = entropy.mean() + loss = pg_loss - args.ent_coef * entropy_loss + v_loss * args.vf_coef + + loss.backward() + gn = nn.utils.clip_grad_norm_(agent.parameters(), args.max_grad_norm) + optimizer.step() + + return approx_kl, v_loss.detach(), pg_loss.detach(), entropy_loss.detach(), old_approx_kl, clipfrac, gn + + +update = tensordict.nn.TensorDictModule( + update, + in_keys=["obs", "actions", "logprobs", "advantages", "returns", "vals"], + out_keys=["approx_kl", "v_loss", "pg_loss", "entropy_loss", "old_approx_kl", "clipfrac", "gn"], +) + +if __name__ == "__main__": + args = tyro.cli(Args) + # if not args.evaluate: exit() + + batch_size = int(args.num_envs * args.num_steps) + args.minibatch_size = batch_size // args.num_minibatches + args.batch_size = args.num_minibatches * args.minibatch_size + args.num_iterations = args.total_timesteps // args.batch_size + if args.exp_name is None: + args.exp_name = os.path.basename(__file__)[: -len(".py")] + run_name = f"{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}" + else: + run_name = args.exp_name + + # TRY NOT TO MODIFY: seeding + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.backends.cudnn.deterministic = args.torch_deterministic + + device = torch.device("cuda" if torch.cuda.is_available() and args.cuda else "cpu") + + ####### Environment setup ####### + env_kwargs = dict(obs_mode="state", render_mode="rgb_array", sim_backend="physx_cuda") + if args.control_mode is not None: + env_kwargs["control_mode"] = args.control_mode + envs = gym.make(args.env_id, num_envs=args.num_envs if not args.evaluate else 1, reconfiguration_freq=args.reconfiguration_freq, **env_kwargs) + eval_envs = gym.make(args.env_id, num_envs=args.num_eval_envs, reconfiguration_freq=args.eval_reconfiguration_freq, human_render_camera_configs=dict(shader_pack="default"), **env_kwargs) + if isinstance(envs.action_space, gym.spaces.Dict): + envs = FlattenActionSpaceWrapper(envs) + eval_envs = FlattenActionSpaceWrapper(eval_envs) + if args.capture_video or args.save_trajectory: + eval_output_dir = f"runs/{run_name}/videos" + if args.evaluate: + eval_output_dir = f"{os.path.dirname(args.checkpoint)}/test_videos" + print(f"Saving eval trajectories/videos to {eval_output_dir}") + if args.save_train_video_freq is not None: + save_video_trigger = lambda x : (x // args.num_steps) % args.save_train_video_freq == 0 + envs = RecordEpisode(envs, output_dir=f"runs/{run_name}/train_videos", save_trajectory=False, save_video_trigger=save_video_trigger, max_steps_per_video=args.num_steps, video_fps=30) + eval_envs = RecordEpisode(eval_envs, output_dir=eval_output_dir, save_trajectory=args.save_trajectory, save_video=args.capture_video, trajectory_name="trajectory", max_steps_per_video=args.num_eval_steps, video_fps=30) + envs = ManiSkillVectorEnv(envs, args.num_envs, ignore_terminations=not args.partial_reset, record_metrics=True) + eval_envs = ManiSkillVectorEnv(eval_envs, args.num_eval_envs, ignore_terminations=not args.eval_partial_reset, record_metrics=True) + assert isinstance(envs.single_action_space, gym.spaces.Box), "only continuous action space is supported" + + max_episode_steps = gym_utils.find_max_episode_steps_value(envs._env) + logger = None + if not args.evaluate: + print("Running training") + if args.track: + import wandb + config = vars(args) + config["env_cfg"] = dict(**env_kwargs, num_envs=args.num_envs, env_id=args.env_id, reward_mode="normalized_dense", env_horizon=max_episode_steps, partial_reset=args.partial_reset) + config["eval_env_cfg"] = dict(**env_kwargs, num_envs=args.num_eval_envs, env_id=args.env_id, reward_mode="normalized_dense", env_horizon=max_episode_steps, partial_reset=False) + wandb.init( + project=args.wandb_project_name, + entity=args.wandb_entity, + sync_tensorboard=False, + config=config, + name=run_name, + save_code=True, + group=args.wandb_group, + tags=["ppo", "walltime_efficient", f"GPU:{torch.cuda.get_device_name()}"] + ) + writer = SummaryWriter(f"runs/{run_name}") + writer.add_text( + "hyperparameters", + "|param|value|\n|-|-|\n%s" % ("\n".join([f"|{key}|{value}|" for key, value in vars(args).items()])), + ) + logger = Logger(log_wandb=args.track, tensorboard=writer) + else: + print("Running evaluation") + n_act = math.prod(envs.single_action_space.shape) + n_obs = math.prod(envs.single_observation_space.shape) + assert isinstance(envs.single_action_space, gym.spaces.Box), "only continuous action space is supported" + + # Register step as a special op not to graph break + # @torch.library.custom_op("mylib::step", mutates_args=()) + def step_func(action: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + # NOTE (stao): change here for gpu env + next_obs, reward, terminations, truncations, info = envs.step(action) + next_done = torch.logical_or(terminations, truncations) + return next_obs, reward, next_done, info + + ####### Agent ####### + agent = Agent(n_obs, n_act, device=device) + if args.checkpoint: + agent.load_state_dict(torch.load(args.checkpoint)) + # Make a version of agent with detached params + agent_inference = Agent(n_obs, n_act, device=device) + agent_inference_p = from_module(agent).data + agent_inference_p.to_module(agent_inference) + + ####### Optimizer ####### + optimizer = optim.Adam( + agent.parameters(), + lr=torch.tensor(args.learning_rate, device=device), + eps=1e-5, + capturable=args.cudagraphs and not args.compile, + ) + + ####### Executables ####### + # Define networks: wrapping the policy in a TensorDictModule allows us to use CudaGraphModule + policy = agent_inference.get_action_and_value + get_value = agent_inference.get_value + + # Compile policy + if args.compile: + policy = torch.compile(policy) + gae = torch.compile(gae, fullgraph=True) + update = torch.compile(update) + + if args.cudagraphs: + policy = CudaGraphModule(policy) + gae = CudaGraphModule(gae) + update = CudaGraphModule(update) + + global_step = 0 + start_time = time.time() + container_local = None + next_obs = envs.reset()[0] + next_done = torch.zeros(args.num_envs, device=device, dtype=torch.bool) + pbar = tqdm.tqdm(range(1, args.num_iterations + 1)) + + cumulative_times = defaultdict(float) + + for iteration in pbar: + agent.eval() + if iteration % args.eval_freq == 1: + stime = time.perf_counter() + eval_obs, _ = eval_envs.reset() + eval_metrics = defaultdict(list) + num_episodes = 0 + for _ in range(args.num_eval_steps): + with torch.no_grad(): + eval_obs, eval_rew, eval_terminations, eval_truncations, eval_infos = eval_envs.step(agent.actor_mean(eval_obs)) + if "final_info" in eval_infos: + mask = eval_infos["_final_info"] + num_episodes += mask.sum() + for k, v in eval_infos["final_info"]["episode"].items(): + eval_metrics[k].append(v) + eval_metrics_mean = {} + for k, v in eval_metrics.items(): + mean = torch.stack(v).float().mean() + eval_metrics_mean[k] = mean + if logger is not None: + logger.add_scalar(f"eval/{k}", mean, global_step) + pbar.set_description( + f"success_once: {eval_metrics_mean['success_once']:.2f}, " + f"return: {eval_metrics_mean['return']:.2f}" + ) + if logger is not None: + eval_time = time.perf_counter() - stime + cumulative_times["eval_time"] += eval_time + logger.add_scalar("time/eval_time", eval_time, global_step) + if args.evaluate: + break + if args.save_model and iteration % args.eval_freq == 1: + model_path = f"runs/{run_name}/ckpt_{iteration}.pt" + torch.save(agent.state_dict(), model_path) + print(f"model saved to {model_path}") + # Annealing the rate if instructed to do so. + if args.anneal_lr: + frac = 1.0 - (iteration - 1.0) / args.num_iterations + lrnow = frac * args.learning_rate + optimizer.param_groups[0]["lr"].copy_(lrnow) + + torch.compiler.cudagraph_mark_step_begin() + rollout_time = time.perf_counter() + next_obs, next_done, container, final_values = rollout(next_obs, next_done) + rollout_time = time.perf_counter() - rollout_time + cumulative_times["rollout_time"] += rollout_time + global_step += container.numel() + + update_time = time.perf_counter() + container = gae(next_obs, next_done, container, final_values) + container_flat = container.view(-1) + + # Optimizing the policy and value network + clipfracs = [] + for epoch in range(args.update_epochs): + b_inds = torch.randperm(container_flat.shape[0], device=device).split(args.minibatch_size) + for b in b_inds: + container_local = container_flat[b] + + out = update(container_local, tensordict_out=tensordict.TensorDict()) + clipfracs.append(out["clipfrac"]) + if args.target_kl is not None and out["approx_kl"] > args.target_kl: + break + else: + continue + break + update_time = time.perf_counter() - update_time + cumulative_times["update_time"] += update_time + + logger.add_scalar("charts/learning_rate", optimizer.param_groups[0]["lr"], global_step) + logger.add_scalar("losses/value_loss", out["v_loss"].item(), global_step) + logger.add_scalar("losses/policy_loss", out["pg_loss"].item(), global_step) + logger.add_scalar("losses/entropy", out["entropy_loss"].item(), global_step) + logger.add_scalar("losses/old_approx_kl", out["old_approx_kl"].item(), global_step) + logger.add_scalar("losses/approx_kl", out["approx_kl"].item(), global_step) + logger.add_scalar("losses/clipfrac", torch.stack(clipfracs).mean().cpu().item(), global_step) + logger.add_scalar("charts/SPS", int(global_step / (time.time() - start_time)), global_step) + logger.add_scalar("time/step", global_step, global_step) + logger.add_scalar("time/update_time", update_time, global_step) + logger.add_scalar("time/rollout_time", rollout_time, global_step) + logger.add_scalar("time/rollout_fps", args.num_envs * args.num_steps / rollout_time, global_step) + for k, v in cumulative_times.items(): + logger.add_scalar(f"time/total_{k}", v, global_step) + logger.add_scalar("time/total_rollout+update_time", cumulative_times["rollout_time"] + cumulative_times["update_time"], global_step) + if not args.evaluate: + if args.save_model: + model_path = f"runs/{run_name}/final_ckpt.pt" + torch.save(agent.state_dict(), model_path) + print(f"model saved to {model_path}") + logger.close() + envs.close() + eval_envs.close() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo_rgb.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo_rgb.py new file mode 100644 index 0000000000000000000000000000000000000000..83a1ab7fd2c5910579dba5472eda5e736bc5d232 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/ppo/ppo_rgb.py @@ -0,0 +1,594 @@ +# docs and experiment results can be found at https://docs.cleanrl.dev/rl-algorithms/ppo/#ppo_continuous_actionpy +from collections import defaultdict +import os +import random +import time +from dataclasses import dataclass +from typing import Optional + +import gymnasium as gym +import numpy as np +import torch +import torch.nn as nn +import torch.optim as optim +import tyro +from torch.distributions.normal import Normal +from torch.utils.tensorboard import SummaryWriter + +# ManiSkill specific imports +import mani_skill.envs +from mani_skill.utils import gym_utils +from mani_skill.utils.wrappers.flatten import FlattenActionSpaceWrapper, FlattenRGBDObservationWrapper +from mani_skill.utils.wrappers.record import RecordEpisode +from mani_skill.vector.wrappers.gymnasium import ManiSkillVectorEnv + +@dataclass +class Args: + exp_name: Optional[str] = None + """the name of this experiment""" + seed: int = 1 + """seed of the experiment""" + torch_deterministic: bool = True + """if toggled, `torch.backends.cudnn.deterministic=False`""" + cuda: bool = True + """if toggled, cuda will be enabled by default""" + track: bool = False + """if toggled, this experiment will be tracked with Weights and Biases""" + wandb_project_name: str = "ManiSkill" + """the wandb's project name""" + wandb_entity: Optional[str] = None + """the entity (team) of wandb's project""" + wandb_group: str = "PPO" + """the group of the run for wandb""" + capture_video: bool = True + """whether to capture videos of the agent performances (check out `videos` folder)""" + save_model: bool = True + """whether to save model into the `runs/{run_name}` folder""" + evaluate: bool = False + """if toggled, only runs evaluation with the given model checkpoint and saves the evaluation trajectories""" + checkpoint: Optional[str] = None + """path to a pretrained checkpoint file to start evaluation/training from""" + render_mode: str = "all" + """the environment rendering mode""" + + # Algorithm specific arguments + env_id: str = "PickCube-v1" + """the id of the environment""" + include_state: bool = True + """whether to include state information in observations""" + total_timesteps: int = 10000000 + """total timesteps of the experiments""" + learning_rate: float = 3e-4 + """the learning rate of the optimizer""" + num_envs: int = 512 + """the number of parallel environments""" + num_eval_envs: int = 8 + """the number of parallel evaluation environments""" + partial_reset: bool = True + """whether to let parallel environments reset upon termination instead of truncation""" + eval_partial_reset: bool = False + """whether to let parallel evaluation environments reset upon termination instead of truncation""" + num_steps: int = 50 + """the number of steps to run in each environment per policy rollout""" + num_eval_steps: int = 50 + """the number of steps to run in each evaluation environment during evaluation""" + reconfiguration_freq: Optional[int] = None + """how often to reconfigure the environment during training""" + eval_reconfiguration_freq: Optional[int] = 1 + """for benchmarking purposes we want to reconfigure the eval environment each reset to ensure objects are randomized in some tasks""" + control_mode: Optional[str] = "pd_joint_delta_pos" + """the control mode to use for the environment""" + anneal_lr: bool = False + """Toggle learning rate annealing for policy and value networks""" + gamma: float = 0.8 + """the discount factor gamma""" + gae_lambda: float = 0.9 + """the lambda for the general advantage estimation""" + num_minibatches: int = 32 + """the number of mini-batches""" + update_epochs: int = 4 + """the K epochs to update the policy""" + norm_adv: bool = True + """Toggles advantages normalization""" + clip_coef: float = 0.2 + """the surrogate clipping coefficient""" + clip_vloss: bool = False + """Toggles whether or not to use a clipped loss for the value function, as per the paper.""" + ent_coef: float = 0.0 + """coefficient of the entropy""" + vf_coef: float = 0.5 + """coefficient of the value function""" + max_grad_norm: float = 0.5 + """the maximum norm for the gradient clipping""" + target_kl: float = 0.2 + """the target KL divergence threshold""" + reward_scale: float = 1.0 + """Scale the reward by this factor""" + eval_freq: int = 25 + """evaluation frequency in terms of iterations""" + save_train_video_freq: Optional[int] = None + """frequency to save training videos in terms of iterations""" + finite_horizon_gae: bool = False + + # to be filled in runtime + batch_size: int = 0 + """the batch size (computed in runtime)""" + minibatch_size: int = 0 + """the mini-batch size (computed in runtime)""" + num_iterations: int = 0 + """the number of iterations (computed in runtime)""" + +def layer_init(layer, std=np.sqrt(2), bias_const=0.0): + torch.nn.init.orthogonal_(layer.weight, std) + torch.nn.init.constant_(layer.bias, bias_const) + return layer + +class DictArray(object): + def __init__(self, buffer_shape, element_space, data_dict=None, device=None): + self.buffer_shape = buffer_shape + if data_dict: + self.data = data_dict + else: + assert isinstance(element_space, gym.spaces.dict.Dict) + self.data = {} + for k, v in element_space.items(): + if isinstance(v, gym.spaces.dict.Dict): + self.data[k] = DictArray(buffer_shape, v, device=device) + else: + dtype = (torch.float32 if v.dtype in (np.float32, np.float64) else + torch.uint8 if v.dtype == np.uint8 else + torch.int16 if v.dtype == np.int16 else + torch.int32 if v.dtype == np.int32 else + v.dtype) + self.data[k] = torch.zeros(buffer_shape + v.shape, dtype=dtype, device=device) + + def keys(self): + return self.data.keys() + + def __getitem__(self, index): + if isinstance(index, str): + return self.data[index] + return { + k: v[index] for k, v in self.data.items() + } + + def __setitem__(self, index, value): + if isinstance(index, str): + self.data[index] = value + for k, v in value.items(): + self.data[k][index] = v + + @property + def shape(self): + return self.buffer_shape + + def reshape(self, shape): + t = len(self.buffer_shape) + new_dict = {} + for k,v in self.data.items(): + if isinstance(v, DictArray): + new_dict[k] = v.reshape(shape) + else: + new_dict[k] = v.reshape(shape + v.shape[t:]) + new_buffer_shape = next(iter(new_dict.values())).shape[:len(shape)] + return DictArray(new_buffer_shape, None, data_dict=new_dict) + +class NatureCNN(nn.Module): + def __init__(self, sample_obs): + super().__init__() + + extractors = {} + + self.out_features = 0 + feature_size = 256 + in_channels=sample_obs["rgb"].shape[-1] + image_size=(sample_obs["rgb"].shape[1], sample_obs["rgb"].shape[2]) + + + # here we use a NatureCNN architecture to process images, but any architecture is permissble here + cnn = nn.Sequential( + nn.Conv2d( + in_channels=in_channels, + out_channels=32, + kernel_size=8, + stride=4, + padding=0, + ), + nn.ReLU(), + nn.Conv2d( + in_channels=32, out_channels=64, kernel_size=4, stride=2, padding=0 + ), + nn.ReLU(), + nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=0 + ), + nn.ReLU(), + nn.Flatten(), + ) + + # to easily figure out the dimensions after flattening, we pass a test tensor + with torch.no_grad(): + n_flatten = cnn(sample_obs["rgb"].float().permute(0,3,1,2).cpu()).shape[1] + fc = nn.Sequential(nn.Linear(n_flatten, feature_size), nn.ReLU()) + extractors["rgb"] = nn.Sequential(cnn, fc) + self.out_features += feature_size + + if "state" in sample_obs: + # for state data we simply pass it through a single linear layer + state_size = sample_obs["state"].shape[-1] + extractors["state"] = nn.Linear(state_size, 256) + self.out_features += 256 + + self.extractors = nn.ModuleDict(extractors) + + def forward(self, observations) -> torch.Tensor: + encoded_tensor_list = [] + # self.extractors contain nn.Modules that do all the processing. + for key, extractor in self.extractors.items(): + obs = observations[key] + if key == "rgb": + obs = obs.float().permute(0,3,1,2) + obs = obs / 255 + encoded_tensor_list.append(extractor(obs)) + return torch.cat(encoded_tensor_list, dim=1) + +class Agent(nn.Module): + def __init__(self, envs, sample_obs): + super().__init__() + self.feature_net = NatureCNN(sample_obs=sample_obs) + # latent_size = np.array(envs.unwrapped.single_observation_space.shape).prod() + latent_size = self.feature_net.out_features + self.critic = nn.Sequential( + layer_init(nn.Linear(latent_size, 512)), + nn.ReLU(inplace=True), + layer_init(nn.Linear(512, 1)), + ) + self.actor_mean = nn.Sequential( + layer_init(nn.Linear(latent_size, 512)), + nn.ReLU(inplace=True), + layer_init(nn.Linear(512, np.prod(envs.unwrapped.single_action_space.shape)), std=0.01*np.sqrt(2)), + ) + self.actor_logstd = nn.Parameter(torch.ones(1, np.prod(envs.unwrapped.single_action_space.shape)) * -0.5) + def get_features(self, x): + return self.feature_net(x) + def get_value(self, x): + x = self.feature_net(x) + return self.critic(x) + def get_action(self, x, deterministic=False): + x = self.feature_net(x) + action_mean = self.actor_mean(x) + if deterministic: + return action_mean + action_logstd = self.actor_logstd.expand_as(action_mean) + action_std = torch.exp(action_logstd) + probs = Normal(action_mean, action_std) + return probs.sample() + def get_action_and_value(self, x, action=None): + x = self.feature_net(x) + action_mean = self.actor_mean(x) + action_logstd = self.actor_logstd.expand_as(action_mean) + action_std = torch.exp(action_logstd) + probs = Normal(action_mean, action_std) + if action is None: + action = probs.sample() + return action, probs.log_prob(action).sum(1), probs.entropy().sum(1), self.critic(x) + +class Logger: + def __init__(self, log_wandb=False, tensorboard: SummaryWriter = None) -> None: + self.writer = tensorboard + self.log_wandb = log_wandb + def add_scalar(self, tag, scalar_value, step): + if self.log_wandb: + wandb.log({tag: scalar_value}, step=step) + self.writer.add_scalar(tag, scalar_value, step) + def close(self): + self.writer.close() + +if __name__ == "__main__": + args = tyro.cli(Args) + args.batch_size = int(args.num_envs * args.num_steps) + args.minibatch_size = int(args.batch_size // args.num_minibatches) + args.num_iterations = args.total_timesteps // args.batch_size + if args.exp_name is None: + args.exp_name = os.path.basename(__file__)[: -len(".py")] + run_name = f"{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}" + else: + run_name = args.exp_name + + # TRY NOT TO MODIFY: seeding + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.backends.cudnn.deterministic = args.torch_deterministic + + device = torch.device("cuda" if torch.cuda.is_available() and args.cuda else "cpu") + + # env setup + env_kwargs = dict(obs_mode="rgb", render_mode=args.render_mode, sim_backend="physx_cuda") + if args.control_mode is not None: + env_kwargs["control_mode"] = args.control_mode + eval_envs = gym.make(args.env_id, num_envs=args.num_eval_envs, reconfiguration_freq=args.eval_reconfiguration_freq, **env_kwargs) + envs = gym.make(args.env_id, num_envs=args.num_envs if not args.evaluate else 1, reconfiguration_freq=args.reconfiguration_freq, **env_kwargs) + + # rgbd obs mode returns a dict of data, we flatten it so there is just a rgbd key and state key + envs = FlattenRGBDObservationWrapper(envs, rgb=True, depth=False, state=args.include_state) + eval_envs = FlattenRGBDObservationWrapper(eval_envs, rgb=True, depth=False, state=args.include_state) + + if isinstance(envs.action_space, gym.spaces.Dict): + envs = FlattenActionSpaceWrapper(envs) + eval_envs = FlattenActionSpaceWrapper(eval_envs) + if args.capture_video: + eval_output_dir = f"runs/{run_name}/videos" + if args.evaluate: + eval_output_dir = f"{os.path.dirname(args.checkpoint)}/test_videos" + print(f"Saving eval videos to {eval_output_dir}") + if args.save_train_video_freq is not None: + save_video_trigger = lambda x : (x // args.num_steps) % args.save_train_video_freq == 0 + envs = RecordEpisode(envs, output_dir=f"runs/{run_name}/train_videos", save_trajectory=False, save_video_trigger=save_video_trigger, max_steps_per_video=args.num_steps, video_fps=30) + eval_envs = RecordEpisode(eval_envs, output_dir=eval_output_dir, save_trajectory=args.evaluate, trajectory_name="trajectory", max_steps_per_video=args.num_eval_steps, video_fps=30) + envs = ManiSkillVectorEnv(envs, args.num_envs, ignore_terminations=not args.partial_reset, record_metrics=True) + eval_envs = ManiSkillVectorEnv(eval_envs, args.num_eval_envs, ignore_terminations=not args.eval_partial_reset, record_metrics=True) + assert isinstance(envs.single_action_space, gym.spaces.Box), "only continuous action space is supported" + + max_episode_steps = gym_utils.find_max_episode_steps_value(envs._env) + logger = None + if not args.evaluate: + print("Running training") + if args.track: + import wandb + config = vars(args) + config["env_cfg"] = dict(**env_kwargs, num_envs=args.num_envs, env_id=args.env_id, reward_mode="normalized_dense", env_horizon=max_episode_steps, partial_reset=args.partial_reset) + config["eval_env_cfg"] = dict(**env_kwargs, num_envs=args.num_eval_envs, env_id=args.env_id, reward_mode="normalized_dense", env_horizon=max_episode_steps, partial_reset=args.partial_reset) + wandb.init( + project=args.wandb_project_name, + entity=args.wandb_entity, + sync_tensorboard=False, + config=config, + name=run_name, + save_code=True, + group=args.wandb_group, + tags=["ppo", "walltime_efficient"] + ) + writer = SummaryWriter(f"runs/{run_name}") + writer.add_text( + "hyperparameters", + "|param|value|\n|-|-|\n%s" % ("\n".join([f"|{key}|{value}|" for key, value in vars(args).items()])), + ) + logger = Logger(log_wandb=args.track, tensorboard=writer) + else: + print("Running evaluation") + + # ALGO Logic: Storage setup + obs = DictArray((args.num_steps, args.num_envs), envs.single_observation_space, device=device) + actions = torch.zeros((args.num_steps, args.num_envs) + envs.single_action_space.shape).to(device) + logprobs = torch.zeros((args.num_steps, args.num_envs)).to(device) + rewards = torch.zeros((args.num_steps, args.num_envs)).to(device) + dones = torch.zeros((args.num_steps, args.num_envs)).to(device) + values = torch.zeros((args.num_steps, args.num_envs)).to(device) + + # TRY NOT TO MODIFY: start the game + global_step = 0 + start_time = time.time() + next_obs, _ = envs.reset(seed=args.seed) + eval_obs, _ = eval_envs.reset(seed=args.seed) + next_done = torch.zeros(args.num_envs, device=device) + print(f"####") + print(f"args.num_iterations={args.num_iterations} args.num_envs={args.num_envs} args.num_eval_envs={args.num_eval_envs}") + print(f"args.minibatch_size={args.minibatch_size} args.batch_size={args.batch_size} args.update_epochs={args.update_epochs}") + print(f"####") + agent = Agent(envs, sample_obs=next_obs).to(device) + optimizer = optim.Adam(agent.parameters(), lr=args.learning_rate, eps=1e-5) + + if args.checkpoint: + agent.load_state_dict(torch.load(args.checkpoint)) + + cumulative_times = defaultdict(float) + + for iteration in range(1, args.num_iterations + 1): + print(f"Epoch: {iteration}, global_step={global_step}") + final_values = torch.zeros((args.num_steps, args.num_envs), device=device) + agent.eval() + if iteration % args.eval_freq == 1: + print("Evaluating") + stime = time.perf_counter() + eval_obs, _ = eval_envs.reset() + eval_metrics = defaultdict(list) + num_episodes = 0 + for _ in range(args.num_eval_steps): + with torch.no_grad(): + eval_obs, eval_rew, eval_terminations, eval_truncations, eval_infos = eval_envs.step(agent.get_action(eval_obs, deterministic=True)) + if "final_info" in eval_infos: + mask = eval_infos["_final_info"] + num_episodes += mask.sum() + for k, v in eval_infos["final_info"]["episode"].items(): + eval_metrics[k].append(v) + print(f"Evaluated {args.num_eval_steps * args.num_eval_envs} steps resulting in {num_episodes} episodes") + for k, v in eval_metrics.items(): + mean = torch.stack(v).float().mean() + if logger is not None: + logger.add_scalar(f"eval/{k}", mean, global_step) + print(f"eval_{k}_mean={mean}") + if logger is not None: + eval_time = time.perf_counter() - stime + cumulative_times["eval_time"] += eval_time + logger.add_scalar("time/eval_time", eval_time, global_step) + if args.evaluate: + break + if args.save_model and iteration % args.eval_freq == 1: + model_path = f"runs/{run_name}/ckpt_{iteration}.pt" + torch.save(agent.state_dict(), model_path) + print(f"model saved to {model_path}") + # Annealing the rate if instructed to do so. + if args.anneal_lr: + frac = 1.0 - (iteration - 1.0) / args.num_iterations + lrnow = frac * args.learning_rate + optimizer.param_groups[0]["lr"] = lrnow + rollout_time = time.perf_counter() + for step in range(0, args.num_steps): + global_step += args.num_envs + obs[step] = next_obs + dones[step] = next_done + + # ALGO LOGIC: action logic + with torch.no_grad(): + action, logprob, _, value = agent.get_action_and_value(next_obs) + values[step] = value.flatten() + actions[step] = action + logprobs[step] = logprob + + # TRY NOT TO MODIFY: execute the game and log data. + next_obs, reward, terminations, truncations, infos = envs.step(action) + next_done = torch.logical_or(terminations, truncations).to(torch.float32) + rewards[step] = reward.view(-1) * args.reward_scale + + if "final_info" in infos: + final_info = infos["final_info"] + done_mask = infos["_final_info"] + for k, v in final_info["episode"].items(): + logger.add_scalar(f"train/{k}", v[done_mask].float().mean(), global_step) + + for k in infos["final_observation"]: + infos["final_observation"][k] = infos["final_observation"][k][done_mask] + with torch.no_grad(): + final_values[step, torch.arange(args.num_envs, device=device)[done_mask]] = agent.get_value(infos["final_observation"]).view(-1) + rollout_time = time.perf_counter() - rollout_time + cumulative_times["rollout_time"] += rollout_time + # bootstrap value according to termination and truncation + with torch.no_grad(): + next_value = agent.get_value(next_obs).reshape(1, -1) + advantages = torch.zeros_like(rewards).to(device) + lastgaelam = 0 + for t in reversed(range(args.num_steps)): + if t == args.num_steps - 1: + next_not_done = 1.0 - next_done + nextvalues = next_value + else: + next_not_done = 1.0 - dones[t + 1] + nextvalues = values[t + 1] + real_next_values = next_not_done * nextvalues + final_values[t] # t instead of t+1 + # next_not_done means nextvalues is computed from the correct next_obs + # if next_not_done is 1, final_values is always 0 + # if next_not_done is 0, then use final_values, which is computed according to bootstrap_at_done + if args.finite_horizon_gae: + """ + See GAE paper equation(16) line 1, we will compute the GAE based on this line only + 1 *( -V(s_t) + r_t + gamma * V(s_{t+1}) ) + lambda *( -V(s_t) + r_t + gamma * r_{t+1} + gamma^2 * V(s_{t+2}) ) + lambda^2 *( -V(s_t) + r_t + gamma * r_{t+1} + gamma^2 * r_{t+2} + ... ) + lambda^3 *( -V(s_t) + r_t + gamma * r_{t+1} + gamma^2 * r_{t+2} + gamma^3 * r_{t+3} + We then normalize it by the sum of the lambda^i (instead of 1-lambda) + """ + if t == args.num_steps - 1: # initialize + lam_coef_sum = 0. + reward_term_sum = 0. # the sum of the second term + value_term_sum = 0. # the sum of the third term + lam_coef_sum = lam_coef_sum * next_not_done + reward_term_sum = reward_term_sum * next_not_done + value_term_sum = value_term_sum * next_not_done + + lam_coef_sum = 1 + args.gae_lambda * lam_coef_sum + reward_term_sum = args.gae_lambda * args.gamma * reward_term_sum + lam_coef_sum * rewards[t] + value_term_sum = args.gae_lambda * args.gamma * value_term_sum + args.gamma * real_next_values + + advantages[t] = (reward_term_sum + value_term_sum) / lam_coef_sum - values[t] + else: + delta = rewards[t] + args.gamma * real_next_values - values[t] + advantages[t] = lastgaelam = delta + args.gamma * args.gae_lambda * next_not_done * lastgaelam # Here actually we should use next_not_terminated, but we don't have lastgamlam if terminated + returns = advantages + values + + # flatten the batch + b_obs = obs.reshape((-1,)) + b_logprobs = logprobs.reshape(-1) + b_actions = actions.reshape((-1,) + envs.single_action_space.shape) + b_advantages = advantages.reshape(-1) + b_returns = returns.reshape(-1) + b_values = values.reshape(-1) + + # Optimizing the policy and value network + agent.train() + b_inds = np.arange(args.batch_size) + clipfracs = [] + update_time = time.perf_counter() + for epoch in range(args.update_epochs): + np.random.shuffle(b_inds) + for start in range(0, args.batch_size, args.minibatch_size): + end = start + args.minibatch_size + mb_inds = b_inds[start:end] + + _, newlogprob, entropy, newvalue = agent.get_action_and_value(b_obs[mb_inds], b_actions[mb_inds]) + logratio = newlogprob - b_logprobs[mb_inds] + ratio = logratio.exp() + + with torch.no_grad(): + # calculate approx_kl http://joschu.net/blog/kl-approx.html + old_approx_kl = (-logratio).mean() + approx_kl = ((ratio - 1) - logratio).mean() + clipfracs += [((ratio - 1.0).abs() > args.clip_coef).float().mean().item()] + + if args.target_kl is not None and approx_kl > args.target_kl: + break + + mb_advantages = b_advantages[mb_inds] + if args.norm_adv: + mb_advantages = (mb_advantages - mb_advantages.mean()) / (mb_advantages.std() + 1e-8) + + # Policy loss + pg_loss1 = -mb_advantages * ratio + pg_loss2 = -mb_advantages * torch.clamp(ratio, 1 - args.clip_coef, 1 + args.clip_coef) + pg_loss = torch.max(pg_loss1, pg_loss2).mean() + + # Value loss + newvalue = newvalue.view(-1) + if args.clip_vloss: + v_loss_unclipped = (newvalue - b_returns[mb_inds]) ** 2 + v_clipped = b_values[mb_inds] + torch.clamp( + newvalue - b_values[mb_inds], + -args.clip_coef, + args.clip_coef, + ) + v_loss_clipped = (v_clipped - b_returns[mb_inds]) ** 2 + v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped) + v_loss = 0.5 * v_loss_max.mean() + else: + v_loss = 0.5 * ((newvalue - b_returns[mb_inds]) ** 2).mean() + + entropy_loss = entropy.mean() + loss = pg_loss - args.ent_coef * entropy_loss + v_loss * args.vf_coef + + optimizer.zero_grad() + loss.backward() + nn.utils.clip_grad_norm_(agent.parameters(), args.max_grad_norm) + optimizer.step() + + if args.target_kl is not None and approx_kl > args.target_kl: + break + update_time = time.perf_counter() - update_time + cumulative_times["update_time"] += update_time + y_pred, y_true = b_values.cpu().numpy(), b_returns.cpu().numpy() + var_y = np.var(y_true) + explained_var = np.nan if var_y == 0 else 1 - np.var(y_true - y_pred) / var_y + + logger.add_scalar("charts/learning_rate", optimizer.param_groups[0]["lr"], global_step) + logger.add_scalar("losses/value_loss", v_loss.item(), global_step) + logger.add_scalar("losses/policy_loss", pg_loss.item(), global_step) + logger.add_scalar("losses/entropy", entropy_loss.item(), global_step) + logger.add_scalar("losses/old_approx_kl", old_approx_kl.item(), global_step) + logger.add_scalar("losses/approx_kl", approx_kl.item(), global_step) + logger.add_scalar("losses/clipfrac", np.mean(clipfracs), global_step) + logger.add_scalar("losses/explained_variance", explained_var, global_step) + print("SPS:", int(global_step / (time.time() - start_time))) + logger.add_scalar("charts/SPS", int(global_step / (time.time() - start_time)), global_step) + logger.add_scalar("time/step", global_step, global_step) + logger.add_scalar("time/update_time", update_time, global_step) + logger.add_scalar("time/rollout_time", rollout_time, global_step) + logger.add_scalar("time/rollout_fps", args.num_envs * args.num_steps / rollout_time, global_step) + for k, v in cumulative_times.items(): + logger.add_scalar(f"time/total_{k}", v, global_step) + logger.add_scalar("time/total_rollout+update_time", cumulative_times["rollout_time"] + cumulative_times["update_time"], global_step) + if args.save_model and not args.evaluate: + model_path = f"runs/{run_name}/final_ckpt.pt" + torch.save(agent.state_dict(), model_path) + print(f"model saved to {model_path}") + + envs.close() + if logger is not None: logger.close() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/README.md b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/README.md new file mode 100644 index 0000000000000000000000000000000000000000..734f9ecf229094ca8f87d7cd98d1b65ebb6b32f9 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/README.md @@ -0,0 +1,77 @@ +# Temporal Difference Learning for Model Predictive Control 2 (TD-MPC2) + +Scalable, robust model-based RL algorithm based on ["TD-MPC2: Scalable, Robust World Models for Continuous Control"](https://arxiv.org/abs/2310.16828). Code adapted from https://github.com/nicklashansen/tdmpc2. It is written to work with the new Maniskill update, and supports vectorized state-based and visual-based RL environment. + +## Installation +We recommend using conda/mamba and you can install the dependencies as so : + +```bash +conda env create -f environment.yaml +conda activate tdmpc2-ms +``` + +or follow the [original repo](https://github.com/nicklashansen/tdmpc2)'s guide to build the docker image. + + +## State Based RL + +Simple command to run the algorithm with default configs (5M params, 1M steps, default control mode, 32 envs, state obs mode) : +```bash +python train.py env_id=PushCube-v1 +``` + +More advanced command with optional configs : (More can be found in config.yaml) +```bash +python train.py model_size=5 steps=1_000_000 seed=1 exp_name=default \ + env_id=PushCube-v1 env_type=gpu num_envs=32 control_mode=pd_ee_delta_pose obs=state \ + save_video_local=false wandb=true wandb_entity=??? wandb_project=??? wandb_group=??? wandb_name=??? setting_tag=??? +``` +(*) The optional *setting_tag* is for adding a specific tag in the wandb log (e.g. sample_efficient, walltime_efficient, etc.) + +## Visual (RGB) Based RL + +The visual based RL expects model_size = 5. Also, make sure you have sufficient CPU memory, otherwise lower the buffer_size and use gpu env. +```bash +python train.py buffer_size=500_000 steps=5_000_000 seed=1 exp_name=default \ + env_id=PushCube-v1 env_type=gpu num_envs=32 control_mode=pd_ee_delta_pose obs=rgb \ + save_video_local=false wandb=true wandb_entity=??? wandb_project=??? wandb_group=??? wandb_name=??? setting_tag=??? +``` + +## Replaying Evaluation Trajectories + +To create videos of a checkpoint model, use the following command. + +```bash +python evaluate.py model_size=5 seed=1 exp_name=default \ + env_id=PushCube-v1 control_mode=pd_ee_delta_pose obs=state \ + save_video_local=true checkpoint=/absolute/path/to/checkpoint.pt +``` + +* Make sure you specify the same control_mode the model was trained on if it's not default. +* The video are saved under ```logs/{env_id}/{seed}/{exp_name}/videos``` +* The number of video saved is determined by ```num_envs * eval_episodes_per_env``` + +## Some Notes + +- Multi-task TD-MPC2 isn't supported for Maniskill at the moment. + +## Citation + +If you use this baseline please cite the following +``` +@inproceedings{hansen2024tdmpc2, + title={TD-MPC2: Scalable, Robust World Models for Continuous Control}, + author={Nicklas Hansen and Hao Su and Xiaolong Wang}, + booktitle={International Conference on Learning Representations (ICLR)}, + year={2024} +} +``` +as well as the original TD-MPC paper: +``` +@inproceedings{hansen2022tdmpc, + title={Temporal Difference Learning for Model Predictive Control}, + author={Nicklas Hansen and Xiaolong Wang and Hao Su}, + booktitle={International Conference on Machine Learning (ICML)}, + year={2022} +} +``` \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/buffer.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/buffer.py new file mode 100644 index 0000000000000000000000000000000000000000..5a1510cd74d34fd908eb76e41856249c4d2870d3 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/buffer.py @@ -0,0 +1,101 @@ +import torch +from tensordict.tensordict import TensorDict +from torchrl.data.replay_buffers import ReplayBuffer, LazyTensorStorage +from torchrl.data.replay_buffers.samplers import SliceSampler + + +class Buffer(): + """ + Replay buffer for TD-MPC2 training. Based on torchrl. + Uses CUDA memory if available, and CPU memory otherwise. + """ + + def __init__(self, cfg): + self.cfg = cfg + self._device = torch.device('cuda') + self._capacity = min(cfg.buffer_size, cfg.steps) + self._sampler = SliceSampler( + num_slices=self.cfg.batch_size, + end_key=None, + traj_key='episode', + truncated_key=None, + strict_length=True, + ) + self._batch_size = cfg.batch_size * (cfg.horizon+1) + self._num_eps = 0 + + @property + def capacity(self): + """Return the capacity of the buffer.""" + return self._capacity + + @property + def num_eps(self): + """Return the number of episodes in the buffer.""" + return self._num_eps + + def _reserve_buffer(self, storage): + """ + Reserve a buffer with the given storage. + """ + return ReplayBuffer( + storage=storage, + sampler=self._sampler, + pin_memory=True, + prefetch=int(self.cfg.num_envs / self.cfg.steps_per_update), + batch_size=self._batch_size, + ) + + def _init(self, tds): + """Initialize the replay buffer. Use the first episode to estimate storage requirements.""" + print(f'Buffer capacity: {self._capacity:,}') + mem_free, _ = torch.cuda.mem_get_info() + bytes_per_step = sum([ + (v.numel()*v.element_size() if not isinstance(v, TensorDict) \ + else sum([x.numel()*x.element_size() for x in v.values()])) \ + for v in tds.values() + ]) / len(tds) + total_bytes = bytes_per_step*self._capacity + print(f'Storage required: {total_bytes/1e9:.2f} GB') + # Heuristic: decide whether to use CUDA or CPU memory + storage_device = 'cuda' if 2.5*total_bytes < mem_free else 'cpu' + print(f'Using {storage_device.upper()} memory for storage.') + return self._reserve_buffer( + LazyTensorStorage(self._capacity, device=torch.device(storage_device)) + ) + + def _to_device(self, *args, device=None): + if device is None: + device = self._device + return (arg.to(device, non_blocking=True) \ + if arg is not None else None for arg in args) + + def _prepare_batch(self, td): + """ + Prepare a sampled batch for training (post-processing). + Expects `td` to be a TensorDict with batch size TxB. + """ + obs = td['obs'] + action = td['action'][1:] + reward = td['reward'][1:].unsqueeze(-1) + task = td['task'][0] if 'task' in td.keys() else None + return self._to_device(obs, action, reward, task) + + def add(self, td): + """Add an episode to the buffer. + Before vec: td[episode_len+1, ..] ..=act_dim, obs_dim, None + After: add num_env to the batch dimension + Note: for official vec code @51d6b8d, it seems to have batch dimension [episode_len+1, num_env]""" + + for _td in td: + _td['episode'] = torch.ones_like(_td['reward'], dtype=torch.int64) * self._num_eps + if self._num_eps == 0: + self._buffer = self._init(_td) + self._buffer.extend(_td) + self._num_eps += 1 + return self._num_eps + + def sample(self): + """Sample a batch of subsequences from the buffer.""" + td = self._buffer.sample().view(-1, self.cfg.horizon+1).permute(1, 0) + return self._prepare_batch(td) diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/init.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/init.py new file mode 100644 index 0000000000000000000000000000000000000000..45a3f5e9eb73dbffa57dbab0399ced9dbc600e09 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/init.py @@ -0,0 +1,22 @@ +import torch.nn as nn + + +def weight_init(m): + """Custom weight initialization for TD-MPC2.""" + if isinstance(m, nn.Linear): + nn.init.trunc_normal_(m.weight, std=0.02) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Embedding): + nn.init.uniform_(m.weight, -0.02, 0.02) + elif isinstance(m, nn.ParameterList): + for i,p in enumerate(m): + if p.dim() == 3: # Linear + nn.init.trunc_normal_(p, std=0.02) # Weight + nn.init.constant_(m[i+1], 0) # Bias + + +def zero_(params): + """Initialize parameters to zero.""" + for p in params: + p.data.fill_(0) diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/logger.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..3f68696c41e8b063e6dbb2ff8a29b40a015a7db2 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/logger.py @@ -0,0 +1,208 @@ +import os +import datetime +import re +import numpy as np +import pandas as pd +from termcolor import colored +from omegaconf import OmegaConf +from mani_skill.utils.visualization.misc import tile_images +import wandb +from common import TASK_SET + + +CONSOLE_FORMAT = [ + ("iteration", "I", "int"), + ("episode", "E", "int"), + ("step", "I", "int"), + ("return", "R", "float"), + ("success_once", "S", "float"), + ("fail_once", "F", "float"), + ("total_time", "T", "time"), + # Added for maniskill rl baselines matrics + # ("reward", "RET", "float"), + # ("episode_len", "L", "int"), + # ("rollout_time", "RT", "float"), + # ("rollout_fps", "RF", "float"), + # ("update_time", "U", "float"), + +] + +CAT_TO_COLOR = { + "pretrain": "yellow", + "train": "blue", + "eval": "green", + # Added for maniskill rl baselines matrics + "time" : "magenta", +} + + +def make_dir(dir_path): + """Create directory if it does not already exist.""" + try: + os.makedirs(dir_path) + except OSError: + pass + return dir_path + + +def print_run(cfg): # this function has to be called after make_env + """ + Pretty-printing of current run information. + Logger calls this method at initialization. + """ + prefix, color, attrs = " ", "green", ["bold"] + + def _limstr(s, maxlen=36): + return str(s[:maxlen]) + "..." if len(str(s)) > maxlen else s + + def _pprint(k, v): + print( + prefix + colored(f'{k.capitalize()+":":<15}', color, attrs=attrs), _limstr(v) + ) + + observations = ", ".join([str(v) for v in cfg.obs_shape.values()]) + kvs = [ + ("task", cfg.env_id), + ("sim backend", cfg.env_type), + ("steps", f"{int(cfg.steps):,}"), + ("observations", observations), + ("actions", cfg.action_dim), + ("experiment", cfg.exp_name), + ] + w = np.max([len(_limstr(str(kv[1]))) for kv in kvs]) + 25 + div = "-" * w + print(div) + for k, v in kvs: + _pprint(k, v) + print(div) + + +def cfg_to_group(cfg, return_list=False): + """ + Return a wandb-safe group name for logging. + Optionally returns group name as list. + """ + lst = [cfg.env_id, re.sub("[^0-9a-zA-Z]+", "-", cfg.exp_name)] + return lst if return_list else "-".join(lst) +class Logger: + """Primary logging object. Logs either locally or using wandb.""" + + def __init__(self, cfg, manager = None): + self.cfg = cfg + self._log_dir = make_dir(cfg.work_dir) + self._model_dir = make_dir(self._log_dir / "models") + self._save_csv = cfg.save_csv + self._save_agent = cfg.save_agent + self._group = cfg_to_group(cfg) + self._seed = cfg.seed + self._eval = [] + self.save_video_local = cfg.save_video_local + # Set up wandb + self.project = cfg.get("wandb_project", "none") + self.entity = cfg.get("wandb_entity", "none") + self.name = cfg.get("wandb_name", "none") + self.group = cfg.get("wandb_group", "none") + if not cfg.wandb or self.project == "none" or self.entity == "none": + print(colored("Wandb disabled.", "blue", attrs=["bold"])) + self._wandb = None + else: + print(colored("Logs will be synced with wandb.", "blue", attrs=["bold"])) + os.environ["WANDB_SILENT"] = "true" if cfg.wandb_silent else "false" + # Modified for Maniskill RL Baseline Logging Convention + wandb_tags = cfg_to_group(cfg, return_list=True) + [f"seed:{cfg.seed}"] + ["tdmpc2"] + if cfg.setting_tag != 'none': + wandb_tags += [cfg.setting_tag] + self._wandb = wandb.init( + project=self.project, + entity=self.entity, + name=self.name, + group=self.group, + tags=wandb_tags, + dir=self._log_dir, + config=OmegaConf.to_container(cfg, resolve=True), + ) + + self.wandb_videos = manager.list() + self.lock = manager.Lock() + + @property + def model_dir(self): + return self._model_dir + + def save_agent(self, agent=None, identifier='final'): + if self._save_agent and agent: + fp = self._model_dir / f'{str(identifier)}.pt' + agent.save(fp) + if self._wandb: + artifact = wandb.Artifact( + self.group + '-' + str(self._seed) + '-' + str(identifier), + type='model', + ) + artifact.add_file(fp) + self._wandb.log_artifact(artifact) + + def finish(self, agent=None): + try: + self.save_agent(agent) + except Exception as e: + print(colored(f"Failed to save model: {e}", "red")) + if self._wandb: + self._wandb.finish() + + def _format(self, key, value, ty): + if ty == "int": + return f'{colored(key+":", "blue")} {int(value):,}' + elif ty == "float": + return f'{colored(key+":", "blue")} {value:.02f}' + elif ty == "time": + value = str(datetime.timedelta(seconds=int(value))) + return f'{colored(key+":", "blue")} {value}' + else: + raise f"invalid log format type: {ty}" + + def _print(self, d, category): + category = colored(category, CAT_TO_COLOR[category]) + pieces = [f" {category:<14}"] + for k, disp_k, ty in CONSOLE_FORMAT: + if k in d: + pieces.append(f"{self._format(disp_k, d[k], ty):<22}") + print(" ".join(pieces)) + + def add_wandb_video(self, frames: np.ndarray): # (num_envs, num_frames, h, w, 3) + with self.lock: + if self.cfg.wandb and len(frames) > 0: + self.wandb_videos.extend(frames) + + + def log_wandb_video(self, step, fps=15, key='videos/eval_video'): + with self.lock: + if self.cfg.wandb and len(self.wandb_videos) > 0 : + nrows = int(np.sqrt(len(self.wandb_videos))) + wandb_video = np.stack(self.wandb_videos) + wandb_video = wandb_video.transpose(1, 0, 2, 3, 4) + wandb_video = [tile_images(rgbs, nrows=nrows) for rgbs in wandb_video] + wandb_video = np.stack(wandb_video) + self.wandb_videos[:] = [] + return self._wandb.log( + {key: wandb.Video(wandb_video.transpose(0, 3, 1, 2), fps=fps, format='mp4')}, step=step + ) + + def log(self, d, category="train"): + assert category in CAT_TO_COLOR.keys(), f"invalid category: {category}" + if self._wandb: + if category in {"train", "eval", "time"}: + xkey = "step" + elif category == "pretrain": + xkey = "iteration" + _d = dict() + for k, v in d.items(): + _d[category + "/" + k] = v + self._wandb.log(_d, step=d[xkey]) + if category == "eval" and self._save_csv: + keys = ["step", "return"] + self._eval.append(np.array([d[keys[0]], d[keys[1]]])) + pd.DataFrame(np.array(self._eval)).to_csv( + self._log_dir / "eval.csv", header=keys, index=None + ) + if category != 'time': + self._print(d, category) diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/scale.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/scale.py new file mode 100644 index 0000000000000000000000000000000000000000..63f0bb2e14a14aa017059c5e55124d50873f4d38 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/scale.py @@ -0,0 +1,48 @@ +import torch + + +class RunningScale: + """Running trimmed scale estimator.""" + + def __init__(self, cfg): + self.cfg = cfg + self._value = torch.ones(1, dtype=torch.float32, device=torch.device('cuda')) + self._percentiles = torch.tensor([5, 95], dtype=torch.float32, device=torch.device('cuda')) + + def state_dict(self): + return dict(value=self._value, percentiles=self._percentiles) + + def load_state_dict(self, state_dict): + self._value.data.copy_(state_dict['value']) + self._percentiles.data.copy_(state_dict['percentiles']) + + @property + def value(self): + return self._value.cpu().item() + + def _percentile(self, x): + x_dtype, x_shape = x.dtype, x.shape + x = x.view(x.shape[0], -1) + in_sorted, _ = torch.sort(x, dim=0) + positions = self._percentiles * (x.shape[0]-1) / 100 + floored = torch.floor(positions) + ceiled = floored + 1 + ceiled[ceiled > x.shape[0] - 1] = x.shape[0] - 1 + weight_ceiled = positions-floored + weight_floored = 1.0 - weight_ceiled + d0 = in_sorted[floored.long(), :] * weight_floored[:, None] + d1 = in_sorted[ceiled.long(), :] * weight_ceiled[:, None] + return (d0+d1).view(-1, *x_shape[1:]).type(x_dtype) + + def update(self, x): + percentiles = self._percentile(x.detach()) + value = torch.clamp(percentiles[1] - percentiles[0], min=1.) + self._value.data.lerp_(value, self.cfg.tau) + + def __call__(self, x, update=False): + if update: + self.update(x) + return x * (1/self.value) + + def __repr__(self): + return f'RunningScale(S: {self.value})' diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/seed.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/seed.py new file mode 100644 index 0000000000000000000000000000000000000000..5c8972e601d879203fd151286edd57a373f60529 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/seed.py @@ -0,0 +1,12 @@ +import random + +import numpy as np +import torch + + +def set_seed(seed): + """Set seed for reproducibility.""" + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/world_model.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/world_model.py new file mode 100644 index 0000000000000000000000000000000000000000..e9283d30e8e9ddf7c1fe720dc22131aa7b7e5d44 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/common/world_model.py @@ -0,0 +1,178 @@ +from copy import deepcopy + +import numpy as np +import torch +import torch.nn as nn + +from common import layers, math, init + + +class WorldModel(nn.Module): + """ + TD-MPC2 implicit world model architecture. + Can be used for both single-task and multi-task experiments. + """ + + def __init__(self, cfg): + super().__init__() + self.cfg = cfg + if cfg.multitask: + self._task_emb = nn.Embedding(len(cfg.tasks), cfg.task_dim, max_norm=1) + self._action_masks = torch.zeros(len(cfg.tasks), cfg.action_dim) + for i in range(len(cfg.tasks)): + self._action_masks[i, :cfg.action_dims[i]] = 1. + self._encoder = layers.enc(cfg) + self._dynamics = layers.mlp(cfg.latent_dim + cfg.action_dim + cfg.task_dim, 2*[cfg.mlp_dim], cfg.latent_dim, act=layers.SimNorm(cfg)) + self._reward = layers.mlp(cfg.latent_dim + cfg.action_dim + cfg.task_dim, 2*[cfg.mlp_dim], max(cfg.num_bins, 1)) + self._pi = layers.mlp(cfg.latent_dim + cfg.task_dim, 2*[cfg.mlp_dim], 2*cfg.action_dim) + self._Qs = layers.Ensemble([layers.mlp(cfg.latent_dim + cfg.action_dim + cfg.task_dim, 2*[cfg.mlp_dim], + max(cfg.num_bins, 1), dropout=cfg.dropout) for _ in range(cfg.num_q)]) + self.apply(init.weight_init) + init.zero_([self._reward[-1].weight, self._Qs.params[-2]]) + self._target_Qs = deepcopy(self._Qs).requires_grad_(False) + self.log_std_min = torch.tensor(cfg.log_std_min) + self.log_std_dif = torch.tensor(cfg.log_std_max) - self.log_std_min + + @property + def total_params(self): + return sum(p.numel() for p in self.parameters() if p.requires_grad) + + def to(self, *args, **kwargs): + """ + Overriding `to` method to also move additional tensors to device. + """ + super().to(*args, **kwargs) + if self.cfg.multitask: + self._action_masks = self._action_masks.to(*args, **kwargs) + self.log_std_min = self.log_std_min.to(*args, **kwargs) + self.log_std_dif = self.log_std_dif.to(*args, **kwargs) + return self + + def train(self, mode=True): + """ + Overriding `train` method to keep target Q-networks in eval mode. + """ + super().train(mode) + self._target_Qs.train(False) + return self + + def track_q_grad(self, mode=True): + """ + Enables/disables gradient tracking of Q-networks. + Avoids unnecessary computation during policy optimization. + This method also enables/disables gradients for task embeddings. + """ + for p in self._Qs.parameters(): + p.requires_grad_(mode) + if self.cfg.multitask: + for p in self._task_emb.parameters(): + p.requires_grad_(mode) + + def soft_update_target_Q(self): + """ + Soft-update target Q-networks using Polyak averaging. + """ + with torch.no_grad(): + for p, p_target in zip(self._Qs.parameters(), self._target_Qs.parameters()): + p_target.data.lerp_(p.data, self.cfg.tau) + + def task_emb(self, x, task): + """ + Continuous task embedding for multi-task experiments. + Retrieves the task embedding for a given task ID `task` + and concatenates it to the input `x`. + """ + if isinstance(task, int): + task = torch.tensor([task], device=x.device) + emb = self._task_emb(task.long()) + if x.ndim == 3: + emb = emb.unsqueeze(0).repeat(x.shape[0], 1, 1) + elif emb.shape[0] == 1: + emb = emb.repeat(x.shape[0], 1) + return torch.cat([x, emb], dim=-1) + + def encode(self, obs, task): + """ + Encodes an observation into its latent representation. Online trainer obs is [1, obs_shape], task is None + This implementation assumes a single state-based observation. Should be already batched. + Should be ok. + """ + if self.cfg.multitask: + obs = self.task_emb(obs, task) + if self.cfg.obs == 'rgb' and obs.ndim == 5: + return torch.stack([self._encoder[self.cfg.obs](o) for o in obs]) + return self._encoder[self.cfg.obs](obs) + + def next(self, z, a, task): + """ + z[] + Predicts the next latent state given the current latent state and action. + """ + if self.cfg.multitask: + z = self.task_emb(z, task) + z = torch.cat([z, a], dim=-1) + return self._dynamics(z) + + def reward(self, z, a, task): + """ + Predicts instantaneous (single-step) reward. + """ + if self.cfg.multitask: + z = self.task_emb(z, task) + z = torch.cat([z, a], dim=-1) + return self._reward(z) + + def pi(self, z, task): + """ + z[~, 1] + Return mu[~, action_dim], pi[~, action_dim], log_pi[~, 1], log_std[~, action_dim] + + Samples an action from the policy prior. + The policy prior is a Gaussian distribution with + mean and (log) std predicted by a neural network. + """ + if self.cfg.multitask: + z = self.task_emb(z, task) + + # Gaussian policy prior + mu, log_std = self._pi(z).chunk(2, dim=-1) + log_std = math.log_std(log_std, self.log_std_min, self.log_std_dif) + eps = torch.randn_like(mu) + + if self.cfg.multitask: # Mask out unused action dimensions + mu = mu * self._action_masks[task] + log_std = log_std * self._action_masks[task] + eps = eps * self._action_masks[task] + action_dims = self._action_masks.sum(-1)[task].unsqueeze(-1) + else: # No masking + action_dims = None + + log_pi = math.gaussian_logprob(eps, log_std, size=action_dims) + pi = mu + eps * log_std.exp() + mu, pi, log_pi = math.squash(mu, pi, log_pi) + + return mu, pi, log_pi, log_std + + def Q(self, z, a, task, return_type='min', target=False): + """ + Predict state-action value. z[~, latent_dim], a[~, action_dim] -> [num_q, ~, num_bins] if all else [~, 1] + `return_type` can be one of [`min`, `avg`, `all`]: + - `min`: return the minimum of two randomly subsampled Q-values. + - `avg`: return the average of two randomly subsampled Q-values. + - `all`: return all Q-values. + `target` specifies whether to use the target Q-networks or not. + """ + assert return_type in {'min', 'avg', 'all'} + + if self.cfg.multitask: + z = self.task_emb(z, task) + + z = torch.cat([z, a], dim=-1) + out = (self._target_Qs if target else self._Qs)(z) + + if return_type == 'all': + return out + + Q1, Q2 = out[np.random.choice(self.cfg.num_q, 2, replace=False)] + Q1, Q2 = math.two_hot_inv(Q1, self.cfg), math.two_hot_inv(Q2, self.cfg) + return torch.min(Q1, Q2) if return_type == 'min' else (Q1 + Q2) / 2 diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/config.yaml b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ccdf8a30b01633db9a1a739ea3a0260a6c59e2d6 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/config.yaml @@ -0,0 +1,120 @@ +defaults: + - override hydra/launcher: submitit_local + +# environment +env_id: PushCube-v1 +obs: state # or rgb +control_mode: default # or pd_joint_delta_pos or pd_ee_delta_pose +num_envs: 32 +num_eval_envs: 4 +env_type: gpu # cpu +render_mode: rgb_array # ['rgb_array' for quality, or 'sensors' for speed] +render_size: 64 +setting_tag: none # ['none', 'walltime_efficient', 'sample_efficient', ...] for wandb tags + +# evaluation +checkpoint: ??? +eval_episodes_per_env: 2 # total (eval_episodes_per_env * num_eval_envs number) of eval episodes +eval_freq: 50000 + +# training +steps: 1_000_000 +batch_size: 256 +reward_coef: 0.1 +value_coef: 0.1 +consistency_coef: 20 +rho: 0.5 +lr: 3e-4 +enc_lr_scale: 0.3 +grad_clip_norm: 20 +tau: 0.01 +discount_denom: 5 +discount_min: 0.95 +discount_max: 0.995 +buffer_size: 1_000_000 +exp_name: default +data_dir: ??? +steps_per_update: 1 + +# planning +mpc: true +iterations: 6 +num_samples: 512 +num_elites: 64 +num_pi_trajs: 24 +horizon: 3 +min_std: 0.05 +max_std: 2 +temperature: 0.5 + +# actor +log_std_min: -10 +log_std_max: 2 +entropy_coef: 1e-4 + +# critic +num_bins: 101 +vmin: -10 +vmax: +10 + +# architecture +model_size: ??? +num_enc_layers: 2 +enc_dim: 256 +num_channels: 32 +mlp_dim: 512 +latent_dim: 512 +task_dim: 0 +num_q: 5 +dropout: 0.01 +simnorm_dim: 8 + +# logging +wandb_project: +wandb_group: +wandb_name: +wandb_entity: +wandb_silent: false +wandb: false # enable wandb +save_csv: true + +# misc +save_video_local: false # save video in eval_video for evaluation during training +save_agent: true +seed: 1 + +# convenience +work_dir: ??? +task_title: ??? +multitask: ??? +tasks: ??? +obs_shape: ??? +action_dim: ??? +episode_length: ??? +obs_shapes: ??? +action_dims: ??? +episode_lengths: ??? +seed_steps: ??? +bin_size: ??? + +# Added for Maniskill RL Baselines Config Convention (don't assign to them) +env_cfg: + env_id: ??? + control_mode: ??? # pd_joint_delta_pos or pd_ee_delta_pose + obs_mode: ??? + reward_mode: ??? + num_envs: ??? + sim_backend: ??? # cpu or gpu + partial_reset: false + env_horizon: ??? +eval_env_cfg: + env_id: ??? + control_mode: ??? + obs_mode: ??? + reward_mode: ??? + num_envs: ??? + sim_backend: ??? + env_horizon: ??? + partial_reset: false + num_eval_episodes: ??? +discount: ??? \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/environment.yaml b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/environment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7c41422ac3737cb57edfcc6ccb9b995baab95822 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/environment.yaml @@ -0,0 +1,67 @@ +name: tdmpc2-ms +channels: + - pytorch-nightly + - nvidia + - conda-forge + - defaults +dependencies: + - cudatoolkit=11.7 + - glew=2.1.0 + - glib=2.68.4 + - pip=21.0 + - python=3.9.0 + - pytorch>=2.2.2 + - torchvision>=0.16.2 + - pip: + - absl-py==2.0.0 + - "cython<3" + - dm-control==1.0.8 + - ffmpeg==1.4 + - glfw==2.6.4 + - hydra-core==1.3.2 + - hydra-submitit-launcher==1.2.0 + - imageio==2.33.1 + - imageio-ffmpeg==0.4.9 + - kornia==0.7.1 + - moviepy==1.0.3 + - mujoco==2.3.1 + - mujoco-py==2.1.2.14 + - numpy==1.23.5 + - omegaconf==2.3.0 + - open3d==0.18.0 + - opencv-contrib-python==4.9.0.80 + - opencv-python==4.9.0.80 + - pandas==2.1.4 + - sapien==3.0.0.b1 + - submitit==1.5.1 + - setuptools==65.5.0 + - patchelf==0.17.2.1 + - protobuf==4.25.2 + - pillow==10.2.0 + - pyquaternion==0.9.9 + - tensordict-nightly==2024.3.26 + - termcolor==2.4.0 + - torchrl-nightly==2024.3.26 + - transforms3d==0.4.1 + - trimesh==4.0.9 + - tqdm==4.66.1 + - wandb==0.16.2 + - wheel==0.38.0 + - mani_skill>=3.0.0b12 + #################### + # Gym: + # (unmaintained but required for maniskill2/meta-world/myosuite) + # - gym==0.21.0 + #################### + # ManiSkill2: + # (requires gym==0.21.0 which occasionally breaks) + # - mani-skill2==0.4.1 + #################### + # Meta-World: + # (requires gym==0.21.0 which occasionally breaks) + # - git+https://github.com/Farama-Foundation/Metaworld.git@04be337a12305e393c0caf0cbf5ec7755c7c8feb + #################### + # MyoSuite: + # (requires gym==0.13 which conflicts with meta-world / mani-skill2) + # - myosuite + #################### diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/evaluate.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..723a69eea26f2d96a25d39febc0e6dcfc64c2986 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/evaluate.py @@ -0,0 +1,124 @@ +import os +os.environ['MUJOCO_GL'] = 'egl' +import warnings +warnings.filterwarnings('ignore') + +import hydra +import imageio +import numpy as np +import torch +from termcolor import colored + +from common.parser import parse_cfg +from common.seed import set_seed +from envs import make_envs +from tdmpc2 import TDMPC2 + +torch.backends.cudnn.benchmark = True + + +@hydra.main(config_name='config', config_path='.') +def evaluate(cfg: dict): + """ + Script for evaluating a single-task / multi-task TD-MPC2 checkpoint. + + Most relevant args: + `env_id`: task name (eg. PickCube-v0) + `model_size`: model size, must be one of `[1, 5, 19, 48, 317]` (default: 5) + `checkpoint`: path to model checkpoint to load + `eval_episodes`: number of episodes to evaluate on per task (default: 10) + `save_video_local`: whether to save a video of the evaluation (default: True) + `seed`: random seed (default: 1) + + See config.yaml for a full list of args. + + Example usage: + ```` + $ python evaluate.py task=mt80 model_size=48 checkpoint=/path/to/mt80-48M.pt + $ python evaluate.py task=mt30 model_size=317 checkpoint=/path/to/mt30-317M.pt + $ python evaluate.py task=dog-run checkpoint=/path/to/dog-1.pt save_video_local=true + ``` + """ + assert torch.cuda.is_available() + assert cfg.eval_episodes_per_env > 0, 'Must evaluate at least 1 episode.' + eval_episodes = cfg.eval_episodes_per_env * cfg.num_eval_envs + cfg.num_envs = 1 # to keep the code similar and logging video simpler + cfg = parse_cfg(cfg) + assert not cfg.multitask, colored('Warning: multi-task models is not currently supported for maniskill.', 'red', attrs=['bold']) + set_seed(cfg.seed) + print(colored(f'Task: {cfg.env_id}', 'blue', attrs=['bold'])) + print(colored(f'Model size: {cfg.get("model_size", "default")}', 'blue', attrs=['bold'])) + print(colored(f'Checkpoint: {cfg.checkpoint}', 'blue', attrs=['bold'])) + + # Make environment + env = make_envs(cfg, cfg.num_envs, is_eval=True) + + # Load agent + agent = TDMPC2(cfg) + assert os.path.exists(cfg.checkpoint), f'Checkpoint {cfg.checkpoint} not found! Must be a valid filepath.' + agent.load(cfg.checkpoint) + + # Evaluate + if cfg.multitask: + print(colored(f'Evaluating agent on {len(cfg.tasks)} tasks:', 'yellow', attrs=['bold'])) + else: + print(colored(f'Evaluating agent on {cfg.env_id}:', 'yellow', attrs=['bold'])) + if cfg.save_video_local: + video_dir = os.path.join(cfg.work_dir, 'videos') + os.makedirs(video_dir, exist_ok=True) + scores = [] + tasks = cfg.tasks if cfg.multitask else [cfg.env_id] + for task_idx, task in enumerate(tasks): + if not cfg.multitask: + task_idx = None + has_success, has_fail = False, False # if task has success or/and fail (added for maniskill) + ep_rewards, ep_successes, ep_fails = [], [], [] + for i in range(eval_episodes): + obs, _ = env.reset() + done = False # ms3: done is truncated since the ms3 ignore_terminations. + ep_reward, t = 0, 0 + if cfg.save_video_local: + frames = [env.render().squeeze()] + while not done: # done is truncated and should be the same + action = agent.act(obs, t0=t==0) + obs, reward, terminated, truncated, info = env.step(action) + done = terminated | truncated + ep_reward += reward + t += 1 + if cfg.save_video_local: + frames.append(env.render().squeeze()) + ep_rewards.append(ep_reward.mean().item()) + if 'success' in info: + has_success = True + ep_successes.append(info['success'].float().mean().item()) + if 'fail' in info: + has_fail = True + ep_fails.append(info['fail'].float().mean().item()) + if cfg.save_video_local: + imageio.mimsave( + os.path.join(video_dir, f'{task}-{i}.mp4'), frames, fps=15) + ep_rewards = np.nanmean(ep_rewards) + ep_successes = np.nanmean(ep_successes) + ep_fails = np.nanmean(ep_fails) + if cfg.multitask: + scores.append(ep_successes*100 if task.startswith('mw-') else ep_rewards/10) + if has_success and has_fail: + print(colored(f' {task:<22}' \ + f'\tR: {ep_rewards:.01f} ' \ + f'\tS: {ep_successes:.02f}' \ + f'\tF: {ep_fails:.02f}', 'yellow')) + elif has_success: + print(colored(f' {task:<22}' \ + f'\tR: {ep_rewards:.01f} ' \ + f'\tS: {ep_successes:.02f}', 'yellow')) + elif has_fail: + print(colored(f' {task:<22}' \ + f'\tR: {ep_rewards:.01f} ' \ + f'\tF: {ep_fails:.02f}', 'yellow')) + + if cfg.multitask: + print(colored(f'Normalized score: {np.mean(scores):.02f}', 'yellow', attrs=['bold'])) + + +if __name__ == '__main__': + evaluate() diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/tdmpc2.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/tdmpc2.py new file mode 100644 index 0000000000000000000000000000000000000000..cd8877b7190a72923cf12a4239877d775262a5d9 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/tdmpc2.py @@ -0,0 +1,313 @@ +import numpy as np +import torch +import torch.nn.functional as F + +from common import math +from common.scale import RunningScale +from common.world_model import WorldModel + + +class TDMPC2: + """ + TD-MPC2 agent. Implements training + inference. + Can be used for both single-task and multi-task experiments, + and supports both state and pixel observations. + """ + + def __init__(self, cfg): + self.cfg = cfg + self.device = torch.device('cuda') + self.model = WorldModel(cfg).to(self.device) + self.optim = torch.optim.Adam([ + {'params': self.model._encoder.parameters(), 'lr': self.cfg.lr*self.cfg.enc_lr_scale}, + {'params': self.model._dynamics.parameters()}, + {'params': self.model._reward.parameters()}, + {'params': self.model._Qs.parameters()}, + {'params': self.model._task_emb.parameters() if self.cfg.multitask else []} + ], lr=self.cfg.lr) + self.pi_optim = torch.optim.Adam(self.model._pi.parameters(), lr=self.cfg.lr, eps=1e-5) + self.model.eval() + self.scale = RunningScale(cfg) + self.cfg.iterations += 2*int(cfg.action_dim >= 20) # Heuristic for large action spaces + self.discount = torch.tensor( + [self._get_discount(ep_len) for ep_len in cfg.episode_lengths], device='cuda' + ) if self.cfg.multitask else self._get_discount(cfg.episode_length) + + self.cfg.discount = self.discount + + def _get_discount(self, episode_length): + """ + Returns discount factor for a given episode length. + Simple heuristic that scales discount linearly with episode length. + Default values should work well for most tasks, but can be changed as needed. + + Args: + episode_length (int): Length of the episode. Assumes episodes are of fixed length. + + Returns: + float: Discount factor for the task. + """ + frac = episode_length/self.cfg.discount_denom + return min(max((frac-1)/(frac), self.cfg.discount_min), self.cfg.discount_max) + + def save(self, fp): + """ + Save state dict of the agent to filepath. + + Args: + fp (str): Filepath to save state dict to. + """ + torch.save({"model": self.model.state_dict()}, fp) + + def load(self, fp): + """ + Load a saved state dict from filepath (or dictionary) into current agent. + + Args: + fp (str or dict): Filepath or state dict to load. + """ + state_dict = fp if isinstance(fp, dict) else torch.load(fp) + self.model.load_state_dict(state_dict["model"]) + + @torch.no_grad() + def act(self, obs, t0=False, eval_mode=False, task=None): + """ + Before: obs is 1d, return seems to be mu(1, action_dim) + After: obs is batched with num_env, return still 2d + + Select an action by planning in the latent space of the world model. + + Args: + obs (torch.Tensor): Observation from the environment. 1d for online trainer + t0 (bool): Whether this is the first observation in the episode. + eval_mode (bool): Whether to use the mean of the action distribution. + task (int): Task index (only used for multi-task experiments). + + Returns: + torch.Tensor: Action to take in the environment. + """ + obs = obs.to(self.device, non_blocking=True) + if task is not None: + task = torch.tensor([task], device=self.device) + z = self.model.encode(obs, task) # [num_envs, latent_dim] + if self.cfg.mpc: + a = self.plan(z, t0=t0, eval_mode=eval_mode, task=task) + else: + a = self.model.pi(z, task)[int(not eval_mode)] # [int(not eval_mode)] selects mu or pi + return a.cpu() + + @torch.no_grad() + def _estimate_value(self, z, actions, task): + """z[num_samples, latent_dim], actions[horizon, num_samples, action_dim] -> [num_samples, 1] + Estimate value of a trajectory starting at latent state z and executing given actions.""" + G, discount = 0, 1 + for t in range(self.cfg.horizon): + reward = math.two_hot_inv(self.model.reward(z, actions[:, t], task), self.cfg) + z = self.model.next(z, actions[:, t], task) + G += discount * reward + discount *= self.discount[torch.tensor(task)] if self.cfg.multitask else self.discount + return G + discount * self.model.Q(z, self.model.pi(z, task)[1], task, return_type='avg') + + @torch.no_grad() + def plan(self, z, t0=False, eval_mode=False, task=None): + """ + Before: For online, z[1, latent_dim] + After: For online z[num_envs, latent_dim]. Should be ok + Plan a sequence of actions using the learned world model. + + Args: + z (torch.Tensor): Latent state from which to plan. + t0 (bool): Whether this is the first observation in the episode. + eval_mode (bool): Whether to use the mean of the action distribution. + task (Torch.Tensor): Task index (only used for multi-task experiments). + + Returns: + torch.Tensor: Action to take in the environment. + """ + num_envs = self.cfg.num_eval_envs if eval_mode else self.cfg.num_envs + # Sample policy trajectories + if self.cfg.num_pi_trajs > 0: + pi_actions = torch.empty(num_envs, self.cfg.horizon, self.cfg.num_pi_trajs, self.cfg.action_dim, device=self.device) + _z = z.unsqueeze(1).repeat(1, self.cfg.num_pi_trajs, 1) # (num_envs, num_pi_trajs, latent_dim) + for t in range(self.cfg.horizon-1): + pi_actions[:, t] = self.model.pi(_z, task)[1] + _z = self.model.next(_z, pi_actions[:, t], task) + pi_actions[:, -1] = self.model.pi(_z, task)[1] + + # Initialize state and parameters + z = z.unsqueeze(1).repeat(1, self.cfg.num_samples, 1) # (num_envs, num_samples, latent_dim) + mean = torch.zeros(num_envs, self.cfg.horizon, self.cfg.action_dim, device=self.device) + std = self.cfg.max_std*torch.ones(num_envs, self.cfg.horizon, self.cfg.action_dim, device=self.device) + if not t0 and hasattr(self, '_prev_mean'): + if eval_mode: # Added to avoid the problem with shape (num_envs) mismatch with train and eval env + mean[:, :-1] = self._prev_mean_eval[:, 1:] + else: + mean[:, :-1] = self._prev_mean[:, 1:] + actions = torch.empty(num_envs, self.cfg.horizon, self.cfg.num_samples, + self.cfg.action_dim, device=self.device) # # (num_envs, horizon, num_samples, latent_dim) + if self.cfg.num_pi_trajs > 0: + actions[:, :, :self.cfg.num_pi_trajs] = pi_actions + + # Iterate MPPI + for _ in range(self.cfg.iterations): + + # Sample actions + actions[:, :, self.cfg.num_pi_trajs:] = (mean.unsqueeze(2) + std.unsqueeze(2) * \ + torch.randn(num_envs, self.cfg.horizon, self.cfg.num_samples-self.cfg.num_pi_trajs, self.cfg.action_dim, device=std.device)) \ + .clamp(-1, 1) + if self.cfg.multitask: + actions = actions * self.model._action_masks[task] + + # Compute elite actions + value = self._estimate_value(z, actions, task).nan_to_num_(0) # (num_envs, num_samples, 1) + elite_idxs = torch.topk(value.squeeze(2), self.cfg.num_elites, dim=1).indices # (num_envs, num_elites) + elite_value = value[torch.arange(num_envs).unsqueeze(1), elite_idxs] # (num_envs, num_elites, 1) + # elite_actions = torch.zeros(num_envs, self.cfg.horizon, self.cfg.num_elites, self.cfg.action_dim, dtype=actions.dtype, device=actions.device) + # for j, curr_elites in enumerate(elite_idxs): + # elite_actions[j] = actions[j, :, curr_elites] + elite_actions = torch.gather(actions, 2, elite_idxs.unsqueeze(1).unsqueeze(3).expand(-1, self.cfg.horizon, -1, self.cfg.action_dim)) + + # Update parameters + max_value = elite_value.max(1)[0] # (num_envs, 1) + score = torch.exp(self.cfg.temperature*(elite_value - max_value.unsqueeze(1))) + score /= score.sum(1, keepdim=True) # (num_envs, num_elites, 1) + mean = torch.sum(score.unsqueeze(1) * elite_actions, dim=2) / (score.sum(1, keepdim=True) + 1e-9) # (num_envs, horizon, action_dim) + std = torch.sqrt(torch.sum(score.unsqueeze(1) * (elite_actions - mean.unsqueeze(2)) ** 2, dim=2) / (score.sum(1, keepdim=True) + 1e-9)) \ + .clamp_(self.cfg.min_std, self.cfg.max_std) # (num_envs, horizon, action_dim) + if self.cfg.multitask: + mean = mean * self.model._action_masks[task] + std = std * self.model._action_masks[task] + + # Select action + score = score.squeeze(2).cpu().numpy() # (num_envs, num_elites) + # (num_envs, horizon, num_elites, action_dim) for elite_actions + actions = torch.zeros(num_envs, self.cfg.horizon, self.cfg.action_dim, dtype=actions.dtype, device=actions.device) + for i in range(len(score)): + actions[i] = elite_actions[i, :, np.random.choice(np.arange(score.shape[1]), p=score[i])] + if eval_mode: + self._prev_mean_eval = mean # (num_eval_envs, horizon, action_dim) + else: + self._prev_mean = mean # (num_envs, horizon, action_dim) + a, std = actions[:, 0], std[:, 0] + if not eval_mode: + a += std * torch.randn(num_envs, self.cfg.action_dim, device=std.device) + return a.clamp_(-1, 1) + + def update_pi(self, zs, task): + """ + Update policy using a sequence of latent states. + + Args: + zs (torch.Tensor): Sequence of latent states. + task (torch.Tensor): Task index (only used for multi-task experiments). + + Returns: + float: Loss of the policy update. + """ + self.pi_optim.zero_grad(set_to_none=True) + self.model.track_q_grad(False) + _, pis, log_pis, _ = self.model.pi(zs, task) + qs = self.model.Q(zs, pis, task, return_type='avg') + self.scale.update(qs[0]) + qs = self.scale(qs) + + # Loss is a weighted sum of Q-values + rho = torch.pow(self.cfg.rho, torch.arange(len(qs), device=self.device)) + pi_loss = ((self.cfg.entropy_coef * log_pis - qs).mean(dim=(1,2)) * rho).mean() + pi_loss.backward() + torch.nn.utils.clip_grad_norm_(self.model._pi.parameters(), self.cfg.grad_clip_norm) + self.pi_optim.step() + self.model.track_q_grad(True) + + return pi_loss.item() + + @torch.no_grad() + def _td_target(self, next_z, reward, task): + """ + Compute the TD-target from a reward and the observation at the following time step. + + Args: + next_z (torch.Tensor): Latent state at the following time step. + reward (torch.Tensor): Reward at the current time step. + task (torch.Tensor): Task index (only used for multi-task experiments). + + Returns: + torch.Tensor: TD-target. + """ + pi = self.model.pi(next_z, task)[1] + discount = self.discount[task].unsqueeze(-1) if self.cfg.multitask else self.discount + return reward + discount * self.model.Q(next_z, pi, task, return_type='min', target=True) + + def update(self, buffer): + """ + Main update function. Corresponds to one iteration of model learning. + + Args: + buffer (common.buffer.Buffer): Replay buffer. + + Returns: + dict: Dictionary of training statistics. + """ + obs, action, reward, task = buffer.sample() + + # Compute targets + with torch.no_grad(): + next_z = self.model.encode(obs[1:], task) + td_targets = self._td_target(next_z, reward, task) + + # Prepare for update + self.optim.zero_grad(set_to_none=True) + self.model.train() + + # Latent rollout + zs = torch.empty(self.cfg.horizon+1, self.cfg.batch_size, self.cfg.latent_dim, device=self.device) + z = self.model.encode(obs[0], task) + zs[0] = z + consistency_loss = 0 + for t in range(self.cfg.horizon): + z = self.model.next(z, action[t], task) + consistency_loss += F.mse_loss(z, next_z[t]) * self.cfg.rho**t + zs[t+1] = z + + # Predictions + _zs = zs[:-1] + qs = self.model.Q(_zs, action, task, return_type='all') + reward_preds = self.model.reward(_zs, action, task) + + # Compute losses + reward_loss, value_loss = 0, 0 + for t in range(self.cfg.horizon): + reward_loss += math.soft_ce(reward_preds[t], reward[t], self.cfg).mean() * self.cfg.rho**t + for q in range(self.cfg.num_q): + value_loss += math.soft_ce(qs[q][t], td_targets[t], self.cfg).mean() * self.cfg.rho**t + consistency_loss *= (1/self.cfg.horizon) + reward_loss *= (1/self.cfg.horizon) + value_loss *= (1/(self.cfg.horizon * self.cfg.num_q)) + total_loss = ( + self.cfg.consistency_coef * consistency_loss + + self.cfg.reward_coef * reward_loss + + self.cfg.value_coef * value_loss + ) + + # Update model + total_loss.backward() + grad_norm = torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.cfg.grad_clip_norm) + self.optim.step() + + # Update policy + pi_loss = self.update_pi(zs.detach(), task) + + # Update target Q-functions + self.model.soft_update_target_Q() + + # Return training statistics + self.model.eval() + return { + "consistency_loss": float(consistency_loss.mean().item()), + "reward_loss": float(reward_loss.mean().item()), + "value_loss": float(value_loss.mean().item()), + "pi_loss": pi_loss, + "total_loss": float(total_loss.mean().item()), + "grad_norm": float(grad_norm), + "pi_scale": float(self.scale.value), + } diff --git a/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/train.py b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/train.py new file mode 100644 index 0000000000000000000000000000000000000000..704548dcd380ac228d266cc90d49192fb66989c3 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/examples/baselines/tdmpc2/train.py @@ -0,0 +1,86 @@ +import os +os.environ['MUJOCO_GL'] = 'egl' +os.environ['LAZY_LEGACY_OP'] = '0' +import warnings +warnings.filterwarnings('ignore') +import torch + +import hydra +from termcolor import colored +from omegaconf import OmegaConf + +from common.parser import parse_cfg +from common.seed import set_seed +from common.buffer import Buffer +from envs import make_envs +from tdmpc2 import TDMPC2 +from trainer.offline_trainer import OfflineTrainer +from trainer.online_trainer import OnlineTrainer +from common.logger import Logger, print_run +import multiprocessing + +import gymnasium as gym + +torch.backends.cudnn.benchmark = True + + +@hydra.main(config_name='config', config_path='.') +def train(cfg: dict): + """ + Script for training single-task / multi-task TD-MPC2 agents. + + Most relevant args: + `task`: task name (or mt30/mt80 for multi-task training) + `model_size`: model size, must be one of `[1, 5, 19, 48, 317]` (default: 5) + `steps`: number of training/environment steps (default: 10M) + `seed`: random seed (default: 1) + + See config.yaml for a full list of args. + + Example usage: + ``` + $ python train.py task=mt80 model_size=48 + $ python train.py task=mt30 model_size=317 + $ python train.py task=dog-run steps=7000000 + ``` + """ + assert torch.cuda.is_available() + assert cfg.steps > 0, 'Must train for at least 1 step.' + cfg = parse_cfg(cfg) + assert not cfg.multitask, colored('Warning: multi-task models is not currently supported for maniskill.', 'red', attrs=['bold']) + set_seed(cfg.seed) + print(colored('Work dir:', 'yellow', attrs=['bold']), cfg.work_dir) + + # Need to initiate logger before make env to wrap record episode wrapper into async vec cpu env + manager = multiprocessing.Manager() + video_path = cfg.work_dir / 'eval_video' + if cfg.save_video_local: + try: + os.makedirs(video_path) + except: + pass + logger = Logger(cfg, manager) + # Init env + env = make_envs(cfg, cfg.num_envs) + eval_env = make_envs(cfg, cfg.num_eval_envs, video_path=video_path, is_eval=True, logger=logger) + print_run(cfg) + # Init agent + agent = TDMPC2(cfg) + # Update wandb config, for control_mode, env_horizon, discount are set after logger init + if logger._wandb != None: + logger._wandb.config.update(OmegaConf.to_container(cfg, resolve=True), allow_val_change=True) + trainer_cls = OnlineTrainer # OfflineTrainer not available + trainer = trainer_cls( + cfg=cfg, + env=env, + eval_env=eval_env, + agent=agent, + buffer=Buffer(cfg), + logger=logger, + ) + trainer.train() + print('\nTraining completed successfully') + + +if __name__ == '__main__': + train() \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..195e3e72a17ddd8b3a97bc5c2c80064eaa53c359 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__init__.py @@ -0,0 +1,46 @@ +from .assembling_kits import AssemblingKitsEnv +from .lift_peg_upright import LiftPegUprightEnv +from .peg_insertion_side import PegInsertionSideEnv +from .pick_clutter_ycb import PickClutterYCBEnv +from .pick_cube import PickCubeEnv +from .pick_single_ycb import PickSingleYCBEnv +from .plug_charger import PlugChargerEnv +from .pull_cube import PullCubeEnv +from .push_cube import PushCubeEnv +from .stack_cube import StackCubeEnv +from .turn_faucet import TurnFaucetEnv +from .two_robot_pick_cube import TwoRobotPickCube +from .two_robot_stack_cube import TwoRobotStackCube +from .poke_cube import PokeCubeEnv +from .place_sphere import PlaceSphereEnv +from .roll_ball import RollBallEnv +from .push_t import PushTEnv +from .pull_cube_tool import PullCubeToolEnv +from .mug_on_rack import PlaceMugOnRackEnv +from .stack_mug_on_rack import StackMugOnRackEnv +from .stack_bowl import StackBowlEnv +from .fork_from_rack import PickForkFromRackEnv +from .stack_plate_on_rack import StackPlateOnRackEnv +from .mug_on_coffee_machine import PlaceMugOnCoffeeMachineEnv +from .mug_from_coffee_machine import PickMugFromCoffeeMachineEnv +from .spoon_on_rack import PlaceSpoonOnRackEnv +from .bowl_on_rack import PlaceBowlOnRackEnv +from .bowl_on_rack_v2 import PlaceBowlOnRackEnv +from .bowl_on_rack_v3 import PlaceBowlOnRackEnv +from .bowl_on_rack_v4 import PlaceBowlOnRackEnv +from .plate_on_rack import PlacePlateOnRackEnv +from .plate_on_rack_v2 import PlacePlateOnRackEnv +from .plate_on_rack_v3 import PlacePlateOnRackEnv +from .plate_on_rack_v4 import PlacePlateOnRackEnv +from .fork_on_rack import PlaceForkOnRackEnv +from .fork_on_rack_v2 import PlaceForkOnRackEnv +from .fork_on_rack_v3 import PlaceForkOnRackEnv +from .fork_on_rack_v4 import PlaceForkOnRackEnv +from .knife_on_rack import PlaceKnifeOnRackEnv +from .knife_on_rack_v2 import PlaceKnifeOnRackEnv +from .knife_on_rack_v3 import PlaceKnifeOnRackEnv +from .knife_on_rack_v4 import PlaceKnifeOnRackEnv +from .grasp_fork_v0 import GraspForkEnv +from .grasp_bowl_v0 import GraspBowlEnv +from .grasp_plate_v0 import GraspPlateEnv +from .grasp_cup_v0 import GraspCupEnv \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/assembling_kits.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/assembling_kits.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9a266b2916827c64fc1ef083d3b8d4b3e246647 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/assembling_kits.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/bowl_on_rack_v4.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/bowl_on_rack_v4.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d702563ad3158ea32952b2b1e8274da0fd5dc67f Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/bowl_on_rack_v4.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_from_rack.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_from_rack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bee2c1878e35e3b5764f9d3fc313a113084aec26 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_from_rack.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11c1b5ab9511366afdffe4c70996eca4ab6a470f Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack_v3.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack_v3.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f745c853350e2956c927f581139b85b0365c02bc Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack_v3.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack_v4.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack_v4.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c3d5afdb95b723be1753f43fc6eb6a8587827a9 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/fork_on_rack_v4.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_bowl_v0.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_bowl_v0.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3acb4caf153587d07003dd5a30debab918582cb2 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_bowl_v0.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_cup_v0.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_cup_v0.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bca01d7277fdb0eb9ec4c2204602eb2d7c3ebe7d Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_cup_v0.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_plate_v0.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_plate_v0.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..abfebe598f11d80138dbd5b66f276726504a27a2 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/grasp_plate_v0.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa295ae9a0697839f269cc7e62ed515a511b74b9 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v2.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8d6d750ed0b366d759dd72eb7f1d8f4f9c57df5 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v2.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v3.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v3.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fece27023e741acd43ecbd80b568662c401b041 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v3.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v4.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v4.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ac70d73fd9cdd7665c24ec2466b2592c57bfe06 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/knife_on_rack_v4.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/mug_from_coffee_machine.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/mug_from_coffee_machine.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a878633b6efc23db17012bed199b6c8ade18f044 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/mug_from_coffee_machine.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/mug_on_coffee_machine.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/mug_on_coffee_machine.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..695b5cd4d6e6a61e62dd1cd685c080c8de95c81c Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/mug_on_coffee_machine.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/peg_insertion_side.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/peg_insertion_side.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50d1c375c63a77f5da4edbd9b5f415c20ec1d482 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/peg_insertion_side.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/place_sphere.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/place_sphere.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76903124996572c75599ca3bac67d55d6856ce1b Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/place_sphere.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plate_on_rack_v2.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plate_on_rack_v2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2eb268df3cfffcac0a701c98482de48c5b00c36 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plate_on_rack_v2.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plate_on_rack_v4.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plate_on_rack_v4.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78e57878af9649ab1d26a71471b3eb349fd7e0f4 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plate_on_rack_v4.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plug_charger.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plug_charger.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fff10b28fbf9ae95be0cdd2ee4c93d9161e5b3f Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/plug_charger.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/pull_cube_tool.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/pull_cube_tool.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5a268fb808582eef53404ee6dcb408557f0f292 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/pull_cube_tool.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/spoon_on_rack.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/spoon_on_rack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47f0a97e5fcf74473416238ce91aa3c9394c20c9 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/spoon_on_rack.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/stack_bowl.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/stack_bowl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52aa322e659bf074ee629ce4c72fccf577fe3869 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/stack_bowl.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/stack_plate_on_rack.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/stack_plate_on_rack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..394dce72f505d66ab35ce7eb1900b837f038c2a5 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/stack_plate_on_rack.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/turn_faucet.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/turn_faucet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c47ccd38c5322418f4c44e9df1ada97da27dc30a Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/turn_faucet.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/two_robot_pick_cube.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/two_robot_pick_cube.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53c7e356ae6022515ceadd1d88bd9c326af6bae9 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/two_robot_pick_cube.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/two_robot_stack_cube.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/two_robot_stack_cube.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b48e4a418635ab78b76e98ba53041e8ab9c10760 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/__pycache__/two_robot_stack_cube.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/bowl_on_rack_v4.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/bowl_on_rack_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..45158b6bdd85deb0bdf68eeed037c1add8592f79 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/bowl_on_rack_v4.py @@ -0,0 +1,583 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda, Piper, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +import os + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + + + +@register_env("PlaceBowlOnRack-v4", max_episode_steps=500) +class PlaceBowlOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a bowl vertically on a dish rack. + + **Randomizations:** + - The bowl's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The bowl is placed vertically on the dish rack. + - The bowl is placed in the surface area of the disk rack + - The bowl does not bounce off the rack + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacebowlOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["piper", "panda", "fetch", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, Piper, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="piper", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.stabilization_steps = 25 # number of steps to stabilize the bowl after placement + self.stabilization_counter = 0 + self.prev_bowl_pose = None # previous bowl pose + # self.max_reward = 6.0 + self.max_reward = 3.0 + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + # import pdb; pdb.set_trace() + if self.robot_uids == "panda": + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + + elif self.robot_uids in ["noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc","noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + elif self.robot_uids in ["piper"]: + return sapien.Pose(p=[-0.35, 0, 0]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp", + ) + + self.table_scene.scene.sim_config.scene_config.solver_position_iterations = 25 + self.table_scene.build() + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + + # init knife + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + + # # init mug + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, 0, 0]), + # ) + # self.mug_extents = get_actor_obb(self.mug).extents + + # init goalsite + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/4 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*9.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/4 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam", "piper"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # rack pose initialize + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.20 # 0.20 - 0.30 + y = -side * torch.ones((b, 1)) * 0.175 # 0.15 -> 0.20 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + y = -side * torch.ones((b, 1)) * 0.20 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:,0] = -np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.20 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.175 + y_rand = torch.empty(b, 1).uniform_(-0.025, 0.025) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + y = -side * torch.ones((b, 1)) * 0.20 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:,0] = -np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = -np.pi + torch.empty(b, 1).uniform_(-np.pi/9, np.pi/9) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = -np.pi + torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + ################################################################################## + # bowl pose initialize + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.45 + z = torch.zeros((b, 1)) + self.bowl_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.45 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + ## default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # init plate + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.60 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # init fork + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.45 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.75 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + # PANDA BOWL POSITION + if self.robot_uids == "panda": + xyz = torch.rand((b, 3), device=self.device) * 0.2 - 0.1 + # PIPER BOWL POSITION + if self.robot_uids == "piper": + xyz = torch.rand((b, 3), device=self.device) * 0.1 # - 0.05 + xyz[:, 2] = 0 + # randomize bowl position + self.bowl.set_pose( + Pose.create_from_pq( + xyz, + torch.tensor([1, 0, 0, 0], device=self.device).repeat(b, 1), + ) + ) + # piper rack pose + if self.robot_uids == "piper": + self.rack.set_pose( + Pose.create_from_pq( + torch.tensor([0.2, 0.2, 0], device=self.device).repeat(b, 1), + torch.tensor( + euler2quat(0, 0, np.pi), device=self.device + ).repeat(b, 1), + ) + ) + # panda rack pose + elif self.robot_uids == "panda": + self.rack.set_pose( + Pose.create_from_pq( + torch.tensor([0, 0.5, 0], device=self.device).repeat(b, 1), + torch.tensor( + euler2quat(0, 0, np.pi), device=self.device + ).repeat(b, 1), + ) + ) + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + BOWL_D = self.bowl_extents[0] + goal_extents = torch.from_numpy(self.goal_extents.copy()) + goal_p, _ = self.get_goal_site_pose() + is_bowl_placed = ( + self.is_inside(self.bowl.pose.p, goal_p , goal_extents) + ) + + euler = rotation_conversions.quaternion_to_euler(self.bowl.pose.q) + is_bowl_vertical = ( + ((torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 )& + ((torch.abs(torch.abs(euler[:, 2])) - np.pi/2) <= np.pi/4 ) + ) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.bowl)) + + # plate is static + is_bowl_static = self.bowl.is_static() + + + return { + "success": is_bowl_placed & is_bowl_vertical & not_grasping & is_bowl_static + } + + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.bowl.pose.raw_pose, + rack_pose = self.rack.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between bowl direction vectors + # bowl center of mass to edge of bowl, (1,0,0), rotated by bowl pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.bowl.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs sin ce (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # Position reward using improved distance metric + if self.robot_uids == "piper": + # Calculate distance to ideal placement position + target_pos = torch.clone(self.rack.pose.p) + target_pos[:, 0] = self.rack.pose.p[:, 0] / 2 + 0.02 # Adjust for center of rack + dist_to_target = torch.linalg.norm(self.bowl.pose.p - target_pos, dim=1) + pos_rew = 1 - torch.tanh(3 * dist_to_target) + reward += pos_rew + else: + # Distance to rack for panda + target_pos = torch.clone(self.rack.pose.p) + target_pos[:, 1] -= 0.3 # Adjust for position on rack + dist_to_target = torch.linalg.norm(self.bowl.pose.p - target_pos, dim=1) + pos_rew = 1 - torch.tanh(3 * dist_to_target) + reward += pos_rew + + # Height reward - encourage lifting bowl to appropriate height + ideal_height = self.rack.pose.p[:, 2] + 0.05 # Slightly above rack + height_diff = torch.abs(self.bowl.pose.p[:, 2] - ideal_height) + height_rew = 1 - torch.tanh(5 * height_diff) + reward += height_rew * 0.5 # Weight height reward + + # Grasping reward - encourage proper grasping before moving + to_grip_vec = self.bowl.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + + # Improved grasping reward + is_grasping = self.agent.is_grasping(self.bowl) + # Only reward reaching if not grasping + reaching_rew = torch.where(is_grasping, torch.ones_like(reaching_rew), reaching_rew / 5) + reward += reaching_rew * 0.3 # Weight reaching reward + # Stability reward - encourage gentle placement + if self.prev_bowl_pose is not None: + vel_diff = torch.linalg.norm(self.bowl.pose.p - self.prev_bowl_pose[:, :3], dim=1) + stability_rew = 1 - torch.tanh(10 * vel_diff) + # Only apply stability reward when bowl is close to rack + if self.robot_uids == "piper": + near_rack = torch.abs(self.bowl.pose.p[:, 1] - self.rack.pose.p[:, 1]) < 0.2 + else: + near_rack = torch.abs(self.bowl.pose.p[:, 1] - (self.rack.pose.p[:, 1] - 0.3)) < 0.15 + stability_rew = torch.where(near_rack, stability_rew, torch.zeros_like(stability_rew)) + reward += stability_rew * 0.5 # Weight stability reward + + # Penalize excessive force/motion + if action is not None: + action_norm = torch.linalg.norm(action, dim=1) + smoothness_rew = -torch.tanh(action_norm - 0.5) * 0.2 + reward += smoothness_rew + + # Success reward + is_success = info.get("success", False) + is_bowl_vertical = info.get("is_bowl_vertical", False) + close_to_rack = info.get("close_to_rack", False) + + # Staged success rewards + if close_to_rack: + reward += 0.5 + if is_bowl_vertical: + reward += 0.5 + if is_success: + reward += 3.0 + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + # z_dist = torch.abs(self.bowl.pose.p[:, 2] - self.rack.pose.p[:, 2]) + # reward += 1 - torch.tanh(5 * z_dist) + + # # small reward to motivate initial reaching + # # initially, we want to reach and grip the bowl + # to_grip_vec = self.bowl.pose.p - self.agent.tcp.pose.p + # to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + # reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # # reaching reward granted if gripping the bowl + # reaching_rew[self.agent.is_grasping(self.bowl)] = 1 + # # weight reaching reward less + # reaching_rew = reaching_rew / 5 + # reward += reaching_rew + # Staged success rewards + if close_to_rack: + reward += 0.5 + if is_bowl_vertical: + reward += 0.5 + if is_success: + reward += 3.0 + # reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + # max_reward = 3.0 + + return self.compute_dense_reward(obs=obs, action=action, info=info) / self.max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_from_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_from_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..4b05bbaed312cab4fced68e0549bd50c87b14f08 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_from_rack.py @@ -0,0 +1,554 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PickForkFromRack-v1", max_episode_steps=500) +class PickForkFromRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a fork vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for fork and rack + - lvl 1: + variable t for fork + - lvl 2: + variable R for fork + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/3, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + self.final_site = actors.build_box( + self.scene, + half_sizes=[self.fork_extents[0]/2 , self.fork_extents[1]/2, self.fork_extents[2]/2], + color=[0, 1, 0, 0.5], + name="final_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.final_extents = self.fork_extents + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.30 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + p, gq = self.get_goal_site_pose() + p[:, 2] = p[:, 2] + self.fork_extents[2] * 3.5 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x_rand = torch.empty(b, 1).uniform_(-0.01, 0.01) + y_rand = torch.empty(b, 1).uniform_(-0.03, 0.03) + p, gq = self.get_goal_site_pose() + p[:, 0] = p[:, 0] + x_rand + p[:, 1] = p[:, 1] + y_rand + p[:, 2] = p[:, 2] + self.fork_extents[2] * 3.5 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + + ################################################################################## + # final site initialize + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + self.final_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.final_site.set_pose(Pose.create_from_pq(p=p, q=q)) + + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + + goal_extents = torch.from_numpy(self.final_extents.copy())*2.0 + is_fork_placed = ( + self.is_inside(self.fork.pose.p, self.final_site.pose.p, goal_extents) + ) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + return { + "success": is_fork_placed & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_from_rack_v2py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_from_rack_v2py new file mode 100644 index 0000000000000000000000000000000000000000..4e21473713afcd509f3908cc048bd129b8add4ec --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_from_rack_v2py @@ -0,0 +1,720 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PickForkFromRack-v2", max_episode_steps=2000) +class PickForkFromRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a fork vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for fork and rack + - lvl 1: + variable t for fork + - lvl 2: + variable R for fork + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/3, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + self.final_site = actors.build_box( + self.scene, + half_sizes=[self.fork_extents[0]/2 , self.fork_extents[1]/2, self.fork_extents[2]/2], + color=[0, 1, 0, 0.5], + name="final_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.final_extents = self.fork_extents + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + # init knife + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + + # # init mug + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, 0, 0]), + # ) + # self.mug_extents = get_actor_obb(self.mug).extents + + # init goalsite (eg. plate site) + self.plate_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.plate_site = actors.build_box( + self.scene, + half_sizes=self.plate_half_sizes, + color=np.array([238, 230, 18, 0], dtype=np.float32) / 255, + name="plate_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.plate_half_sizes[0]*0.5, 0, self.plate_half_sizes[2]]), + ) + self.plate_extents = np.array(self.plate_half_sizes) * 2 + + # init hole site + self.hole_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[1, 0, 0, 0], + name="hole_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.hole_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init bowl site + self.bowl_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/4 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 0, 1, 0], + name="bowl_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*9.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.bowl_extents = np.array([self.rack_extents[0]/4 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init mug site + self.mug_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/10 , self.rack_extents[1]/2*0.7, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="mug_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*2.85/20/2, 0, self.rack_extents[2]/2]), + ) + self.mug_extents = np.array([self.rack_extents[0]/5, self.rack_extents[1]*0.7, self.rack_extents[2]]) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.30 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + p, gq = self.get_goal_site_pose() + p[:, 2] = p[:, 2] + self.fork_extents[2] * 3.5 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x_rand = torch.empty(b, 1).uniform_(-0.01, 0.01) + y_rand = torch.empty(b, 1).uniform_(-0.03, 0.03) + p, gq = self.get_goal_site_pose() + p[:, 0] = p[:, 0] + x_rand + p[:, 1] = p[:, 1] + y_rand + p[:, 2] = p[:, 2] + self.fork_extents[2] * 3.5 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + + ################################################################################## + # final site initialize + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + self.final_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.final_site.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init knife pose + # init to inside hole + p, gq = self.get_site_pose(self.hole_site) + p[:, 2] = p[:, 2] + self.knife_extents[2] * 5 + p[:, 1] = p[:, 1] - self.knife_extents[2] * 1.5 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + + # ################################################################################# + # TODO add mug + # # init mug pose + # p, gq = self.get_site_pose(self.mug_site) + + # # default rot + # euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + # euler[:, 0] = np.pi/4 + # euler[:, 1] = 0 + # euler[:, 2] = np.pi + # q = rotation_conversions.euler_to_quaternion(euler) + + # self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init plate pose + # init to plate site + p, gq = self.get_site_pose(self.plate_site) + euler_rack = rotation_conversions.quaternion_to_euler(gq) + p[:, 2] = p[:, 2] - self.plate_extents[0]/10 + # p[:, 2] = p[:, 2] - self.plate_extents[0]/10 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = euler_rack[:, 0] + euler[:, 1] = -np.pi/2 - np.pi/36 + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init bowl pose + p, gq = self.get_site_pose(self.bowl_site) + p[:, 2] = p[:, 2] - self.bowl_extents[2]*0.4 + # p[:, 1] = p[:, 1] - self.bowl_extents[2] + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + # euler[:, 0] = np.pi/2 + # euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def get_site_pose(self, site): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = site.pose.p + relative_q = site.pose.q + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + + goal_extents = torch.from_numpy(self.final_extents.copy())*2.0 + is_fork_placed = ( + self.is_inside(self.fork.pose.p, self.final_site.pose.p, goal_extents) + ) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + return { + "success": is_fork_placed & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..2d91b3d3fc5bda7a90c924a8798ef34f1a6deffc --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack.py @@ -0,0 +1,522 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceForkOnRack-v1", max_episode_steps=1000) +class PlaceForkOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a fork vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for fork and rack + - lvl 1: + variable t for fork + - lvl 2: + variable R for fork + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + is_fork_static = self.fork.is_static() + + return { + "success": is_fork_vertical & is_fork_placed & is_fork_static & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v2.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..891485b549d57712443dfdb193bb457056052350 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v2.py @@ -0,0 +1,691 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb +import os + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceForkOnRack-v2", max_episode_steps=500) +class PlaceForkOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a fork vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for fork and rack + - lvl 1: + variable t for fork + - lvl 2: + variable R for fork + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + # init knife + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + + # # init mug + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, 0, 0]), + # ) + # self.mug_extents = get_actor_obb(self.mug).extents + + # init goalsite (eg. plate site) + self.plate_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.plate_site = actors.build_box( + self.scene, + half_sizes=self.plate_half_sizes, + color=np.array([238, 230, 18, 0], dtype=np.float32) / 255, + name="plate_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.plate_half_sizes[0]*0.5, 0, self.plate_half_sizes[2]]), + ) + self.plate_extents = np.array(self.plate_half_sizes) * 2 + + # init hole site + self.hole_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[1, 0, 0, 0], + name="hole_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.hole_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init bowl site + self.bowl_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/4 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 0, 1, 0], + name="bowl_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*9.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.bowl_extents = np.array([self.rack_extents[0]/4 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init mug site + self.mug_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/10 , self.rack_extents[1]/2*0.7, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="mug_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*2.85/20/2, 0, self.rack_extents[2]/2]), + ) + self.mug_extents = np.array([self.rack_extents[0]/5, self.rack_extents[1]*0.7, self.rack_extents[2]]) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + ################################################################################# + # init knife pose + # init to inside hole + p, gq = self.get_site_pose(self.hole_site) + p[:, 2] = p[:, 2] + self.knife_extents[2] * 5 + p[:, 1] = p[:, 1] - self.knife_extents[2] * 1.5 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + + # ################################################################################# + # TODO add mug + # # init mug pose + # p, gq = self.get_site_pose(self.mug_site) + + # # default rot + # euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + # euler[:, 0] = np.pi/4 + # euler[:, 1] = 0 + # euler[:, 2] = np.pi + # q = rotation_conversions.euler_to_quaternion(euler) + + # self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init plate pose + # init to plate site + p, gq = self.get_site_pose(self.plate_site) + euler_rack = rotation_conversions.quaternion_to_euler(gq) + p[:, 2] = p[:, 2] - self.plate_extents[0]/10 + # p[:, 2] = p[:, 2] - self.plate_extents[0]/10 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = euler_rack[:, 0] + euler[:, 1] = -np.pi/2 - np.pi/36 + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init bowl pose + p, gq = self.get_site_pose(self.bowl_site) + p[:, 2] = p[:, 2] - self.bowl_extents[2]*0.4 + # p[:, 1] = p[:, 1] - self.bowl_extents[2] + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + # euler[:, 0] = np.pi/2 + # euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc","noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def get_site_pose(self, site): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = site.pose.p + relative_q = site.pose.q + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + is_fork_static = self.fork.is_static() + + return { + "success": is_fork_vertical & is_fork_placed & is_fork_static & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v3.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..45103c2e5921791799d91e14014b6eb0b0534565 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v3.py @@ -0,0 +1,523 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.noahbiarm.utils import get_3d_bbox, get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceForkOnRack-v3", max_episode_steps=500) +class PlaceForkOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a fork vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for fork and rack + - lvl 1: + variable t for fork + - lvl 2: + variable R for fork + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + is_fork_static = self.fork.is_static() + + return { + "success": is_fork_vertical & is_fork_placed & is_fork_static & not_grasping + } + + def _get_obs_extra(self, info: Dict): + fork_bb = get_3d_bbox(self.fork) + rack_bb = get_3d_bbox(self.rack) + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + obj_pose=self.fork.pose.raw_pose, + rack_pose=self.rack.pose.raw_pose, + obj_bb=fork_bb, + rack_bb=rack_bb, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v4.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..cd8f0940cb76755808f52507af3aef18a68c6af2 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/fork_on_rack_v4.py @@ -0,0 +1,619 @@ +from typing import Any, Dict, Union + +import os +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceForkOnRack-v4", max_episode_steps=500) +class PlaceForkOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a fork vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for fork and rack + - lvl 1: + variable t for fork + - lvl 2: + variable R for fork + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + # init knife + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + + # # init mug + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, 0, 0]), + # ) + # self.mug_extents = get_actor_obb(self.mug).extents + + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + # ################################################################################## + # init knife + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.75 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # init bowl + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.40 + z = torch.zeros((b, 1)) + self.bowl_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # plate pose initialization + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.60 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + is_fork_static = self.fork.is_static() + + return { + "success": is_fork_vertical & is_fork_placed & is_fork_static & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/grasp_cup_v0.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/grasp_cup_v0.py new file mode 100644 index 0000000000000000000000000000000000000000..5ca236ab338b0e22e889210305906859791c7400 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/grasp_cup_v0.py @@ -0,0 +1,482 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + +from mani_skill import PACKAGE_ASSET_DIR +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + +@register_env("GraspCup-v0", max_episode_steps=50) +class GraspCupEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a mug up-right on the rack + + ***Randomizations:** + - lvl 0: fixed (R,t) for mug and rack + - lvl 1: + variable t for mug + - lvl 2: + variable R for mug + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The mug is placed up right on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.5, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[1.2, 0, 1.4], target=[0.0, 0.0, 0.25]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + + elif self.robot_uids in ["noahbiarm_r", "noahbiarm_rc","noahbiarm_rcw"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[0, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # init mug + self.mug = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=str(PACKAGE_ASSET_DIR / "plates/kalas_mug.obj"), + scale=(1,1,1), + color=np.array([225, 0, 225, 255], dtype=np.float32) / 255, + name="mug", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.mug_extents = get_actor_obb(self.mug).extents + self.mug_tip = actors.build_cube( + self.scene, + half_size=max(self.mug_extents)/40, + color=[1, 0, 0, 1], + name="mug_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, 0, -self.mug_extents[2]]), + ) + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goal site + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/10 , self.rack_extents[1]/2*0.7, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*2.85/20/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/5, self.rack_extents[1]*0.7, self.rack_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # mug pose initialize + max_reach = 0.3 + min_reach = 0.1 + max_extent = np.max(self.mug_extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.25 + y = -1 * side * torch.ones((b, 1)) * 0.55 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + y = side * torch.ones((b, 1)) * 0.55 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotations + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # mug pose initialize + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.25 + y = -1 * side * torch.ones((b, 1)) * 0.55 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 1] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 1] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + euler[:, 1] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + y = side * torch.ones((b, 1)) * 0.55 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotations + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def get_mug_tip_pose(self): + mug_p, mug_q = self.mug.pose.p, self.mug.pose.q + relative_p = self.mug_tip.pose.p + relative_q = self.mug_tip.pose.q + new_tip_p = mug_p + rotate_vector(mug_q, relative_p) + new_tip_q = quaternion_multiply(mug_q, relative_q) + return new_tip_p, new_tip_q + + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2/2, dim=1) + + return inside + + + def evaluate(self): + # check rotation + euler = rotation_conversions.quaternion_to_euler(self.mug.pose.q) + is_mug_up = (torch.abs(euler[:, 1] - np.pi) < np.pi/20) & (torch.abs(euler[:, 2] - np.pi) = 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.25 + y = -1 * side * torch.ones((b, 1)) * 0.55 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + y = side * torch.ones((b, 1)) * 0.55 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotations + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + grasping = self.agent.is_grasping(self.fork) + not_grasping = torch.logical_not(grasping) + + is_fork_static = self.fork.is_static() + + return { + "success": grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + goal_pose=self.fork.pose.raw_pose, + goal_pos=self.fork.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/grasp_plate_v0.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/grasp_plate_v0.py new file mode 100644 index 0000000000000000000000000000000000000000..4eefd004190b9f6729dee610fa4d780d0bde2313 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/grasp_plate_v0.py @@ -0,0 +1,469 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat,quat2euler + +from mani_skill import PACKAGE_ASSET_DIR +from mani_skill.agents.robots import Fetch, Panda, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.noahbiarm.utils import get_3d_bbox, get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("GraspPlate-v0", max_episode_steps=500) +class GraspPlateEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a plate vertically on a dish rack. + + **Randomizations:** + - The plate's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The plate is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "piper", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + plate_radius = 0.1 + plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.5, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[1.2, 0, 1.4], target=[0.0, 0.0, 0.25]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + + elif self.robot_uids in ["noahbiarm_r", "noahbiarm_rc","noahbiarm_rcw"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[0, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + elif self.robot_uids in ["piper"]: + return sapien.Pose(p=[-0.35, 0, 0]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=str(PACKAGE_ASSET_DIR / "plates/stamling_rack.obj"), + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goalsite + self.goal_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.goal_site = actors.build_box( + self.scene, + half_sizes=self.goal_half_sizes, + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.goal_half_sizes[0]*0.5, 0, self.goal_half_sizes[2]]), + ) + self.goal_extents = np.array(self.goal_half_sizes) * 2 + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam", "piper"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # plate pose initialization + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.00 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.00 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/9, np.pi/9) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.25 + y = -1 * side * torch.ones((b, 1)) * 0.55 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + y = side * torch.ones((b, 1)) * 0.55 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotations + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.25 + x_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + x += x_rand + y = side * torch.ones((b, 1)) * 0.55 + y_rand = torch.empty(b, 1).uniform_(-0.15, 0.15) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + #plate is vertical + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + is_plate_vertical = ( + torch.abs(torch.abs(euler[:, 1])- np.pi/2) < np.pi/8 + or torch.abs(torch.abs(euler[:, 0])- np.pi/2) < np.pi/8 + ) + + # plate is placed + goal_extents = torch.from_numpy(self.goal_extents.copy()) * 0.5 + goal_p, _ = self.get_goal_site_pose() + is_plate_placed = ( + self.is_inside(self.plate.pose.p, goal_p , goal_extents) + ) + + # not grasping + grasping = self.agent.is_grasping(self.plate) + not_grasping = torch.logical_not(grasping) + + # plate is static + is_plate_static = self.plate.is_static() + + + return { + "success": grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + goal_pose=self.plate.pose.raw_pose, + goal_pos=self.plate.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.plate.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.plate.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.plate.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.plate)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..0add5fef36d10dc5505d13e65bb8d34358daabcb --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack.py @@ -0,0 +1,520 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceKnifeOnRack-v1", max_episode_steps=500) +class PlaceKnifeOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a knife vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for knife and rack + - lvl 1: + variable t for knife + - lvl 2: + variable R for knife + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc","noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + is_fork_static = self.fork.is_static() + + return { + "success": is_fork_vertical & is_fork_placed & not_grasping & is_fork_static + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v2.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..f3e6d885408dc684cac5756e7be9fd5c6f686a33 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v2.py @@ -0,0 +1,675 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb +import os + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceKnifeOnRack-v2", max_episode_steps=500) +class PlaceKnifeOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a knife vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for knife and rack + - lvl 1: + variable t for knife + - lvl 2: + variable R for knife + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + self.knife_tip = actors.build_cube( + self.scene, + half_size=max(self.knife_extents)/40, + color=[1, 0, 0, 0], + name="knife_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.knife_extents[1]/2, 0]), + ) + self.knife_tail = actors.build_cube( + self.scene, + half_size=max(self.knife_extents)/40, + color=[1, 1, 0, 0], + name="knife_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.knife_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + # init goalsite (eg. plate site) + self.plate_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.plate_site = actors.build_box( + self.scene, + half_sizes=self.plate_half_sizes, + color=np.array([238, 230, 18, 0], dtype=np.float32) / 255, + name="plate_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.plate_half_sizes[0]*0.5, 0, self.plate_half_sizes[2]]), + ) + self.plate_extents = np.array(self.plate_half_sizes) * 2 + + # init hole site + self.hole_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[1, 0, 0, 0], + name="hole_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.hole_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init bowl site + self.bowl_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/4 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 0, 1, 0], + name="bowl_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*9.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.bowl_extents = np.array([self.rack_extents[0]/4 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init mug site + self.mug_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/10 , self.rack_extents[1]/2*0.7, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="mug_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*2.85/20/2, 0, self.rack_extents[2]/2]), + ) + self.mug_extents = np.array([self.rack_extents[0]/5, self.rack_extents[1]*0.7, self.rack_extents[2]]) + + + + # # init mug + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, 0, 0]), + # ) + # self.mug_extents = get_actor_obb(self.mug).extents + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # knife pose initialize + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + + # ################################################################################# + # TODO add mug + # # init mug pose + # p, gq = self.get_site_pose(self.mug_site) + + # # default rot + # euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + # euler[:, 0] = np.pi/4 + # euler[:, 1] = 0 + # euler[:, 2] = np.pi + # q = rotation_conversions.euler_to_quaternion(euler) + + # self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init plate pose + # init to plate site + p, gq = self.get_site_pose(self.plate_site) + euler_rack = rotation_conversions.quaternion_to_euler(gq) + p[:, 2] = p[:, 2] - self.plate_extents[0]/10 + # p[:, 2] = p[:, 2] - self.plate_extents[0]/10 + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = euler_rack[:, 0] + euler[:, 1] = -np.pi/2 - np.pi/36 + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init bowl pose + p, gq = self.get_site_pose(self.bowl_site) + p[:, 2] = p[:, 2] - self.bowl_extents[2]*0.4 + # p[:, 1] = p[:, 1] - self.bowl_extents[2] + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + # euler[:, 0] = np.pi/2 + # euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # init fork pose + # init to inside hole + p, gq = self.get_site_pose(self.hole_site) + p[:, 2] = p[:, 2] + self.fork_extents[2] * 3.5 + p[:, 1] = p[:, 1] + self.fork_extents[2] + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + euler[:, 2] = -np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def get_knife_tip_pose(self): + knife_p, knife_q = self.knife.pose.p, self.knife.pose.q + relative_p = self.knife_tip.pose.p + relative_q = self.knife_tip.pose.q + new_tip_p = knife_p + rotate_vector(knife_q, relative_p) + new_tip_q = quaternion_multiply(knife_q, relative_q) + return new_tip_p, new_tip_q + + def get_knife_tail_pose(self): + knife_p, knife_q = self.knife.pose.p, self.knife.pose.q + relative_p = self.knife_tail.pose.p + relative_q = self.knife_tail.pose.q + new_tail_p = knife_p + rotate_vector(knife_q, relative_p) + new_tail_q = quaternion_multiply(knife_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def get_site_pose(self, site): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = site.pose.p + relative_q = site.pose.q + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.knife.pose.q) + + is_knife_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_knife_tip_pose() + tail_p, _ = self.get_knife_tail_pose() + is_knife_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.knife)) + + is_knife_static = self.knife.is_static() + + return { + "success": is_knife_vertical & is_knife_placed & not_grasping & is_knife_static + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v3.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..8468691baeff49bd352c004ade530378b418df87 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v3.py @@ -0,0 +1,506 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.noahbiarm.utils import get_3d_bbox, get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceKnifeOnRack-v3", max_episode_steps=500) +class PlaceKnifeOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a knife vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for knife and rack + - lvl 1: + variable t for knife + - lvl 2: + variable R for knife + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc","noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # knife pose initialize + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.fork)) + + is_fork_static = self.fork.is_static() + + return { + "success": is_fork_vertical & is_fork_placed & not_grasping & is_fork_static + } + + def _get_obs_extra(self, info: Dict): + fork_bb = get_3d_bbox(self.fork) + rack_bb = get_3d_bbox(self.rack) + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + obj_pose=self.fork.pose.raw_pose, + rack_pose=self.rack.pose.raw_pose, + obj_bb=fork_bb, + rack_bb=rack_bb, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v4.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..23c82932328fd3465c7cd462f74a11c3c871a108 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/knife_on_rack_v4.py @@ -0,0 +1,591 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random +import os + + +from mani_skill.agents.robots import Fetch, Panda, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceKnifeOnRack-v4", max_episode_steps=500) +class PlaceKnifeOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a knife vertically on a dish rack. + + **Randomizations:** + - lvl 0: fixed (R,t) for knife and rack + - lvl 1: + variable t for knife + - lvl 2: + variable R for knife + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The fork is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc","noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + # init fork + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + self.knife_tip = actors.build_cube( + self.scene, + half_size=max(self.knife_extents)/40, + color=[1, 0, 0, 0], + name="knife_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.knife_extents[1]/2, 0]), + ) + self.knife_tail = actors.build_cube( + self.scene, + half_size=max(self.knife_extents)/40, + color=[1, 1, 0, 0], + name="knife_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.knife_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # knife pose initialize + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.05 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.05 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.1, 0.1) + y += y_rand + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + torch.empty(b, 1).uniform_(-np.pi/6, np.pi/6) + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.35 + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = euler[:, 0] + np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + # ################################################################################## + # init fork + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.75 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # init bowl + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.40 + z = torch.zeros((b, 1)) + self.bowl_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################# + # plate pose initialization + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.60 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def get_knife_tip_pose(self): + knife_p, knife_q = self.knife.pose.p, self.knife.pose.q + relative_p = self.knife_tip.pose.p + relative_q = self.knife_tip.pose.q + new_tip_p = knife_p + rotate_vector(knife_q, relative_p) + new_tip_q = quaternion_multiply(knife_q, relative_q) + return new_tip_p, new_tip_q + + def get_knife_tail_pose(self): + knife_p, knife_q = self.knife.pose.p, self.knife.pose.q + relative_p = self.knife_tail.pose.p + relative_q = self.knife_tail.pose.q + new_tail_p = knife_p + rotate_vector(knife_q, relative_p) + new_tail_q = quaternion_multiply(knife_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.knife.pose.q) + + is_knife_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_knife_tip_pose() + tail_p, _ = self.get_knife_tail_pose() + is_knife_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + not_grasping = torch.logical_not(self.agent.is_grasping(self.knife)) + + is_knife_static = self.knife.is_static() + + return { + "success": is_knife_vertical & is_knife_placed & not_grasping & is_knife_static + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/lift_peg_upright.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/lift_peg_upright.py new file mode 100644 index 0000000000000000000000000000000000000000..9901dc37ba3d9c9085ff3b641471bb97bd18fa1c --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/lift_peg_upright.py @@ -0,0 +1,144 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda, Piper +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + + +@register_env("LiftPegUpright-v1", max_episode_steps=50) +class LiftPegUprightEnv(BaseEnv): + """ + **Task Description:** + A simple task where the objective is to move a peg laying on the table to any upright position on the table + + **Randomizations:** + - the peg's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat along it's length on the table + + **Success Conditions:** + - the absolute value of the peg's y euler angle is within 0.08 of $\pi$/2 and the z position of the peg is within 0.005 of its half-length (0.12). + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/LiftPegUpright-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "piper"] + agent: Union[Panda, Fetch, Piper] + + peg_half_width = 0.025 + peg_half_length = 0.12 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([0.6, 0.7, 0.6], [0.0, 0.0, 0.35]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # the peg that we want to manipulate + self.peg = actors.build_twocolor_peg( + self.scene, + length=self.peg_half_length, + width=self.peg_half_width, + color_1=np.array([176, 14, 14, 255]) / 255, + color_2=np.array([12, 42, 160, 255]) / 255, + name="peg", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0.1]), + ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + xyz = torch.zeros((b, 3)) + xyz[..., :2] = torch.rand((b, 2)) * 0.2 - 0.1 + xyz[..., 2] = self.peg_half_width + q = euler2quat(np.pi / 2, 0, 0) + + obj_pose = Pose.create_from_pq(p=xyz, q=q) + self.peg.set_pose(obj_pose) + + def evaluate(self): + q = self.peg.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + is_peg_upright = ( + torch.abs(torch.abs(euler[:, 2]) - np.pi / 2) < 0.08 + ) # 0.08 radians of difference permitted + close_to_table = torch.abs(self.peg.pose.p[:, 2] - self.peg_half_length) < 0.005 + return { + "success": is_peg_upright & close_to_table, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.peg.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between peg direction vectors + # peg center of mass to end of peg, (1,0,0), rotated by peg pose rotation + # dot product with its goal orientation: (0,0,1) or (0,0,-1) + qmats = rotation_conversions.quaternion_to_matrix(self.peg.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device) + goal_vec = torch.tensor([0, 0, 1.0], device=self.device) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,0,-1) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward half length above table + z_dist = torch.abs(self.peg.pose.p[:, 2] - self.peg_half_length) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip peg + to_grip_vec = self.peg.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping block + reaching_rew[self.agent.is_grasping(self.peg)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_from_coffee_machine.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_from_coffee_machine.py new file mode 100644 index 0000000000000000000000000000000000000000..7c6f06bbbff543df460eeb3ff95186792193c778 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_from_coffee_machine.py @@ -0,0 +1,440 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + +@register_env("PickMugFromCoffeeMachine-v1", max_episode_steps=500) +class PickMugFromCoffeeMachineEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to pick a mug from the coffee machien (CM) + + **Randomizations:** + - lvl 0: fixed (R,t) for mug and CM + - lvl 1: + variable t for mug + - lvl 2: + variable R for mug + - lvl 3: + variable t for CM + - lvl 4: + variable R for CM + - lvl 5: + variable side of table + + **Success Conditions:** + - The mug is placed on the table + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + pose = look_at([-0.6, -1.7, 1.0], [0.1, 0.5, -0.5],[0,0,1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + pose = look_at([-0.6, -1.7, 1.0], [0.0, 0.0, 0.25],[0,0,1]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # init mug + self.mug = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_mug.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="mug", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.mug_extents = get_actor_obb(self.mug).extents + self.mug_tip = actors.build_cube( + self.scene, + half_size=max(self.mug_extents)/40, + color=[1, 0, 0, 1], + name="mug_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, 0, -self.mug_extents[2]]), + ) + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/coffee_machine/coffee_machine.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goal site + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.mug_extents[0]/2 , self.mug_extents[1]/2, self.mug_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]/20, self.mug_extents[1], -self.rack_extents[2] * 0.57]), + ) + self.goal_extents = np.array([self.mug_extents[0] , self.mug_extents[1], self.mug_extents[2]]) + + + # init final site site + self.final_site = actors.build_box( + self.scene, + half_sizes=[self.mug_extents[0]/2 , self.mug_extents[1]/2, self.mug_extents[2]/2], + color=[0, 1, 0, 0.4], + name="final_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-0.615, -0.4, self.mug_extents[2]]), + ) + self.final_extents = np.array([self.mug_extents[0] , self.mug_extents[1], self.mug_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) * -1 + + + ################################################################################## + # final site initialization + ################################################################################## + # final site initialize + if self.rand_level < 1: + # default position + x = torch.zeros((b, 1)) + y = side * torch.ones((b, 1)) * 0.4 + z = torch.zeros((b, 1)) + self.mug_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + if self.rand_level >=1: + # translation + x = torch.zeros((b, 1)) + x_noise = (torch.rand((b, 1))*2-1)*0.1 + x += x_noise + y = side * torch.ones((b, 1)) * 0.4 + y_noise = (torch.rand((b, 1))*2-1)*0.2 + y+=y_noise + z = torch.zeros((b, 1)) + self.mug_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + self.final_site.set_pose(Pose.create_from_pq(p=p)) + + + ################################################################################## + # CM pose initialize + if self.rand_level < 3: + # translation + x = torch.zeros(b, 1) - 0.05 + y = torch.zeros(b, 1) - 0.15 + z = torch.zeros(b, 1) + xyz = torch.cat((x, y, z), dim=1) + p = xyz + + # rotation + euler = torch.zeros((b, 3)) + euler[:, 0] = np.pi/2 + euler[:, 1] = 0 + euler[:, 2] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + raise NotImplementedError + # # translation + # x = torch.zeros(b, 1) - 0.05 + # x_noise = (torch.rand((b, 1))*2-1)*0.00 + # x += x_noise + # y = torch.zeros(b, 1) - 0.45 + # y_noise = (torch.rand((b, 1))*2-1)*0.05 + # y += y_noise + # z = torch.zeros(b, 1) + # xyz = torch.cat((x, y, z), dim=1) + # p = xyz + + # # rotation + # euler = torch.zeros((b, 3)) + # euler[:, 0] = np.pi/2 + # euler[:, 1] = 0 + # euler[:, 2] = np.pi/2 + # q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # mug pose initialize + p, _ = self.get_goal_site_pose() + p[:, 2] = p[:, 2] - self.goal_extents[2]/2 + + # rotation + # follow zyx convention for euler + euler = torch.zeros(b, 3) + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.7 + min_reach = 0.3 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # rack pose initialize + if self.rand_level < 3: + # translation + x = torch.zeros(b, 1) - 0.2 + y = side * (torch.ones(b, 1)* 0.3 - 1) + z = torch.zeros(b, 1) + xyz = torch.cat((x, y, z), dim=1) + p = xyz + + # rotation + euler = torch.zeros((b, 3)) + euler[:, 0] = (1-side)*np.pi/2 + euler[:, 1] = 0 + euler[:, 2] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + # x 0 to -0.3 + # y -1 to -0.7 + # z = 0 + x = torch.rand(b, 1) * 0.3 - 0.3 + y = side * (torch.rand(b, 1) * 0.3 - 1) + z = torch.zeros(b, 1) + xyz = torch.cat((x, y, z), dim=1) + p = xyz + + # rotation Z, Y, X + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = (1-side)*np.pi/2 + euler[:, 1] = 0 + euler[:, 2] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # mug pose initialize + p, _ = self.get_goal_site_pose() + + # rotation + # follow zyx convention for euler + euler = torch.zeros(b, 3) + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + def get_mug_tip_pose(self): + mug_p, mug_q = self.mug.pose.p, self.mug.pose.q + relative_p = self.mug_tip.pose.p + relative_q = self.mug_tip.pose.q + new_tip_p = mug_p + rotate_vector(mug_q, relative_p) + new_tip_q = quaternion_multiply(mug_q, relative_q) + return new_tip_p, new_tip_q + + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2/2, dim=1) + + return inside + + + def evaluate(self): + # check rotation + euler = rotation_conversions.quaternion_to_euler(self.mug.pose.q) + is_mug_up = (torch.abs(torch.abs(euler[:, 2]) - np.pi) <= np.pi/20) + + # check translation + final_site_p = self.final_site.pose.p + final_extents = torch.from_numpy(self.final_extents).to(final_site_p.device) + is_mug_placed = self.is_inside(self.mug.pose.p, final_site_p, final_extents) + + # check if robot released the object + not_grasping = torch.logical_not(self.agent.is_grasping(self.mug)) + # print(is_mug_placed, is_mug_up, ) + + return { + "success": is_mug_placed & is_mug_up & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.mug.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.mug.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.mug.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.mug.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.mug)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_on_coffee_machine.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_on_coffee_machine.py new file mode 100644 index 0000000000000000000000000000000000000000..43265cc1bf262530f9ec0055ea94385c5870f1f8 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_on_coffee_machine.py @@ -0,0 +1,467 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + +@register_env("PlaceMugOnCoffeeMachine-v1", max_episode_steps=500) +class PlaceMugOnCoffeeMachineEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a mug on the coffee machine. + + **Randomizations:** + - lvl 0: fixed (R,t) for mug and CM + - lvl 1: + variable t for mug + - lvl 2: + variable R for mug + - lvl 3: + variable t for CM + - lvl 4: + variable R for CM + - lvl 5: + variable side of table + + **Success Conditions:** + - The mug is placed on the coffe machine. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + pose = look_at([-0.6, -1.7, 1.0], [0.1, 0.5, -0.5],[0,0,1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + pose = look_at([-0.6, -1.7, 1.0], [0.0, 0.0, 0.25],[0,0,1]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # init mug + self.mug = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_mug.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="mug", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.mug_extents = get_actor_obb(self.mug).extents + self.mug_tip = actors.build_cube( + self.scene, + half_size=max(self.mug_extents)/40, + color=[1, 0, 0, 1], + name="mug_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, 0, -self.mug_extents[2]]), + ) + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/coffee_machine/coffee_machine.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goal site + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.mug_extents[0]/2 , self.mug_extents[1]/2, self.mug_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]/20, self.mug_extents[1], -self.rack_extents[2] * 0.57]), + ) + self.goal_extents = np.array([self.mug_extents[0] , self.mug_extents[1], self.mug_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) * -1 + + ################################################################################## + # mug pose initialize + max_reach = 0.3 + min_reach = 0.1 + max_extent = np.max(self.mug_extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.zeros((b, 1)) + y = side * torch.ones((b, 1)) * 0.4 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rot + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = np.pi/4 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >=1: + # translation + x = torch.zeros((b, 1)) + x_noise = (torch.rand((b, 1))*2-1)*0.1 + x += x_noise + y = side * torch.ones((b, 1)) * 0.4 + y_noise = (torch.rand((b, 1))*2-1)*0.2 + y+=y_noise + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rot + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = np.pi/4 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # CM pose initialize + if self.rand_level < 3: + # translation + x = torch.zeros(b, 1) - 0.05 + y = torch.zeros(b, 1) - 0.15 + z = torch.zeros(b, 1) + xyz = torch.cat((x, y, z), dim=1) + p = xyz + + # rotation + euler = torch.zeros((b, 3)) + euler[:, 0] = np.pi/2 + euler[:, 1] = 0 + euler[:, 2] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + raise NotImplementedError + # # translation + # x = torch.zeros(b, 1) - 0.05 + # x_noise = (torch.rand((b, 1))*2-1)*0.00 + # x += x_noise + # y = torch.zeros(b, 1) - 0.45 + # y_noise = (torch.rand((b, 1))*2-1)*0.05 + # y += y_noise + # z = torch.zeros(b, 1) + # xyz = torch.cat((x, y, z), dim=1) + # p = xyz + + # # rotation + # euler = torch.zeros((b, 3)) + # euler[:, 0] = np.pi/2 + # euler[:, 1] = 0 + # euler[:, 2] = np.pi/2 + # q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.7 + min_reach = 0.4 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # mug pose initialize + max_extent = np.max(self.mug_extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = 0 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + # follow zyx convention for euler + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = 0 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + + if self.rand_level < 3: + # translation + x = torch.zeros(b, 1) - 0.2 + y = side * (torch.ones(b, 1)* 0.3 - 1) + z = torch.zeros(b, 1) + xyz = torch.cat((x, y, z), dim=1) + p = xyz + + # rotation + euler = torch.zeros((b, 3)) + euler[:, 0] = (1-side)*np.pi/2 + euler[:, 1] = 0 + euler[:, 2] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + # x 0 to -0.2 + # y -1 to -0.7 + # z = 0 + x = torch.rand(b, 1) * 0.2 - 0.2 + y = side * (torch.rand(b, 1) * 0.3 - 1) + z = torch.zeros(b, 1) + xyz = torch.cat((x, y, z), dim=1) + p = xyz + + # rotation Z, Y, X + euler = torch.zeros((b, 3)) + euler[:, 0] = (1-side)*np.pi/2 + euler[:, 1] = 0 + euler[:, 2] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + def get_mug_tip_pose(self): + mug_p, mug_q = self.mug.pose.p, self.mug.pose.q + relative_p = self.mug_tip.pose.p + relative_q = self.mug_tip.pose.q + new_tip_p = mug_p + rotate_vector(mug_q, relative_p) + new_tip_q = quaternion_multiply(mug_q, relative_q) + return new_tip_p, new_tip_q + + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2/2, dim=1) + + return inside + + + def evaluate(self): + # check rotation + euler = rotation_conversions.quaternion_to_euler(self.mug.pose.q) + is_mug_up = (torch.abs(torch.abs(euler[:, 2]) - np.pi) <= np.pi/20) + + # check translation + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_mug_tip_pose() + goal_extents = torch.from_numpy(self.goal_extents).to(goal_p.device) + is_mug_placed = self.is_inside(self.mug.pose.p, goal_p, goal_extents) + + # check if robot released the object + not_grasping = torch.logical_not(self.agent.is_grasping(self.mug)) + + return { + "success": is_mug_placed & is_mug_up & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.mug.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.mug.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.mug.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.mug.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.mug)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..ea503dfa734f80a0cfb6f792dd80ade63635811d --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/mug_on_rack.py @@ -0,0 +1,472 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + +@register_env("PlaceMugOnRack-v1", max_episode_steps=50) +class PlaceMugOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a mug up-right on the rack + + ***Randomizations:** + - lvl 0: fixed (R,t) for mug and rack + - lvl 1: + variable t for mug + - lvl 2: + variable R for mug + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The mug is placed up right on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # init mug + self.mug = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_mug.obj", + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="mug", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.mug_extents = get_actor_obb(self.mug).extents + self.mug_tip = actors.build_cube( + self.scene, + half_size=max(self.mug_extents)/40, + color=[1, 0, 0, 1], + name="mug_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, 0, -self.mug_extents[2]]), + ) + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goal site + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/10 , self.rack_extents[1]/2*0.7, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[+self.rack_extents[0]*2.85/20/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/5, self.rack_extents[1]*0.7, self.rack_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # mug pose initialize + max_reach = 0.3 + min_reach = 0.1 + max_extent = np.max(self.mug_extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.1 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rot + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = np.pi/4 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >=1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + # follow zyx convention for euler + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = 0 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.12 + min_reach = 0.15 + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.13 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/6 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi/10 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # mug pose initialize + max_reach = 0.8 + min_reach = 0.4 + max_extent = np.max(self.mug_extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rot + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = 0 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >=1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + # follow zyx convention for euler + euler = torch.randint(low=1, high=4, size=(b, 3)) * np.pi/2 + euler[:, 0] = 0 + euler[:, 1] = 0 + euler[:, 2] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + self.mug.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + max_reach = 0.8 + min_reach = 0.2 + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def get_mug_tip_pose(self): + mug_p, mug_q = self.mug.pose.p, self.mug.pose.q + relative_p = self.mug_tip.pose.p + relative_q = self.mug_tip.pose.q + new_tip_p = mug_p + rotate_vector(mug_q, relative_p) + new_tip_q = quaternion_multiply(mug_q, relative_q) + return new_tip_p, new_tip_q + + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2/2, dim=1) + + return inside + + + def evaluate(self): + # check rotation + euler = rotation_conversions.quaternion_to_euler(self.mug.pose.q) + is_mug_up = (torch.abs(euler[:, 1] - np.pi) < np.pi/20) & (torch.abs(euler[:, 2] - np.pi) ActorBuilder: + raise NotImplementedError() + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.scene_builder = TableSceneBuilder( + self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.scene_builder.build() + + # sample some clutter configurations + eps_idxs = self._batched_episode_rng.randint(0, len(self._episodes)) + + self.selectable_target_objects: List[List[Actor]] = [] + """for each sub-scene, a list of objects that can be selected as targets""" + all_objects = [] + + for i, eps_idx in enumerate(eps_idxs): + self.selectable_target_objects.append([]) + episode = self._episodes[eps_idx] + for actor_config in episode["actors"]: + builder = self._load_model(actor_config["model_id"]) + init_pose = actor_config["pose"] + builder.initial_pose = sapien.Pose(p=init_pose[:3], q=init_pose[3:]) + builder.set_scene_idxs([i]) + obj = builder.build(name=f"set_{i}_{actor_config['model_id']}") + all_objects.append(obj) + if actor_config["rep_pts"] is not None: + # rep_pts is representative points, representing visible points + # we only permit selecting target objects that are visible + self.selectable_target_objects[-1].append(obj) + + self.all_objects = Actor.merge(all_objects, name="all_objects") + + self.goal_site = actors.build_sphere( + self.scene, + radius=0.01, + color=[0, 1, 0, 1], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(), + ) + self._hidden_objects.append(self.goal_site) + + self._sample_target_objects() + + def _sample_target_objects(self): + # note this samples new target objects for every sub-scene + target_objects = [] + for i in range(self.num_envs): + selected_obj_idxs = torch.randint(low=0, high=99999, size=(self.num_envs,)) + selected_obj_idxs[i] = selected_obj_idxs[i] % len( + self.selectable_target_objects[-1] + ) + target_objects.append( + self.selectable_target_objects[-1][selected_obj_idxs[i]] + ) + self.target_object = Actor.merge(target_objects, name="target_object") + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.scene_builder.initialize(env_idx) + goal_pos = torch.rand(size=(b, 3)) * torch.tensor( + [0.3, 0.5, 0.1] + ) + torch.tensor([-0.15, -0.25, 0.35]) + self.goal_pos = goal_pos + self.goal_site.set_pose(Pose.create_from_pq(self.goal_pos)) + + # reset objects to original poses + if b == self.num_envs: + # if all envs reset + self.all_objects.pose = self.all_objects.initial_pose + else: + # if only some envs reset, we unfortunately still have to do some mask wrangling + mask = torch.isin(self.all_objects._scene_idxs, env_idx) + self.all_objects.pose = self.all_objects.initial_pose[mask] + + def evaluate(self): + return { + "success": torch.zeros(self.num_envs, device=self.device, dtype=bool), + "fail": torch.zeros(self.num_envs, device=self.device, dtype=bool), + } + + def _get_obs_extra(self, info: Dict): + + return dict() + + +@register_env( + "PickClutterYCB-v1", + asset_download_ids=["ycb", "pick_clutter_ycb_configs"], + max_episode_steps=100, +) +class PickClutterYCBEnv(PickClutterEnv): + DEFAULT_EPISODE_JSON = f"{ASSET_DIR}/tasks/pick_clutter/ycb_train_5k.json.gz" + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PickClutterYCB-v1_rt.mp4" + + def _load_model(self, model_id): + builder = actors.get_actor_builder(self.scene, id=f"ycb:{model_id}") + return builder diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pick_cube.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pick_cube.py new file mode 100644 index 0000000000000000000000000000000000000000..bfc3f2eb0c2cba3f1275425c3e5db5e408c6140e --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pick_cube.py @@ -0,0 +1,160 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch + +import mani_skill.envs.utils.randomization as randomization +from mani_skill.agents.robots import Fetch, Panda, XArm6Robotiq +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose + + +@register_env("PickCube-v1", max_episode_steps=50) +class PickCubeEnv(BaseEnv): + """ + **Task Description:** + A simple task where the objective is to grasp a red cube and move it to a target goal position. + + **Randomizations:** + - the cube's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table + - the cube's z-axis rotation is randomized to a random angle + - the target goal position (marked by a green sphere) of the cube has its xy position randomized in the region [0.1, 0.1] x [-0.1, -0.1] and z randomized in [0, 0.3] + + **Success Conditions:** + - the cube position is within `goal_thresh` (default 0.025m) euclidean distance of the goal position + - the robot is static (q velocity < 0.2) + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PickCube-v1_rt.mp4" + SUPPORTED_ROBOTS = [ + "panda", + "fetch", + "xarm6_robotiq", + ] + agent: Union[Panda, Fetch, XArm6Robotiq] + cube_half_size = 0.02 + goal_thresh = 0.025 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([0.6, 0.7, 0.6], [0.0, 0.0, 0.35]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + self.cube = actors.build_cube( + self.scene, + half_size=self.cube_half_size, + color=[1, 0, 0, 1], + name="cube", + initial_pose=sapien.Pose(p=[0, 0, self.cube_half_size]), + ) + self.goal_site = actors.build_sphere( + self.scene, + radius=self.goal_thresh, + color=[0, 1, 0, 1], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(), + ) + self._hidden_objects.append(self.goal_site) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + xyz = torch.zeros((b, 3)) + xyz[:, :2] = torch.rand((b, 2)) * 0.2 - 0.1 + xyz[:, 2] = self.cube_half_size + qs = randomization.random_quaternions(b, lock_x=True, lock_y=True) + self.cube.set_pose(Pose.create_from_pq(xyz, qs)) + + goal_xyz = torch.zeros((b, 3)) + goal_xyz[:, :2] = torch.rand((b, 2)) * 0.2 - 0.1 + goal_xyz[:, 2] = torch.rand((b)) * 0.3 + xyz[:, 2] + self.goal_site.set_pose(Pose.create_from_pq(goal_xyz)) + + def _get_obs_extra(self, info: Dict): + # in reality some people hack is_grasped into observations by checking if the gripper can close fully or not + obs = dict( + is_grasped=info["is_grasped"], + tcp_pose=self.agent.tcp.pose.raw_pose, + goal_pos=self.goal_site.pose.p, + ) + if "state" in self.obs_mode: + obs.update( + obj_pose=self.cube.pose.raw_pose, + tcp_to_obj_pos=self.cube.pose.p - self.agent.tcp.pose.p, + obj_to_goal_pos=self.goal_site.pose.p - self.cube.pose.p, + ) + return obs + + def evaluate(self): + is_obj_placed = ( + torch.linalg.norm(self.goal_site.pose.p - self.cube.pose.p, axis=1) + <= self.goal_thresh + ) + is_grasped = self.agent.is_grasping(self.cube) + is_robot_static = self.agent.is_static(0.2) + return { + "success": is_obj_placed & is_robot_static, + "is_obj_placed": is_obj_placed, + "is_robot_static": is_robot_static, + "is_grasped": is_grasped, + } + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + tcp_to_obj_dist = torch.linalg.norm( + self.cube.pose.p - self.agent.tcp.pose.p, axis=1 + ) + reaching_reward = 1 - torch.tanh(5 * tcp_to_obj_dist) + reward = reaching_reward + + is_grasped = info["is_grasped"] + reward += is_grasped + + obj_to_goal_dist = torch.linalg.norm( + self.goal_site.pose.p - self.cube.pose.p, axis=1 + ) + place_reward = 1 - torch.tanh(5 * obj_to_goal_dist) + reward += place_reward * is_grasped + + qvel_without_gripper = self.agent.robot.get_qvel() + if self.robot_uids == "xarm6_robotiq": + qvel_without_gripper = qvel_without_gripper[..., :-6] + elif self.robot_uids == "panda": + qvel_without_gripper = qvel_without_gripper[..., :-2] + static_reward = 1 - torch.tanh( + 5 * torch.linalg.norm(qvel_without_gripper, axis=1) + ) + reward += static_reward * info["is_obj_placed"] + + reward[info["success"]] = 5 + return reward + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + return self.compute_dense_reward(obs=obs, action=action, info=info) / 5 diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pick_single_ycb.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pick_single_ycb.py new file mode 100644 index 0000000000000000000000000000000000000000..4f619c994ee378b1388b5161c6428120e93d0d60 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pick_single_ycb.py @@ -0,0 +1,259 @@ +from typing import Any, Dict, List, Union + +import numpy as np +import sapien +import torch + +from mani_skill import ASSET_DIR +from mani_skill.agents.robots.fetch.fetch import Fetch +from mani_skill.agents.robots.panda.panda import Panda +from mani_skill.agents.robots.panda.panda_wristcam import PandaWristCam +from mani_skill.agents.robots.xmate3.xmate3 import Xmate3Robotiq +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils.randomization.pose import random_quaternions +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import common, sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.io_utils import load_json +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.actor import Actor +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import GPUMemoryConfig, SimConfig + +WARNED_ONCE = False + + +@register_env("PickSingleYCB-v1", max_episode_steps=50, asset_download_ids=["ycb"]) +class PickSingleYCBEnv(BaseEnv): + """ + **Task Description:** + Pick up a random object sampled from the [YCB dataset](https://www.ycbbenchmarks.com/) and move it to a random goal position + + **Randomizations:** + - the object's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table + - the object's z-axis rotation is randomized + - the object geometry is randomized by randomly sampling any YCB object. (during reconfiguration) + + **Success Conditions:** + - the object position is within goal_thresh (default 0.025) euclidean distance of the goal position + - the robot is static (q velocity < 0.2) + + **Goal Specification:** + - 3D goal position (also visualized in human renders) + + **Additional Notes** + - On GPU simulation, in order to collect data from every possible object in the YCB database we recommend using at least 128 parallel environments or more, otherwise you will need to reconfigure in order to sample new objects. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PickSingleYCB-v1_rt.mp4" + + SUPPORTED_ROBOTS = ["panda", "panda_wristcam", "fetch"] + agent: Union[Panda, PandaWristCam, Fetch] + goal_thresh = 0.025 + + def __init__( + self, + *args, + robot_uids="panda_wristcam", + robot_init_qpos_noise=0.02, + num_envs=1, + reconfiguration_freq=None, + **kwargs, + ): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.model_id = None + self.all_model_ids = np.array( + [ + k + for k in load_json( + ASSET_DIR / "assets/mani_skill2_ycb/info_pick_v0.json" + ).keys() + if k + not in [ + "022_windex_bottle", + "028_skillet_lid", + "029_plate", + "059_chain", + ] # NOTE (arth): ignore these non-graspable/hard to grasp ycb objects + ] + ) + if reconfiguration_freq is None: + if num_envs == 1: + reconfiguration_freq = 1 + else: + reconfiguration_freq = 0 + super().__init__( + *args, + robot_uids=robot_uids, + reconfiguration_freq=reconfiguration_freq, + num_envs=num_envs, + **kwargs, + ) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([0.6, 0.7, 0.6], [0.0, 0.0, 0.35]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + global WARNED_ONCE + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # randomize the list of all possible models in the YCB dataset + # then sub-scene i will load model model_ids[i % number_of_ycb_objects] + model_ids = self._batched_episode_rng.choice(self.all_model_ids, replace=True) + if ( + self.num_envs > 1 + and self.num_envs < len(self.all_model_ids) + and self.reconfiguration_freq <= 0 + and not WARNED_ONCE + ): + WARNED_ONCE = True + print( + """There are less parallel environments than total available models to sample. + Not all models will be used during interaction even after resets unless you call env.reset(options=dict(reconfigure=True)) + or set reconfiguration_freq to be >= 1.""" + ) + + self._objs: List[Actor] = [] + self.obj_heights = [] + for i, model_id in enumerate(model_ids): + # TODO: before official release we will finalize a metadata dataclass that these build functions should return. + builder = actors.get_actor_builder( + self.scene, + id=f"ycb:{model_id}", + ) + builder.initial_pose = sapien.Pose(p=[0, 0, 0]) + builder.set_scene_idxs([i]) + self._objs.append(builder.build(name=f"{model_id}-{i}")) + self.remove_from_state_dict_registry(self._objs[-1]) + self.obj = Actor.merge(self._objs, name="ycb_object") + self.add_to_state_dict_registry(self.obj) + + self.goal_site = actors.build_sphere( + self.scene, + radius=self.goal_thresh, + color=[0, 1, 0, 1], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(), + ) + self._hidden_objects.append(self.goal_site) + + def _after_reconfigure(self, options: dict): + self.object_zs = [] + for obj in self._objs: + collision_mesh = obj.get_first_collision_mesh() + # this value is used to set object pose so the bottom is at z=0 + self.object_zs.append(-collision_mesh.bounding_box.bounds[0, 2]) + self.object_zs = common.to_tensor(self.object_zs, device=self.device) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + xyz = torch.zeros((b, 3)) + xyz[:, :2] = torch.rand((b, 2)) * 0.2 - 0.1 + xyz[:, 2] = self.object_zs[env_idx] + qs = random_quaternions(b, lock_x=True, lock_y=True) + self.obj.set_pose(Pose.create_from_pq(p=xyz, q=qs)) + + goal_xyz = torch.zeros((b, 3)) + goal_xyz[:, :2] = torch.rand((b, 2)) * 0.2 - 0.1 + goal_xyz[:, 2] = torch.rand((b)) * 0.3 + xyz[:, 2] + self.goal_site.set_pose(Pose.create_from_pq(goal_xyz)) + + # Initialize robot arm to a higher position above the table than the default typically used for other table top tasks + if self.robot_uids == "panda" or self.robot_uids == "panda_wristcam": + # fmt: off + qpos = np.array( + [0.0, 0, 0, -np.pi * 2 / 3, 0, np.pi * 2 / 3, np.pi / 4, 0.04, 0.04] + ) + # fmt: on + qpos[:-2] += self._episode_rng.normal( + 0, self.robot_init_qpos_noise, len(qpos) - 2 + ) + self.agent.reset(qpos) + self.agent.robot.set_root_pose(sapien.Pose([-0.615, 0, 0])) + elif self.robot_uids == "xmate3_robotiq": + qpos = np.array([0, 0.6, 0, 1.3, 0, 1.3, -1.57, 0, 0]) + qpos[:-2] += self._episode_rng.normal( + 0, self.robot_init_qpos_noise, len(qpos) - 2 + ) + self.agent.reset(qpos) + self.agent.robot.set_root_pose(sapien.Pose([-0.562, 0, 0])) + else: + raise NotImplementedError(self.robot_uids) + + def evaluate(self): + obj_to_goal_pos = self.goal_site.pose.p - self.obj.pose.p + is_obj_placed = torch.linalg.norm(obj_to_goal_pos, axis=1) <= self.goal_thresh + is_grasped = self.agent.is_grasping(self.obj) + is_robot_static = self.agent.is_static(0.2) + return dict( + is_grasped=is_grasped, + obj_to_goal_pos=obj_to_goal_pos, + is_obj_placed=is_obj_placed, + is_robot_static=is_robot_static, + is_grasping=self.agent.is_grasping(self.obj), + success=torch.logical_and(is_obj_placed, is_robot_static), + ) + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + goal_pos=self.goal_site.pose.p, + is_grasped=info["is_grasped"], + ) + if "state" in self.obs_mode: + obs.update( + tcp_to_goal_pos=self.goal_site.pose.p - self.agent.tcp.pose.p, + obj_pose=self.obj.pose.raw_pose, + tcp_to_obj_pos=self.obj.pose.p - self.agent.tcp.pose.p, + obj_to_goal_pos=self.goal_site.pose.p - self.obj.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + tcp_to_obj_dist = torch.linalg.norm( + self.obj.pose.p - self.agent.tcp.pose.p, axis=1 + ) + reaching_reward = 1 - torch.tanh(5 * tcp_to_obj_dist) + reward = reaching_reward + + is_grasped = info["is_grasped"] + reward += is_grasped + + obj_to_goal_dist = torch.linalg.norm( + self.goal_site.pose.p - self.obj.pose.p, axis=1 + ) + place_reward = 1 - torch.tanh(5 * obj_to_goal_dist) + reward += place_reward * is_grasped + + reward += info["is_obj_placed"] * is_grasped + + static_reward = 1 - torch.tanh( + 5 * torch.linalg.norm(self.agent.robot.get_qvel()[..., :-2], axis=1) + ) + reward += static_reward * info["is_obj_placed"] * is_grasped + + reward[info["success"]] = 6 + return reward + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + return self.compute_dense_reward(obs=obs, action=action, info=info) / 6 diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/place_sphere.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/place_sphere.py new file mode 100644 index 0000000000000000000000000000000000000000..e0a37e1bc735605da604a9345793a1f6c9fa8451 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/place_sphere.py @@ -0,0 +1,258 @@ +from typing import Any, Dict, Union + +import gymnasium as gym +import matplotlib.pyplot as plt +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils import randomization +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import common, sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs import Pose +from mani_skill.utils.structs.types import Array, GPUMemoryConfig, SimConfig + + +@register_env("PlaceSphere-v1", max_episode_steps=50) +class PlaceSphereEnv(BaseEnv): + """ + **Task Description:** + Place the sphere into the shallow bin. + + **Randomizations:** + - The position of the bin and the sphere are randomized: The bin is initialized in [0, 0.1] x [-0.1, 0.1], + and the sphere is initialized in [-0.1, -0.05] x [-0.1, 0.1] + + **Success Conditions:** + - The sphere is placed on the top of the bin. The robot remains static and the gripper is not closed at the end state. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceSphere-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch"] + + # Specify some supported robot types + agent: Union[Panda, Fetch] + + # set some commonly used values + radius = 0.02 # radius of the sphere + inner_side_half_len = 0.02 # side length of the bin's inner square + short_side_half_size = 0.0025 # length of the shortest edge of the block + block_half_size = [ + short_side_half_size, + 2 * short_side_half_size + inner_side_half_len, + 2 * short_side_half_size + inner_side_half_len, + ] # The bottom block of the bin, which is larger: The list represents the half length of the block along the [x, y, z] axis respectively. + edge_block_half_size = [ + short_side_half_size, + 2 * short_side_half_size + inner_side_half_len, + 2 * short_side_half_size, + ] # The edge block of the bin, which is smaller. The representations are similar to the above one + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sim_config(self): + return SimConfig( + gpu_memory_config=GPUMemoryConfig( + found_lost_pairs_capacity=2**25, max_rigid_patch_count=2**18 + ) + ) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.2], target=[-0.1, 0, 0]) + return [ + CameraConfig( + "base_camera", + pose=pose, + width=128, + height=128, + fov=np.pi / 2, + near=0.01, + far=100, + ) + ] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([0.6, -0.2, 0.2], [0.0, 0.0, 0.2]) + return CameraConfig( + "render_camera", pose=pose, width=512, height=512, fov=1, near=0.01, far=100 + ) + + def _build_bin(self, radius): + builder = self.scene.create_actor_builder() + + # init the locations of the basic blocks + dx = self.block_half_size[1] - self.block_half_size[0] + dy = self.block_half_size[1] - self.block_half_size[0] + dz = self.edge_block_half_size[2] + self.block_half_size[0] + + # build the bin bottom and edge blocks + poses = [ + sapien.Pose([0, 0, 0]), + sapien.Pose([-dx, 0, dz]), + sapien.Pose([dx, 0, dz]), + sapien.Pose([0, -dy, dz]), + sapien.Pose([0, dy, dz]), + ] + half_sizes = [ + [self.block_half_size[1], self.block_half_size[2], self.block_half_size[0]], + self.edge_block_half_size, + self.edge_block_half_size, + [ + self.edge_block_half_size[1], + self.edge_block_half_size[0], + self.edge_block_half_size[2], + ], + [ + self.edge_block_half_size[1], + self.edge_block_half_size[0], + self.edge_block_half_size[2], + ], + ] + for pose, half_size in zip(poses, half_sizes): + builder.add_box_collision(pose, half_size) + builder.add_box_visual(pose, half_size) + + # build the kinematic bin + return builder.build_kinematic(name="bin") + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + # load the table + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # load the sphere + self.obj = actors.build_sphere( + self.scene, + radius=self.radius, + color=np.array([12, 42, 160, 255]) / 255, + name="sphere", + body_type="dynamic", + ) + + # load the bin + self.bin = self._build_bin(self.radius) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + # init the table scene + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # init the sphere in the first 1/4 zone along the x-axis (so that it doesn't collide the bin) + xyz = torch.zeros((b, 3)) + xyz[..., 0] = (torch.rand((b, 1)) * 0.05 - 0.1)[ + ..., 0 + ] # first 1/4 zone of x ([-0.1, -0.05]) + xyz[..., 1] = (torch.rand((b, 1)) * 0.2 - 0.1)[ + ..., 0 + ] # spanning all possible ys + xyz[..., 2] = self.radius # on the table + q = [1, 0, 0, 0] + obj_pose = Pose.create_from_pq(p=xyz, q=q) + self.obj.set_pose(obj_pose) + + # init the bin in the last 1/2 zone along the x-axis (so that it doesn't collide the sphere) + pos = torch.zeros((b, 3)) + pos[:, 0] = ( + torch.rand((b, 1))[..., 0] * 0.1 + ) # the last 1/2 zone of x ([0, 0.1]) + pos[:, 1] = ( + torch.rand((b, 1))[..., 0] * 0.2 - 0.1 + ) # spanning all possible ys + pos[:, 2] = self.block_half_size[0] # on the table + q = [1, 0, 0, 0] + bin_pose = Pose.create_from_pq(p=pos, q=q) + self.bin.set_pose(bin_pose) + + def evaluate(self): + pos_obj = self.obj.pose.p + pos_bin = self.bin.pose.p + offset = pos_obj - pos_bin + xy_flag = torch.linalg.norm(offset[..., :2], axis=1) <= 0.005 + z_flag = ( + torch.abs(offset[..., 2] - self.radius - self.block_half_size[0]) <= 0.005 + ) + is_obj_on_bin = torch.logical_and(xy_flag, z_flag) + is_obj_static = self.obj.is_static(lin_thresh=1e-2, ang_thresh=0.5) + is_obj_grasped = self.agent.is_grasping(self.obj) + success = is_obj_on_bin & is_obj_static & (~is_obj_grasped) + return { + "is_obj_grasped": is_obj_grasped, + "is_obj_on_bin": is_obj_on_bin, + "is_obj_static": is_obj_static, + "success": success, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + is_grasped=info["is_obj_grasped"], + tcp_pose=self.agent.tcp.pose.raw_pose, + bin_pos=self.bin.pose.p, + ) + if "state" in self.obs_mode: + obs.update( + obj_pose=self.obj.pose.raw_pose, + tcp_to_obj_pos=self.obj.pose.p - self.agent.tcp.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + # reaching reward + tcp_pose = self.agent.tcp.pose.p + obj_pos = self.obj.pose.p + obj_to_tcp_dist = torch.linalg.norm(tcp_pose - obj_pos, axis=1) + reward = 2 * (1 - torch.tanh(5 * obj_to_tcp_dist)) + + # grasp and place reward + obj_pos = self.obj.pose.p + self.bin.pose.p + bin_top_pos = self.bin.pose.p.clone() + bin_top_pos[:, 2] = bin_top_pos[:, 2] + self.block_half_size[0] + self.radius + obj_to_bin_top_dist = torch.linalg.norm(bin_top_pos - obj_pos, axis=1) + place_reward = 1 - torch.tanh(5.0 * obj_to_bin_top_dist) + reward[info["is_obj_grasped"]] = (4 + place_reward)[info["is_obj_grasped"]] + + # ungrasp and static reward + gripper_width = (self.agent.robot.get_qlimits()[0, -1, 1] * 2).to(self.device) + is_obj_grasped = info["is_obj_grasped"] + ungrasp_reward = ( + torch.sum(self.agent.robot.get_qpos()[:, -2:], axis=1) / gripper_width + ) + ungrasp_reward[ + ~is_obj_grasped + ] = 16.0 # give ungrasp a bigger reward, so that it exceeds the robot static reward and the gripper can close + v = torch.linalg.norm(self.obj.linear_velocity, axis=1) + av = torch.linalg.norm(self.obj.angular_velocity, axis=1) + static_reward = 1 - torch.tanh(v * 10 + av) + robot_static_reward = self.agent.is_static( + 0.2 + ) # keep the robot static at the end state, since the sphere may spin when being placed on top + reward[info["is_obj_on_bin"]] = ( + 6 + (ungrasp_reward + static_reward + robot_static_reward) / 3.0 + )[info["is_obj_on_bin"]] + + # success reward + reward[info["success"]] = 13 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + # this should be equal to compute_dense_reward / max possible reward + max_reward = 13.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..ac02edaa74e43b2f62a01648930f56248fff0efa --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack.py @@ -0,0 +1,454 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat,quat2euler + +from mani_skill.agents.robots import Fetch, Panda, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlacePlateOnRack-v1", max_episode_steps=500) +class PlacePlateOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a plate vertically on a dish rack. + + **Randomizations:** + - The plate's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The plate is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "piper", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + plate_radius = 0.1 + plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + + elif self.robot_uids in ["noahbiarm_r", "noahbiarm_rc","noahbiarm_rcw"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + elif self.robot_uids in ["piper"]: + return sapien.Pose(p=[-0.35, 0, 0]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goalsite + self.goal_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.goal_site = actors.build_box( + self.scene, + half_sizes=self.goal_half_sizes, + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.goal_half_sizes[0]*0.5, 0, self.goal_half_sizes[2]]), + ) + self.goal_extents = np.array(self.goal_half_sizes) * 2 + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam", "piper"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # plate pose initialization + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.00 - 0.03 + y = -1 * side * torch.ones((b, 1)) * 0.5 + 0.03 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.00 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/9, np.pi/9) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm max reach + max_reach = 0.855 + min_reach = 0.010 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + + + ################################################################################## + # plate pose initialize + obb = get_actor_obb(self.plate) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + # (1,0,0,0) since plate is round, flate on table + q = torch.tensor([1, 0, 0, 0], dtype=torch.float32).repeat(b, 1) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + #print("plate init rot:", euler) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) * np.pi + euler[:, 2] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + #batch to q + q = torch.zeros((b, 4), dtype=torch.float32) + for i in range(b): + q[i] = torch.tensor(euler2quat(*euler[i], axes='sxyz'), dtype=torch.float32) + #q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + #Goal goes with rack + goal_euler = torch.zeros((b, 3), dtype=torch.float32) + goal_euler[:, 2] = euler[:, 2] + np.pi/2 + goal_euler[:, 0] = -np.pi/2 + goal_q = torch.zeros((b, 4), dtype=torch.float32) + for i in range(b): + goal_q[i] = torch.tensor(euler2quat(*goal_euler[i], axes='sxyz'), dtype=torch.float32) + z_goal = torch.full((b, 1), 0.103, dtype=torch.float32) + p_goal = robot_p + torch.cat((x, y, z_goal), dim=1) + self.goal_site.set_pose(Pose.create_from_pq(p=p_goal, q=goal_q)) + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + #plate is vertical + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + is_plate_vertical = ( + torch.abs(torch.abs(euler[:, 1])- np.pi/2) < np.pi/8 + or torch.abs(torch.abs(euler[:, 0])- np.pi/2) < np.pi/8 + ) + + # plate is placed + goal_extents = torch.from_numpy(self.goal_extents.copy()) * 0.5 + goal_p, _ = self.get_goal_site_pose() + is_plate_placed = ( + self.is_inside(self.plate.pose.p, goal_p , goal_extents) + ) + + # not grasping + not_grasping = torch.logical_not(self.agent.is_grasping(self.plate)) + + # plate is static + is_plate_static = self.plate.is_static() + + + return { + "success": is_plate_vertical & is_plate_placed & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.plate.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.plate.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.plate.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.plate.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.plate)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack_v3.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..3e73896e63f02b521075924e477bac068d7b6f9f --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack_v3.py @@ -0,0 +1,456 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat,quat2euler + +from mani_skill.agents.robots import Fetch, Panda, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.noahbiarm.utils import get_3d_bbox, get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlacePlateOnRack-v3", max_episode_steps=500) +class PlacePlateOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a plate vertically on a dish rack. + + **Randomizations:** + - The plate's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The plate is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "piper", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + plate_radius = 0.1 + plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + + elif self.robot_uids in ["noahbiarm_r", "noahbiarm_rc","noahbiarm_rcw"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + elif self.robot_uids in ["piper"]: + return sapien.Pose(p=[-0.35, 0, 0]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init goalsite + self.goal_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.goal_site = actors.build_box( + self.scene, + half_sizes=self.goal_half_sizes, + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.goal_half_sizes[0]*0.5, 0, self.goal_half_sizes[2]]), + ) + self.goal_extents = np.array(self.goal_half_sizes) * 2 + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam", "piper"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # plate pose initialization + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.00 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.00 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/9, np.pi/9) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm max reach + max_reach = 0.855 + min_reach = 0.010 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + + + ################################################################################## + # plate pose initialize + obb = get_actor_obb(self.plate) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + # (1,0,0,0) since plate is round, flate on table + q = torch.tensor([1, 0, 0, 0], dtype=torch.float32).repeat(b, 1) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + #print("plate init rot:", euler) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) * np.pi + euler[:, 2] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + #batch to q + q = torch.zeros((b, 4), dtype=torch.float32) + for i in range(b): + q[i] = torch.tensor(euler2quat(*euler[i], axes='sxyz'), dtype=torch.float32) + #q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + #Goal goes with rack + goal_euler = torch.zeros((b, 3), dtype=torch.float32) + goal_euler[:, 2] = euler[:, 2] + np.pi/2 + goal_euler[:, 0] = -np.pi/2 + goal_q = torch.zeros((b, 4), dtype=torch.float32) + for i in range(b): + goal_q[i] = torch.tensor(euler2quat(*goal_euler[i], axes='sxyz'), dtype=torch.float32) + z_goal = torch.full((b, 1), 0.103, dtype=torch.float32) + p_goal = robot_p + torch.cat((x, y, z_goal), dim=1) + self.goal_site.set_pose(Pose.create_from_pq(p=p_goal, q=goal_q)) + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + #plate is vertical + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + is_plate_vertical = ( + torch.abs(torch.abs(euler[:, 1])- np.pi/2) < np.pi/8 + or torch.abs(torch.abs(euler[:, 0])- np.pi/2) < np.pi/8 + ) + + # plate is placed + goal_extents = torch.from_numpy(self.goal_extents.copy()) * 0.5 + goal_p, _ = self.get_goal_site_pose() + is_plate_placed = ( + self.is_inside(self.plate.pose.p, goal_p , goal_extents) + ) + + # not grasping + not_grasping = torch.logical_not(self.agent.is_grasping(self.plate)) + + # plate is static + is_plate_static = self.plate.is_static() + + + return { + "success": is_plate_vertical & is_plate_placed & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obj_bb = get_3d_bbox(self.plate) + rack_bb = get_3d_bbox(self.rack) + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + obj_pose=self.plate.pose.raw_pose, + rack_pose=self.rack.pose.raw_pose, + obj_bb=obj_bb, + rack_bb=rack_bb, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.plate.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.plate.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.plate.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.plate)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack_v4.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..bac34b84ce7c32c8275b30dceb85397b4f08f948 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plate_on_rack_v4.py @@ -0,0 +1,549 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat,quat2euler + +from mani_skill.agents.robots import Fetch, Panda, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlacePlateOnRack-v4", max_episode_steps=500) +class PlacePlateOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a plate vertically on a dish rack. + + **Randomizations:** + - The plate's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The plate is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "piper", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"] + agent: Union[Panda, Fetch, Piper, PandaWristCam, NoahBiArmR, NoahBiArmRC, NoahBiArmRCW] + + plate_radius = 0.1 + plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + + elif self.robot_uids in ["noahbiarm_r", "noahbiarm_rc","noahbiarm_rcw"]: + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r", "noahbiarm_rc", "noahbiarm_rcw"]: + return sapien.Pose(p=[-1, 0, -0.7]) + elif self.robot_uids in ["piper"]: + return sapien.Pose(p=[-0.35, 0, 0]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, + robot_init_qpos_noise=self.robot_init_qpos_noise, + keyframe="vertical_grasp" + ) + self.table_scene.build() + + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + + # init bowl + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1, 1, 1), + color=np.array([0, 0, 255, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + add_collision=True, + initial_pose=sapien.Pose(p=[0, 0, 0], q=[1, 0, 0, 0]), + ) + self.bowl_extents = get_actor_obb(self.bowl).extents + + # init plate + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate_extents = get_actor_obb(self.plate).extents + + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_fork.obj", + scale=(1,1,1), + color=np.array([255, 0, 0, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + + # init knife + self.knife = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_knife.obj", + scale=(1,1,1), + color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + name="knife", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.knife_extents = get_actor_obb(self.knife).extents + + # # init mug + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([0, 255, 0, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, 0, 0]), + # ) + # self.mug_extents = get_actor_obb(self.mug).extents + + # init goalsite + self.goal_half_sizes = [self.plate_extents[2] , self.plate_extents[1]/2, self.plate_extents[0]/2] + self.goal_site = actors.build_box( + self.scene, + half_sizes=self.goal_half_sizes, + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.goal_half_sizes[0]*0.5, 0, self.goal_half_sizes[2]]), + ) + self.goal_extents = np.array(self.goal_half_sizes) * 2 + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam", "piper"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r","noahbiarm_rc", "noahbiarm_rcw"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################# + # plate pose initialization + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.00 + y = -1 * side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # default position + x = torch.ones((b, 1)) * 0.00 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -1 * side * torch.ones((b, 1)) * 0.5 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros((b, 1)) + self.plate_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + y = -side * torch.ones((b, 1)) * 0.2 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # default translation + x = torch.ones((b, 1)) * 0.2 + x_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + x += x_rand + y = -side * torch.ones((b, 1)) * 0.2 + y_rand = torch.empty(b, 1).uniform_(-0.05, 0.05) + y += y_rand + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/9, np.pi/9) + if self.robot_uids in ["noahbiarm_rc", "noahbiarm_rcw"]: + euler[:, 0] = np.pi + torch.empty(b, 1).uniform_(-np.pi/18, np.pi/18) + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # init fork + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.45 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # init knife + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.75 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.knife.set_pose(Pose.create_from_pq(p=p, q=q)) + + # ################################################################################## + # init bowl + # default position + x = torch.ones((b, 1)) * 0.30 + y = -1 * side * torch.ones((b, 1)) * 0.60 + z = torch.zeros((b, 1)) + self.bowl_extents[2]/2 + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm max reach + max_reach = 0.855 + min_reach = 0.010 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + + + ################################################################################## + # plate pose initialize + obb = get_actor_obb(self.plate) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + # (1,0,0,0) since plate is round, flate on table + q = torch.tensor([1, 0, 0, 0], dtype=torch.float32).repeat(b, 1) + self.plate.set_pose(Pose.create_from_pq(p=p, q=q)) + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + #print("plate init rot:", euler) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) * np.pi + euler[:, 2] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + #batch to q + q = torch.zeros((b, 4), dtype=torch.float32) + for i in range(b): + q[i] = torch.tensor(euler2quat(*euler[i], axes='sxyz'), dtype=torch.float32) + #q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + #Goal goes with rack + goal_euler = torch.zeros((b, 3), dtype=torch.float32) + goal_euler[:, 2] = euler[:, 2] + np.pi/2 + goal_euler[:, 0] = -np.pi/2 + goal_q = torch.zeros((b, 4), dtype=torch.float32) + for i in range(b): + goal_q[i] = torch.tensor(euler2quat(*goal_euler[i], axes='sxyz'), dtype=torch.float32) + z_goal = torch.full((b, 1), 0.103, dtype=torch.float32) + p_goal = robot_p + torch.cat((x, y, z_goal), dim=1) + self.goal_site.set_pose(Pose.create_from_pq(p=p_goal, q=goal_q)) + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + #plate is vertical + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + is_plate_vertical = ( + torch.abs(torch.abs(euler[:, 1])- np.pi/2) < np.pi/8 + or torch.abs(torch.abs(euler[:, 0])- np.pi/2) < np.pi/8 + ) + + # plate is placed + goal_extents = torch.from_numpy(self.goal_extents.copy()) * 0.5 + goal_p, _ = self.get_goal_site_pose() + is_plate_placed = ( + self.is_inside(self.plate.pose.p, goal_p , goal_extents) + ) + + # not grasping + not_grasping = torch.logical_not(self.agent.is_grasping(self.plate)) + + # plate is static + is_plate_static = self.plate.is_static() + + + return { + "success": is_plate_vertical & is_plate_placed & not_grasping + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.plate.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.plate.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.plate.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.plate.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.plate)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plug_charger.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plug_charger.py new file mode 100644 index 0000000000000000000000000000000000000000..e82bc95359563f87029009ca1f36ff684a664430 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/plug_charger.py @@ -0,0 +1,286 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import PandaWristCam +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils import randomization +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import common, sapien_utils +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import SimConfig + + +@register_env("PlugCharger-v1", max_episode_steps=200) +class PlugChargerEnv(BaseEnv): + """ + **Task Description:** + The robot must pick up one of the misplaced shapes on the board/kit and insert it into the correct empty slot. + + **Randomizations:** + - The charger position is randomized on the XY plane on top of the table. The rotation is also randomized + - The receptacle position is randomized on the XY plane and the rotation is also randomized. Note that the human render camera has its pose + fixed relative to the receptacle. + + **Success Conditions:** + - The charger is inserted into the receptacle + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlugCharger-v1_rt.mp4" + + _base_size = [2e-2, 1.5e-2, 1.2e-2] # charger base half size + _peg_size = [8e-3, 0.75e-3, 3.2e-3] # charger peg half size + _peg_gap = 7e-3 # charger peg gap + _clearance = 5e-4 # single side clearance + _receptacle_size = [1e-2, 5e-2, 5e-2] # receptacle half size + + SUPPORTED_ROBOTS = ["panda_wristcam"] + agent: Union[PandaWristCam] + + def __init__( + self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, **kwargs + ): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sim_config(self): + return SimConfig() + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [ + CameraConfig("base_camera", pose=pose, width=128, height=128, fov=np.pi / 2) + ] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([0.3, 0.4, 0.1], [0, 0, 0]) + return [ + CameraConfig( + "render_camera", + pose=pose, + width=512, + height=512, + fov=1, + mount=self.receptacle, + ) + ] + + def _build_charger(self, peg_size, base_size, gap): + builder = self.scene.create_actor_builder() + + # peg + mat = sapien.render.RenderMaterial() + mat.set_base_color([1, 1, 1, 1]) + mat.metallic = 1.0 + mat.roughness = 0.0 + mat.specular = 1.0 + builder.add_box_collision(sapien.Pose([peg_size[0], gap, 0]), peg_size) + builder.add_box_visual( + sapien.Pose([peg_size[0], gap, 0]), peg_size, material=mat + ) + builder.add_box_collision(sapien.Pose([peg_size[0], -gap, 0]), peg_size) + builder.add_box_visual( + sapien.Pose([peg_size[0], -gap, 0]), peg_size, material=mat + ) + + # base + mat = sapien.render.RenderMaterial() + mat.set_base_color([1, 1, 1, 1]) + mat.metallic = 0.0 + mat.roughness = 0.1 + builder.add_box_collision(sapien.Pose([-base_size[0], 0, 0]), base_size) + builder.add_box_visual( + sapien.Pose([-base_size[0], 0, 0]), base_size, material=mat + ) + builder.initial_pose = sapien.Pose(p=[0, 0, self._base_size[2]]) + return builder.build(name="charger") + + def _build_receptacle(self, peg_size, receptacle_size, gap): + builder = self.scene.create_actor_builder() + + sy = 0.5 * (receptacle_size[1] - peg_size[1] - gap) + sz = 0.5 * (receptacle_size[2] - peg_size[2]) + dx = -receptacle_size[0] + dy = peg_size[1] + gap + sy + dz = peg_size[2] + sz + + mat = sapien.render.RenderMaterial() + mat.set_base_color([1, 1, 1, 1]) + mat.metallic = 0.0 + mat.roughness = 0.1 + + poses = [ + sapien.Pose([dx, 0, dz]), + sapien.Pose([dx, 0, -dz]), + sapien.Pose([dx, dy, 0]), + sapien.Pose([dx, -dy, 0]), + ] + half_sizes = [ + [receptacle_size[0], receptacle_size[1], sz], + [receptacle_size[0], receptacle_size[1], sz], + [receptacle_size[0], sy, receptacle_size[2]], + [receptacle_size[0], sy, receptacle_size[2]], + ] + for pose, half_size in zip(poses, half_sizes): + builder.add_box_collision(pose, half_size) + builder.add_box_visual(pose, half_size, material=mat) + + # Fill the gap + pose = sapien.Pose([-receptacle_size[0], 0, 0]) + half_size = [receptacle_size[0], gap - peg_size[1], peg_size[2]] + builder.add_box_collision(pose, half_size) + builder.add_box_visual(pose, half_size, material=mat) + + # Add dummy visual for hole + mat = sapien.render.RenderMaterial() + mat.set_base_color(sapien_utils.hex2rgba("#DBB539")) + mat.metallic = 1.0 + mat.roughness = 0.0 + mat.specular = 1.0 + pose = sapien.Pose([-receptacle_size[0], -(gap * 0.5 + peg_size[1]), 0]) + half_size = [receptacle_size[0], peg_size[1], peg_size[2]] + builder.add_box_visual(pose, half_size, material=mat) + pose = sapien.Pose([-receptacle_size[0], gap * 0.5 + peg_size[1], 0]) + builder.add_box_visual(pose, half_size, material=mat) + builder.initial_pose = sapien.Pose(p=[0, 0, 0.1]) + return builder.build_kinematic(name="receptacle") + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.scene_builder = TableSceneBuilder( + self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.scene_builder.build() + self.charger = self._build_charger( + self._peg_size, + self._base_size, + self._peg_gap, + ) + self.receptacle = self._build_receptacle( + [ + self._peg_size[0], + self._peg_size[1] + self._clearance, + self._peg_size[2] + self._clearance, + ], + self._receptacle_size, + self._peg_gap, + ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.scene_builder.initialize(env_idx) + + # Initialize agent + qpos = torch.tensor( + [ + 0.0, + np.pi / 8, + 0, + -np.pi * 5 / 8, + 0, + np.pi * 3 / 4, + np.pi / 4, + 0.04, + 0.04, + ] + ) + qpos = ( + torch.normal( + 0, self.robot_init_qpos_noise, (b, len(qpos)), device=self.device + ) + + qpos + ) + qpos[:, -2:] = 0.04 + self.agent.robot.set_qpos(qpos) + self.agent.robot.set_pose(sapien.Pose([-0.615, 0, 0])) + + # Initialize charger + xy = randomization.uniform( + [-0.1, -0.2], [-0.01 - self._peg_size[0] * 2, 0.2], size=(b, 2) + ) + pos = torch.zeros((b, 3)) + pos[:, :2] = xy + pos[:, 2] = self._base_size[2] + ori = randomization.random_quaternions( + n=b, lock_x=True, lock_y=True, bounds=(-torch.pi / 3, torch.pi / 3) + ) + self.charger.set_pose(Pose.create_from_pq(pos, ori)) + + # Initialize receptacle + xy = randomization.uniform([0.01, -0.1], [0.1, 0.1], size=(b, 2)) + pos = torch.zeros((b, 3)) + pos[:, :2] = xy + pos[:, 2] = 0.1 + ori = randomization.random_quaternions( + n=b, + lock_x=True, + lock_y=True, + bounds=(torch.pi - torch.pi / 8, torch.pi + torch.pi / 8), + ) + self.receptacle.set_pose(Pose.create_from_pq(pos, ori)) + + self.goal_pose = self.receptacle.pose * ( + sapien.Pose(q=euler2quat(0, 0, np.pi)) + ) + + @property + def charger_base_pose(self): + return self.charger.pose * (sapien.Pose([-self._base_size[0], 0, 0])) + + def _compute_distance(self): + obj_pose = self.charger.pose + obj_to_goal_pos = self.goal_pose.p - obj_pose.p + obj_to_goal_dist = torch.linalg.norm(obj_to_goal_pos, axis=1) + + obj_to_goal_quat = rotation_conversions.quaternion_multiply( + rotation_conversions.quaternion_invert(self.goal_pose.q), obj_pose.q + ) + obj_to_goal_axis = rotation_conversions.quaternion_to_axis_angle( + obj_to_goal_quat + ) + obj_to_goal_angle = torch.linalg.norm(obj_to_goal_axis, axis=1) + obj_to_goal_angle = torch.min( + obj_to_goal_angle, torch.pi * 2 - obj_to_goal_angle + ) + + return obj_to_goal_dist, obj_to_goal_angle + + def evaluate(self): + obj_to_goal_dist, obj_to_goal_angle = self._compute_distance() + success = (obj_to_goal_dist <= 5e-3) & (obj_to_goal_angle <= 0.2) + return dict( + obj_to_goal_dist=obj_to_goal_dist, + obj_to_goal_angle=obj_to_goal_angle, + success=success, + ) + + def _get_obs_extra(self, info: Dict): + obs = dict(tcp_pose=self.agent.tcp.pose.raw_pose) + if self.obs_mode_struct.use_state: + obs.update( + charger_pose=self.charger.pose.raw_pose, + receptacle_pose=self.receptacle.pose.raw_pose, + goal_pose=self.goal_pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + return torch.zeros(self.num_envs, device=self.device) + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + max_reward = 1.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/poke_cube.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/poke_cube.py new file mode 100644 index 0000000000000000000000000000000000000000..233ed02312a6ce630e04db5eeb1373fb843e35ea --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/poke_cube.py @@ -0,0 +1,230 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils import randomization +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose + + +@register_env("PokeCube-v1", max_episode_steps=50) +class PokeCubeEnv(BaseEnv): + """ + **Task Description:** + A simple task where the objective is to poke a red cube with a peg and push it to a target goal position. + + **Randomizations:** + - the peg's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat along it's length on the table + - the cube's x-coordinate is fixed to peg's x-coordinate + peg half-length (0.12) + 0.1 and y-coordinate is randomized in range [-0.1, 0.1]. It is placed flat on the table + - the cube's z-axis rotation is randomized in range [-$\pi$/ 6, $\pi$ / 6] + - the target goal region is marked by a red/white circular target. The position of the target is fixed to be the cube xy position + [0.05 + goal_radius, 0] + + **Success Conditions:** + - the cube's xy position is within goal_radius (default 0.05) of the target's xy position by euclidean distance + - the robot is static + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PokeCube-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch"] + agent: Union[Panda, Fetch] + + cube_half_size = 0.02 + peg_half_width = 0.025 + peg_half_length = 0.12 + goal_radius = 0.05 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([0.6, 0.7, 0.6], [0.2, 0.2, 0.35]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + self.cube = actors.build_cube( + self.scene, + half_size=self.cube_half_size, + color=[1, 0, 0, 1], + name="cube", + body_type="dynamic", + initial_pose=sapien.Pose(p=[1, 0, self.cube_half_size]), + ) + + self.peg = actors.build_twocolor_peg( + self.scene, + length=self.peg_half_length, + width=self.peg_half_width, + color_1=np.array([12, 42, 160, 255]) / 255, + color_2=np.array([12, 42, 160, 255]) / 255, + name="peg", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, self.peg_half_width]), + ) + + self.goal_region = actors.build_red_white_target( + self.scene, + radius=self.goal_radius, + thickness=1e-5, + name="goal_region", + add_collision=False, + body_type="kinematic", + initial_pose=sapien.Pose(), + ) + + self.peg_head_offsets = Pose.create_from_pq( + p=[self.peg_half_length, 0, 0], device=self.device + ) + + @property + def peg_head_pos(self): + return self.peg.pose.p + self.peg_head_offsets.p + + @property + def peg_head_pose(self): + return self.peg.pose * self.peg_head_offsets + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + # initialize the peg + peg_xyz = torch.zeros((b, 3)) + peg_xyz = torch.rand((b, 3)) * 0.2 - 0.1 + peg_xyz[..., 2] = self.peg_half_width + peg_q = [1, 0, 0, 0] + peg_pose = Pose.create_from_pq(p=peg_xyz, q=peg_q) + self.peg.set_pose(peg_pose) + # initialize the cube + cube_xyz = torch.zeros((b, 3)) + cube_xyz = torch.rand((b, 3)) * 0.2 - 0.1 + cube_xyz[..., 0] = peg_xyz[..., 0] + self.peg_half_length + 0.1 + cube_xyz[..., 2] = self.cube_half_size + cube_q = randomization.random_quaternions( + b, + lock_x=True, + lock_y=True, + lock_z=False, + bounds=(-np.pi / 6, np.pi / 6), + ) + cube_pose = Pose.create_from_pq(p=cube_xyz, q=cube_q) + self.cube.set_pose(cube_pose) + # initialize the goal region + goal_region_xyz = cube_xyz + torch.tensor([0.05 + self.goal_radius, 0, 0]) + goal_region_xyz[..., 2] = 1e-3 + goal_region_q = euler2quat(0, np.pi / 2, 0) + goal_region_pose = Pose.create_from_pq(p=goal_region_xyz, q=goal_region_q) + self.goal_region.set_pose(goal_region_pose) + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + + if self.obs_mode_struct.use_state: + obs.update( + cube_pose=self.cube.pose.raw_pose, + peg_pose=self.peg.pose.raw_pose, + goal_pos=self.peg.pose.p, + tcp_to_peg_pos=self.peg.pose.p - self.agent.tcp.pose.p, + peg_to_cube_pos=self.cube.pose.p - self.peg.pose.p, + cube_to_goal_pos=self.goal_region.pose.p - self.cube.pose.p, + peghead_to_cube_pos=self.peg_head_pos - self.cube.pose.p, + ) + return obs + + def evaluate(self): + is_cube_placed = ( + torch.linalg.norm( + self.cube.pose.p[..., :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + < self.goal_radius + ) + peg_q = self.peg_head_pose.q + peg_qmat = rotation_conversions.quaternion_to_matrix(peg_q) + peg_euler = rotation_conversions.matrix_to_euler_angles(peg_qmat, "XYZ") + cube_q = self.cube.pose.q + cube_qmat = rotation_conversions.quaternion_to_matrix(cube_q) + cube_euler = rotation_conversions.matrix_to_euler_angles(cube_qmat, "XYZ") + angle_diff = torch.abs(peg_euler[:, 2] - cube_euler[:, 2]) + is_peg_cube_aligned = angle_diff < 0.05 + + head_to_cube_dist = torch.linalg.norm( + self.peg_head_pos[..., :2] - self.cube.pose.p[..., :2], axis=1 + ) + is_peg_cube_close = head_to_cube_dist <= self.cube_half_size + 0.005 + + is_peg_cube_fit = torch.logical_and(is_peg_cube_aligned, is_peg_cube_close) + is_peg_grasped = self.agent.is_grasping(self.peg) + is_robot_static = self.agent.is_static(0.2) + return { + "success": is_cube_placed & is_robot_static, + "is_cube_placed": is_cube_placed, + "is_peg_cube_fit": is_peg_cube_fit, + "is_peg_grasped": is_peg_grasped, + "angle_diff": angle_diff, + "head_to_cube_dist": head_to_cube_dist, + } + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + # reach peg + tcp_pos = self.agent.tcp.pose.p + tgt_tcp_pose = self.peg.pose + tcp_to_peg_dist = torch.linalg.norm(tcp_pos - tgt_tcp_pose.p, axis=1) + reached = tcp_to_peg_dist < 0.01 + reaching_reward = 2 * (1 - torch.tanh(5.0 * tcp_to_peg_dist)) + reward = reaching_reward + + # peg to cube + angle_diff = info["angle_diff"] + align_reward = 1 - torch.tanh(5.0 * angle_diff) + head_to_cube_dist = info["head_to_cube_dist"] + close_reward = 1 - torch.tanh(5.0 * head_to_cube_dist) + is_peg_grasped = info["is_peg_grasped"] * reached + reward[is_peg_grasped] = (4 + close_reward + align_reward)[is_peg_grasped] + + # cube to goal + cube_to_goal_dist = torch.linalg.norm( + self.goal_region.pose.p - self.cube.pose.p, axis=1 + ) + place_reward = 1 - torch.tanh(5 * cube_to_goal_dist) + is_peg_cube_fit = info["is_peg_cube_fit"] * is_peg_grasped + reward[is_peg_cube_fit] = (7 + place_reward)[is_peg_cube_fit] + + static_reward = 1 - torch.tanh( + 5 * torch.linalg.norm(self.agent.robot.get_qvel()[..., :-2], axis=1) + ) + reward[info["is_cube_placed"]] += static_reward[info["is_cube_placed"]] + + reward[info["success"]] = 10 + return reward + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + max_reward = 10.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pull_cube.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pull_cube.py new file mode 100644 index 0000000000000000000000000000000000000000..40f5d2797064c978636aaf479bafb22dd61bf461 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pull_cube.py @@ -0,0 +1,151 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + + +@register_env("PullCube-v1", max_episode_steps=50) +class PullCubeEnv(BaseEnv): + """ + **Task Description:** + A simple task where the objective is to pull a cube onto a target. + + **Randomizations:** + - the cube's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. + - the target goal region is marked by a red and white target. The position of the target is fixed to be the cube's xy position - [0.1 + goal_radius, 0] + + **Success Conditions:** + - the cube's xy position is within goal_radius (default 0.1) of the target's xy position by euclidean distance. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PullCube-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch"] + agent: Union[Panda, Fetch] + goal_radius = 0.1 + cube_half_size = 0.02 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([0.6, 0.7, 0.6], [0.0, 0.0, 0.35]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # create cube + self.obj = actors.build_cube( + self.scene, + half_size=self.cube_half_size, + color=np.array([12, 42, 160, 255]) / 255, + name="cube", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, self.cube_half_size]), + ) + + # create target + self.goal_region = actors.build_red_white_target( + self.scene, + radius=self.goal_radius, + thickness=1e-5, + name="goal_region", + add_collision=False, + body_type="kinematic", + ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + xyz = torch.zeros((b, 3)) + xyz[..., :2] = torch.rand((b, 2)) * 0.2 - 0.1 + xyz[..., 2] = self.cube_half_size + q = [1, 0, 0, 0] + + obj_pose = Pose.create_from_pq(p=xyz, q=q) + self.obj.set_pose(obj_pose) + + target_region_xyz = xyz - torch.tensor([0.1 + self.goal_radius, 0, 0]) + + target_region_xyz[..., 2] = 1e-3 + self.goal_region.set_pose( + Pose.create_from_pq( + p=target_region_xyz, + q=euler2quat(0, np.pi / 2, 0), + ) + ) + + def evaluate(self): + is_obj_placed = ( + torch.linalg.norm( + self.obj.pose.p[..., :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + < self.goal_radius + ) + + return { + "success": is_obj_placed, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + goal_pos=self.goal_region.pose.p, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.obj.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # grippers should close and pull from behind the cube, not grip it + # distance to backside of cube (+ 2*0.005) sufficiently encourages this + tcp_pull_pos = self.obj.pose.p + torch.tensor( + [self.cube_half_size + 2 * 0.005, 0, 0], device=self.device + ) + tcp_to_pull_pose = tcp_pull_pos - self.agent.tcp.pose.p + tcp_to_pull_pose_dist = torch.linalg.norm(tcp_to_pull_pose, axis=1) + reaching_reward = 1 - torch.tanh(5 * tcp_to_pull_pose_dist) + reward = reaching_reward + + reached = tcp_to_pull_pose_dist < 0.01 + obj_to_goal_dist = torch.linalg.norm( + self.obj.pose.p[..., :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + place_reward = 1 - torch.tanh(5 * obj_to_goal_dist) + reward += place_reward * reached + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pull_cube_tool.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pull_cube_tool.py new file mode 100644 index 0000000000000000000000000000000000000000..567b91964f3c1b236765d4e65c9be9d17aecb0e5 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/pull_cube_tool.py @@ -0,0 +1,282 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch + +from mani_skill.agents.robots import Fetch, Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils import randomization +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs import Pose +from mani_skill.utils.structs.types import GPUMemoryConfig, SimConfig + + +@register_env("PullCubeTool-v1", max_episode_steps=100) +class PullCubeToolEnv(BaseEnv): + """ + **Task Description** + Given an L-shaped tool that is within the reach of the robot, leverage the + tool to pull a cube that is out of it's reach + + **Randomizations** + - The cube's position (x,y) is randomized on top of a table in the region "". It is placed flat on the table + - The target goal region is the region on top of the table marked by "" + + **Success Conditions** + - The cube's xy position is within the goal region of the arm's base (marked by reachability) + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PullCubeTool-v1_rt.mp4" + + SUPPORTED_ROBOTS = ["panda", "fetch"] + SUPPORTED_REWARD_MODES = ("normalized_dense", "dense", "sparse", "none") + agent: Union[Panda, Fetch] + + goal_radius = 0.3 + cube_half_size = 0.02 + handle_length = 0.2 + hook_length = 0.05 + width = 0.05 + height = 0.05 + cube_size = 0.02 + arm_reach = 0.35 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sim_config(self): + return SimConfig( + gpu_memory_config=GPUMemoryConfig( + found_lost_pairs_capacity=2**25, max_rigid_patch_count=2**18 + ) + ) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.5], target=[-0.1, 0, 0.1]) + return [ + CameraConfig( + "base_camera", + pose=pose, + width=128, + height=128, + fov=np.pi / 2, + near=0.01, + far=100, + ) + ] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([0.6, 0.7, 0.6], [0.0, 0.0, 0.35]) + return [ + CameraConfig( + "render_camera", + pose=pose, + width=512, + height=512, + fov=1, + near=0.01, + far=100, + ) + ] + + def _build_l_shaped_tool(self, handle_length, hook_length, width, height): + builder = self.scene.create_actor_builder() + + mat = sapien.render.RenderMaterial() + mat.set_base_color([1, 0, 0, 1]) + mat.metallic = 1.0 + mat.roughness = 0.0 + mat.specular = 1.0 + + builder.add_box_collision( + sapien.Pose([handle_length / 2, 0, 0]), + [handle_length / 2, width / 2, height / 2], + density=500, + ) + builder.add_box_visual( + sapien.Pose([handle_length / 2, 0, 0]), + [handle_length / 2, width / 2, height / 2], + material=mat, + ) + + builder.add_box_collision( + sapien.Pose([handle_length - hook_length / 2, width, 0]), + [hook_length / 2, width, height / 2], + ) + builder.add_box_visual( + sapien.Pose([handle_length - hook_length / 2, width, 0]), + [hook_length / 2, width, height / 2], + material=mat, + ) + + return builder.build(name="l_shape_tool") + + def _load_scene(self, options: dict): + self.scene_builder = TableSceneBuilder( + self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.scene_builder.build() + + self.cube = actors.build_cube( + self.scene, + half_size=self.cube_half_size, + color=np.array([12, 42, 160, 255]) / 255, + name="cube", + body_type="dynamic", + ) + + self.l_shape_tool = self._build_l_shaped_tool( + handle_length=self.handle_length, + hook_length=self.hook_length, + width=self.width, + height=self.height, + ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.scene_builder.initialize(env_idx) + + tool_xyz = torch.zeros((b, 3), device=self.device) + tool_xyz[..., :2] = -torch.rand((b, 2), device=self.device) * 0.2 - 0.1 + tool_xyz[..., 2] = self.height / 2 + tool_q = torch.tensor([1, 0, 0, 0], device=self.device).expand(b, 4) + + tool_pose = Pose.create_from_pq(p=tool_xyz, q=tool_q) + self.l_shape_tool.set_pose(tool_pose) + + cube_xyz = torch.zeros((b, 3), device=self.device) + cube_xyz[..., 0] = ( + self.arm_reach + + torch.rand(b, device=self.device) * (self.handle_length) + - 0.3 + ) + cube_xyz[..., 1] = torch.rand(b, device=self.device) * 0.3 - 0.25 + cube_xyz[..., 2] = self.cube_size / 2 + 0.015 + + cube_q = randomization.random_quaternions( + b, + lock_x=True, + lock_y=True, + lock_z=False, + bounds=(-np.pi / 6, np.pi / 6), + device=self.device, + ) + + cube_pose = Pose.create_from_pq(p=cube_xyz, q=cube_q) + self.cube.set_pose(cube_pose) + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + + if self.obs_mode_struct.use_state: + obs.update( + cube_pose=self.cube.pose.raw_pose, + tool_pose=self.l_shape_tool.pose.raw_pose, + ) + + return obs + + def evaluate(self): + cube_pos = self.cube.pose.p + + robot_base_pos = self.agent.robot.get_links()[0].pose.p + + cube_to_base_dist = torch.linalg.norm( + cube_pos[:, :2] - robot_base_pos[:, :2], dim=1 + ) + + # Success condition - cube is pulled close enough + cube_pulled_close = cube_to_base_dist < 0.6 + + workspace_center = robot_base_pos.clone() + workspace_center[:, 0] += self.arm_reach * 0.1 + cube_to_workspace_dist = torch.linalg.norm(cube_pos - workspace_center, dim=1) + progress = 1 - torch.tanh(3.0 * cube_to_workspace_dist) + + return { + "success": cube_pulled_close, + "success_once": cube_pulled_close, + "success_at_end": cube_pulled_close, + "cube_progress": progress.mean(), + "cube_distance": cube_to_workspace_dist.mean(), + "reward": self.compute_normalized_dense_reward( + None, None, {"success": cube_pulled_close} + ), + } + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + + tcp_pos = self.agent.tcp.pose.p + cube_pos = self.cube.pose.p + tool_pos = self.l_shape_tool.pose.p + robot_base_pos = self.agent.robot.get_links()[0].pose.p + + # Stage 1: Reach and grasp tool + tool_grasp_pos = tool_pos + torch.tensor([0.02, 0, 0], device=self.device) + tcp_to_tool_dist = torch.linalg.norm(tcp_pos - tool_grasp_pos, dim=1) + reaching_reward = 2.0 * (1 - torch.tanh(5.0 * tcp_to_tool_dist)) + + # Add specific grasping reward + is_grasping = self.agent.is_grasping(self.l_shape_tool, max_angle=20) + grasping_reward = 2.0 * is_grasping + + # Stage 2: Position tool behind cube + ideal_hook_pos = cube_pos + torch.tensor( + [-(self.hook_length + self.cube_half_size), -0.067, 0], device=self.device + ) + tool_positioning_dist = torch.linalg.norm(tool_pos - ideal_hook_pos, dim=1) + positioning_reward = 1.5 * (1 - torch.tanh(3.0 * tool_positioning_dist)) + tool_positioned = tool_positioning_dist < 0.05 + + # Stage 3: Pull cube to workspace + workspace_target = robot_base_pos + torch.tensor( + [0.05, 0, 0], device=self.device + ) + cube_to_workspace_dist = torch.linalg.norm(cube_pos - workspace_target, dim=1) + initial_dist = torch.linalg.norm( + torch.tensor( + [self.arm_reach + 0.1, 0, self.cube_size / 2], device=self.device + ) + - workspace_target, + dim=1, + ) + pulling_progress = (initial_dist - cube_to_workspace_dist) / initial_dist + pulling_reward = 3.0 * pulling_progress * tool_positioned + + # Combine rewards with staging and grasping dependency + reward = reaching_reward + grasping_reward + reward += positioning_reward * is_grasping + reward += pulling_reward * is_grasping + + # Penalties + cube_pushed_away = cube_pos[:, 0] > (self.arm_reach + 0.15) + reward[cube_pushed_away] -= 2.0 + + # Success bonus + if "success" in info: + reward[info["success"]] += 5.0 + + return reward + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + """ + Normalizes the dense reward by the maximum possible reward (success bonus) + """ + max_reward = 5.0 # Maximum possible reward from success bonus + dense_reward = self.compute_dense_reward(obs=obs, action=action, info=info) + return dense_reward / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/roll_ball.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/roll_ball.py new file mode 100644 index 0000000000000000000000000000000000000000..f3387cdac55dddb62c75c2d61e1cba7020332102 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/roll_ball.py @@ -0,0 +1,181 @@ +from typing import Any, Dict + +import numpy as np +import sapien +import torch +from transforms3d.euler import euler2quat + +import mani_skill.envs.utils.randomization as randomization +from mani_skill.agents.robots import Fetch, Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array, GPUMemoryConfig, SimConfig + + +@register_env("RollBall-v1", max_episode_steps=80) +class RollBallEnv(BaseEnv): + """ + **Task Description:** + A simple task where the objective is to push and roll a ball to a goal region at the other end of the table + + **Randomizations:** + - The ball's xy position is randomized on top of a table in the region [0.2, 0.5] x [-0.4, 0.7]. It is placed flat on the table + - The target goal region is marked by a red/white circular target. The position of the target is randomized on top of a table in the region [-0.4, -0.7] x [0.2, -0.9] + + **Success Conditions:** + - The ball's xy position is within goal_radius (default 0.1) of the target's xy position by euclidean distance. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/RollBall-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda"] + + agent: Panda + + goal_radius: float = 0.1 # radius of the goal region + ball_radius: float = 0.035 # radius of the ball + reached_status: torch.Tensor + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sim_config(self): + return SimConfig( + gpu_memory_config=GPUMemoryConfig( + found_lost_pairs_capacity=2**25, max_rigid_patch_count=2**18 + ) + ) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[-0.1, 0.9, 0.3], target=[0.0, 0.0, 0.0]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([-0.6, 1.3, 0.8], [0.0, 0.13, 0.0]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + self.ball = actors.build_sphere( + self.scene, + radius=self.ball_radius, + color=[0, 0.2, 0.8, 1], + name="ball", + initial_pose=sapien.Pose(p=[0, 0, 0.1]), + ) + + self.goal_region = actors.build_red_white_target( + self.scene, + radius=self.goal_radius, + thickness=1e-5, + name="goal_region", + add_collision=False, + body_type="kinematic", + initial_pose=sapien.Pose(p=[0, 0, 0.1]), + ) + self.reached_status = torch.zeros(self.num_envs, dtype=torch.float32) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + self.reached_status = self.reached_status.to(self.device) + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + robot_pose = Pose.create_from_pq( + p=[-0.1, 1.0, 0], q=[0.7071, 0, 0, -0.7072] + ) + self.agent.robot.set_pose(robot_pose) + + xyz = torch.zeros((b, 3)) + xyz[..., 0] = (torch.rand((b)) * 2 - 1) * 0.3 - 0.1 + xyz[..., 1] = torch.rand((b)) * 0.2 + 0.5 + xyz[..., 2] = self.ball_radius + q = [1, 0, 0, 0] + + obj_pose = Pose.create_from_pq(p=xyz, q=q) + self.ball.set_pose(obj_pose) + + xyz_goal = torch.zeros((b, 3)) + xyz_goal[..., 0] = (torch.rand((b)) * 2 - 1) * 0.3 - 0.1 + xyz_goal[..., 1] = torch.rand((b)) * 0.2 - 1.0 + self.goal_radius + xyz_goal[..., 2] = 1e-3 + self.goal_region.set_pose( + Pose.create_from_pq( + p=xyz_goal, + q=euler2quat(0, np.pi / 2, 0), + ) + ) + self.reached_status[env_idx] = 0.0 + + def evaluate(self): + + is_obj_placed = ( + torch.linalg.norm( + self.ball.pose.p[..., :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + < self.goal_radius + ) + + return { + "success": is_obj_placed, + } + + def _get_obs_extra(self, info: Dict): + + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + goal_pos=self.goal_region.pose.p, + ball_pose=self.ball.pose.raw_pose, + ball_vel=self.ball.linear_velocity, + tcp_to_ball_pos=self.ball.pose.p - self.agent.tcp.pose.p, + ball_to_goal_pos=self.goal_region.pose.p - self.ball.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + unit_vec = self.ball.pose.p - self.goal_region.pose.p + unit_vec = unit_vec / torch.linalg.norm(unit_vec, axis=1, keepdim=True) + tcp_hit_pose = Pose.create_from_pq( + p=self.ball.pose.p + unit_vec * (self.ball_radius + 0.05), + ) + tcp_to_hit_pose = tcp_hit_pose.p - self.agent.tcp.pose.p + tcp_to_hit_pose_dist = torch.linalg.norm(tcp_to_hit_pose, axis=1) + self.reached_status[tcp_to_hit_pose_dist < 0.04] = 1.0 + reaching_reward = 1 - torch.tanh(2 * tcp_to_hit_pose_dist) + + obj_to_goal_dist = torch.linalg.norm( + self.ball.pose.p[..., :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + + reached_reward = 1 - torch.tanh(obj_to_goal_dist) + + reward = ( + 20 * reached_reward * self.reached_status + + reaching_reward * (1 - self.reached_status) + + self.reached_status + ) + + reward[info["success"]] = 30.0 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 30.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/spoon_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/spoon_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..7f5fe98385d4192596243a99362b86adaf3eb4d2 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/spoon_on_rack.py @@ -0,0 +1,498 @@ +from typing import Any, Dict, Union + +import numpy as np +import sapien +import torch +import torch.random + + +from mani_skill.agents.robots import Fetch, Panda, PandaWristCam, NoahBiArm, NoahBiArmR +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + +def quaternion_multiply(q, r): + """ + Multiply two batches of quaternions. + q, r: tensors of shape (B, 4) in the format [w, x, y, z] + Returns: tensor of shape (B, 4) + """ + w1, x1, y1, z1 = q.unbind(-1) + w2, x2, y2, z2 = r.unbind(-1) + return torch.stack([ + w1*w2 - x1*x2 - y1*y2 - z1*z2, + w1*x2 + x1*w2 + y1*z2 - z1*y2, + w1*y2 - x1*z2 + y1*w2 + z1*x2, + w1*z2 + x1*y2 - y1*x2 + z1*w2, + ], dim=-1) + +def rotate_vector(q, v): + """ + Rotate vector v by quaternion q. + q: tensor of shape (B, 4) in [w, x, y, z] format + v: tensor of shape (B, 3) (or (3,) will be broadcasted) + Returns: rotated vector of shape (B, 3) + """ + # Convert v into a quaternion with zero scalar part + zeros = torch.zeros(v.shape[0], 1, device=v.device, dtype=v.dtype) + v_quat = torch.cat([zeros, v], dim=-1) # shape (B, 4) + + # Compute conjugate of q + q_conj = q.clone() + q_conj[:, 1:] = -q_conj[:, 1:] + + # Rotate: v_rot = q * v_quat * q_conj + v_rot = quaternion_multiply(quaternion_multiply(q, v_quat), q_conj) + return v_rot[:, 1:] # return only the vector part + + +@register_env("PlaceSpoonOnRack-v1", max_episode_steps=50) +class PlaceSpoonOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a spoon vertically on a dish rack. + + ***Randomizations:** + - lvl 0: fixed (R,t) for spoon and rack + - lvl 1: + variable t for spoon + - lvl 2: + variable R for spoon + - lvl 3: + variable t for rack + - lvl 4: + variable R for rack + - lvl 5: + variable side of table + + **Success Conditions:** + - The spoon is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlaceForkOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "panda_wristcam", "noahbiarm", "noahbiarm_r"] + agent: Union[Panda, Fetch, PandaWristCam, NoahBiArm, NoahBiArmR] + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, rand_level=0, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.rand_level = rand_level + assert 0 <= rand_level <=5 + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25],[-1,0,0]) + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _init_robot_pose(self): + if self.robot_uids in ["panda", "panda_wristcam"]: + return sapien.Pose(p=[-0.615, 0, 0]) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return sapien.Pose(p=[-1, 0, -0.7]) + + def _load_agent(self, options: dict): + super()._load_agent(options, self._init_robot_pose()) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + # init fork + self.fork = actors.build_convex_from_mesh( + self.scene, + mesh_path="./assets/plates/kalas_spoon.obj", + scale=(1,1,1), + color=np.array([255, 255, 255, 255], dtype=np.float32) / 255, + name="fork", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.fork_extents = get_actor_obb(self.fork).extents + self.fork_tip = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 0, 0, 0], + name="fork_tip", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, self.fork_extents[1]/2, 0]), + ) + self.fork_tail = actors.build_cube( + self.scene, + half_size=max(self.fork_extents)/40, + color=[1, 1, 0, 0], + name="fork_tail", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[0, -1*self.fork_extents[1]/2, 0]), + ) + # init rack + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path="./assets/plates/stamling_rack.obj", + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.rack_extents = get_actor_obb(self.rack).extents + self.goal_site = actors.build_box( + self.scene, + half_sizes=[self.rack_extents[0]/2/7 , self.rack_extents[1]/2*0.9, self.rack_extents[2]/2], + color=[0, 1, 0, 0], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(p=[-self.rack_extents[0]*11.5/14/2, 0, self.rack_extents[2]/2]), + ) + self.goal_extents = np.array([self.rack_extents[0]/7 , self.rack_extents[1]*0.9, self.rack_extents[2]]) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + if self.robot_uids in ["panda", "panda_wristcam"]: + return self._initialize_episode_panda(env_idx, options) + elif self.robot_uids in ["noahbiarm", "noahbiarm_r"]: + return self._initialize_episode_noahbiarm(env_idx, options) + + def _initialize_episode_noahbiarm(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.3 + min_reach = 0.1 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_p[:, 2] = 0 + robot_p[:, 0] = -0.615 + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.1 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/6 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.28 + min_reach = 0.32 + + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.3 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/6 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi/10 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + # gp, gq = self.get_goal_site_pose() + # self.goal_site.set_pose(Pose.create_from_pq(p=gp, q=gq)) + + def _initialize_episode_panda(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # @sajjad: need to get the reach parames of robot dynamically + # get arm max reach + max_reach = 0.8 + min_reach = 0.2 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + + if self.rand_level >= 5: + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + else: + side = torch.ones((b, 1)) + + + ################################################################################## + # fork pose initialize + obb = get_actor_obb(self.fork) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 1: + # default position + x = torch.ones((b, 1)) * 0.5 + y = side * torch.ones((b, 1)) * 0.5 + z = torch.zeros((b, 1)) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 1: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * np.pi/4 + np.pi/8 + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.ones((b, 1), dtype=torch.float32) * np.pi/2 + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 2: + # rotation + # follow zyx convention for euler + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + # euler[:, 1] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + self.fork.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # rack pose initialize + obb = get_actor_obb(self.rack) + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + if self.rand_level < 3: + # default translation + x = torch.ones((b, 1)) * 0.5 + y = -side * torch.ones((b, 1)) * 0.5 + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 3: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = -1 * side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + # default rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + q = rotation_conversions.euler_to_quaternion(euler) + + if self.rand_level >= 4: + # rotation + euler = torch.zeros((b, 3), dtype=torch.float32) + euler[:, 0] = torch.rand((b, 1), dtype=torch.float32) * np.pi * 2 + q = rotation_conversions.euler_to_quaternion(euler) + + self.rack.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def get_fork_tip_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tip.pose.p + relative_q = self.fork_tip.pose.q + new_tip_p = fork_p + rotate_vector(fork_q, relative_p) + new_tip_q = quaternion_multiply(fork_q, relative_q) + return new_tip_p, new_tip_q + + def get_fork_tail_pose(self): + fork_p, fork_q = self.fork.pose.p, self.fork.pose.q + relative_p = self.fork_tail.pose.p + relative_q = self.fork_tail.pose.q + new_tail_p = fork_p + rotate_vector(fork_q, relative_p) + new_tail_q = quaternion_multiply(fork_q, relative_q) + return new_tail_p, new_tail_q + + def get_goal_site_pose(self): + rack_p, rack_q = self.rack.pose.p, self.rack.pose.q + relative_p = self.goal_site.pose.p + relative_q = self.goal_site.pose.q + + new_box_p = rack_p + rotate_vector(rack_q, relative_p) + new_box_q = quaternion_multiply(rack_q, relative_q) + + return new_box_p, new_box_q + + def is_inside(self, p1, p2, extents2): + """ + Check if each coordinate in the batch p1 is inside the corresponding box defined by center p2 + and half extents extents2 using PyTorch. + + Args: + p1 (torch.Tensor): Batch of points, shape (batch_size, dims). + p2 (torch.Tensor): Batch of box centers, shape (batch_size, dims). + extents2 (torch.Tensor): Batch of half extents along each axis, shape (batch_size, dims). + + Returns: + torch.Tensor: Boolean tensor of shape (batch_size,) indicating if each point is inside its box. + """ + # Compute the absolute difference between each point and its box center + diff = torch.abs(p1 - p2) + + # Check if the difference is within the half extents along each dimension + inside = torch.all(diff <= extents2.to(diff.device)/2, dim=1) + + return inside + + def evaluate(self): + euler = rotation_conversions.quaternion_to_euler(self.fork.pose.q) + + is_fork_vertical = ( + (torch.abs(torch.abs(euler[:, 1])) - np.pi/2) <= np.pi/4 + ) + + goal_extents = torch.from_numpy(self.goal_extents) + goal_p, _ = self.get_goal_site_pose() + tip_p, _ = self.get_fork_tip_pose() + tail_p, _ = self.get_fork_tail_pose() + is_fork_placed = ( + self.is_inside(tip_p, goal_p, goal_extents) | + self.is_inside(tail_p, goal_p, goal_extents)) + + return { + "success": is_fork_vertical & is_fork_placed, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.fork.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.fork.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.fork.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the fork + to_grip_vec = self.fork.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the fork + reaching_rew[self.agent.is_grasping(self.fork)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_bowl.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_bowl.py new file mode 100644 index 0000000000000000000000000000000000000000..2d4df0fea6365f139ff931a3a4b6f2d92f78ec70 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_bowl.py @@ -0,0 +1,215 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda, Piper +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array +from mani_skill.examples.motionplanning.panda.utils import get_actor_obb + + +@register_env("StackBowl-v1", max_episode_steps=50) +class StackBowlEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to stack a bowl on the other. + + **Randomizations:** + - Both bowls' xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The bowk is placed on the other bowl. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/StackBowl-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "piper"] + agent: Union[Panda, Fetch, Piper] + + # plate_radius = 0.1 + # plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 256, 256, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids == "panda": + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _load_agent(self, options: dict): + # PANDA AGENT + if self.robot_uids == "panda": + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + # PIPER AGENT + elif self.robot_uids == "piper": + super()._load_agent(options, sapien.Pose(p=[-0.35, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + self.bowl = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="bowl", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0.],q=[1,0,0,0]), + ) + self.bowl2 = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_bowl.obj"), + scale=(1,1,1), + color=np.array([174, 205, 245, 255], dtype=np.float32) / 255, + name="bowl2", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0],q=[1,0,0,0]), + ) + + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # get arm max reach + max_reach = 0.855 + min_reach = 0.020 + # get arm base positions + robot_p = self.agent.robot.pose.p + robot_q = self.agent.robot.pose.q + ################################################################################## + # bowl pose initialize + obb = get_actor_obb(self.bowl) + + self.tgt_extents = obb.extents + max_extent = np.max(obb.extents)/2 + max_radius = max_reach - max_extent + min_radius = min_reach + max_extent + + assert max_radius > 0 + assert min_radius > 0 + assert max_radius > min_radius + + side = torch.randint(0, 2, (b, 1)) * 2 - 1 + + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + bowl_p = p + + # rotation + # (1,0,0,0) since plate is round, flate on table + q = torch.tensor([1, 0, 0, 0], dtype=torch.float32).repeat(b, 1) + self.bowl.set_pose(Pose.create_from_pq(p=p, q=q)) + + ################################################################################## + # bowl2 pose initialize + while True: + # translation + r = torch.sqrt(torch.rand(b, 1) * (max_radius**2 - min_radius**2) + min_radius**2) + theta = torch.rand(b, 1) * 2 * np.pi + x = torch.clamp(torch.abs(r * torch.cos(theta)), min = max_extent) + y = side * torch.clamp(torch.abs(r * torch.sin(theta)), min = max_extent) + z = torch.zeros_like(x) + xyz = torch.cat((x, y, z), dim=1) + p = robot_p + xyz + + if torch.linalg.norm(p - bowl_p, dim=1) > torch.from_numpy(get_actor_obb(self.bowl).extents)[0]: + break + + # rotation + # (1,0,0,0) since plate is round, flate on table + q = torch.tensor([1, 0, 0, 0], dtype=torch.float32).repeat(b, 1) + self.bowl2.set_pose(Pose.create_from_pq(p=p, q=q)) + + + def evaluate(self): + + threshold = self.tgt_extents[0] *.2 #10% threshold + + on_bowl2 = (torch.abs(self.bowl.pose.p[:, 0] - self.bowl2.pose.p[:, 0]) < threshold + and torch.abs(self.bowl.pose.p[:, 1] - self.bowl2.pose.p[:, 1]) < threshold + and self.bowl.pose.p[:, 2] - self.bowl2.pose.p[:, 2] >0) + return { + "success": on_bowl2, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.bowl.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.bowl.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.bowl.pose.p[:, 2] - self.bowl2.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.bowl.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.bowl)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_mug_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_mug_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..514f20ece28b651a9bf3a0241598d33ed47b7948 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_mug_on_rack.py @@ -0,0 +1,292 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda, Piper +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + + +@register_env("StackMugOnRack-v1", max_episode_steps=50) +class StackMugOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to place a plate vertically on a dish rack. + + **Randomizations:** + - The plate's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The plate is placed vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "piper"] + agent: Union[Panda, Fetch, Piper] + + plate_radius = 0.1 + plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids == "panda": + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _load_agent(self, options: dict): + # PANDA AGENT + if self.robot_uids == "panda": + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + # PIPER AGENT + elif self.robot_uids == "piper": + super()._load_agent(options, sapien.Pose(p=[-0.35, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # the plate that we want to manipulate + # self.plate = actors.build_plate( + # self.scene, + # radius=self.plate_radius, + # thickness=self.plate_thickness, + # color=np.array([176, 14, 14, 255], dtype=np.float32) / 255, + # name="plate", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, -0.25, 0]), + # ) + self.mug = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_mug.obj"), + scale=(1,1,1), + color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + name="mug", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, -0.2, 0]), + ) + self.mug2 = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_mug.obj"), + scale=(1,1,1), + color=np.array([174, 205, 245, 255], dtype=np.float32) / 255, + name="mug2", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0.1, -0.2, 0]), + ) + # self.mug3 = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([255, 255, 255, 255], dtype=np.float32) / 255, + # name="mug3", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, -0.2, 0], q=[0, 0, 0, 0]), + # ) + self.plate = actors.build_convex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([174, 205, 245, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[1, 1, 0]), + ) + # self.spoon = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_spoon.obj", + # scale=(1,1,1), + # color=np.array([174, 205, 245, 255], dtype=np.float32) / 255, + # name="spoon", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.10, -0.25, 0.1]), + # ) + # self.fork = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_fork.obj", + # scale=(1,1,1), + # color=np.array([255, 255, 255, 255], dtype=np.float32) / 255, + # name="fork", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.10, -0.20, 0.1]), + # ) + # self.knife = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_knife.obj", + # scale=(1,1,1), + # color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + # name="knife", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.10, -0.15, 0.1]), + # ) + # self.bowl = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_bowl.obj", + # scale=(1,1,1), + # color=np.array([255, 180, 178, 255], dtype=np.float32) / 255, + # name="bowl", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, -0.3, 0.1]), + # ) + # self.kalasbin = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_bin.obj", + # scale=(1,1,1), + # color=np.array([221, 222, 216, 255], dtype=np.float32) / 255, + # name="kalasbin", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, 0.4, 0.1]), + # ) + # self.boxa = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/racks/sockerbit_box.obj", + # scale=(1,1,1), + # color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + # name="boxa", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.25, 0.45, 0.1]), + # ) + # self.boxb = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/racks/uppsnofsad_box.obj", + # scale=(1,1,1), + # color=np.array([10, 10, 10, 255], dtype=np.float32) / 255, + # name="boxb", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, 0.5, 0.1]), + # ) + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/stamling_rack.obj"), + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[-0.20, 0.1, 0.1]), + ) + + # the dish rack + # self.rack = actors.build_rack( + # self.scene, + # size=np.array([0.2, 0.1, 0.05], dtype=np.float32), + # color=np.array([12, 42, 160, 255], dtype=np.float32) / 255, + # name="rack", + # body_type="static", + # initial_pose=sapien.Pose(p=[0.2, 0, 0.1]), + # ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + xyz = torch.zeros((b, 3), dtype=torch.float32) + xyz[...,0] = torch.rand((b, 1), dtype=torch.float32) * 0.2 - 0.1 + xyz[...,1] = torch.rand((b, 1), dtype=torch.float32) * -0.2 + 0.1 + xyz[..., 2] = 0 + q = euler2quat(0, 0, 0).astype(np.float32) + + obj_pose = Pose.create_from_pq(p=xyz, q=q) + self.mug.set_pose(obj_pose) + + xyz = torch.zeros((b, 3), dtype=torch.float32) + xyz[...,0] = torch.rand((b, 1), dtype=torch.float32) * 0.2 - 0.1 + xyz[...,1] = torch.rand((b, 1), dtype=torch.float32) * -0.2 + 0.1 + xyz[..., 2] = 0 + q = euler2quat(0, 0, 0).astype(np.float32) + + obj_pose = Pose.create_from_pq(p=xyz, q=q) + self.mug2.set_pose(obj_pose) + self.rack.set_pose( + Pose.create_from_pq( + p=torch.tensor([[-0.20, 0.1, 0.1]], dtype=torch.float32), + q=torch.tensor(euler2quat(0, 0, 0), dtype=torch.float32))) + #print(self.mug.pose) + #print(self.mug2.pose) + + def evaluate(self): + # mug_p = self.mug.pose.p + # mug2_p = self.mug2.pose.p + # rack_p = self.rack.pose.p + # print(mug2_p) + # print(mug_p) + # print(rack_p) + mug_stacked = torch.abs(self.mug.pose.p[:, 2] - self.mug.pose.p[:, 2]) < 0.05 and torch.abs(self.mug.pose.p[:, 0] - self.mug2.pose.p[:, 0]) < 0.9 and torch.abs(self.mug.pose.p[:, 1] - self.mug2.pose.p[:, 1]) < 0.01 + close_to_rack = torch.abs(self.mug.pose.p[:, 0] - self.rack.pose.p[:, 0]) < 0.45 and torch.abs(self.mug.pose.p[:, 1] - self.rack.pose.p[:, 1]) < 0.35 + return { + "success": close_to_rack and mug_stacked, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.plate.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.plate.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.plate.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.plate.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.plate)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_plate_on_rack.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_plate_on_rack.py new file mode 100644 index 0000000000000000000000000000000000000000..16beeea9db7e4f01fb7b803e688c17240a747fb6 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/stack_plate_on_rack.py @@ -0,0 +1,303 @@ +from typing import Any, Dict, Union +import os +import numpy as np +import sapien +import torch +import torch.random +from transforms3d.euler import euler2quat + +from mani_skill.agents.robots import Fetch, Panda, Piper +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils.building import actors +from mani_skill.utils.geometry import rotation_conversions +from mani_skill.utils.registration import register_env +from mani_skill.utils.sapien_utils import look_at +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import Array + +from mani_skill import PACKAGE_ASSET_DIR + +@register_env("StackPlateOnRack-v1", max_episode_steps=50) +class StackPlateOnRackEnv(BaseEnv): + """ + **Task Description:** + A task where the objective is to stack two plates vertically on a dish rack. + + **Randomizations:** + - The plate's xy position is randomized on top of a table in the region [0.1, 0.1] x [-0.1, -0.1]. It is placed flat on the table. + + **Success Conditions:** + - The plate is stackedd vertically on the dish rack. + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/PlacePlateOnRack-v1_rt.mp4" + SUPPORTED_ROBOTS = ["panda", "fetch", "piper"] + agent: Union[Panda, Fetch, Piper] + + plate_radius = 0.1 + plate_thickness = 0.01 + + def __init__(self, *args, robot_uids="panda", robot_init_qpos_noise=0.02, **kwargs): + self.robot_init_qpos_noise = robot_init_qpos_noise + self.mesh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../")) + + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sensor_configs(self): + pose = look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + if self.robot_uids == "panda": + # PANDA CAMERA + pose = look_at([1.2, 0, 1.4], [0.0, 0.0, 0.25], [-1, 0, 0]) + elif self.robot_uids == "piper": + # PIPER CAMERA + # pose = look_at([1, -1, 1], [0.0, 0.0, 0.25],[-1,0,0]) + pose = look_at([1, 0, 1], [0.0, 0.0, 0.25], [-1, 0, 0]) + # import pdb; pdb.set_trace() + return CameraConfig("render_camera", pose, 1024, 1024, 1, 0.01, 100) + + def _load_agent(self, options: dict): + # PANDA AGENT + if self.robot_uids == "panda": + super()._load_agent(options, sapien.Pose(p=[-0.615, 0, 0])) + # PIPER AGENT + elif self.robot_uids == "piper": + super()._load_agent(options, sapien.Pose(p=[-0.35, 0, 0])) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + + # the plate that we want to manipulate + # self.plate = actors.build_plate( + # self.scene, + # radius=self.plate_radius, + # thickness=self.plate_thickness, + # color=np.array([176, 14, 14, 255], dtype=np.float32) / 255, + # name="plate", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, -0.25, 0]), + # ) + # self.mug = actors.build_nonconvex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_mug.obj", + # scale=(1,1,1), + # color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + # name="mug", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0, -0.2, 0], q=[0, 1, 0, 0]), + # ) + self.plate = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([160, 189, 184, 255], dtype=np.float32) / 255, + name="plate", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + self.plate1 = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/kalas_plate.obj"), + scale=(1,1,1), + color=np.array([174, 205, 245, 255], dtype=np.float32) / 255, + name="plate1", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0, 0, 0]), + ) + # self.spoon = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_spoon.obj", + # scale=(1,1,1), + # color=np.array([174, 205, 245, 255], dtype=np.float32) / 255, + # name="spoon", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.10, -0.15, 0.1]), + # ) + # self.fork = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_fork.obj", + # scale=(1,1,1), + # color=np.array([255, 255, 255, 255], dtype=np.float32) / 255, + # name="fork", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.10, -0.10, 0.1]), + # ) + # self.knife = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_knife.obj", + # scale=(1,1,1), + # color=np.array([238, 230, 18, 255], dtype=np.float32) / 255, + # name="knife", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.10, -0.15, 0.1]), + # ) + # self.bowl = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_bowl.obj", + # scale=(1,1,1), + # color=np.array([255, 180, 178, 255], dtype=np.float32) / 255, + # name="bowl", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, -0.3, 0.1]), + # ) + # self.kalasbin = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/plates/kalas_bin.obj", + # scale=(1,1,1), + # color=np.array([221, 222, 216, 255], dtype=np.float32) / 255, + # name="kalasbin", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, 0.4, 0.1]), + # ) + # self.boxa = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/racks/sockerbit_box.obj", + # scale=(1,1,1), + # color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + # name="boxa", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[0.25, 0.45, 0.1]), + # ) + # self.boxb = actors.build_convex_from_mesh( + # self.scene, + # mesh_path="./assets/racks/uppsnofsad_box.obj", + # scale=(1,1,1), + # color=np.array([10, 10, 10, 255], dtype=np.float32) / 255, + # name="boxb", + # body_type="dynamic", + # initial_pose=sapien.Pose(p=[-0.25, 0.5, 0.1]), + # ) + self.rack = actors.build_nonconvex_from_mesh( + self.scene, + mesh_path=os.path.join(self.mesh_dir, "./assets/plates/stamling_rack.obj"), + scale=(1,1,1), + color=np.array([216, 215, 211, 255], dtype=np.float32) / 255, + name="rack", + body_type="dynamic", + initial_pose=sapien.Pose(p=[0.0, 0.0, 0.0]), + ) + + # the dish rack + # self.rack = actors.build_rack( + # self.scene, + # size=np.array([0.2, 0.1, 0.05], dtype=np.float32), + # color=np.array([12, 42, 160, 255], dtype=np.float32) / 255, + # name="rack", + # body_type="static", + # initial_pose=sapien.Pose(p=[0.2, 0, 0.1]), + # ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + + # x be in range [-.2,.2], y be in range [.1,.3] + + x = torch.rand(b, 1) * 0.3 - 0.1 + y = torch.rand(b, 1) * 0.2 + 0.2 + z = torch.zeros((b, 1), dtype=torch.float32)+.02 + self.plate.set_pose( + Pose.create_from_pq( + p=torch.cat([x, y, z], dim=1), + q=euler2quat(0, 0, 0), + ) + ) + # x be in range [-.2,.2], y be in range [-.1,-.3] + x = torch.rand(b, 1) * 0.3 - 0.1 + y = torch.rand(b, 1) * -0.2 - 0.2 + z = torch.zeros((b, 1), dtype=torch.float32)+.02 + self.plate1.set_pose( + Pose.create_from_pq( + p=torch.cat([x, y, z], dim=1), + q=euler2quat(0, 0, 0), + ) + ) + self.rack.set_pose( + Pose.create_from_pq( + p=torch.tensor([-0.2, 0, 0], device=self.device), + q=torch.tensor([1, 0, 0, 0], device=self.device).repeat(b, 1), + ) + ) + # xyz = torch.zeros((b, 3), dtype=torch.float32) + # xyz[..., :2] = torch.rand((b, 2), dtype=torch.float32) * 1 - 0.1 + # xyz[..., 2] = self.plate_thickness / 2 + # q = euler2quat(0, 0, 0).astype(np.float32) + + # obj_pose = Pose.create_from_pq(p=xyz, q=q) + # self.plate.set_pose(obj_pose) + + def evaluate(self): + q = self.plate.pose.q + qmat = rotation_conversions.quaternion_to_matrix(q) + euler = rotation_conversions.matrix_to_euler_angles(qmat, "XYZ") + q1 = self.plate1.pose.q + qmat1 = rotation_conversions.quaternion_to_matrix(q1) + euler1 = rotation_conversions.matrix_to_euler_angles(qmat1, "XYZ") + is_plate_vertical = ( + torch.abs(torch.abs(euler[:, 2]) - np.pi / 2) < np.pi/6 and torch.abs(torch.abs(euler1[:, 2]) - np.pi / 2) < np.pi/6 + ) # 0.08 radians of difference permitted + #print(euler) + #print(euler1) + #print(is_plate_vertical) + close_to_rack = torch.abs(self.plate.pose.p[:, 1] - self.rack.pose.p[:, 1]) < 0.1 and torch.abs(self.plate.pose.p[:, 0] - self.rack.pose.p[:, 0]) < 0.2 and torch.abs(self.plate1.pose.p[:, 1] - self.rack.pose.p[:, 1]) < 0.1 and torch.abs(self.plate1.pose.p[:, 0] - self.rack.pose.p[:, 0]) < 0.2 + #print(self.plate.pose.p) + ##print(self.plate1.pose.p) + #print(self.rack.pose.p) + return { + "success": is_plate_vertical & close_to_rack, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + tcp_pose=self.agent.tcp.pose.raw_pose, + ) + if self.obs_mode_struct.use_state: + obs.update( + obj_pose=self.plate.pose.raw_pose, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: Array, info: Dict): + # rotation reward as cosine similarity between plate direction vectors + # plate center of mass to edge of plate, (1,0,0), rotated by plate pose rotation + # dot product with its goal orientation: (0,1,0) or (0,-1,0) + qmats = rotation_conversions.quaternion_to_matrix(self.plate.pose.q) + vec = torch.tensor([1.0, 0, 0], device=self.device, dtype=torch.float32) + goal_vec = torch.tensor([0, 1, 0], device=self.device, dtype=torch.float32) + rot_vec = (qmats @ vec).view(-1, 3) + # abs since (0,-1,0) is also valid, values in [0,1] + rot_rew = (rot_vec @ goal_vec).view(-1).abs() + reward = rot_rew + + # position reward using common maniskill distance reward pattern + # giving reward in [0,1] for moving center of mass toward the rack + z_dist = torch.abs(self.plate.pose.p[:, 2] - self.rack.pose.p[:, 2]) + reward += 1 - torch.tanh(5 * z_dist) + + # small reward to motivate initial reaching + # initially, we want to reach and grip the plate + to_grip_vec = self.plate.pose.p - self.agent.tcp.pose.p + to_grip_dist = torch.linalg.norm(to_grip_vec, axis=1) + reaching_rew = 1 - torch.tanh(5 * to_grip_dist) + # reaching reward granted if gripping the plate + reaching_rew[self.agent.is_grasping(self.plate)] = 1 + # weight reaching reward less + reaching_rew = reaching_rew / 5 + reward += reaching_rew + + reward[info["success"]] = 3 + return reward + + def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict): + max_reward = 3.0 + return self.compute_dense_reward(obs=obs, action=action, info=info) / max_reward \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/two_robot_pick_cube.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/two_robot_pick_cube.py new file mode 100644 index 0000000000000000000000000000000000000000..d97fa4973aba2d51e975407d6b4404d4238c433c --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/two_robot_pick_cube.py @@ -0,0 +1,262 @@ +from typing import Any, Dict, Tuple + +import numpy as np +import sapien +import torch + +from mani_skill.agents.multi_agent import MultiAgent +from mani_skill.agents.robots.panda import Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils import randomization +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import GPUMemoryConfig, SimConfig + + +@register_env("TwoRobotPickCube-v1", max_episode_steps=100) +class TwoRobotPickCube(BaseEnv): + """ + **Task Description:** + The goal is to pick up a red cube and lift it to a goal location. There are two robots in this task and the + goal location is out of reach of the left robot while the cube is out of reach of the right robot, thus the two robots must work together + to move the cube to the goal. + + **Randomizations:** + - cube has its z-axis rotation randomized + - cube has its xy positions on top of the table scene randomized such that it is in within reach of the left robot but not the right. + - the target goal position (marked by a green sphere) of the cube is randomized such that it is within reach of the right robot but not the left. + + + **Success Conditions:** + - red cube is at the goal location + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/refs/heads/main/figures/environment_demos/TwoRobotPickCube-v1_rt.mp4" + + SUPPORTED_ROBOTS = [("panda_wristcam", "panda_wristcam")] + agent: MultiAgent[Tuple[Panda, Panda]] + cube_half_size = 0.02 + goal_thresh = 0.025 + + def __init__( + self, + *args, + robot_uids=("panda_wristcam", "panda_wristcam"), + robot_init_qpos_noise=0.02, + **kwargs + ): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sim_config(self): + return SimConfig( + gpu_memory_config=GPUMemoryConfig( + found_lost_pairs_capacity=2**25, + max_rigid_patch_count=2**19, + max_rigid_contact_count=2**21, + ) + ) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at([1.0, 0, 0.75], [0.0, 0.0, 0.25]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + pose = sapien_utils.look_at([1.4, 0.8, 0.75], [0.0, 0.1, 0.1]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent( + options, [sapien.Pose(p=[0, -1, 0]), sapien.Pose(p=[0, 1, 0])] + ) + + def _load_scene(self, options: dict): + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + self.cube = actors.build_cube( + self.scene, + half_size=self.cube_half_size, + color=[1, 0, 0, 1], + name="cube", + initial_pose=sapien.Pose(p=[0, 0, 0.02]), + ) + self.goal_site = actors.build_sphere( + self.scene, + radius=self.goal_thresh, + color=[0, 1, 0, 1], + name="goal_site", + body_type="kinematic", + add_collision=False, + initial_pose=sapien.Pose(), + ) + self._hidden_objects.append(self.goal_site) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + self.left_init_qpos = self.left_agent.robot.get_qpos() + xyz = torch.zeros((b, 3)) + xyz[:, 0] = torch.rand((b,)) * 0.1 - 0.05 + # ensure cube is spawned on the left side of the table + xyz[:, 1] = -0.15 - torch.rand((b,)) * 0.1 + 0.05 + xyz[:, 2] = self.cube_half_size + qs = randomization.random_quaternions(b, lock_x=True, lock_y=True) + self.cube.set_pose(Pose.create_from_pq(xyz, qs)) + + goal_xyz = torch.zeros((b, 3)) + goal_xyz[:, 0] = torch.rand((b,)) * 0.1 - 0.05 + goal_xyz[:, 1] = 0.15 + torch.rand((b,)) * 0.1 - 0.05 + goal_xyz[:, 2] = torch.rand((b,)) * 0.3 + xyz[:, 2] + self.goal_site.set_pose(Pose.create_from_pq(goal_xyz)) + + @property + def left_agent(self) -> Panda: + return self.agent.agents[0] + + @property + def right_agent(self) -> Panda: + return self.agent.agents[1] + + def evaluate(self): + is_obj_placed = ( + torch.linalg.norm(self.goal_site.pose.p - self.cube.pose.p, axis=1) + <= self.goal_thresh + ) + is_right_arm_static = self.right_agent.is_static(0.2) + return { + "success": torch.logical_and(is_obj_placed, is_right_arm_static), + "is_obj_placed": is_obj_placed, + "is_right_arm_static": is_right_arm_static, + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + left_arm_tcp=self.left_agent.tcp.pose.raw_pose, + right_arm_tcp=self.right_agent.tcp.pose.raw_pose, + ) + if "state" in self.obs_mode: + obs.update( + cube_pose=self.cube.pose.raw_pose, + left_arm_tcp_to_cube_pos=self.cube.pose.p - self.left_agent.tcp.pose.p, + right_arm_tcp_to_cube_pos=self.cube.pose.p + - self.right_agent.tcp.pose.p, + cube_to_goal_pos=self.goal_site.pose.p - self.cube.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + # Stage 1: Reach and push cube to be near other robot + tcp_to_obj_dist = torch.linalg.norm( + self.cube.pose.p - self.left_agent.tcp.pose.p, axis=1 + ) + reaching_reward = 1 - torch.tanh(5 * tcp_to_obj_dist) + + # set a sub_goal here where we want the cube to first be pushed to close to the right arm robot + # by moving cube past y = 0.05 + cube_to_other_side_reward = 1 - torch.tanh( + 5 + * ( + torch.max( + 0.05 - self.cube.pose.p[:, 1], torch.zeros_like(reaching_reward) + ) + ) + ) + reward = (reaching_reward + cube_to_other_side_reward) / 2 + + # stage 1 passes if cube is near a sub-goal + cube_at_other_side = self.cube.pose.p[:, 1] >= 0.0 + + # Stage 2: reach and grasp cube with right robot and make left robot leave space + tcp_to_obj_dist = torch.linalg.norm( + self.cube.pose.p - self.right_agent.tcp.pose.p, axis=1 + ) + reaching_reward = 1 - torch.tanh(5 * tcp_to_obj_dist) + stage_2_reward = reaching_reward + + # condition for good grasp: both fingers are at the same height and open + self.right_agent: Panda + right_tip_1_height = self.right_agent.finger1_link.pose.p[:, 2] + right_tip_2_height = self.right_agent.finger2_link.pose.p[:, 2] + tip_height_reward = 1 - torch.tanh( + 5 * torch.abs(right_tip_1_height - right_tip_2_height) + ) + tip_width_reward = 1 - torch.tanh( + 5 + * torch.abs( + torch.linalg.norm( + self.right_agent.finger1_link.pose.p + - self.right_agent.finger2_link.pose.p, + axis=1, + ) + - 0.07 + ) + ) + tip_reward = (tip_height_reward + tip_width_reward) / 2 + stage_2_reward += tip_reward + + # make left arm move as close as possible to the y=-0.2 line + left_arm_leave_reward = 1 - torch.tanh( + 5 * (self.left_agent.tcp.pose.p[:, 1] + 0.2).abs() + ) + stage_2_reward += left_arm_leave_reward + + # stage 2 passes if cube is grasped + is_grasped = self.right_agent.is_grasping(self.cube) + stage_2_reward += 2 * is_grasped + + reward[cube_at_other_side] = 2 + stage_2_reward[cube_at_other_side] + + # Stage 3: bring cube towards goal + obj_to_goal_dist = torch.linalg.norm( + self.goal_site.pose.p - self.right_agent.tcp.pose.p, axis=1 + ) + place_reward = 1 - torch.tanh(5 * obj_to_goal_dist) + stage_3_reward = 2 * place_reward + + # return left arm to original position + left_qpos_reward = 1 - torch.tanh( + torch.linalg.norm( + self.left_agent.robot.get_qpos() - self.left_init_qpos, axis=1 + ) + ) + stage_3_reward += left_qpos_reward + + reward[is_grasped] = 8 + stage_3_reward[is_grasped] + + # stage 3 passes if object is near goal (within 0.25m) - intermediate reward + is_obj_near = torch.logical_and(obj_to_goal_dist < 0.25, is_grasped) + # Stage 4: reuse same reward as stage 3 but stronger incentive + reward[is_obj_near] = 12 + 2 * stage_3_reward[is_obj_near] + + # stage 4 passes if object is placed + is_obj_placed = info["is_obj_placed"] + + # Stage 5: keep robot static at the goal + right_static_reward = 1 - torch.tanh( + 5 * torch.linalg.norm(self.right_agent.robot.get_qvel()[..., :-2], axis=1) + ) + left_static_reward = 1 - torch.tanh( + 5 * torch.linalg.norm(self.left_agent.robot.get_qvel()[..., :-2], axis=1) + ) + static_reward = (right_static_reward + left_static_reward) / 2 + + reward[is_obj_placed] = 19 + static_reward[is_obj_placed] + + reward[info["success"]] = 21 + + return reward + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + return self.compute_dense_reward(obs=obs, action=action, info=info) / 21 diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/two_robot_stack_cube.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/two_robot_stack_cube.py new file mode 100644 index 0000000000000000000000000000000000000000..e0562830a46d5cda29744a4e3d6623a195131235 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/tasks/tabletop/two_robot_stack_cube.py @@ -0,0 +1,296 @@ +from typing import Any, Dict, Tuple + +import numpy as np +import sapien +import torch +from transforms3d.euler import euler2quat + +from mani_skill.agents.multi_agent import MultiAgent +from mani_skill.agents.robots.panda import Panda +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.envs.utils.randomization.pose import random_quaternions +from mani_skill.sensors.camera import CameraConfig +from mani_skill.utils import common, sapien_utils +from mani_skill.utils.building import actors +from mani_skill.utils.registration import register_env +from mani_skill.utils.scene_builder.table import TableSceneBuilder +from mani_skill.utils.structs.pose import Pose +from mani_skill.utils.structs.types import GPUMemoryConfig, SimConfig + + +@register_env("TwoRobotStackCube-v1", max_episode_steps=100) +class TwoRobotStackCube(BaseEnv): + """ + **Task Description:** + A collaborative task where two robot arms need to work together to stack two cubes. One robot must pick up the green cube and place it on the target region, while the other robot picks up the blue cube and stacks it on top of the green cube. + + The cubes are initially positioned such that each robot can only reach one cube - the green cube is near the right robot and the blue cube is near the left robot. This requires coordination between the robots to complete the stacking task. + + **Randomizations:** + - Both cubes have random rotations around their z-axis + - The xy positions of both cubes on the table are randomized, while ensuring: + - The cubes do not collide with each other + - The green cube remains reachable by the right robot + - The blue cube remains reachable by the left robot + - The goal region is placed along the midline between the robots (y=0), with randomized x position + + **Success Conditions:** + - The blue cube is stacked on top of the green cube (within half a cube size) + - The green cube is placed on the red/white target region + - Both cubes are released by the robots (not being grasped) + + """ + + _sample_video_link = "https://github.com/haosulab/ManiSkill/raw/main/figures/environment_demos/TwoRobotStackCube-v1_rt.mp4" + SUPPORTED_ROBOTS = [("panda_wristcam", "panda_wristcam")] + agent: MultiAgent[Tuple[Panda, Panda]] + + goal_radius = 0.06 + + def __init__( + self, + *args, + robot_uids=("panda_wristcam", "panda_wristcam"), + robot_init_qpos_noise=0.02, + **kwargs + ): + self.robot_init_qpos_noise = robot_init_qpos_noise + super().__init__(*args, robot_uids=robot_uids, **kwargs) + + @property + def _default_sim_config(self): + return SimConfig( + gpu_memory_config=GPUMemoryConfig( + found_lost_pairs_capacity=2**25, + max_rigid_patch_count=2**19, + max_rigid_contact_count=2**21, + ) + ) + + @property + def _default_sensor_configs(self): + pose = sapien_utils.look_at(eye=[0.3, 0, 0.6], target=[-0.1, 0, 0.1]) + return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)] + + @property + def _default_human_render_camera_configs(self): + # pose = sapien_utils.look_at([1.4, 0.8, 0.75], [0.0, 0.1, 0.1]) # this perspective is good for demos + pose = sapien_utils.look_at(eye=[0.6, 0.2, 0.4], target=[-0.1, 0, 0.1]) + return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100) + + def _load_agent(self, options: dict): + super()._load_agent( + options, [sapien.Pose(p=[0, -1, 0]), sapien.Pose(p=[0, 1, 0])] + ) + + def _load_scene(self, options: dict): + self.cube_half_size = common.to_tensor([0.02] * 3, device=self.device) + self.table_scene = TableSceneBuilder( + env=self, robot_init_qpos_noise=self.robot_init_qpos_noise + ) + self.table_scene.build() + self.cubeA = actors.build_cube( + self.scene, + half_size=0.02, + color=np.array([12, 42, 160, 255]) / 255, + name="cubeA", + initial_pose=sapien.Pose(p=[1, 0, 0.02]), + ) + self.cubeB = actors.build_cube( + self.scene, + half_size=0.02, + color=[0, 1, 0, 1], + name="cubeB", + initial_pose=sapien.Pose(p=[-1, 0, 0.02]), + ) + self.goal_region = actors.build_red_white_target( + self.scene, + radius=self.goal_radius, + thickness=1e-5, + name="goal_region", + add_collision=False, + body_type="kinematic", + initial_pose=sapien.Pose(), + ) + + def _initialize_episode(self, env_idx: torch.Tensor, options: dict): + with torch.device(self.device): + b = len(env_idx) + self.table_scene.initialize(env_idx) + # the table scene initializes two robots. the first one self.agents[0] is on the left and the second one is on the right + + torch.zeros((b, 3)) + torch.rand((b, 2)) * 0.2 - 0.1 + cubeA_xyz = torch.zeros((b, 3)) + cubeA_xyz[:, 0] = torch.rand((b,)) * 0.1 - 0.05 + cubeA_xyz[:, 1] = -0.15 - torch.rand((b,)) * 0.1 + 0.05 + cubeB_xyz = torch.zeros((b, 3)) + cubeB_xyz[:, 0] = torch.rand((b,)) * 0.1 - 0.05 + cubeB_xyz[:, 1] = 0.15 + torch.rand((b,)) * 0.1 - 0.05 + cubeA_xyz[:, 2] = 0.02 + cubeB_xyz[:, 2] = 0.02 + + qs = random_quaternions( + b, + lock_x=True, + lock_y=True, + lock_z=False, + ) + self.cubeA.set_pose(Pose.create_from_pq(p=cubeA_xyz, q=qs)) + + qs = random_quaternions( + b, + lock_x=True, + lock_y=True, + lock_z=False, + ) + self.cubeB.set_pose(Pose.create_from_pq(p=cubeB_xyz, q=qs)) + + target_region_xyz = torch.zeros((b, 3)) + target_region_xyz[:, 0] = torch.rand((b,)) * 0.1 - 0.05 + target_region_xyz[:, 1] = -0.1 + # set a little bit above 0 so the target is sitting on the table + target_region_xyz[..., 2] = 1e-3 + self.goal_region.set_pose( + Pose.create_from_pq( + p=target_region_xyz, + q=euler2quat(0, np.pi / 2, 0), + ) + ) + + @property + def left_agent(self) -> Panda: + return self.agent.agents[0] + + @property + def right_agent(self) -> Panda: + return self.agent.agents[1] + + def evaluate(self): + pos_A = self.cubeA.pose.p + pos_B = self.cubeB.pose.p + offset = pos_A - pos_B + xy_flag = ( + torch.linalg.norm(offset[..., :2], axis=1) + <= torch.linalg.norm(self.cube_half_size[:2]) + 0.005 + ) + z_flag = torch.abs(offset[..., 2] - self.cube_half_size[..., 2] * 2) <= 0.005 + is_cubeA_on_cubeB = torch.logical_and(xy_flag, z_flag) + cubeB_to_goal_dist = torch.linalg.norm( + self.cubeB.pose.p[:, :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + cubeB_placed = cubeB_to_goal_dist < self.goal_radius + is_cubeA_grasped = self.left_agent.is_grasping(self.cubeA) + is_cubeB_grasped = self.right_agent.is_grasping(self.cubeB) + success = ( + is_cubeA_on_cubeB * cubeB_placed * (~is_cubeA_grasped) * (~is_cubeB_grasped) + ) + return { + "is_cubeA_grasped": is_cubeA_grasped, + "is_cubeB_grasped": is_cubeB_grasped, + "is_cubeA_on_cubeB": is_cubeA_on_cubeB, + "cubeB_placed": cubeB_placed, + "success": success.bool(), + } + + def _get_obs_extra(self, info: Dict): + obs = dict( + left_arm_tcp=self.left_agent.tcp.pose.raw_pose, + right_arm_tcp=self.right_agent.tcp.pose.raw_pose, + ) + if "state" in self.obs_mode: + obs.update( + goal_region_pos=self.goal_region.pose.p, + cubeA_pose=self.cubeA.pose.raw_pose, + cubeB_pose=self.cubeB.pose.raw_pose, + left_arm_tcp_to_cubeA_pos=self.cubeA.pose.p + - self.left_agent.tcp.pose.p, + right_arm_tcp_to_cubeB_pos=self.cubeB.pose.p + - self.right_agent.tcp.pose.p, + cubeA_to_cubeB_pos=self.cubeB.pose.p - self.cubeA.pose.p, + ) + return obs + + def compute_dense_reward(self, obs: Any, action: torch.Tensor, info: Dict): + # Stage 1: Reach and grasp + # reaching reward for both robots to their respective cubes + cubeA_to_left_arm_tcp_dist = torch.linalg.norm( + self.left_agent.tcp.pose.p - self.cubeA.pose.p, axis=1 + ) + right_arm_push_pose = Pose.create_from_pq( + p=self.cubeB.pose.p + + torch.tensor([0, self.cube_half_size[0] + 0.005, 0], device=self.device) + ) + right_arm_to_push_pose_dist = torch.linalg.norm( + right_arm_push_pose.p - self.right_agent.tcp.pose.p, axis=1 + ) + reach_reward = ( + 1 + - torch.tanh(5 * cubeA_to_left_arm_tcp_dist) + + 1 + - torch.tanh(5 * right_arm_to_push_pose_dist) + ) / 2 + + # grasp reward for left robot which needs to lift cubeA up eventually + cubeA_pos = self.cubeA.pose.p + cubeB_pos = self.cubeB.pose.p + reward = (reach_reward + info["is_cubeA_grasped"]) / 2 + + # pass condition for stage 1 + place_stage_reached = info["is_cubeA_grasped"] + + # Stage 2: Place bottom cube and still hold to cube A + # place reward for bottom cube (cube B) + cubeB_to_goal_dist = torch.linalg.norm( + cubeB_pos[:, :2] - self.goal_region.pose.p[..., :2], axis=1 + ) + place_reward = 1 - torch.tanh(5 * cubeB_to_goal_dist) + stage_2_reward = place_reward + info["is_cubeA_grasped"] + reward[place_stage_reached] = 2 + stage_2_reward[place_stage_reached] / 2 + + # pass condition for stage 2 + cubeB_placed_and_cubeA_grasped = info["cubeB_placed"] * info["is_cubeA_grasped"] + + # Stage 3: Place top cube while moving right arm away to give left arm space + # place reward for top cube (cube A) + goal_xyz = torch.hstack( + [cubeB_pos[:, :2], (cubeB_pos[:, 2] + self.cube_half_size[2] * 2)[:, None]] + ) + cubeA_to_goal_dist = torch.linalg.norm(goal_xyz - cubeA_pos, axis=1) + place_reward = 1 - torch.tanh(5 * cubeA_to_goal_dist) + + # move right arm as close as possible to the y=0.2 line + right_arm_leave_reward = 1 - torch.tanh( + 5 * (self.right_agent.tcp.pose.p[:, 1] - 0.2).abs() + ) + stage_3_reward = place_reward * 2 + right_arm_leave_reward + reward[cubeB_placed_and_cubeA_grasped] = ( + 4 + stage_3_reward[cubeB_placed_and_cubeA_grasped] + ) + # pass condition for stage 3 + cubes_placed = info["is_cubeA_on_cubeB"] * info["cubeB_placed"] + # Stage 4: get both robots to stop grasping + gripper_width = (self.left_agent.robot.get_qlimits()[0, -1, 1] * 2).to( + self.device + ) # NOTE: hard-coded with panda + ungrasp_reward_left = ( + torch.sum(self.left_agent.robot.get_qpos()[:, -2:], axis=1) / gripper_width + ) + ungrasp_reward_left[~info["is_cubeA_grasped"]] = 1.0 + ungrasp_reward_right = ( + torch.sum(self.right_agent.robot.get_qpos()[:, -2:], axis=1) / gripper_width + ) + ungrasp_reward_right[~info["is_cubeB_grasped"]] = 1.0 + + reward[cubes_placed] = ( + 8 + (ungrasp_reward_left + ungrasp_reward_right)[cubes_placed] / 2 + ) + + reward[info["success"]] = 10 + + return reward + + def compute_normalized_dense_reward( + self, obs: Any, action: torch.Tensor, info: Dict + ): + return self.compute_dense_reward(obs=obs, action=action, info=info) / 10 diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/README.md b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/README.md new file mode 100644 index 0000000000000000000000000000000000000000..77bbaffd5e22574f4eddb745e5118d2c358d38ba --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/README.md @@ -0,0 +1,3 @@ +# Environment Utilities + +Contains utilities for building scenes, designing reward functions / success conditions, and more, all implemented to support both data in numpy (for CPU sim) and pytorch (for GPU sim) \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/__pycache__/__init__.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e5b5b6266b271fcd7f1a8c5962d37c8cd7dbca7c Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f4da385722ecc228c173f532fe0dadec6a9ee4f6 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__init__.py @@ -0,0 +1,105 @@ +from dataclasses import dataclass + +from .observations import * + + +@dataclass +class CameraObsTextures: + rgb: bool + depth: bool + segmentation: bool + position: bool + normal: bool + albedo: bool + + +@dataclass +class ObservationModeStruct: + """A dataclass describing what observation data is being requested by the user""" + + state_dict: bool + """whether to include state data which generally means including privileged information such as object poses""" + state: bool + """whether to include flattened state data which generally means including privileged information such as object poses""" + visual: CameraObsTextures + """textures to capture from cameras""" + + @property + def use_state(self): + """whether or not the environment should return ground truth/privileged information such as object poses""" + return self.state or self.state_dict + + +ALL_VISUAL_TEXTURES = ["rgb", "depth", "segmentation", "position", "normal", "albedo"] +"""set of all standard textures that can come from cameras""" + + +def parse_obs_mode_to_struct(obs_mode: str) -> ObservationModeStruct: + """Given user supplied observation mode, return a struct with the relevant textures that are to be captured""" + # parse obs mode into a string of possible textures + if obs_mode == "rgbd": + return ObservationModeStruct( + state_dict=False, + state=False, + visual=CameraObsTextures( + rgb=True, + depth=True, + segmentation=False, + position=False, + normal=False, + albedo=False, + ), + ) + elif obs_mode == "pointcloud": + return ObservationModeStruct( + state_dict=False, + state=False, + visual=CameraObsTextures( + rgb=True, + depth=False, + segmentation=True, + position=True, + normal=False, + albedo=False, + ), + ) + elif obs_mode == "sensor_data": + return ObservationModeStruct( + state_dict=False, + state=False, + visual=CameraObsTextures( + rgb=True, + depth=True, + segmentation=True, + position=True, + normal=False, + albedo=False, + ), + ) + else: + # Parse obs mode into individual texture types + textures = obs_mode.split("+") + if "pointcloud" in textures: + textures.remove("pointcloud") + textures.append("position") + textures.append("rgb") + textures.append("segmentation") + for texture in textures: + if texture == "state" or texture == "state_dict" or texture == "none": + # allows fetching privileged state data in addition to visual data. + continue + assert ( + texture in ALL_VISUAL_TEXTURES + ), f"Invalid texture type '{texture}' requested in the obs mode '{obs_mode}'. Each individual texture must be one of {ALL_VISUAL_TEXTURES}" + return ObservationModeStruct( + state_dict="state_dict" in textures, + state="state" in textures, + visual=CameraObsTextures( + rgb="rgb" in textures, + depth="depth" in textures, + segmentation="segmentation" in textures, + position="position" in textures, + normal="normal" in textures, + albedo="albedo" in textures, + ), + ) diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__pycache__/__init__.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5fe681c7ad114f340864bc08fb8e08563cf35588 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__pycache__/__init__.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__pycache__/observations.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__pycache__/observations.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..446464539794d82d23d1fbd282dce8b53244ca5e Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/__pycache__/observations.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/observations.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/observations.py new file mode 100644 index 0000000000000000000000000000000000000000..52bfaa09a46cdbe1e43e8487182571e984a55c30 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/observations/observations.py @@ -0,0 +1,68 @@ +""" +Functions that map a observation to a particular format, e.g. mapping the raw images to rgbd or pointcloud formats +""" + +from typing import Dict + +import numpy as np +import sapien.physx as physx +import torch + +from mani_skill.render import SAPIEN_RENDER_SYSTEM +from mani_skill.sensors.base_sensor import BaseSensor, BaseSensorConfig +from mani_skill.sensors.camera import Camera +from mani_skill.utils import common + + +def sensor_data_to_pointcloud(observation: Dict, sensors: Dict[str, BaseSensor]): + """convert all camera data in sensor to pointcloud data""" + sensor_data = observation["sensor_data"] + camera_params = observation["sensor_param"] + pointcloud_obs = dict() + + for (cam_uid, images), (sensor_uid, sensor) in zip( + sensor_data.items(), sensors.items() + ): + assert cam_uid == sensor_uid + if isinstance(sensor, Camera): + cam_pcd = {} + # TODO: double check if the .clone()s are necessary + # Each pixel is (x, y, z, actor_id) in OpenGL camera space + # actor_id = 0 for the background + images: Dict[str, torch.Tensor] + position = images["position"].clone() + segmentation = images["segmentation"].clone() + position = position.float() + position[..., :3] = ( + position[..., :3] / 1000.0 + ) # convert the raw depth from millimeters to meters + + # Convert to world space + cam2world = camera_params[cam_uid]["cam2world_gl"] + xyzw = torch.cat([position, segmentation != 0], dim=-1).reshape( + position.shape[0], -1, 4 + ) @ cam2world.transpose(1, 2) + cam_pcd["xyzw"] = xyzw + + # Extra keys + if "rgb" in images: + rgb = images["rgb"][..., :3].clone() + cam_pcd["rgb"] = rgb.reshape(rgb.shape[0], -1, 3) + if "segmentation" in images: + cam_pcd["segmentation"] = segmentation.reshape( + segmentation.shape[0], -1, 1 + ) + + pointcloud_obs[cam_uid] = cam_pcd + for k in pointcloud_obs.keys(): + del observation["sensor_data"][k] + pointcloud_obs = common.merge_dicts(pointcloud_obs.values()) + for key, value in pointcloud_obs.items(): + pointcloud_obs[key] = torch.concat(value, axis=1) + observation["pointcloud"] = pointcloud_obs + + # if not physx.is_gpu_enabled(): + # observation["pointcloud"]["segmentation"] = ( + # observation["pointcloud"]["segmentation"].numpy().astype(np.uint16) + # ) + return observation diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b995d1a27431cee775fd3e7df42262431e29a95e --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__init__.py @@ -0,0 +1,3 @@ +from .common import * +from .pose import * +from .samplers import * diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/__init__.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34a15b728856966ae0ecc8ec9475231b5a20de84 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/__init__.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/batched_rng.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/batched_rng.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db04159b6878fc47022e0e4050a49776bc972ec8 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/batched_rng.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/common.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f539b1cf3322f3b96538cb5e0984c31f008e475 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/common.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/pose.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/pose.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5779c2f016900a40a8fe8eee65e1f96625795d9 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/pose.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/samplers.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/samplers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..366cfd3f63ba5ed41fc11eb7f7b60dfe40e8aa4b Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/__pycache__/samplers.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/batched_rng.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/batched_rng.py new file mode 100644 index 0000000000000000000000000000000000000000..df1ed550209686b7d3d74d7ab2bcfc5346abc40b --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/batched_rng.py @@ -0,0 +1,70 @@ +""" +Code implementation for a batched random number generator. The goal is to enable seeding a batched random number generator with a batch of seeds to ensure randomization +in CPU simulators and GPU simulators are the same +""" + +from typing import List, Union + +import numpy as np + +from mani_skill.utils import common + + +class BatchedRNG(np.random.RandomState): + def __init__(self, rngs: List): + self.rngs = rngs + self.batch_size = len(rngs) + + @classmethod + def from_seeds(cls, seeds: List[int], backend: str = "numpy:random_state"): + if backend == "numpy:random_state": + return cls(rngs=[np.random.RandomState(seed) for seed in seeds]) + raise ValueError(f"Unknown batched RNG backend: {backend}") + + @classmethod + def from_rngs(cls, rngs: List): + return cls(rngs=rngs) + + def __getitem__(self, idx: Union[int, List[int], np.ndarray]): + idx = common.to_numpy(idx) + if np.iterable(idx): + return BatchedRNG.from_rngs([self.rngs[i] for i in idx]) + return self.rngs[idx] + + def __setitem__( + self, + idx: Union[int, List[int], np.ndarray], + value: Union[np.random.RandomState, List[np.random.RandomState]], + ): + idx = common.to_numpy(idx) + if np.iterable(idx): + for i, new_v in zip(idx, value): + self.rngs[i] = new_v + else: + self.rngs[idx] = value + + def __getattribute__(self, item): + if item in [ + "rngs", + "__class__", + "__dict__", + "__getattribute__", + "__str__", + "__repr__", + "__getitem__", + "batch_size", + ]: + return object.__getattribute__(self, item) + if callable(getattr(self.rngs[0], item)): + + def method(*args, **kwargs): + return np.array( + [ + object.__getattribute__(rng, item)(*args, **kwargs) + for rng in self.rngs + ] + ) + + return method + else: + return np.array([getattr(rng, item) for rng in self.rngs]) diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/common.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/common.py new file mode 100644 index 0000000000000000000000000000000000000000..f602960ca2c68a7f1cd47cfef54f497bdf4336ee --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/common.py @@ -0,0 +1,20 @@ +from typing import Sequence, Union + +import torch + +from mani_skill.utils import common +from mani_skill.utils.structs.types import Device + + +def uniform( + low: Union[float, torch.Tensor], + high: Union[float, torch.Tensor], + size: Sequence, + device: Device = None, +): + if not isinstance(low, float): + low = common.to_tensor(low, device=device) + if not isinstance(high, float): + high = common.to_tensor(high, device=device) + dist = high - low + return torch.rand(size=size, device=device) * dist + low diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/pose.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/pose.py new file mode 100644 index 0000000000000000000000000000000000000000..69c8515840ad10c18229c3860cfa089960d11397 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/pose.py @@ -0,0 +1,34 @@ +import numpy as np +import torch +import transforms3d + +from mani_skill.utils import sapien_utils +from mani_skill.utils.geometry.rotation_conversions import ( + euler_angles_to_matrix, + matrix_to_quaternion, +) +from mani_skill.utils.structs.types import Device + + +def random_quaternions( + n: int, + device: Device = None, + lock_x: bool = False, + lock_y: bool = False, + lock_z: bool = False, + bounds=(0, np.pi * 2), +): + """ + Generates random quaternions by generating random euler angles uniformly, with each of + the X, Y, Z angles ranging from bounds[0] to bounds[1] radians. Can optionally + choose to fix X, Y, and/or Z euler angles to 0 via lock_x, lock_y, lock_z arguments + """ + dist = bounds[1] - bounds[0] + xyz_angles = torch.rand((n, 3), device=device) * (dist) + bounds[0] + if lock_x: + xyz_angles[:, 0] *= 0 + if lock_y: + xyz_angles[:, 1] *= 0 + if lock_z: + xyz_angles[:, 2] *= 0 + return matrix_to_quaternion(euler_angles_to_matrix(xyz_angles, convention="XYZ")) diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/samplers.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/samplers.py new file mode 100644 index 0000000000000000000000000000000000000000..0edb4e79bbcd0dc4f0ea09ba3ffdf432b3b4dc92 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/randomization/samplers.py @@ -0,0 +1,96 @@ +""" +Various sampling functions/classes for fast, vectorized sampling of e.g. object poses +""" + +from typing import List, Tuple + +import torch + +from mani_skill.utils import common +from mani_skill.utils.geometry.rotation_conversions import Device + + +class UniformPlacementSampler: + """Uniform placement sampler that lets you sequentially sample data such that the data is within given bounds and + not too close to previously sampled data. This sampler is also batched so you can use this easily for GPU simulated tasks + + Args: + bounds: ((low1, low2, ...), (high1, high2, ...)) + batch_size (int): The number of points to sample with each call to sample(...) + """ + + def __init__( + self, + bounds: Tuple[List[float], List[float]], + batch_size: int, + device: Device = None, + ) -> None: + assert len(bounds) == 2 and len(bounds[0]) == len(bounds[1]) + self._bounds = common.to_tensor(bounds, device=device) + self._ranges = self._bounds[1] - self._bounds[0] + self.fixtures_radii = None + self.fixture_positions = None + self.batch_size = batch_size + + def sample(self, radius, max_trials, append=True, verbose=False): + """Sample a position. + + Args: + radius (float): collision radius. + max_trials (int): maximal trials to sample. + append (bool, optional): whether to append the new sample to fixtures. Defaults to True. + verbose (bool, optional): whether to print verbosely. Defaults to False. + + Returns: + torch.Tensor: a sampled position. + """ + if self.fixture_positions is None: + sampled_pos = ( + torch.rand((self.batch_size, self._bounds.shape[1])) * self._ranges + + self._bounds[0] + ) + else: + pass_mask = torch.zeros((self.batch_size), dtype=bool) + sampled_pos = torch.zeros((self.batch_size, self._bounds.shape[1])) + for i in range(max_trials): + pos = ( + torch.rand((self.batch_size, self._bounds.shape[1])) * self._ranges + + self._bounds[0] + ) # (B, d) + dist = torch.linalg.norm( + pos - self.fixture_positions, axis=-1 + ) # (n, B) + radii = self.fixtures_radii + radius # (n, ) + mask = torch.all(dist > radii[:, None], axis=0) # (B, ) + sampled_pos[mask] = pos[mask] + pass_mask[mask] = True + if torch.all(pass_mask): + if verbose: + print( + f"Found valid set of {self.batch_size=} samples at {i}-th trial" + ) + break + else: + if verbose: + print("Fail to find a valid sample!") + if append: + if self.fixture_positions is None: + self.fixture_positions = sampled_pos[None, ...] + else: + self.fixture_positions = torch.concat( + [self.fixture_positions, sampled_pos[None, ...]] + ) + if self.fixtures_radii is None: + self.fixtures_radii = common.to_tensor(radius).reshape( + 1, + ) + else: + self.fixtures_radii = torch.concat( + [ + self.fixtures_radii, + common.to_tensor(radius).reshape( + 1, + ), + ] + ) + return sampled_pos diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55e5f844b4a2b2e06ed806c0747cbda88b29d084 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__init__.py @@ -0,0 +1 @@ +from .common import * diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__pycache__/__init__.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71e087a9939f57efad4cb973293b3737a1667c46 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__pycache__/__init__.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__pycache__/common.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d6a4b18b7388afb2fa1584530861d0a57a43598 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/__pycache__/common.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/common.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/common.py new file mode 100644 index 0000000000000000000000000000000000000000..26cd78d04fe834021db31deef661f6b51f4c57e9 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/rewards/common.py @@ -0,0 +1,88 @@ +import numpy as np +import torch + + +def tolerance( + x, lower=0.0, upper=0.0, margin=0.0, sigmoid="gaussian", value_at_margin=0.1 +): + # modified from https://github.com/google-deepmind/dm_control/blob/554ad2753df914372597575505249f22c255979d/dm_control/utils/rewards.py#L93 + """Returns 1 when `x` falls inside the bounds, between 0 and 1 otherwise. + + Args: + x: A torch array. (B, 3) + lower, upper: specifying inclusive `(lower, upper)` bounds for + the target interval. These can be infinite if the interval is unbounded + at one or both ends, or they can be equal to one another if the target + value is exact. + margin: Float. Parameter that controls how steeply the output decreases as + `x` moves out-of-bounds. + * If `margin == 0` then the output will be 0 for all values of `x` + outside of `bounds`. + * If `margin > 0` then the output will decrease sigmoidally with + increasing distance from the nearest bound. + sigmoid: String, choice of sigmoid type. Valid values are: 'gaussian', + 'linear', 'hyperbolic', 'long_tail', 'cosine', 'tanh_squared'. + value_at_margin: A float between 0 and 1 specifying the output value when + the distance from `x` to the nearest bound is equal to `margin`. Ignored + if `margin == 0`. + + Returns: + A torch array with values between 0.0 and 1.0. + + Raises: + ValueError: If `bounds[0] > bounds[1]`. + ValueError: If `margin` is negative. + ValueError: If not 0 < `value_at_margin` < 1, + except for `linear`, `cosine` and `quadratic` sigmoids, which allow `value_at_margin` == 0. + ValueError: If `sigmoid` is of an unknown type. + """ + + if sigmoid in ("cosine", "linear", "quadratic"): + if not 0 <= value_at_margin < 1: + raise ValueError( + "`value_at_margin` must be nonnegative and smaller than 1, " + "got {}.".format(value_at_margin) + ) + else: + if not 0 < value_at_margin < 1: + raise ValueError( + "`value_at_margin` must be strictly between 0 and 1, " + "got {}.".format(value_at_margin) + ) + + if lower > upper: + raise ValueError("Lower bound must be <= upper bound.") + + if margin < 0: + raise ValueError("`margin` must be non-negative.") + + in_bounds = torch.logical_and(lower <= x, x <= upper) + + if margin == 0: + value = torch.where(in_bounds, torch.tensor(1.0), torch.tensor(0.0)) + else: + d = torch.where(x < lower, lower - x, x - upper) / margin + if sigmoid == "gaussian": + scale = np.sqrt(-2 * np.log(value_at_margin)) + value = torch.where( + in_bounds, torch.tensor(1.0), torch.exp(-0.5 * (d * scale) ** 2) + ) + elif sigmoid == "hyperbolic": + scale = np.arccosh(1 / value_at_margin) + value = torch.where( + in_bounds, torch.tensor(1.0), 1 / (1 + torch.exp(d * scale)) + ) + elif sigmoid == "quadratic": + scale = np.sqrt(1 - value_at_margin) + scaled_d = d * scale + x = torch.where(scaled_d.abs() < 1, 1 - scaled_d**2, torch.tensor(0.0)) + value = torch.where(in_bounds, torch.tensor(1.0), x) + elif sigmoid == "linear": + scale = 1 - value_at_margin + scaled_d = d * scale + x = torch.where(scaled_d.abs() < 1, 1 - scaled_d, torch.tensor(0.0)) + value = torch.where(in_bounds, torch.tensor(1.0), x) + else: + raise ValueError(f"Unknown sigmoid type {sigmoid!r}.") + + return value diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/system/__pycache__/backend.cpython-310.pyc b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/system/__pycache__/backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..101d67d92b02ac6a5e92638963dde230d56392e5 Binary files /dev/null and b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/system/__pycache__/backend.cpython-310.pyc differ diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/system/backend.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/system/backend.py new file mode 100644 index 0000000000000000000000000000000000000000..93c07cfbe7cbd5edead7b26be232adc92eb2c3c7 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/envs/utils/system/backend.py @@ -0,0 +1,74 @@ +""" +Utilities for determining the simulation backend and devices +""" +from dataclasses import dataclass + +import sapien +import torch + + +@dataclass +class BackendInfo: + device: torch.device + """the device in which to return all simulation data on""" + sim_device: sapien.Device + """the device on which the physics simulation is running""" + sim_backend: str + """the backend name of the physics simulation""" + render_device: sapien.Device + """the device on which the renderer is running""" + render_backend: str + """the backend name of the renderer""" + + +CPU_SIM_BACKENDS = set(["cpu", "physx_cpu"]) + +sim_backend_name_mapping = { + "cpu": "physx_cpu", + "cuda": "physx_cuda", + "gpu": "physx_cuda", + "physx_cpu": "physx_cpu", + "physx_cuda": "physx_cuda", +} +render_backend_name_mapping = { + "cpu": "sapien_cpu", + "cuda": "sapien_cuda", + "gpu": "sapien_cuda", + "sapien_cpu": "sapien_cpu", + "sapien_cuda": "sapien_cuda", +} + + +def parse_sim_and_render_backend(sim_backend: str, render_backend: str) -> BackendInfo: + sim_backend = sim_backend_name_mapping[sim_backend] + render_backend = render_backend_name_mapping[render_backend] + if sim_backend == "physx_cpu": + device = torch.device("cpu") + sim_device = sapien.Device("cpu") + elif sim_backend == "physx_cuda": + device = torch.device("cuda") + sim_device = sapien.Device("cuda") + elif sim_backend[:4] == "cuda": + device = torch.device(sim_backend) + sim_device = sapien.Device(sim_backend) + else: + raise ValueError(f"Invalid simulation backend: {sim_backend}") + + # TODO (stao): handle checking if system is mac, in which we must then use render_backend = "sapien_cpu" + # determine render device + if render_backend == "sapien_cuda": + render_device = sapien.Device("cuda") + elif render_backend == "sapien_cpu": + render_device = sapien.Device("cpu") + elif render_backend[:4] == "cuda": + render_device = sapien.Device(render_backend) + else: + # handle special cases such as for AMD gpus, render_backend must be defined as pci:... instead as cuda is not available. + render_device = sapien.Device(render_backend) + return BackendInfo( + device=device, + sim_device=sim_device, + sim_backend=sim_backend_name_mapping[sim_backend], + render_device=render_device, + render_backend=render_backend, + ) diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..70ceb546add5c490045e30b734f1e499df8515b2 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/__init__.py @@ -0,0 +1 @@ +from .solution import BasePolicy diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/evaluator.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..ba276541cf4b7bc556f180e4df20417539059210 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/evaluator.py @@ -0,0 +1,129 @@ +from typing import Callable, List, Type + +import gymnasium as gym +import numpy as np + +from mani_skill.envs.sapien_env import BaseEnv +from mani_skill.utils import common, gym_utils + +from .solution import BasePolicy + + +class BaseEvaluator: + env: gym.Env + policy: BasePolicy + + MAX_EPISODE_STEPS = 1000 + + def setup( + self, + env_id: str, + policy_cls: Type[BasePolicy], + render_mode="cameras", + env_kwargs=None, + ): + """Setup environment and policy.""" + self.env_id = env_id + self.env_kwargs = {} if env_kwargs is None else env_kwargs + + obs_mode = policy_cls.get_obs_mode(env_id) + control_mode = policy_cls.get_control_mode(env_id) + + self.env: BaseEnv = gym.make( + self.env_id, + obs_mode=obs_mode, + control_mode=control_mode, + render_mode=render_mode, + **self.env_kwargs + ) + self.policy = policy_cls( + self.env_id, self.env.observation_space, self.env.action_space + ) + self.result = dict() + + def evaluate_episode(self, reset_kwargs, render=False): + """Evaluate a single episode.""" + env = self.env + policy = self.policy + + obs, _ = env.reset(**reset_kwargs) + policy.reset(obs) + # NOTE(jigu): Use for-loop rather than while-loop + # in case time limit is not correctly set. + for _ in range(self.MAX_EPISODE_STEPS): + action = policy.act(obs) + # NOTE(jigu): render after action in case action is needed to visualize + if render: + env.render() + obs, reward, terminated, truncated, info = env.step(action) + if terminated or truncated: + if render: + env.render() + assert "success" in info, sorted(info.keys()) + metrics = gym_utils.extract_scalars_from_info( + info, "TimeLimit.truncated" + ) + return metrics + + def evaluate_episodes(self, episode_cfgs: List[dict], callback: Callable = None): + """Evaluate episodes according to configurations. + + Args: + episode_cfgs (List[dict]): a list of episode configurations. + The configuration should contain "reset_kwargs". + callback (Callable, optional): callback function to report progress. + It accepts two arguments: + int: the number of completed episodes + dict: the results of the latest evaluated episode + """ + for i, episode_cfg in enumerate(episode_cfgs): + episode_id = episode_cfg.get("episode_id", i) + reset_kwargs = episode_cfg.get("reset_kwargs", {}) + metrics = self.evaluate_episode(reset_kwargs) + if metrics is None: + raise RuntimeError( + "Episode {}: check whether time limit is set".format(episode_id) + ) + if episode_id in self.result: + raise RuntimeError("Episode id {} is not unique.".format(episode_id)) + self.result[episode_id] = metrics + + if callback is not None: + callback(i + 1, metrics) + + def close(self): + self.env.close() + + def generate_dummy_config(self, env_id, num_episodes: int): + """Generate dummy configuration.""" + env_info = dict(env_id=env_id) + episodes = [dict(episode_id=i) for i in range(num_episodes)] + return dict(env_info=env_info, episodes=episodes) + + def merge_result(self): + merged_result = common.merge_dicts(self.result.values()) + merged_metrics = {k: np.mean(v) for k, v in merged_result.items()} + return merged_metrics + + def export_to_csv(self, path): + """Average results and export to a csv file.""" + import csv + + import tabulate + + merged_metrics = self.merge_result() + headers = ["env_id"] + list(merged_metrics.keys()) + data = [[self.env_id] + list(merged_metrics.values())] + print(tabulate(data, headers=headers, tablefmt="psql", floatfmt=".4f")) + + with open(path, "w") as f: + csv_writer = csv.writer(f) + csv_writer.writerow(headers) + csv_writer.writerows(data) + print("The evaluation result is saved to {}.".format(path)) + + def submit(self): + raise NotImplementedError + + def error(self, *args, **kwargs): + raise NotImplementedError diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/__init__.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/env.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/env.py new file mode 100644 index 0000000000000000000000000000000000000000..b34a62c2f21dfdab4852e95724cdc7d1543b1029 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/env.py @@ -0,0 +1,156 @@ +import mani_skill.envs +import gymnasium as gym +import numpy as np +import matplotlib.pyplot as plt +from typing import Dict, Any +from openpi_client import image_tools +from openpi_client.runtime import environment as _environment +from typing_extensions import override + +class CustomEnvironment(_environment.Environment): + """An environment for the custom task in ManiSkill.""" + + def get_prompt(self): + if self.env_id == "PlaceMugOnCoffeeMachine-v1": + return "place mug on coffee machine" + elif self.env_id == "PickMugFromCoffeeMachine-v1": + return "pick mug from coffee machine" + elif self.env_id == "F3Scene-v0": + return "place mug on coffee machine" + elif "PlaceBowlOnRack" in self.env_id: + return "place bowl on rack" + elif "PlacePlateOnRack" in self.env_id: + return "place plate on rack" + elif "PlaceForkOnRack" in self.env_id: + return "place fork on rack" + elif "PlaceKnifeOnRack" in self.env_id: + return "place knife on rack" + elif self.env_id == "PlaceBPFKOnRack-v1": + return "place it on rack" + + def __init__(self, env_id: str = "PlaceMugOnCoffeeMachine-v1", + obs_mode: str = "rgb", + render_mode: str = "rgb_array", + control_mode: str = "pd_joint_pos", + robot_uids: str = "noahbiarm_rc", + rand_level: int = 0, + seed: int = 0, + ) -> None: + np.random.seed(seed) + self.env_id = env_id + self._rng = np.random.default_rng(seed) + self._gym = gym.make( + env_id, + obs_mode=obs_mode, + control_mode=control_mode, + render_mode=render_mode, + rand_level=rand_level, + robot_uids=robot_uids, + ) + self._last_obs = None + self._done = False + self._episode_reward = 0.0 + self._last_action = np.zeros(8, dtype=np.float32) # Initialize with zeros for reset case + # Add action history tracking + self._action_history = [] + self._timesteps = [] + self._steps=0 + self._successfull = False + + @override + def reset(self) -> None: + gym_obs, _ = self._gym.reset() #seed=int(self._rng.integers(2**32 - 1)) + self._last_obs = self._convert_observation(gym_obs) + self._done = False + self._episode_reward = 0.0 + # Reset action history on environment reset + self._action_history = [] + self._timesteps = [] + self._steps=0 + self._success = False + + @override + def is_episode_complete(self) -> bool: + return self._done + + @override + def is_episode_successfull(self) -> bool: + return self._success + + @override + def get_observation(self) -> Dict: + if self._last_obs is None: + raise RuntimeError("Observation is not set. Call reset() first.") + return self._last_obs + + @override + def apply_action(self, action: Dict) -> None: + gym_action = action["actions"] + self._last_action = gym_action.copy() # Store the real action + + # Record action and timestep + self._action_history.append(gym_action.copy()) + self._timesteps.append(len(self._timesteps)) + + gym_obs, reward, terminated, truncated, info = self._gym.step(gym_action) + self._last_obs = self._convert_observation(gym_obs) + + self._done = (terminated or truncated) + self._episode_reward = max(self._episode_reward, reward) + self._steps+=1 + + self._success = info["success"] + + if self._gym.render_mode == "human": + self._gym.render() + + def _convert_observation(self, gym_obs: Dict) -> Dict: + if "rgbd" in self._gym.obs_mode or "rgb" in self._gym.obs_mode: + + base_rgb = gym_obs["sensor_data"]["base_camera"]["rgb"].cpu().numpy().squeeze(0) # [256, 256, 3] + base_img = image_tools.convert_to_uint8(image_tools.resize_with_pad(base_rgb, 224, 224)) # [224, 224, 3] + base_img = np.transpose(base_img, (2, 0, 1)) # [3, 224, 224] + + hand_rgb = gym_obs["sensor_data"]["hand_camera"]["rgb"].cpu().numpy().squeeze(0) # [256, 256, 3] + hand_img = image_tools.convert_to_uint8(image_tools.resize_with_pad(hand_rgb, 224, 224)) # [224, 224, 3] + hand_img = np.transpose(hand_img, (2, 0, 1)) # [3, 224, 224] + + head_rgb = gym_obs["sensor_data"]["head_camera"]["rgb"].cpu().numpy().squeeze(0) # [256, 256, 3] + head_img = image_tools.convert_to_uint8(image_tools.resize_with_pad(head_rgb, 224, 224)) # [224, 224, 3] + head_img = np.transpose(head_img, (2, 0, 1)) # [3, 224, 224] + + state = gym_obs["agent"]["qpos"].cpu().numpy().squeeze(0) # [9] + padded_action = np.expand_dims(self._last_action, axis=0) # [1, 8] + dummy_action = padded_action + obs = { + "observation/state": state, + "observation/base_camera": base_img, + "observation/hand_camera": hand_img, + "observation/head_camera": head_img, + "prompt": self.get_prompt(), + } + return obs + else: + raise NotImplementedError(f"Observation mode {self._gym.obs_mode} not supported yet.") + + def _plot_actions(self) -> None: + """Plot actions vs time after episode completion.""" + if not self._action_history: + print("No actions to plot.") + return + + # Convert action history to numpy array for easier handling + actions = np.array(self._action_history) # Shape: [timesteps, 7] + timesteps = np.array(self._timesteps) + + # Create plot + plt.figure(figsize=(10, 6)) + for i in range(actions.shape[1]): # Plot each action dimension + plt.plot(timesteps, actions[:, i], label=f'Action {i+1}') + + plt.xlabel('Timestep') + plt.ylabel('Action Value') + plt.title('Actions vs Time') + plt.legend() + plt.grid(True) + plt.show() \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/main.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/main.py new file mode 100644 index 0000000000000000000000000000000000000000..41dae67eb5d9ab9ab747f8acdf0e2c282130a5ee --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/main.py @@ -0,0 +1,78 @@ + + +import dataclasses +import logging +import pathlib +import tyro + +import mani_skill.evaluation.openpi.env as _env +import mani_skill.evaluation.openpi.saver as _saver + +from openpi_client import action_chunk_broker +from openpi_client import websocket_client_policy as _websocket_client_policy +from openpi_client.runtime import runtime as _runtime +from openpi_client.runtime.agents import policy_agent as _policy_agent + + +@dataclasses.dataclass +class Args: + env_id: str + robot_uids: str + rand_level: int + + out_dir: pathlib.Path = pathlib.Path("videos/policies/robot_uids/pi0_robot_uids/env_id") + render_mode: str = "rgb_array" + obs_mode: str = "rgb" + control_mode: str = "pd_joint_pos" + + host: str = "127.0.0.1" + port: int = 8001 + + seed: int = 0 + action_horizon: int = 50 + display: bool = False + num_episodes: int = 1 + + + +def main(args: Args) -> None: + """Main function""" + # update outdir using env_id + args.out_dir = pathlib.Path(f"videos/policies/{args.robot_uids}/pi0_{args.robot_uids}/{args.env_id}") + + # create mani env and simulate + runtime = _runtime.Runtime( + environment=_env.CustomEnvironment( + obs_mode=args.obs_mode, + control_mode=args.control_mode, + seed=args.seed, + env_id=args.env_id, + rand_level=args.rand_level, + robot_uids=args.robot_uids, + ), + + agent=_policy_agent.PolicyAgent( + policy=action_chunk_broker.ActionChunkBroker( + policy=_websocket_client_policy.WebsocketClientPolicy( + host=args.host, + port=args.port, + ), + action_horizon=args.action_horizon, + ) + ), + subscribers=[ + _saver.VideoSaver(args.out_dir), + ], + max_hz=30, # Adjust based on task requirements + num_episodes=args.num_episodes, + ) + + # compute stats + stats = runtime.run() + stats = runtime.stats() + print(stats) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO, force=True) + tyro.cli(main) \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/saver.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/saver.py new file mode 100644 index 0000000000000000000000000000000000000000..2e49204f7e769d8d8bd01156d83760efad72837b --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/openpi/saver.py @@ -0,0 +1,39 @@ +import logging +import pathlib +import imageio +import numpy as np +from openpi_client.runtime import subscriber as _subscriber +from typing_extensions import override + + +class VideoSaver(_subscriber.Subscriber): + """Saves episode data.""" + + def __init__(self, out_dir: pathlib.Path, subsample: int = 1) -> None: + out_dir.mkdir(parents=True, exist_ok=True) + self._out_dir = out_dir + self._images: list[np.ndarray] = [] + self._subsample = subsample + + @override + def on_episode_start(self) -> None: + self._images = [] + + @override + def on_step(self, observation: dict, action: dict) -> None: + im = observation["observation/base_camera"] # [C, H, W] - base camera RGB from ManiSkill + im = np.transpose(im, (1, 2, 0)) # [H, W, C] + self._images.append(im) +# + @override + def on_episode_end(self) -> None: + existing = list(self._out_dir.glob("out_[0-9]*.mp4")) + next_idx = max([int(p.stem.split("_")[1]) for p in existing], default=-1) + 1 + out_path = self._out_dir / f"out_{next_idx}.mp4" + + logging.info(f"Saving video to {out_path}") + imageio.mimwrite( + out_path, + [np.asarray(x) for x in self._images[:: self._subsample]], + fps=50 // max(1, self._subsample), + ) \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/run_evaluation.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/run_evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..ba2586a9a3ba0f4bd896b31b60b36ebc3f3c00ff --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/run_evaluation.py @@ -0,0 +1,147 @@ +import os +import sys + +from tqdm import tqdm + +from mani_skill.evaluation.evaluator import BaseEvaluator +from mani_skill.utils.io_utils import dump_json, load_json, write_txt +from mani_skill.utils.wrappers import RecordEpisode + + +class Evaluator(BaseEvaluator): + """Local evaluation.""" + + def __init__(self, output_dir: str, record_dir=None): + if os.path.exists(output_dir): + print(f"{output_dir} already exists.") + os.makedirs(output_dir, exist_ok=True) + self.output_dir = output_dir + + self.record_dir = record_dir + + def setup(self, *args, **kwargs): + super().setup(*args, **kwargs) + if self.record_dir is not None: + self.env = RecordEpisode(self.env, self.record_dir, clean_on_close=False) + + def submit(self): + # Export per-episode results + json_path = os.path.join(self.output_dir, "episode_results.json") + dump_json(json_path, self.result) + print("The per-episode evaluation result is saved to {}.".format(json_path)) + + # Export average result + json_path = os.path.join(self.output_dir, "average_metrics.json") + merged_metrics = self.merge_result() + self.merged_metrics = merged_metrics + dump_json(json_path, merged_metrics) + print("The averaged evaluation result is saved to {}.".format(json_path)) + + def error(self, *args): + write_txt(os.path.join(self.output_dir, "error.log"), args) + + +class TqdmCallback: + def __init__(self, n: int): + self.n = n + self.pbar = tqdm(total=n) + + def __call__(self, i, metrics): + self.pbar.update() + + +def parse_args(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument( + "-e", "--env-id", type=str, required=True, help="Environment ID" + ) + parser.add_argument( + "-o", + "--output-dir", + type=str, + required=True, + help="Directory to save evaluation results.", + ) + parser.add_argument( + "--config-file", + type=str, + help="Path to the config file. If None, use the dummy config.", + ) + # For debug only + parser.add_argument("-n", "--num-episodes", type=int, help="Number of episodes.") + parser.add_argument( + "--use-random-policy", + action="store_true", + help="Whether to use a random policy.", + ) + parser.add_argument( + "--record-dir", + type=str, + help="Directory to record videos and trajectories. If it is '@', use the output directory.", + ) + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + if args.record_dir == "@": + args.record_dir = args.output_dir + evaluator = Evaluator(args.output_dir, record_dir=args.record_dir) + + # ---------------------------------------------------------------------------- # + # Load evaluation configuration + # ---------------------------------------------------------------------------- # + try: + if args.config_file is not None: + config = load_json(args.config_file) + config_env_id = config["env_info"]["env_id"] + assert config_env_id == args.env_id, (config_env_id, args.env_id) + else: # For debug + config = evaluator.generate_dummy_config(args.env_id, args.num_episodes) + except: + exc_info = sys.exc_info() + print("Fail to load evaluation configuration.", exc_info[:-1]) + evaluator.error("Fail to load evaluation configuration.", str(exc_info[0])) + exit(1) + + # ---------------------------------------------------------------------------- # + # Import user policy + # ---------------------------------------------------------------------------- # + if args.use_random_policy: + from mani_skill.evaluation.solution import RandomPolicy + + UserPolicy = RandomPolicy + else: + try: + from user_solution import UserPolicy + except: + exc_info = sys.exc_info() + print("Fail to import UserPolicy", exc_info[:-1]) + evaluator.error("Fail to import UserPolicy", str(exc_info[0])) + exit(2) + + # ---------------------------------------------------------------------------- # + # Main + # ---------------------------------------------------------------------------- # + env_kwargs = config["env_info"].get("env_kwargs") + evaluator.setup( + args.env_id, UserPolicy, render_mode="cameras", env_kwargs=env_kwargs + ) + + episodes = config["episodes"] + if args.num_episodes is not None: + episodes = episodes[: args.num_episodes] + cb = TqdmCallback(len(episodes)) + evaluator.evaluate_episodes(episodes, callback=cb) + + evaluator.submit() + evaluator.close() + + +if __name__ == "__main__": + main() diff --git a/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/solution.py b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/solution.py new file mode 100644 index 0000000000000000000000000000000000000000..05778e437e730cc9c9b2ca5484422767647a5631 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/mani_skill/evaluation/solution.py @@ -0,0 +1,42 @@ +import numpy as np +from gymnasium import spaces + + +class BasePolicy: + def __init__( + self, env_id: str, observation_space: spaces.Space, action_space: spaces.Space + ) -> None: + self.env_id = env_id + self.observation_space = observation_space + self.action_space = action_space + # NOTE(jigu): Do not assume that gym.make(env_id) works during evaluation + + def reset(self, observations): + """Called at the beginning of an episode.""" + + def act(self, observations) -> np.ndarray: + """Act based on the observations.""" + raise NotImplementedError + + @classmethod + def get_obs_mode(cls, env_id: str) -> str: + """Get the observation mode for the policy. Define the observation space.""" + raise NotImplementedError + + @classmethod + def get_control_mode(cls, env_id) -> str: + """Get the control mode for the policy. Define the action space.""" + raise NotImplementedError + + +class RandomPolicy(BasePolicy): + def act(self, observations): + return self.action_space.sample() + + @classmethod + def get_obs_mode(cls, env_id: str) -> str: + return "rgbd" + + @classmethod + def get_control_mode(cls, env_id: str) -> str: + return None # use default one diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/README.md b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/README.md new file mode 100644 index 0000000000000000000000000000000000000000..90ecbcf2022ef613b071686b532b972b1b06a635 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/README.md @@ -0,0 +1,9 @@ +# Data Generation Scripts + +The code/scripts in this folder are used to generate all the demonstration datasets for the ManiSkill Benchmark as well as generate the standard state-based and vision-based demonstration datasets for imitation learning baselines. + +The tasks that have demonstrations are documented here with the "has demonstrations" tag: https://maniskill.readthedocs.io/en/latest/tasks/index.html + +- `replay_for_il_baselines.sh`: This script is used to generate the state-based and vision-based demonstration datasets for imitation learning baselines using the demonstration datasets uploaded to the ManiSkill HuggingFace dataset: https://huggingface.co/datasets/haosulab/ManiSkill_Demonstrations +- `rl.sh`: This script is used for using reinforcement learning to learn a policy from dense rewards to then rollout success demonstrations for different controller modes +- `motionplanning.sh`: This script is used for generating the motion planning demonstrations for various tasks that have predefined motion planning solutions \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/learning_from_demos.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/learning_from_demos.sh new file mode 100644 index 0000000000000000000000000000000000000000..6c59006d48b179b35a721fd90abd196c189f095e --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/learning_from_demos.sh @@ -0,0 +1 @@ +# Learning from demonstrations based approach to learn a policy from a few demonstrations to then rollout more successful demonstrations \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/motionplanning.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/motionplanning.sh new file mode 100644 index 0000000000000000000000000000000000000000..8356831a38226c2a6ad331ac957df98404585e55 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/motionplanning.sh @@ -0,0 +1,96 @@ +#!/bin/bash + +# --- Configurable variables --- +backend="cpu" +obs_mode="state_dict" +traj_name="trajectory" +num_sample=1 +num_traj=100 +cuda=0 +nproc=1 +rand_level=4 + +# List of tasks +tasks=( + # "PickForkFromRack-v1" + # "PlaceSpoonOnRack-v1" + # "PlaceMugOnRack-v1" + # "PlaceMugOnCoffeeMachine-v1" + # "PickMugFromCoffeeMachine-v1" + # "PlaceBowlOnRack-v1" + # "PlacePlateOnRack-v1" + # "PlaceForkOnRack-v1" + # "PlaceKnifeOnRack-v1" + # "PlaceBowlOnRack-v2" + # "PlacePlateOnRack-v2" + # "PlaceForkOnRack-v2" + # "PlaceKnifeOnRack-v2" + # "PlaceBowlOnRack-v3" + # "PlacePlateOnRack-v3" + # "PlaceForkOnRack-v3" + # "PlaceKnifeOnRack-v3" + # "PlaceBowlOnRack-v4" + # "PlacePlateOnRack-v4" + # "PlaceForkOnRack-v4" + # "PlaceKnifeOnRack-v4" + # "GraspFork-v0" + # "GraspBowl-v0" + # "GraspPlate-v0" + "GraspCup-v0" +) + +# List of agents +agents=( + # "noahbiarm" + "panda" +) + +# list of robot versions +robot_uids=( + # "noahbiarm_r" + # "noahbiarm_rc" + # "noahbiarm_rcw" + "panda_wristcam" +) + +# Loop over each agent +for agent in "${agents[@]}"; do + # loop over robot versions + for robot in "${robot_uids[@]}"; do + # Loop over each task for the current agent + for env_id in "${tasks[@]}"; do + echo "Processing tasks for agent: $agent (backend=$backend, obs_mode=$obs_mode, rand_level=$rand_level, traj_name=$traj_name)" + echo "Robot: $robot Task: $env_id" + + # Generate sample video + CUDA_VISIBLE_DEVICES="${cuda}" python -m mani_skill.examples.motionplanning."${agent}".run \ + --rand_level="$rand_level" \ + --env-id="$env_id" \ + --traj-name=sample.rl."$rand_level" \ + -n="$num_sample" \ + -b="$backend" \ + -o="$obs_mode" \ + --record-dir=data/"${robot}" \ + --robot_uids="$robot" \ + --num-procs=1 \ + --save-video \ + --only-count-success + + mv data/"${robot}"/"$env_id"/motionplanning/0.mp4 \ + data/"${robot}"/"$env_id"/motionplanning/sample.rl."$rand_level".mp4 + +# # # Generate training data +# CUDA_VISIBLE_DEVICES="${cuda}" python -m mani_skill.examples.motionplanning."${agent}".run \ +# --rand_level="$rand_level" \ +# --env-id "$env_id" \ +# --traj-name="$traj_name".rl."$rand_level" \ +# --only-count-success \ +# -n "$num_traj" \ +# -b="$backend" \ +# -o="$obs_mode" \ +# --record-dir=data/"${robot}" \ +# --robot_uids="$robot" \ +# --num-procs="$nproc" + done + done +done \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/motionplanning_agilex.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/motionplanning_agilex.sh new file mode 100644 index 0000000000000000000000000000000000000000..fab97e02675b6391d1c50b382bf1d1d025dd37eb --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/motionplanning_agilex.sh @@ -0,0 +1,13 @@ +# Motion planning based approach to heuristically generate demonstrations + +## Panda Arm Solutions ## PushCube-v1 PickCube-v1 StackCube-v1 PegInsertionSide-v1 PlugCharger-v1 LiftPegUpright-v1 PullCubeTool-v1 PullCube-v1 +## Huawei EAI Tasks ## StackPlateOnRack-v1 PlacePlateOnRack-v1 PlaceMugOnRack-v1 PlaceBowlOnRack-v1 StackMugOnRack-v1 StackBowl-v1 PlaceForkOnRack-v1 "StackPlateOnRack-v1": solveStackPlateOnRack, +# --only-count-success +for env_id in PlaceBowlOnRack-v1; +do + CUDA_VISIBLE_DEVICES=0 python -m mani_skill.examples.motionplanning.agilex.run --env-id $env_id \ + --traj-name="trajectory" --save-video -n 1 \ + --shader="rt-fast" --vis --only-count-success # generate sample videos + mv demos/$env_id/motionplanning/0.mp4 demos/$env_id/motionplanning/sample.mp4 + CUDA_VISIBLE_DEVICES=0 python -m mani_skill.examples.motionplanning.agilex.run --env-id $env_id --traj-name="trajectory" -n 2000 --num-procs 10 --only-count-success +done diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/parquet.py b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/parquet.py new file mode 100644 index 0000000000000000000000000000000000000000..8d48649c4431901b76bd07ea11a4cafe2cda3ceb --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/parquet.py @@ -0,0 +1,27 @@ +import pandas as pd +import os +import matplotlib.pyplot as plt +from PIL import Image +import io +import numpy as np + +fp = "data/lerobot/noahbiarm/pd_joint_pos/PlacePlateOnRack-v1/data/chunk-000" +for filename in os.listdir(fp): + if filename.endswith('.parquet'): + print(filename) + df = pd.read_parquet(fp + "/" + filename) + + ## checks if all the actions and obs stats are the same !!!!! + check = df["action"][0][:7] == df["observation.state"][0][:7] + if not check.all(): + print("action and state where not the same") + + ## show images + base_img = df['observation.images.head_camera'] + buf = io.BytesIO(base_img[0]['bytes']) + pil_img = Image.open(buf).convert("RGB") # decode and ensure 3 channels + arr = np.array(pil_img) + plt.imshow(arr) + plt.show() + +print("finished") \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/process_rl_trajectories.py b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/process_rl_trajectories.py new file mode 100644 index 0000000000000000000000000000000000000000..9e789f0deffe99e400a1c9b62b51af4fe3dc0ceb --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/process_rl_trajectories.py @@ -0,0 +1,203 @@ +import copy +import json +import os +import shutil +from dataclasses import dataclass +from pathlib import Path +from typing import Optional + +import gymnasium as gym +import h5py +import numpy as np +import tyro +from tqdm import tqdm + +import mani_skill.envs +from mani_skill.trajectory import utils +from mani_skill.utils.visualization.misc import images_to_video + + +@dataclass +class Args: + runs_path: str + out_dir: str + dry_run: Optional[bool] = True + + +def main(): + args = tyro.cli(Args) + + if args.dry_run: + print("Dry run, skipping actual processing") + + # Dictionary to store paths for each environment experiment + env_paths = {} + + # List all subfolders in runs_path + for env_name in os.listdir(args.runs_path): + env_dir = Path(args.runs_path) / env_name + + if not env_dir.is_dir(): + continue + + # Look for checkpoint and trajectory files + ckpt_path = env_dir / "final_ckpt.pt" + traj_path = env_dir / "test_videos" / "trajectory.h5" + traj_metadata_path = env_dir / "test_videos" / "trajectory.json" + + # Only store if both files exist + if not (ckpt_path.exists() and traj_path.exists()): + print( + f"Skipping {env_name} because checkpoint or trajectory file does not exist" + ) + continue + + env_paths[env_name] = { + "checkpoint": str(ckpt_path), + "trajectory": str(traj_path), + "metadata": str(traj_metadata_path), + } + high_fail_rate_envs = [] + for env_name, env_path in env_paths.items(): + print(f"Processing {env_name}") + traj_path = env_path["trajectory"] + file = h5py.File(traj_path, "r") + metadata_path = env_path["metadata"] + with open(metadata_path, "r") as f: + metadata = json.load(f) + env_id = metadata["env_info"]["env_id"] + new_metadata = copy.deepcopy(metadata) + new_metadata["episodes"] = [] + control_mode = new_metadata["env_info"]["env_kwargs"]["control_mode"] + sim_backend = new_metadata["env_info"]["env_kwargs"]["sim_backend"] + traj_filename = f"{env_id}/rl/trajectory.none.{control_mode}.{sim_backend}" + out_trajectory_path = os.path.join(args.out_dir, f"{traj_filename}.h5") + + if not args.dry_run: + os.makedirs(os.path.dirname(out_trajectory_path), exist_ok=True) + out_file = h5py.File(out_trajectory_path, "w") + + failed_count = 0 + truncated_count = 0 + avg_episode_length = 0 + original_episode_count = len(metadata["episodes"]) + first_success_indexes = [] + recorded_sample_video = False + for episode in tqdm(metadata["episodes"]): + traj_id = f"traj_{episode['episode_id']}" + traj = file[traj_id] + success = np.array(traj["success"]) + if not success.any(): + # this failed + failed_count += 1 + continue + # truncate until last success + success_indexes = success.nonzero()[0] + last_success_index = int(success_indexes[-1]) + first_success_index = int(success_indexes[0]) + first_success_indexes.append(first_success_index) + if last_success_index != len(success) - 1: + truncated_count += 1 + avg_episode_length += last_success_index + 1 + + def recursive_copy_and_slice( + key, source_group, target_group, add_last_frame=False + ): + if key == "obs" or key == "rewards": + return + if isinstance(target_group, h5py.Dataset): + if not add_last_frame and ("obs" == key or "env_states" == key): + add_last_frame = True + source_group.create_dataset( + key, + data=target_group[: last_success_index + 1 + add_last_frame], + ) + elif isinstance(target_group, h5py.Group): + if not add_last_frame and ("obs" == key or "env_states" == key): + add_last_frame = True + source_group.create_group(key, track_order=True) + for k in target_group.keys(): + recursive_copy_and_slice( + k, + source_group[key], + target_group[k], + add_last_frame=add_last_frame, + ) + + if not args.dry_run: + recursive_copy_and_slice(traj_id, out_file, traj) + new_episode = copy.deepcopy(episode) + new_episode["success"] = True + new_episode["elapsed_steps"] = last_success_index + 1 + new_metadata["episodes"].append(new_episode) + + if not args.dry_run: + if not recorded_sample_video: + recorded_sample_video = True + env_kwargs = copy.deepcopy(new_metadata["env_info"]["env_kwargs"]) + env_kwargs["num_envs"] = 1 + env_kwargs["sim_backend"] = "physx_cpu" + env_kwargs["human_render_camera_configs"] = { + "shader_pack": "rt-med" + } + env = gym.make(env_id, **env_kwargs) + env.reset( + seed=episode["episode_seed"], **new_episode["reset_kwargs"] + ) + imgs = [] + env_states = utils.dict_to_list_of_dicts( + out_file[traj_id]["env_states"] + ) + for step in range(new_episode["elapsed_steps"]): + env.set_state_dict(env_states[step]) + imgs.append(env.render_rgb_array().cpu().numpy()[0]) + env.close() + images_to_video( + imgs, + output_dir=os.path.join(args.out_dir, env_id, "rl"), + video_name=f"sample_{control_mode}", + fps=30, + ) + final_episode_count = len(new_metadata["episodes"]) + avg_episode_length /= final_episode_count + avg_steps_to_first_success = np.mean(first_success_indexes) + print( + f"{env_id}: Failed: {failed_count}/{original_episode_count}, Truncated: {truncated_count}/{original_episode_count}, Final Episodes: {final_episode_count}, Avg Episode Length: {avg_episode_length}, Avg Steps to First Success: {avg_steps_to_first_success}" + ) + if failed_count / original_episode_count >= 0.05: + high_fail_rate_envs.append( + (env_name, failed_count / original_episode_count) + ) + + new_metadata["source_type"] = "rl" + new_metadata[ + "source_desc" + ] = "Demonstrations generated by rolling out a PPO dense reward trained policy" + if not args.dry_run: + with open( + os.path.join( + args.out_dir, + f"{traj_filename}.json", + ), + "w", + ) as f: + json.dump(new_metadata, f, indent=2) + print(f"Saved to {os.path.join(args.out_dir, f'{traj_filename}.json')}") + out_file.close() + + # Copy checkpoint to output dir + checkpoint_path = env_path["checkpoint"] + checkpoint_out_path = os.path.join( + args.out_dir, f"{env_id}/rl/ppo_{control_mode}_ckpt.pt" + ) + os.makedirs(os.path.dirname(checkpoint_out_path), exist_ok=True) + shutil.copy(checkpoint_path, checkpoint_out_path) + + for env_name, fail_rate in high_fail_rate_envs: + print( + f"Warning: {env_name} has {fail_rate*100:0.1f} >= 5% failed episodes. Need a better policy." + ) + + +if __name__ == "__main__": + main() diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/replay_for_il_baselines.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/replay_for_il_baselines.sh new file mode 100644 index 0000000000000000000000000000000000000000..65aa63af0214e10bfcf71da8867d77dac7fd8d23 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/replay_for_il_baselines.sh @@ -0,0 +1,63 @@ +# Script used for replaying downloaded demonstrations to include the relevant observation +# data and action space (controller) data used for the imitation learning benchmarking in ManiSkill. +# Note that we specify here the controller mode to use, different from the original stored in the datasets. +# The strategy here is to use the simplest controller possible such that the task is still solvable. + +# We do not upload the replayed demonstrations here as they can be extremely large due to image data +# being saved. Uploaded demonstrations typically only keep environment state data which is much smaller. + + +### State-based demonstration replay ### + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PushCube-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pos -o state \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PickCube-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pos -o state \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/StackCube-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pos -o state \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PegInsertionSide-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pose -o state \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PushT-v1/rl/trajectory.none.pd_ee_delta_pose.physx_cuda.h5 \ + --use-env-states -c pd_ee_delta_pose -o state \ + --save-traj --num-envs 1024 -b physx_cuda + +### RGB-based demonstration replay ### + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PushCube-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pos -o rgb \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PickCube-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pos -o rgb \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/StackCube-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pos -o rgb \ + --save-traj --num-envs 10 -b physx_cpu + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PegInsertionSide-v1/motionplanning/trajectory.h5 \ + --use-first-env-state -c pd_ee_delta_pose -o rgb \ + --save-traj --num-envs 10 -b physx_cpu + + +python -m mani_skill.trajectory.replay_trajectory \ + --traj-path ~/.maniskill/demos/PushT-v1/rl/trajectory.none.pd_ee_delta_pos.physx_cuda.h5 \ + --use-env-states -c pd_ee_delta_pos -o rgb \ + --save-traj --num-envs 256 -b physx_cuda \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/replay_trajectory.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/replay_trajectory.sh new file mode 100644 index 0000000000000000000000000000000000000000..546aa8a103b769f69b5b387a22c25af415db4e11 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/replay_trajectory.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# List of tasks +tasks=( + # "PlaceForkOnRack-v1" + # "PlaceSpoonOnRack-v1" + # "PlaceKnifeOnRack-v1" + # "PlaceMugOnRack-v1" + "PlaceMugOnCoffeeMachine-v1" + # "PickMugFromCoffeeMachine-v1" +) + +# List of agents +agents=( + # "panda" + "noahbiarm" +) + +cntrl_modes=( + "pd_ee_delta_pose" +) + +# Loop over each agent +for agent in "${agents[@]}" +do + # loop over controll modes for current agent + for cntrl_mode in "${cntrl_modes[@]}" + do + # Loop over each task for the current agent + for env_id in "${tasks[@]}" + do + echo "Processing task: $env_id - agent: $agent - mode: $cntrl_mode" + + python -m mani_skill.trajectory.replay_trajectory \ + --traj-path=data/"${agent}"/"${env_id}"/motionplanning/trajectory.h5 \ + -b="cpu" \ + -o="rgbd" \ + -c="${cntrl_mode}" \ + --save-traj \ + --max_retry=1 \ + --use_first_env_state \ + --num_envs=1 + done + done +done \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/rl.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/rl.sh new file mode 100644 index 0000000000000000000000000000000000000000..39b8abfedc346a68b55c7ca4bf0cc17995f8d754 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/rl.sh @@ -0,0 +1,172 @@ +# State-based RL/PPO used to learn a policy to then rollout success demonstrations for different controller modes +# Weights for the trained models and pre-generated demos are on our hugging face dataset: https://huggingface.co/datasets/haosulab/ManiSkill_Demonstrations + +# to use these commands you need to install torchrl and tensordict. If cudagraphs does not work you can remove that flag +# then go to the examples/baselines/ppo folder and run the commands there +# Then run the following commands to preprocess the demos +# python scripts/data_generation/process_rl_trajectories.py --runs_path examples/baselines/ppo/runs/data_generation/ --out-dir ~/.maniskill/demos/ + +### PushCube-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="PushCube-v1" \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=5_000_000 --eval_freq=100 \ + --save-model --cudagraphs --exp-name="data_generation/PushCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="PushCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/PushCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=50 --no-capture-video --save-trajectory +done + + +### PickCube-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="PickCube-v1" \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=5_000_000 --eval_freq=100 \ + --save-model --cudagraphs --exp-name="data_generation/PickCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="PickCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/PickCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=50 --no-capture-video --save-trajectory +done + +### StackCube-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="StackCube-v1" \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 \ + --save-model --cudagraphs --exp-name="data_generation/StackCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="StackCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/StackCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=50 --no-capture-video --save-trajectory +done + +### PushT-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="PushT-v1" \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=25_000_000 --num-eval-steps=100 --gamma=0.99 \ + --save-model --cudagraphs --exp-name="data_generation/PushT-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="PushT-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/PushT-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=100 --no-capture-video --save-trajectory +done + +### RollBall-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="RollBall-v1" \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=20_000_000 --num-eval-steps=80 --gamma=0.95 \ + --save-model --cudagraphs --exp-name="data_generation/RollBall-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="RollBall-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/RollBall-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=80 --no-capture-video --save-trajectory +done + +### PokeCube-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="PokeCube-v1" \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=20_000_000 --eval_freq=100 \ + --save-model --cudagraphs --exp-name="data_generation/PokeCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="PokeCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/PokeCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=50 --no-capture-video --save-trajectory +done + +### PullCube-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="PullCube-v1" \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=5_000_000 --eval_freq=100 \ + --save-model --cudagraphs --exp-name="data_generation/PullCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + python ppo_fast.py --env_id="PullCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/PullCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=50 --no-capture-video --save-trajectory +done + +### LiftPegUpright-v1 ### +for control_mode in "pd_joint_delta_pos" "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="LiftPegUpright-v1" \ + --num_envs=4096 --num-steps=4 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=8_000_000 --eval_freq=100 \ + --save-model --cudagraphs --exp-name="data_generation/LiftPegUpright-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="LiftPegUpright-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/LiftPegUpright-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=50 --no-capture-video --save-trajectory +done + +### AnymalC-Reach-v1 ### +python ppo_fast.py --env_id="AnymalC-Reach-v1" \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 \ + --save-model --cudagraphs --exp-name="data_generation/AnymalC-Reach-v1-ppo-pd_joint_delta_pos" + +python ppo_fast.py --env_id="AnymalC-Reach-v1" --evaluate \ + --checkpoint=runs/data_generation/AnymalC-Reach-v1-ppo-pd_joint_delta_pos/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=200 --no-capture-video --save-trajectory + +### AnymalC-Spin-v1 ### +python ppo_fast.py --env_id="AnymalC-Spin-v1" \ + --num_envs=4096 --num-steps=16 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=10_000_000 --num-eval-steps=200 \ + --gamma=0.99 --gae_lambda=0.95 \ + --save-model --cudagraphs --exp-name="data_generation/AnymalC-Spin-v1-ppo-pd_joint_delta_pos" + +# task has no success so no demos for now + +### PegInsertionSide-v1 ### +for control_mode in "pd_ee_delta_pose"; do + python ppo_fast.py --env_id="PegInsertionSide-v1" \ + --num_envs=1024 --num-steps=100 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=100_000_000 --num-eval-steps=100 --gamma=0.97 --gae_lambda=0.95 \ + --save-model --cudagraphs --exp-name="data_generation/PegInsertionSide-v1-ppo-${control_mode}" --control-mode ${control_mode} +done + +### TwoRobotPickCube-v1 ### +for control_mode in "pd_joint_delta_pos"; do + python ppo_fast.py --env_id="TwoRobotPickCube-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=35_000_000 --num-steps=100 --num-eval-steps=100 \ + --save-model --cudagraphs --exp-name="data_generation/TwoRobotPickCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="TwoRobotPickCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/TwoRobotPickCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=100 --no-capture-video --save-trajectory +done + +### TwoRobotStackCube-v1 ### +for control_mode in "pd_joint_delta_pos"; do + python ppo_fast.py --env_id="TwoRobotStackCube-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=100 --num-eval-steps=100 \ + --save-model --cudagraphs --exp-name="data_generation/TwoRobotStackCube-v1-ppo-${control_mode}" --control-mode ${control_mode} + + python ppo_fast.py --env_id="TwoRobotStackCube-v1" --evaluate --control-mode ${control_mode} \ + --checkpoint=runs/data_generation/TwoRobotStackCube-v1-ppo-${control_mode}/final_ckpt.pt \ + --num_eval_envs=1024 --num-eval-steps=100 --no-capture-video --save-trajectory +done + +### UnitreeG1PlaceAppleInBowl-v1 ### +# num-steps=32 can be optimized down probably +for control_mode in "pd_joint_delta_pos"; do + python ppo_fast.py --env_id="UnitreeG1PlaceAppleInBowl-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=32 --num-eval-steps=100 \ + --save-model --cudagraphs --exp-name="data_generation/UnitreeG1PlaceAppleInBowl-v1-ppo-${control_mode}" --control-mode ${control_mode} +done + +### UnitreeG1TransportBox-v1 ### +for control_mode in "pd_joint_delta_pos"; do + python ppo_fast.py --env_id="UnitreeG1TransportBox-v1" \ + --num_envs=1024 --update_epochs=8 --num_minibatches=32 \ + --total_timesteps=50_000_000 --num-steps=32 --num-eval-steps=100 \ + --save-model --cudagraphs --exp-name="data_generation/UnitreeG1TransportBox-v1-ppo-${control_mode}" --control-mode ${control_mode} +done diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/to_lerobot.py b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/to_lerobot.py new file mode 100644 index 0000000000000000000000000000000000000000..9872e19951bcab0de073beea473a005103fcc72c --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/to_lerobot.py @@ -0,0 +1,456 @@ +""" +Convert ManiSkill HDF5 trajectory data into a LeRobot‑compatible dataset. + +Usage: + python to_lerobot.py \ + --hdf5-file PATH1_TO_HDF5 PATH2_TO_HDF5 ... \ + --env-id ENVIRONMENT_ID \ + --robot-uid ROBOT_UID \ + --control-mode CONTROL_MODE +""" + +import dataclasses +import shutil +import json +import h5py +import torch +import tqdm +import tyro +import copy +import numpy as np +from pathlib import Path +from PIL import Image +import re + +from lerobot.common.constants import HF_LEROBOT_HOME +from lerobot.common.datasets.lerobot_dataset import LeRobotDataset +from huggingface_hub import HfApi + + +@dataclasses.dataclass(frozen=True) +class DatasetConfig: + use_videos: bool = True + tolerance_s: float = 0.0001 + image_writer_processes: int = 10 + image_writer_threads: int = 5 + video_backend: str | None = None + + +DEFAULT_DATASET_CONFIG = DatasetConfig() + +def get_motors(robot_uid:str): + if robot_uid == "noahbiarm": + motors = [ + "Right_Joint_0", + "Right_Joint_1", + "Right_Joint_2", + "Right_Joint_3", + "Right_Joint_4", + "Right_Joint_5", + "Right_Joint_6", + "Right_Joint_Gripper_Up", + "Right_Joint_Gripper_Down", + ] + + elif robot_uid == "noahbiarm_rc": + motors = [ + "chest_rot", + "Right_Joint_0", + "Right_Joint_1", + "Right_Joint_2", + "Right_Joint_3", + "Right_Joint_4", + "Right_Joint_5", + "Right_Joint_6", + "Right_Joint_Gripper_Up", + "Right_Joint_Gripper_Down", + ] + + return motors + +def get_actions(robot_uid:str, control_mode:str): + if robot_uid == "noahbiarm": + if control_mode == "pd_joint_pos": + acts = [ + "Right_Joint_0", + "Right_Joint_1", + "Right_Joint_2", + "Right_Joint_3", + "Right_Joint_4", + "Right_Joint_5", + "Right_Joint_6", + "Right_Joint_Gripper", + ] + elif control_mode == "pd_ee_delta_pose": + acts = [ + "Dx", + "Dy", + "Dz", + "Droll", + "Dpitch", + "Dyaw", + "Right_Joint_Gripper", + ] + elif robot_uid == "noahbiarm_rc": + if control_mode == "pd_joint_pos": + acts = [ + "chest_rot", + "Right_Joint_0", + "Right_Joint_1", + "Right_Joint_2", + "Right_Joint_3", + "Right_Joint_4", + "Right_Joint_5", + "Right_Joint_6", + "Right_Joint_Gripper", + ] + + return acts + +def get_cameras(robot_uid: str): + if robot_uid in ["noahbiarm", "noahbiarm_rc"]: + cameras = [ + "base_camera", + "hand_camera", + "head_camera" + ] + + return cameras + +def get_dim_qpos(robot_uid:str): + return len(get_motors(robot_uid)) + +def get_dim_act(robot_uid:str, control_mode:str): + return len(get_actions(robot_uid, control_mode)) + +def create_empty_dataset( + env_id: str, + robot_uid: str, + control_mode: str, + mode: str, + dataset_config: DatasetConfig, +) -> LeRobotDataset: + + + D_QPOS = get_dim_qpos(robot_uid=robot_uid) + D_ACT = get_dim_act(robot_uid=robot_uid, control_mode=control_mode) + motors = get_motors(robot_uid=robot_uid) + acts = get_actions(robot_uid=robot_uid, control_mode=control_mode) + cameras = get_cameras(robot_uid=robot_uid) + + features = { + "observation.state": { + "dtype": "float32", + "shape": (D_QPOS,), + "names": [motors], + }, + "action": { + "dtype": "float32", + "shape": (D_ACT,), + "names": [acts], + }, + "observation.tcp.pose": { + "dtype": "float32", + "shape": (7,), + "names": [["x", "y", "z", "qx", "qy", "qz", "qw"]], + }, + "observation.obj.pose": { + "dtype": "float32", + "shape": (7,), + "names": [["x", "y", "z", "qx", "qy", "qz", "qw"]], + }, + "observation.obj.vertices": { + "dtype": "float32", + "shape": (8,3), + "names": ["vertices xyz"], + }, + "observation.obj.rotation": { + "dtype": "float32", + "shape": (3,3), + "names": ["rotation matrix"], + }, + "observation.obj.translation": { + "dtype": "float32", + "shape": (3,), + "names": ["translation"], + }, + "observation.obj.extents": { + "dtype": "float32", + "shape": (3,), + "names": ["extents"], + }, + "observation.rack.pose": { + "dtype": "float32", + "shape": (7,), + "names": [["x", "y", "z", "qx", "qy", "qz", "qw"]], + }, + "observation.rack.vertices": { + "dtype": "float32", + "shape": (8,3), + "names": ["vertices xyz"], + }, + "observation.rack.rotation": { + "dtype": "float32", + "shape": (3,3), + "names": ["rotation matrix"], + }, + "observation.rack.translation": { + "dtype": "float32", + "shape": (3,), + "names": ["translation"], + }, + "observation.rack.extents": { + "dtype": "float32", + "shape": (3,), + "names": ["extents"], + }, + } + + for cam in cameras: + features[f"observation.images.{cam}"] = { + "dtype": mode, + "shape": (3, 256, 256), # Matches 128x128 RGB images (CHW format) + "names": ["channels", "height", "width"], + } + features[f"observation.depth.{cam}"] = { + "dtype": mode, + "shape": (1, 256, 256), # Matches 128x128 RGB images (CHW format) + "names": ["channels", "height", "width"], + } + features[f"observation.segmentation.{cam}"] = { + "dtype": mode, + "shape": (1, 256, 256), # Matches 128x128 RGB images (CHW format) + "names": ["channels", "height", "width"], + } + features[f"sensors.{cam}.extrinsic_cv"] = { + "dtype": "float32", + "shape": (3, 4), + "names": ["extrinsic_cv"], + } + features[f"sensors.{cam}.intrinsic_cv"] = { + "dtype": "float32", + "shape": (3, 3), + "names": ["intrinsic_cv"], + } + features[f"sensors.{cam}.cam2world_gl"] = { + "dtype": "float32", + "shape": (4, 4), + "names": ["cam2world_gl"], + } + + + # check if data path exists + DATA_PATH = Path(f"data/lerobot/{robot_uid}/{control_mode}/{env_id}") + if (DATA_PATH).exists(): + shutil.rmtree(DATA_PATH) + + return LeRobotDataset.create( + repo_id=env_id, + root=DATA_PATH, + fps=30, # Assuming 50Hz; + robot_type=robot_uid, + features=features, + use_videos=dataset_config.use_videos, + tolerance_s=dataset_config.tolerance_s, + image_writer_processes=dataset_config.image_writer_processes, + image_writer_threads=dataset_config.image_writer_threads, + video_backend=dataset_config.video_backend, + ) + +def load_maniskill_data_episode(f: h5py.File, traj_key: str, robot_uid: str) -> tuple[dict[str, np.ndarray], np.ndarray, np.ndarray]: + traj = f[traj_key] + obs = traj["obs"] + cameras = get_cameras(robot_uid=robot_uid) + # remove last env img & qpos since it has no action paired with it + imgs_per_cam = { + cam: obs["sensor_data"][cam]["rgb"][:-1] + for cam in cameras + } + depth_per_cam = { + cam: obs["sensor_data"][cam]["depth"][:-1] + for cam in cameras + } + segms_per_cam = { + cam: obs["sensor_data"][cam]["segmentation"][:-1] + for cam in cameras + } + extrinsic_per_cam = { + cam: obs["sensor_param"][cam]["extrinsic_cv"][:-1] + for cam in cameras + } + intrinsic_per_cam = { + cam: obs["sensor_param"][cam]["intrinsic_cv"][:-1] + for cam in cameras + } + cam2world_per_cam = { + cam: obs["sensor_param"][cam]["cam2world_gl"][:-1] + for cam in cameras + } + state = obs["agent"]["qpos"][:-1] + actions = traj["actions"] + tcp_pose = obs["extra"]["tcp_pose"][:-1] + obj_pose = obs["extra"]["obj_pose"][:-1] + obj_vertices = obs["extra"]["obj_bb"]["vertices_world"][:-1] + obj_rot = obs["extra"]["obj_bb"]["rotation"][:-1] + obj_trans = obs["extra"]["obj_bb"]["translation"][:-1] + obj_extents = obs["extra"]["obj_bb"]["extents"][:-1] + rack_pose = obs["extra"]["rack_pose"][:-1] + rack_vertices = obs["extra"]["rack_bb"]["vertices_world"][:-1] + rack_rot = obs["extra"]["rack_bb"]["rotation"][:-1] + rack_trans = obs["extra"]["rack_bb"]["translation"][:-1] + rack_extents = obs["extra"]["rack_bb"]["extents"][:-1] + + return imgs_per_cam, actions, state, \ + depth_per_cam, segms_per_cam, \ + tcp_pose, obj_pose, rack_pose, \ + obj_vertices, obj_rot, obj_trans, obj_extents, \ + rack_vertices, rack_rot, rack_trans, rack_extents, \ + extrinsic_per_cam, intrinsic_per_cam, cam2world_per_cam + +def get_task_from_hdf5(hdf5_file_name: Path) -> str: + # get env name from hdf5 file + name = hdf5_file_name.parts[-3] + # 1. remove the version suffix like "-v1", "-v2", etc. + base = re.sub(r'-v\d+$', '', name) + # 2. insert a space before each capital letter (unless it's the first char) + with_spaces = re.sub(r'(? LeRobotDataset: + + with h5py.File(hdf5_file, "r") as f: + # Load JSON metadata to get episode info + json_path = hdf5_file.with_suffix(".json") + if not json_path.exists(): + raise FileNotFoundError(f"JSON metadata file not found at {json_path}") + with open(json_path, "r") as json_file: + json_data = json.load(json_file) + episodes = json_data["episodes"] + + total_frames = 0 + for ep_idx, ep in enumerate(episodes): + cur_episode += 1 + if cur_episode < starting_episode: continue + traj_key = f"traj_{ep['episode_id']}" + if traj_key not in f: + print(f"Warning: {traj_key} not found in HDF5 file, skipping") + continue + + imgs_per_cam, actions, state, depth_per_cam, segms_per_cam,\ + tcp_pose, obj_pose, rack_pose, \ + obj_vertices, obj_rot, obj_trans, obj_extents, \ + rack_vertices, rack_rot, rack_trans, rack_extents, \ + extrinsic_per_cam, intrinsic_per_cam, cam2world_per_cam = load_maniskill_data_episode(f, traj_key, robot_uid) + num_frames = actions.shape[0] + total_frames += num_frames + + print(f"Processing Episode {cur_episode} ({hdf5_file.parts[-3]} - {traj_key}) with {num_frames} frames") + for i in tqdm.tqdm(range(num_frames), desc=f"Episode {ep_idx}"): + frame = { + "observation.state": torch.from_numpy(state[i]), + "action": torch.from_numpy(actions[i]), + "task": get_task_from_hdf5(hdf5_file), + "observation.tcp.pose": torch.from_numpy(tcp_pose[i]), + "observation.obj.pose": torch.from_numpy(obj_pose[i]), + "observation.rack.pose": torch.from_numpy(rack_pose[i]), + "observation.obj.vertices": torch.from_numpy(obj_vertices[i]), + "observation.obj.rotation": torch.from_numpy(obj_rot[i]), + "observation.obj.translation": torch.from_numpy(obj_trans[i]), + "observation.obj.extents": torch.from_numpy(obj_extents[i]), + "observation.rack.vertices": torch.from_numpy(rack_vertices[i]), + "observation.rack.rotation": torch.from_numpy(rack_rot[i]), + "observation.rack.translation": torch.from_numpy(rack_trans[i]), + "observation.rack.extents": torch.from_numpy(rack_extents[i]), + } + for camera, img_array in imgs_per_cam.items(): + pil_img = Image.fromarray(np.array(img_array[i], dtype=np.uint8)) + frame[f"observation.images.{camera}"] = pil_img + for camera, depth_array in depth_per_cam.items(): + pil_img = Image.fromarray(np.squeeze(np.array(depth_array[i], dtype=np.uint8))) + frame[f"observation.depth.{camera}"] = pil_img + for camera, segms_array in segms_per_cam.items(): + pil_img = Image.fromarray(np.squeeze(np.array(segms_array[i], dtype=np.uint8))) + frame[f"observation.segmentation.{camera}"] = pil_img + for camera, extrinsic in extrinsic_per_cam.items(): + frame[f"sensors.{camera}.extrinsic_cv"] = torch.from_numpy(extrinsic[i]) + for camera, intrinsic in intrinsic_per_cam.items(): + frame[f"sensors.{camera}.intrinsic_cv"] = torch.from_numpy(intrinsic[i]) + for camera, cam2world in cam2world_per_cam.items(): + frame[f"sensors.{camera}.cam2world_gl"] = torch.from_numpy(cam2world[i]) + + dataset.add_frame(frame) + dataset.save_episode() + + return dataset, cur_episode + + +def port_maniskill( + hdf5_files: list[Path], + env_id: str, + robot_uid: str, + control_mode: str, + mode: str = "image", + dataset_config: DatasetConfig = DEFAULT_DATASET_CONFIG, + overwrite: bool = False, + encode_videos: bool = False, + push_to_hub: bool = False, +): + # check if files exist + for f in hdf5_files: + if not f.exists(): + raise ValueError(f"HDF5 file {f} does not exist") + + # create empty dataset or load from previous state + DATA_PATH = Path(f"data/lerobot/{robot_uid}/{control_mode}/{env_id}") + if (DATA_PATH).exists() and (not overwrite): + dataset = LeRobotDataset( + repo_id=DATA_PATH.name, + root=DATA_PATH) + else: + dataset = create_empty_dataset( + env_id=env_id, + robot_uid=robot_uid, + control_mode=control_mode, + mode=mode, + dataset_config=dataset_config + ) + + # loop over all h5 files and add to dataset + cur_episode = -1 + starting_episode = dataset.meta.total_episodes + for hdf5_file in hdf5_files: + dataset, cur_episode = populate_dataset( + dataset=dataset, + hdf5_file=hdf5_file, + env_id=env_id, + robot_uid=robot_uid, + starting_episode=starting_episode, + cur_episode=cur_episode, + ) + + print(f"Dataset saved to {DATA_PATH}") + + # Optionally push to the Hugging Face Hub + if push_to_hub: + HfApi().create_repo( + repo_id=f"savoji/{DATA_PATH.name}", + repo_type="dataset", + private=False, # or True if you want a private dataset + exist_ok=True # won’t error if it already exists + ) + dataset.push_to_hub( + repo_id=f"savoji/{DATA_PATH.name}", + private=False, # or True, as desired + commit_message="initial upload" + ) + +if __name__ == "__main__": + tyro.cli(port_maniskill) \ No newline at end of file diff --git a/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/to_lerobot.sh b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/to_lerobot.sh new file mode 100644 index 0000000000000000000000000000000000000000..342840359ebce6714a3b279f50532a2a93859741 --- /dev/null +++ b/project/ManiSkill3/src/maniskill3_environment/scripts/data_generation/to_lerobot.sh @@ -0,0 +1,47 @@ +# Define control modes +control_modes=( + "pd_joint_pos" +) +rand_level=4 + +# List of tasks to be performed +tasks=( + # "PickForkFromRack-v1" + # "PlaceSpoonOnRack-v1" + # "PlaceMugOnRack-v1" + # "PlaceMugOnCoffeeMachine-v1" + # "PickMugFromCoffeeMachine-v1" + # "PlaceBowlOnRack-v1" + # "PlacePlateOnRack-v1" + # "PlaceForkOnRack-v1" + # "PlaceKnifeOnRack-v1" + # "PlaceBowlOnRack-v2" + # "PlacePlateOnRack-v2" + # "PlaceForkOnRack-v2" + # "PlaceKnifeOnRack-v2" + "PlaceBowlOnRack-v3" +) + +# List of robot versions +robot_uids=( + # "noahbiarm_r" + "noahbiarm_rc" + # "noahbiarm_rcw" +) + +# Iterate over each robot version +for robot in "${robot_uids[@]}"; do + # Iterate over each task for the current robot + for env_id in "${tasks[@]}"; do + # Iterate over each control mode + for control_mode in "${control_modes[@]}"; do + # Run the conversion script for each combination of robot, task, and control mode + python scripts/data_generation/to_lerobot.py \ + --hdf5-files=data/"${robot}"/"$env_id"/motionplanning/trajectory.rl."$rand_level".h5 \ + --env-id="$env_id"-RL"$rand_level" \ + --robot-uid="${robot}" \ + --control-mode="${control_mode}" \ + --overwrite + done + done +done \ No newline at end of file diff --git a/project/grasp_box/__pycache__/perception_server.cpython-310.pyc b/project/grasp_box/__pycache__/perception_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bbe8c7d67a4863ef6ec89feba94abea33fdbbc0 Binary files /dev/null and b/project/grasp_box/__pycache__/perception_server.cpython-310.pyc differ diff --git a/project/grasp_box/__pycache__/utilis.cpython-310.pyc b/project/grasp_box/__pycache__/utilis.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7121fe9d90d436288d0b81157922fa7dadf45fd Binary files /dev/null and b/project/grasp_box/__pycache__/utilis.cpython-310.pyc differ diff --git a/project/grasp_box/config/config_perception.yaml b/project/grasp_box/config/config_perception.yaml new file mode 100644 index 0000000000000000000000000000000000000000..033ee8ee0bab17e8ee12f78bab20707c60d3562b --- /dev/null +++ b/project/grasp_box/config/config_perception.yaml @@ -0,0 +1,15 @@ +camera: + intrinsic_matrix: + c_x: 128.0 + c_y: 128.0 + f_x: 128.0 + f_y: 128.0 + +visualization: True +visualization_path: /home/kyber/charles/project/grasp_box/debug + + +cad_database: + # R306: /home/kyber/charles/project/grasp_box/CAD/R306/model.obj + box: /home/kyber/charles/project/ManiSkill3/src/maniskill2_benchmark/msx-envs/src/msx_envs/assets/object/box_01/box_01.gltf + diff --git a/project/grasp_box/perception_function.py b/project/grasp_box/perception_function.py new file mode 100644 index 0000000000000000000000000000000000000000..d2da3a8ca8c634d617df37f4e48307f2bc2f08ff --- /dev/null +++ b/project/grasp_box/perception_function.py @@ -0,0 +1,278 @@ +import sys +sys.path.append("/home/kyber/charles/project/grasp_box") +from submodules.SAM6D.pose_estimator import SAM6DPoseEstimator +import numpy as np +import argparse +import json +import yaml +import cv2 +import matplotlib.pyplot as plt +import copy +from tqdm import tqdm +import glob +import pickle +import open3d as o3d +import time +import trimesh +import os +from pathlib import Path +from utilis import render_cad_mask, find_matched_points,count_lines_passing_points, get_connected_vertices, intersection_in_xyz_axis + + + + +class Perception: + def __init__(self,intrinsic, extrinsic): + self.intrinsic_matrix = intrinsic + self.pose_estimator = SAM6DPoseEstimator( + config=None, + self.intrinsic_matrix, + "/home/kyber/charles/project/grasp_box/submodules/SAM6D/config/base.yaml", + True + ) + self.extrinsic = extrinsic + + def binary_search_scale(self, rgb,depth, mask, cad_name,debug=False,scale_min=[0.1, 0.15,0.15], scale_max=[0.3,0.18,0.5], threshold=15): + + h, w, _ = rgb.shape + + [low_x, low_y, low_z]=scale_min + [high_x, high_y, high_z]=scale_max + + if set(np.unique(mask) ).issubset({0, 255}): + mask = (mask // 255).astype(np.uint8) + + while low_x<=high_x and low_y <= high_y and low_z <= high_z: + + mid_x = (low_x+high_x)/2 + mid_y = (low_y+high_y)/2 + mid_z = (low_z+high_z)/2 + self.pose_estimator.K = self.intrinsic_matrix + pose_scores, pred_rot, pred_trans,color_vis,_ = self.pose_estimator.inference(rgb.copy(), mask.copy(), depth.copy(), cad_name, scale= [mid_x, mid_y,mid_z]) + + pose = np.eye(4) + pose[:3,3] = pred_trans + pose[:3,:3] = pred_rot + + mesh_c = self.pose_estimator.cad_cache['tmp']['mesh'] + + mask_r= render_cad_mask(pose, mesh_c, self.intrinsic_matrix, w, h) + if debug: + self.vis_3d(rgb, depth, [pose],self.intrinsic_matrix,mesh_c) + breakpoint() + # find nearset vertices, and project get length between vertices + half_extents = mesh_c.extents / 2.0 + signs = np.array([[x, y, z] for x in [-1, 1] + for y in [-1, 1] + for z in [-1, 1]]) + + vertices = signs * half_extents + transformed_points = (pose @ np.hstack((vertices, np.ones((vertices.shape[0], 1)))).T).T[:, :3] + projected_points = (self.intrinsic_matrix @ transformed_points.T).T + projected_points[:, :2] /= projected_points[:, 2:3] + + condition = (projected_points[:, 0] < w) & (projected_points[:, 1] < h) + filtered_points = projected_points[condition] + filtered_indices = np.where(condition)[0] + + min_idx_in_filtered = np.argmin(np.linalg.norm(filtered_points)) + original_index = filtered_indices[min_idx_in_filtered] + nearest_index = original_index + projected_points = projected_points[...,:2].astype(int) + + + # if two vertices are matched, use this vertex as starting point and find intersection value, then get box extents directly + gt_contours, _ = cv2.findContours(mask.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + gt_cnt = max(gt_contours, key=cv2.contourArea) + gt_vertex = cv2.approxPolyDP(gt_cnt, epsilon=5, closed=True).reshape(-1,2) + + obs_contours, _ = cv2.findContours(mask_r.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + obs_cnt = max(obs_contours, key=cv2.contourArea) + obs_vertex = cv2.approxPolyDP(obs_cnt, epsilon=5, closed=True).reshape(-1,2) + vis = rgb.copy() + for u, v in gt_vertex: + cv2.circle(vis, tuple([u,v]), 5, (255, 0, 0), -1) + # cv2.imwrite('a.png', vis) + pair_match = find_matched_points(projected_points, gt_vertex, threshold=50) + + # early stopping, if key point matched, stop searching and return scale based on similarity + for start_pt_index in range(8): + start_pt = projected_points[start_pt_index] + + connected = get_connected_vertices(start_pt_index) + norm_vectors = (projected_points[connected] - start_pt)/ np.linalg.norm(projected_points[connected] - start_pt) + count = count_lines_passing_points(start_pt, norm_vectors, gt_vertex, threshold=threshold/2, rgb=rgb,vis = False) + if count == 3: + # lines start from start pt and project to xyz axis, all hits the vertex of gt mask, return the scale + arr_gt, arr_obs = intersection_in_xyz_axis(norm_vectors, start_pt, mask_r, mask, threshold = threshold//2,vis = False, save = False) + output_scale = [None] * 3 + for axis_index,norm_vector in enumerate(norm_vectors): + axis = np.nonzero(signs[connected[axis_index]] - signs[start_pt_index])[0][0] + length_gt = arr_gt[axis_index] + end_pt_obs = projected_points[connected[axis_index]] + + if end_pt_obs[0] not in range(0,rgb.shape[1]) or end_pt_obs[1] not in range(0,rgb.shape[0]): + # out of boundary. get pixel distance directly from start and end point. add threshold for tolerance + length_obs = np.linalg.norm(end_pt_obs - start_pt).astype(np.uint32) + threshold + else: + length_obs = arr_obs[axis_index] + + output_scale[axis] = round(length_gt * mesh_c.extents[axis]/length_obs,2) + + return output_scale + + if len(pair_match) == 0: + start_pt_index = nearest_index + + else: + pair_match = pair_match[0] + start_pt_index = pair_match[0] + + start_pt = projected_points[start_pt_index] + connected = get_connected_vertices(start_pt_index) + norm_vectors = (projected_points[connected] - start_pt)/ np.linalg.norm(projected_points[connected] - start_pt) + arr_gt, arr_obs = intersection_in_xyz_axis(norm_vectors, start_pt, mask_r, mask,threshold,vis = False, save = False) + + if abs(arr_gt-arr_obs)[0] <= 20 and abs(arr_gt-arr_obs)[1] <= 20 and abs(arr_gt-arr_obs)[2] <= 20: + break + + if abs(high_x-low_x) < 0.1 and abs(high_y-low_y) < 1 and abs(high_z-low_z) < 0.1: + break + + if (arr_obs[0] - arr_gt[0]) > 0: + high_x = mid_x + elif (arr_obs[0] - arr_gt[0]) < 0: + low_x = mid_x + + if (arr_obs[1] - arr_gt[1]) > 0: + high_y = mid_y + elif (arr_obs[1] - arr_gt[1]) < 0: + low_y = mid_y + + if (arr_obs[2] - arr_gt[2]) > 0: + high_z = mid_z + elif (arr_obs[2] - arr_gt[2]) < 0: + low_z = mid_z + + final_scale = self.pose_estimator.cad_cache['tmp']['mesh'].extents + return final_scale + + def vis_3d(self, rgb_img, depth_img, pose_list,intrinsic,mesh, mask = None): + + vis = o3d.visualization.Visualizer() + vis.create_window() + + if mask is not None: + rgb_img = rgb_img * mask[:,:,None] + depth_img = depth_img * mask + rgb = o3d.geometry.Image(rgb_img) # for uinit16 use + depth = o3d.geometry.Image((depth_img).astype(np.uint16)) + # mask = (mask > 0).astype(np.uint8) + + rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(rgb, depth, depth_scale=1000.0) + + width = rgb_img.shape[1] + height = rgb_img.shape[0] + cx = int(intrinsic[0,2]) + cy = int(intrinsic[1,2]) + fx = int(intrinsic[0,0]) + fy = int(intrinsic[1,1]) + + intri = o3d.camera.PinholeCameraIntrinsic(width=width, height=height, fx=fx, fy=fy, cx=cx, cy=cy) + o3d_points = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, intrinsic=intri) + + + vis.add_geometry(o3d_points) + # vis.add_geometry(o3d.geometry.TriangleMesh.create_coordinate_frame(1)) + vis.update_geometry(o3d_points) + + + + + + for i,pose in enumerate(pose_list): + + # mesh = trimesh.load(self.config['cad_database'][f'{box_name[i]}'], force='mesh') + # mesh_o3d = o3d.io.read_triangle_mesh(f"/media/kyber/Data1/SAM-6D/SAM-6D/Data/Storage_hq_test/model/{box_name}/model.obj",enable_post_processing=True) + mesh_o3d = o3d.geometry.TriangleMesh() + mesh_o3d.vertices = o3d.utility.Vector3dVector(mesh.vertices) + mesh_o3d.triangles = o3d.utility.Vector3iVector(mesh.faces) + aabb = mesh_o3d.get_axis_aligned_bounding_box() + extents = aabb.get_extent() + + mesh_o3d.transform(pose) + # print('----------------------------->',extents) + mesh_o3d.paint_uniform_color([1.0, 0.0, 0.0]) + mesh_o3d = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(mesh.vertices), o3d.utility.Vector3iVector(mesh.faces)) + pcd_obj = mesh_o3d.sample_points_uniformly(number_of_points=5000) + + + pcd_obj_trans = copy.deepcopy(pcd_obj).transform(pose) + pcd_obj_trans.paint_uniform_color([1.0, 0.0, 0.0]) + vis.add_geometry(pcd_obj_trans) + vis.update_geometry(pcd_obj_trans) + + vis.run() + + def run(self, depth_img, color_img, mask_img,debug = False, gt = False): + + color_vis = copy.deepcopy(color_img) + + + assert len(mask_img.shape) == 2 + + # vis the GT + # box = trimesh.load(f'/workspace/PACE/models/obj_{str(obj_id).zfill(6)}.ply', force='mesh') + # box_rescale = box.copy() + # box_rescale.vertices = box_rescale.vertices/1000 + # self.vis_3d(color_img, depth_img, [gt_pose],self.intrinsic_matrix,box_rescale) + + # use box infer module to infer the scale & pose + tmp_scores = -1 + output_pose = None + output_scores = None + output_scale = None + + self.pose_estimator.K = self.intrinsic_matrix + + + assert np.isin(mask_img, [0,1]).all() + + for run_idx in range(2): + + scale = self.binary_search_scale(color_img,depth_img, mask_img*255, self.intrinsic_matrix, 'R306',debug=False,scale_min=[0.1, 0.15,0.15], scale_max=[0.3,0.18,0.5],threshold=10) + box_ori_scale = self.pose_estimator.cad_cache['R306']['mesh'].extents + new_scale = np.round(scale/box_ori_scale,2) + pose_scores, pred_rot, pred_trans,color_vis, _ = self.pose_estimator.inference(color_img.copy(), mask_img*255, depth_img.copy(), 'R306', new_scale) + pose6d = np.eye(4) + pose6d[:3,:3] = pred_rot + pose6d[:3,3] = pred_trans + mesh = copy.deepcopy(self.pose_estimator.cad_cache['tmp']['mesh']) + print('scores:', pose_scores) + + if pose_scores is None: + continue + if pose_scores > tmp_scores: + output_pose = pose6d + output_scores = pose_scores + output_scale = mesh.extents + tmp_scores = pose_scores + + self.vis_3d(color_img, depth_img, [pose6d],self.intrinsic_matrix,mesh) + pose_word = self.extrinsic @ output_pose + return pose_word, output_scores, output_scale + + +if __name__ == '__main__': + + config = '/home/kyber/charles/project/grasp_box/config/config_perception.yaml' + + color_img = cv2.imread('/home/kyber/charles/project/grasp_box/data/000000_rgb.png', cv2.IMREAD_COLOR) + depth_img = cv2.imread('/home/kyber/charles/project/grasp_box/data/000000_depth.png', -1) # unit mm + mask = ~cv2.imread('/home/kyber/charles/project/grasp_box/data/000000_box-colgate.png', -1) + mask = mask.astype(bool) + mask = mask.astype(np.uint8) + intrinsic = np.eye(3) + extrinsic = np.eye(4) + perception = Perception(intrinsic,extrinsic ) + pose,scores, extents = perception.run(depth_img = depth_img, color_img = color_img, mask_img=mask) diff --git a/project/grasp_box/perception_server.py b/project/grasp_box/perception_server.py new file mode 100644 index 0000000000000000000000000000000000000000..fbb3516d7867678afeb74c36ad633105ff3ba069 --- /dev/null +++ b/project/grasp_box/perception_server.py @@ -0,0 +1,303 @@ +import sys +sys.path.append("/home/kyber/charles/project/grasp_box") +from submodules.SAM6D.pose_estimator import SAM6DPoseEstimator +import numpy as np +import argparse +import json +import yaml +import cv2 +import matplotlib.pyplot as plt +import copy +from tqdm import tqdm +import glob +import pickle +import open3d as o3d +import time +import trimesh +import os +from pathlib import Path +from utilis import render_cad_mask, find_matched_points,count_lines_passing_points, get_connected_vertices, intersection_in_xyz_axis + + + + +class Perception: + def __init__(self,config): + self.config_path = config + self.config = self.load_config(config) + self.intrinsic_matrix = np.array( + [ + [ + self.config["camera"]["intrinsic_matrix"]["f_x"], + 0.0, + self.config["camera"]["intrinsic_matrix"]["c_x"], + ], + [ + 0.0, + self.config["camera"]["intrinsic_matrix"]["f_y"], + self.config["camera"]["intrinsic_matrix"]["c_y"], + ], + [0.0, 0.0, 1.0], + ] + ) + self.pose_estimator = SAM6DPoseEstimator( + config, + self.intrinsic_matrix, + "/home/kyber/charles/project/grasp_box/submodules/SAM6D/config/base.yaml", + True + ) + def load_config(self, config_path): + with open(config_path, "r") as file: + return yaml.safe_load(file) + + def binary_search_scale(self, rgb,depth, mask, K, cad_name,debug=False,scale_min=[0.1, 0.15,0.15], scale_max=[0.3,0.18,0.5], threshold=15): + + h, w, _ = rgb.shape + + [low_x, low_y, low_z]=scale_min + [high_x, high_y, high_z]=scale_max + + if set(np.unique(mask) ).issubset({0, 255}): + mask = (mask // 255).astype(np.uint8) + + while low_x<=high_x and low_y <= high_y and low_z <= high_z: + + mid_x = (low_x+high_x)/2 + mid_y = (low_y+high_y)/2 + mid_z = (low_z+high_z)/2 + self.pose_estimator.K = self.intrinsic_matrix + pose_scores, pred_rot, pred_trans,color_vis,_ = self.pose_estimator.inference(rgb.copy(), mask.copy(), depth.copy(), cad_name, scale= [mid_x, mid_y,mid_z]) + + pose = np.eye(4) + pose[:3,3] = pred_trans + pose[:3,:3] = pred_rot + + mesh_c = self.pose_estimator.cad_cache['tmp']['mesh'] + + mask_r= render_cad_mask(pose, mesh_c, K, w, h) + if debug: + self.vis_3d(rgb, depth, [pose],K,mesh_c) + breakpoint() + # find nearset vertices, and project get length between vertices + half_extents = mesh_c.extents / 2.0 + signs = np.array([[x, y, z] for x in [-1, 1] + for y in [-1, 1] + for z in [-1, 1]]) + + vertices = signs * half_extents + transformed_points = (pose @ np.hstack((vertices, np.ones((vertices.shape[0], 1)))).T).T[:, :3] + projected_points = (K @ transformed_points.T).T + projected_points[:, :2] /= projected_points[:, 2:3] + + condition = (projected_points[:, 0] < w) & (projected_points[:, 1] < h) + filtered_points = projected_points[condition] + filtered_indices = np.where(condition)[0] + + min_idx_in_filtered = np.argmin(np.linalg.norm(filtered_points)) + original_index = filtered_indices[min_idx_in_filtered] + nearest_index = original_index + projected_points = projected_points[...,:2].astype(int) + + + # if two vertices are matched, use this vertex as starting point and find intersection value, then get box extents directly + gt_contours, _ = cv2.findContours(mask.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + gt_cnt = max(gt_contours, key=cv2.contourArea) + gt_vertex = cv2.approxPolyDP(gt_cnt, epsilon=5, closed=True).reshape(-1,2) + + obs_contours, _ = cv2.findContours(mask_r.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + obs_cnt = max(obs_contours, key=cv2.contourArea) + obs_vertex = cv2.approxPolyDP(obs_cnt, epsilon=5, closed=True).reshape(-1,2) + vis = rgb.copy() + for u, v in gt_vertex: + cv2.circle(vis, tuple([u,v]), 5, (255, 0, 0), -1) + # cv2.imwrite('a.png', vis) + pair_match = find_matched_points(projected_points, gt_vertex, threshold=50) + + # early stopping, if key point matched, stop searching and return scale based on similarity + for start_pt_index in range(8): + start_pt = projected_points[start_pt_index] + + connected = get_connected_vertices(start_pt_index) + norm_vectors = (projected_points[connected] - start_pt)/ np.linalg.norm(projected_points[connected] - start_pt) + count = count_lines_passing_points(start_pt, norm_vectors, gt_vertex, threshold=threshold/2, rgb=rgb,vis = False) + if count == 3: + # lines start from start pt and project to xyz axis, all hits the vertex of gt mask, return the scale + arr_gt, arr_obs = intersection_in_xyz_axis(norm_vectors, start_pt, mask_r, mask, threshold = threshold//2,vis = False, save = False) + output_scale = [None] * 3 + for axis_index,norm_vector in enumerate(norm_vectors): + axis = np.nonzero(signs[connected[axis_index]] - signs[start_pt_index])[0][0] + length_gt = arr_gt[axis_index] + end_pt_obs = projected_points[connected[axis_index]] + + if end_pt_obs[0] not in range(0,rgb.shape[1]) or end_pt_obs[1] not in range(0,rgb.shape[0]): + # out of boundary. get pixel distance directly from start and end point. add threshold for tolerance + length_obs = np.linalg.norm(end_pt_obs - start_pt).astype(np.uint32) + threshold + else: + length_obs = arr_obs[axis_index] + + output_scale[axis] = round(length_gt * mesh_c.extents[axis]/length_obs,2) + + return output_scale + + if len(pair_match) == 0: + start_pt_index = nearest_index + + else: + pair_match = pair_match[0] + start_pt_index = pair_match[0] + + start_pt = projected_points[start_pt_index] + connected = get_connected_vertices(start_pt_index) + norm_vectors = (projected_points[connected] - start_pt)/ np.linalg.norm(projected_points[connected] - start_pt) + arr_gt, arr_obs = intersection_in_xyz_axis(norm_vectors, start_pt, mask_r, mask,threshold,vis = False, save = False) + + if abs(arr_gt-arr_obs)[0] <= 20 and abs(arr_gt-arr_obs)[1] <= 20 and abs(arr_gt-arr_obs)[2] <= 20: + break + + if abs(high_x-low_x) < 0.1 and abs(high_y-low_y) < 1 and abs(high_z-low_z) < 0.1: + break + + if (arr_obs[0] - arr_gt[0]) > 0: + high_x = mid_x + elif (arr_obs[0] - arr_gt[0]) < 0: + low_x = mid_x + + if (arr_obs[1] - arr_gt[1]) > 0: + high_y = mid_y + elif (arr_obs[1] - arr_gt[1]) < 0: + low_y = mid_y + + if (arr_obs[2] - arr_gt[2]) > 0: + high_z = mid_z + elif (arr_obs[2] - arr_gt[2]) < 0: + low_z = mid_z + + final_scale = self.pose_estimator.cad_cache['tmp']['mesh'].extents + return final_scale + + def vis_3d(self, rgb_img, depth_img, pose_list,intrinsic,mesh, mask = None): + + vis = o3d.visualization.Visualizer() + vis.create_window() + + if mask is not None: + rgb_img = rgb_img * mask[:,:,None] + depth_img = depth_img * mask + rgb = o3d.geometry.Image(rgb_img) # for uinit16 use + depth = o3d.geometry.Image((depth_img).astype(np.uint16)) + # mask = (mask > 0).astype(np.uint8) + + rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(rgb, depth, depth_scale=1000.0) + + width = rgb_img.shape[1] + height = rgb_img.shape[0] + cx = int(intrinsic[0,2]) + cy = int(intrinsic[1,2]) + fx = int(intrinsic[0,0]) + fy = int(intrinsic[1,1]) + + intri = o3d.camera.PinholeCameraIntrinsic(width=width, height=height, fx=fx, fy=fy, cx=cx, cy=cy) + o3d_points = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, intrinsic=intri) + + + vis.add_geometry(o3d_points) + # vis.add_geometry(o3d.geometry.TriangleMesh.create_coordinate_frame(1)) + vis.update_geometry(o3d_points) + + + + + + for i,pose in enumerate(pose_list): + + # mesh = trimesh.load(self.config['cad_database'][f'{box_name[i]}'], force='mesh') + # mesh_o3d = o3d.io.read_triangle_mesh(f"/media/kyber/Data1/SAM-6D/SAM-6D/Data/Storage_hq_test/model/{box_name}/model.obj",enable_post_processing=True) + mesh_o3d = o3d.geometry.TriangleMesh() + mesh_o3d.vertices = o3d.utility.Vector3dVector(mesh.vertices) + mesh_o3d.triangles = o3d.utility.Vector3iVector(mesh.faces) + aabb = mesh_o3d.get_axis_aligned_bounding_box() + extents = aabb.get_extent() + + mesh_o3d.transform(pose) + # print('----------------------------->',extents) + mesh_o3d.paint_uniform_color([1.0, 0.0, 0.0]) + mesh_o3d = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(mesh.vertices), o3d.utility.Vector3iVector(mesh.faces)) + pcd_obj = mesh_o3d.sample_points_uniformly(number_of_points=5000) + + + pcd_obj_trans = copy.deepcopy(pcd_obj).transform(pose) + pcd_obj_trans.paint_uniform_color([1.0, 0.0, 0.0]) + vis.add_geometry(pcd_obj_trans) + vis.update_geometry(pcd_obj_trans) + + vis.run() + + def run(self, depth_img, color_img, mask_img, K = None,debug = False, gt = False): + + if K is not None: + self.intrinsic_matrix = K + + color_vis = copy.deepcopy(color_img) + + + assert len(mask_img.shape) == 2 + + # vis the GT + # box = trimesh.load(f'/workspace/PACE/models/obj_{str(obj_id).zfill(6)}.ply', force='mesh') + # box_rescale = box.copy() + # box_rescale.vertices = box_rescale.vertices/1000 + # self.vis_3d(color_img, depth_img, [gt_pose],self.intrinsic_matrix,box_rescale) + + # use box infer module to infer the scale & pose + tmp_scores = -1 + output_pose = None + output_scores = None + output_scale = None + + self.pose_estimator.K = self.intrinsic_matrix + + + assert np.isin(mask_img, [0,1]).all() + + for run_idx in range(2): + + scale = self.binary_search_scale(color_img,depth_img, mask_img*255, self.intrinsic_matrix, 'R306',debug=False,scale_min=[0.1, 0.15,0.15], scale_max=[0.3,0.18,0.5],threshold=10) + box_ori_scale = self.pose_estimator.cad_cache['R306']['mesh'].extents + new_scale = np.round(scale/box_ori_scale,2) + pose_scores, pred_rot, pred_trans,color_vis, _ = self.pose_estimator.inference(color_img.copy(), mask_img*255, depth_img.copy(), 'R306', new_scale) + pose6d = np.eye(4) + pose6d[:3,:3] = pred_rot + pose6d[:3,3] = pred_trans + mesh = copy.deepcopy(self.pose_estimator.cad_cache['tmp']['mesh']) + print('scores:', pose_scores) + + if pose_scores is None: + continue + if pose_scores > tmp_scores: + output_pose = pose6d + output_scores = pose_scores + output_scale = mesh.extents + tmp_scores = pose_scores + + self.vis_3d(color_img, depth_img, [pose6d],self.intrinsic_matrix,mesh) + + # IoU = iou_3d_boxes(box.extents/1000, gt_pose, mesh.extents, pose6d, grid_n=100) + # IoU = compute_3d_iou_new(gt_pose, pose6d, box.extents/1000, mesh.extents,0,'box','box') + # vis example from cpff++ + # box_ori_scale = self.pose_estimator.cad_cache['R306']['mesh'].extents + # new_scale = np.round(cpf_scale/box_ori_scale,2) + # mesh = copy.deepcopy(self.pose_estimator.cad_cache['tmp']['mesh']) + # self.vis_3d(color_img, depth_img, [cpf_pose],self.intrinsic_matrix,mesh) + return output_pose, output_scores, output_scale + + +if __name__ == '__main__': + + config = '/home/kyber/charles/project/grasp_box/config/config_perception.yaml' + perception = Perception(config) + color_img = cv2.imread('/home/kyber/charles/project/grasp_box/data/000000_rgb.png', cv2.IMREAD_COLOR) + depth_img = cv2.imread('/home/kyber/charles/project/grasp_box/data/000000_depth.png', -1) # unit mm + mask = ~cv2.imread('/home/kyber/charles/project/grasp_box/data/000000_box-colgate.png', -1) + mask = mask.astype(bool) + mask = mask.astype(np.uint8) + pose,scores, extents = perception.run(depth_img = depth_img, color_img = color_img, mask_img=mask) diff --git a/project/grasp_box/submodules/SAM6D/README.md b/project/grasp_box/submodules/SAM6D/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e04353dc88cbff8480861a49271c7d386724d9cb --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/README.md @@ -0,0 +1,58 @@ +# Pose Estimation Model (PEM) for SAM-6D + + + +![image](https://github.com/JiehongLin/SAM-6D/blob/main/pics/overview_pem.png) + +## Requirements +The code has been tested with +- python 3.9.6 +- pytorch 2.0.0 +- CUDA 11.3 + +Other dependencies: + +``` +sh dependencies.sh +``` + +## Data Preparation + +Please refer to [[link](https://github.com/JiehongLin/SAM-6D/tree/main/SAM-6D/Data)] for more details. + + +## Model Download +Our trained model is provided [[here](https://drive.google.com/file/d/1joW9IvwsaRJYxoUmGo68dBVg-HcFNyI7/view?usp=sharing)], and could be downloaded via the command: +``` +python download_sam6d-pem.py +``` + +## Training on MegaPose Training Set + +To train the Pose Estimation Model of SAM-6D, please prepare the training data and run the folowing command: +``` +python train.py --gpus 0,1,2,3 --model pose_estimation_model --config config/base.yaml +``` +By default, we use four GPUs of 3090ti to train the model with batchsize set as 28. + + +## Evaluation on BOP Datasets + +To evaluate the model on BOP datasets, please run the following command: +``` +python test_bop.py --gpus 0 --model pose_estimation_model --config config/base.yaml --dataset $DATASET --view 42 +``` +The string "DATASET" could be set as `lmo`, `icbin`, `itodd`, `hb`, `tless`, `tudl`, `ycbv`, or `all`. Before evaluation, please refer to [[link](https://github.com/JiehongLin/SAM-6D/tree/main/SAM-6D/Data)] for rendering the object templates of BOP datasets, or download our [rendered templates](https://drive.google.com/drive/folders/1fXt5Z6YDPZTJICZcywBUhu5rWnPvYAPI?usp=drive_link). Besides, the instance segmentation should be done following [[link](https://github.com/JiehongLin/SAM-6D/tree/main/SAM-6D/Instance_Segmentation_Model)]; to test on your own segmentation results, you could change the "detection_paths" in the `test_bop.py` file. + +One could also download our trained model for evaluation: +``` +python test_bop.py --gpus 0 --model pose_estimation_model --config config/base.yaml --checkpoint_path checkpoints/sam-6d-pem-base.pth --dataset $DATASET --view 42 +``` + + +## Acknowledgements +- [MegaPose](https://github.com/megapose6d/megapose6d) +- [GDRNPP](https://github.com/shanice-l/gdrnpp_bop2022) +- [GeoTransformer](https://github.com/qinzheng93/GeoTransformer) +- [Flatten Transformer](https://github.com/LeapLabTHU/FLatten-Transformer) + diff --git a/project/grasp_box/submodules/SAM6D/__pycache__/pose_estimator.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/__pycache__/pose_estimator.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4bab62accbd5217461d0e1db3bebd70de931ab71 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/__pycache__/pose_estimator.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/__pycache__/pose_estimator.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/__pycache__/pose_estimator.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c4e7f939553070bf69e6defb15d75045c63f851 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/__pycache__/pose_estimator.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/config/__pycache__/config.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/config/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8dd140ead0e8a3efd77dfd6578d91cc33bc2664d Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/config/__pycache__/config.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/config/__pycache__/config.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/config/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10e220bb5839c05996d6c7e6d059e59ce90290cb Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/config/__pycache__/config.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/config/base.yaml b/project/grasp_box/submodules/SAM6D/config/base.yaml new file mode 100644 index 0000000000000000000000000000000000000000..24145ec4c66605bddaf80a7dd5e63a2222533809 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/config/base.yaml @@ -0,0 +1,104 @@ +NAME_PROJECT: SAM-6D + +optimizer: + type : Adam + lr : 0.0001 + betas: [0.5, 0.999] + eps : 0.000001 + weight_decay: 0.0 + +lr_scheduler: + type: WarmupCosineLR + max_iters: 600000 + warmup_factor: 0.001 + warmup_iters: 1000 + +model: + coarse_npoint: 196 + fine_npoint: 2048 + feature_extraction: + vit_type: vit_base + up_type: linear + embed_dim: 768 + out_dim: 256 + use_pyramid_feat: True + pretrained: True + geo_embedding: + sigma_d: 0.2 + sigma_a: 15 + angle_k: 3 + reduction_a: max + hidden_dim: 256 + coarse_point_matching: + nblock: 3 + input_dim: 256 + hidden_dim: 256 + out_dim: 256 + temp: 0.1 + sim_type: cosine + normalize_feat: True + loss_dis_thres: 0.15 + nproposal1: 6000 + nproposal2: 300 + fine_point_matching: + nblock: 3 + input_dim: 256 + hidden_dim: 256 + out_dim: 256 + pe_radius1: 0.1 + pe_radius2: 0.2 + focusing_factor: 3 + temp: 0.1 + sim_type: cosine + normalize_feat: True + loss_dis_thres: 0.15 + + + +train_dataset: + name: training_dataset + data_dir: ../Data/MegaPose-Training-Data + img_size: 224 + n_sample_observed_point: 2048 + n_sample_model_point: 2048 + n_sample_template_point: 5000 + min_visib_fract: 0.1 + min_px_count_visib: 512 + shift_range: 0.01 + rgb_mask_flag: True + dilate_mask: True + +train_dataloader: + bs : 28 + num_workers : 24 + shuffle : True + drop_last : True + pin_memory : False + + + +test_dataset: + name: bop_test_dataset + data_dir: ../Data/BOP + template_dir: ../Data/BOP-Templates + img_size: 224 + n_sample_observed_point: 2048 + n_sample_model_point: 1024 + n_sample_template_point: 5000 + minimum_n_point: 8 + rgb_mask_flag: True + seg_filter_score: 0.25 + n_template_view: 42 + + +test_dataloader: + bs : 16 + num_workers : 16 + shuffle : False + drop_last : False + pin_memory : False + + +rd_seed: 1 +training_epoch: 15 +iters_to_print: 50 diff --git a/project/grasp_box/submodules/SAM6D/config/config.py b/project/grasp_box/submodules/SAM6D/config/config.py new file mode 100644 index 0000000000000000000000000000000000000000..7df4db7e486a10da9782c01e714b9f9280f48392 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/config/config.py @@ -0,0 +1,305 @@ +import os +import json +import tempfile +import warnings +from typing import Optional +from argparse import Namespace +import yaml +from addict import Dict + +BASE_KEY = "_base_" +RESERVED_KEYS = ["filename", "text"] + + +class ConfigDict(Dict): + r"""ConfigDict based on Dict, which use to convert the config + file into config dict + """ + def __missing__(self, name): + raise KeyError(name) + + def __getattr__(self, name): + try: + value = super(ConfigDict, self).__getattr__(name) + except KeyError: + ex = AttributeError( + f"`{self.__class__.__name__}` object has no attribute `{name}`" + ) + except Exception as e: + ex = e + else: + return value + raise ex + + +class Config(object): + r"""A facility for config and config files. + It supports common file formats as configs: python/json/yaml. The interface + is the same as a dict object and also allows access config values as + attributes. + + Example: + >>> cfg = Config(dict(a=1, b=dict(b1=[0, 1]))) + >>> cfg.a + 1 + >>> cfg.b + {"b1": [0, 1]} + >>> cfg.b.b1 + [0, 1] + >>> cfg = Config.fromfile("./configs/test.py") + >>> cfg.filename + "/home/gorilla_lab/code/gorilla/configs/test.py" + >>> cfg.item4 + "test" + >>> cfg + "Config [path: /home/gorilla_lab/code/gorilla/configs/test.py]: " + "{"item1": [1, 2], "item2": {"a": 0}, "item3": True, "item4": "test"}" + """ + def __init__(self, + cfg_dict: Optional[Dict] = None, + cfg_text: Optional[str] = None, + filename: Optional[str] = None): + if cfg_dict is None: + cfg_dict = dict() + elif not isinstance(cfg_dict, dict): + raise TypeError(f"cfg_dict must be a dict, " + f"but got {type(cfg_dict)}") + for key in cfg_dict: + if key in RESERVED_KEYS: + raise KeyError(f"{key} is reserved for config file") + + super(Config, self).__setattr__("_cfg_dict", ConfigDict(cfg_dict)) + super(Config, self).__setattr__("_filename", filename) + if cfg_text: + text = cfg_text + elif filename: + with open(filename, "r") as f: + text = f.read() + else: + text = "" + super(Config, self).__setattr__("_text", text) + + + @staticmethod + def _file2dict(filename: str): + filename = os.path.abspath(os.path.expanduser(filename)) + + with open(filename, 'r') as file: + obj = yaml.safe_load(file) + cfg_dict = ConfigDict(obj) + + with open(filename, "r") as f: + cfg_text = f.read() + + # here cfg_dict is still the same as content in --config file, + # and the code block below read 4 sub-config file then merge into one. + if BASE_KEY in cfg_dict: + cfg_dir = os.path.dirname(filename) + base_filename = cfg_dict.pop(BASE_KEY) + base_filename = base_filename if isinstance( + base_filename, list) else [base_filename] + + cfg_dict_list = list() + cfg_text_list = list() + for f in base_filename: + _cfg_dict, _cfg_text = Config._file2dict(os.path.join(cfg_dir, f)) + cfg_dict_list.append(_cfg_dict) + cfg_text_list.append(_cfg_text) + + base_cfg_dict = dict() + for c in cfg_dict_list: + if len(base_cfg_dict.keys() & c.keys()) > 0: + # e.g. sub-config file about dataset should not overlap with + # the one about model + raise KeyError("Duplicate key is not allowed among bases") + base_cfg_dict.update(c) + + cfg_dict = Config._merge_a_into_b(cfg_dict, base_cfg_dict) + + # merge cfg_text + cfg_text_list.append(cfg_text) + cfg_text = "\n".join(cfg_text_list) + + return cfg_dict, cfg_text + + @staticmethod + def _merge_a_into_b(a, b): + r"""merge dict ``a`` into dict ``b`` (non-inplace). + Values in ``a`` will overwrite ``b``. ``b`` is copied first to avoid + in-place modifications. + Args: + a (dict): The source dict to be merged into ``b``. + b (dict): The origin dict to be fetch keys from ``a``. + Returns: + dict: The modified dict of ``b`` using ``a``. + Examples: + # Normally merge a into b. + >>> Config._merge_a_into_b( + ... dict(obj=dict(a=2)), dict(obj=dict(a=1))) + {"obj": {"a": 2}} + """ + b = b.copy() + for k, v in a.items(): + if isinstance(v, dict) and k in b: + allowed_types = dict + if not isinstance(b[k], allowed_types): + raise TypeError( + f"{k}={v} in child config cannot inherit from base " + f"because {k} is a dict in the child config but is of " + f"type {type(b[k])} in base config.") + b[k] = Config._merge_a_into_b(v, b[k]) + else: + b[k] = v + return b + + @staticmethod + def fromfile(filename: str): + r"""cfg_text is the text content read from 5 files, and cfg_dict is + a dict resolved by the text content. + """ + cfg_dict, cfg_text = Config._file2dict(filename) + return Config(cfg_dict, cfg_text=cfg_text, filename=filename) + + @staticmethod + def fromstring(cfg_str, file_format): + """Generate config from config str. + Args: + cfg_str (str): Config str. + file_format (str): Config file format corresponding to the + config str. Only py/yml/yaml/json type are supported now! + Returns: + obj:`Config`: Config obj. + """ + if file_format not in [".py", ".json", ".yaml", ".yml"]: + raise IOError("Only py/yml/yaml/json type are supported now!") + if file_format != ".py" and "dict(" in cfg_str: + # check if users specify a wrong suffix for python + warnings.warn( + "Please check 'file_format', the file format may be .py") + + with tempfile.NamedTemporaryFile("w", suffix=file_format) as temp_file: + temp_file.write(cfg_str) + temp_file.flush() + cfg = Config.fromfile(temp_file.name) + return cfg + + @property + def filename(self) -> str: + return self._filename + + @property + def text(self) -> str: + return self._text + + def __repr__(self) -> str: + content = f"Config (path: {self.filename})\n" + content += json.dumps(self._cfg_dict, indent=4, ensure_ascii=False) + return content + + def __len__(self) -> int: + return len(self._cfg_dict) + + def __getattr__(self, name: str): + return getattr(self._cfg_dict, name) + + def __getitem__(self, name: str): + return self._cfg_dict.__getitem__(name) + + def __setattr__(self, name: str, value: Dict): + if isinstance(value, dict): + value = ConfigDict(value) + self._cfg_dict.__setattr__(name, value) + + def __setitem__(self, name: str, value: Dict): + if isinstance(value, dict): + value = ConfigDict(value) + self._cfg_dict.__setitem__(name, value) + + def __iter__(self): + return iter(self._cfg_dict) + + def dump(self, file: Optional[str] = None, **kwargs): + cfg_dict = self._cfg_dict.to_dict() + from gorilla.fileio import dump + if file is None: + # output the content + file_format = self.filename.split(".")[-1] + if file_format == "py": + return self.text + else: + return dump(cfg_dict, file_format=file_format, **kwargs) + else: + if file.endswith("py"): + with open(file, "w") as f: + f.write(self.text) + else: + dump(cfg_dict, file, **kwargs) + + def merge_from_dict(self, options: Dict): + r"""Merge list into cfg_dict. + Merge the dict parsed by MultipleKVAction into this cfg. + Examples: + >>> options = {"model.backbone.depth": 50, + ... "model.backbone.with_cp":True} + >>> cfg = Config(dict(model=dict(backbone=dict(type="ResNet")))) + >>> cfg.merge_from_dict(options) + >>> cfg_dict = super(Config, self).__getattribute__("_cfg_dict") + >>> assert cfg_dict == dict( + ... model=dict(backbone=dict(depth=50, with_cp=True))) + + # Merge list element + >>> cfg = Config(dict(pipeline=[ + ... dict(type="LoadImage"), dict(type="LoadAnnotations")])) + >>> options = dict(pipeline={"0": dict(type="SelfLoadImage")}) + Args: + options (dict): dict of configs to merge from. + """ + option_cfg_dict = {} + for full_key, v in options.items(): + if v is None: # handle the case when a parameter simultaneously appears in argparse and config file + continue + d = option_cfg_dict + key_list = full_key.split(".") + for subkey in key_list[:-1]: + d.setdefault(subkey, ConfigDict()) + d = d[subkey] + subkey = key_list[-1] + d[subkey] = v + + cfg_dict = self._cfg_dict + cfg_dict = Config._merge_a_into_b(option_cfg_dict, cfg_dict) + # NOTE: strange phenomenon + # self._cfg_dict = cfg_dict + super(Config, self).__setattr__("_cfg_dict", cfg_dict) + + +def merge_cfg_and_args(cfg: Optional[Config] = None, + args: Optional[Namespace] = None) -> Config: + r"""merge args and cfg into a Config by calling 'merge_from_dict' func + + Args: + cfg (Config, optional): Config from cfg file. + args (Namespace, optional): Argument parameters input. + + Returns: + Config: Merged Config + """ + assert cfg is not None or args is not None, "'cfg' or 'args' can not be None simultaneously" + + if cfg is None: + cfg = Config() + else: + assert isinstance( + cfg, Config + ), f"'cfg' must be None or gorilla.Config, but got {type(cfg)}" + if args is None: + args = Namespace() + else: + assert isinstance( + args, Namespace + ), f"'args' must be None or argsparse.Namespace, but got {type(args)}" + + # convert namespace into dict + args_dict = vars(args) + cfg.merge_from_dict(args_dict) + return cfg \ No newline at end of file diff --git a/project/grasp_box/submodules/SAM6D/dependencies.sh b/project/grasp_box/submodules/SAM6D/dependencies.sh new file mode 100644 index 0000000000000000000000000000000000000000..d0649f36f23ce88443e5646fa486f38e591f7bc5 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/dependencies.sh @@ -0,0 +1,8 @@ + + + +pip install timm gorilla-core==0.2.7.8 trimesh==3.22.1 imgaug opencv-python gpustat==1.0.0 einops + +cd /model/pointnet2 +python setup.py install +cd .. diff --git a/project/grasp_box/submodules/SAM6D/download_sam6d-pem.py b/project/grasp_box/submodules/SAM6D/download_sam6d-pem.py new file mode 100644 index 0000000000000000000000000000000000000000..50f069152b8313fa8a9ab4547544c5b6ad50f286 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/download_sam6d-pem.py @@ -0,0 +1,18 @@ +import os +import os.path as osp + +def download_model(output_path): + import os + command = f"gdown --no-cookies --no-check-certificate -O '{output_path}/sam-6d-pem-base.pth' 1joW9IvwsaRJYxoUmGo68dBVg-HcFNyI7" + os.system(command) + +def download() -> None: + root_dir = os.path.dirname((os.path.abspath(__file__))) + save_dir = osp.join(root_dir, "checkpoints") + os.makedirs(save_dir, exist_ok=True) + download_model(save_dir) + +if __name__ == "__main__": + download() + + diff --git a/project/grasp_box/submodules/SAM6D/environment.yaml b/project/grasp_box/submodules/SAM6D/environment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..afc5afdd9f4cc15a88c953686034e0f6c20a8831 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/environment.yaml @@ -0,0 +1,39 @@ +name: sam6d +channels: + - xformers + - conda-forge + - pytorch + - nvidia + - defaults +dependencies: + - pip + - python=3.9.6 + - pip: + - torch==2.0.0 + - torchvision==0.15.1 + - fvcore + - xformers==0.0.18 + - torchmetrics==0.10.3 + - blenderproc==2.6.1 + - opencv-python + # ISM + - omegaconf + - ruamel.yaml + - hydra-colorlog + - hydra-core + - gdown + - pandas + - imageio + - pyrender + - pytorch-lightning==1.8.1 + - pycocotools + - distinctipy + - git+https://github.com/facebookresearch/segment-anything.git # SAM + - ultralytics==8.0.135 # FastSAM + # PEM + - timm + - gorilla-core==0.2.7.8 + - trimesh==4.0.8 + - gpustat==1.0.0 + - imgaug + - einops \ No newline at end of file diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/coarse_point_matching.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/coarse_point_matching.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53dd4856bb0662a8a045fd1f2c5c88c7b4e550fb Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/coarse_point_matching.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/coarse_point_matching.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/coarse_point_matching.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..901f338598c6108049788586f32eff86a755f05b Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/coarse_point_matching.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/feature_extraction.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/feature_extraction.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f56cc261471c8b3ddeeeda562f4452c8b073435a Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/feature_extraction.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/feature_extraction.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/feature_extraction.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c20591de82d6fe802d206a712e1903a9f51da89 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/feature_extraction.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/fine_point_matching.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/fine_point_matching.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa15e7751308b3043181a93eecc9003c566cd31e Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/fine_point_matching.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/fine_point_matching.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/fine_point_matching.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..886ee15fa50d63bddccbf46ce2023b41523daff8 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/fine_point_matching.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/pose_estimation_model.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/pose_estimation_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4391eacc2af750916be58a7459c257db3344fbfe Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/pose_estimation_model.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/pose_estimation_model.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/pose_estimation_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a04ce311ccac89d3555bf51474d8472b4d8025c Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/pose_estimation_model.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/transformer.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f18ad7a2b6d32166a7a7f6cf394a27474468a0f Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/transformer.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/__pycache__/transformer.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/__pycache__/transformer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2740d37d2fadf552fdfd9571234ebaa08c1aacf8 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/__pycache__/transformer.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/coarse_point_matching.py b/project/grasp_box/submodules/SAM6D/model/coarse_point_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..484fd7bbaae78e35b1b8d0ea4ceee62b73f94261 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/coarse_point_matching.py @@ -0,0 +1,89 @@ +import torch +import torch.nn as nn + +from transformer import GeometricTransformer +from model_utils import ( + compute_feature_similarity, + aug_pose_noise, + compute_coarse_Rt, +) +from loss_utils import compute_correspondence_loss + + + +class CoarsePointMatching(nn.Module): + def __init__(self, cfg, return_feat=False): + super(CoarsePointMatching, self).__init__() + self.cfg = cfg + self.return_feat = return_feat + self.nblock = self.cfg.nblock + + self.in_proj = nn.Linear(cfg.input_dim, cfg.hidden_dim) + self.out_proj = nn.Linear(cfg.hidden_dim, cfg.out_dim) + + self.bg_token = nn.Parameter(torch.randn(1, 1, cfg.hidden_dim) * .02) + + self.transformers = [] + for _ in range(self.nblock): + self.transformers.append(GeometricTransformer( + blocks=['self', 'cross'], + d_model = cfg.hidden_dim, + num_heads = 4, + dropout=None, + activation_fn='ReLU', + return_attention_scores=False, + )) + self.transformers = nn.ModuleList(self.transformers) + + def forward(self, p1, f1, geo1, p2, f2, geo2, radius, end_points): + B = f1.size(0) + + f1 = self.in_proj(f1) + f1 = torch.cat([self.bg_token.repeat(B,1,1), f1], dim=1) # adding bg + f2 = self.in_proj(f2) + f2 = torch.cat([self.bg_token.repeat(B,1,1), f2], dim=1) # adding bg + + atten_list = [] + for idx in range(self.nblock): + f1, f2 = self.transformers[idx](f1, geo1, f2, geo2) + + if self.training or idx==self.nblock-1: + atten_list.append(compute_feature_similarity( + self.out_proj(f1), + self.out_proj(f2), + self.cfg.sim_type, + self.cfg.temp, + self.cfg.normalize_feat + )) + + if self.training: + gt_R = end_points['rotation_label'] + gt_t = end_points['translation_label'] / (radius.reshape(-1, 1)+1e-6) + init_R, init_t = aug_pose_noise(gt_R, gt_t) + + end_points = compute_correspondence_loss( + end_points, atten_list, p1, p2, gt_R, gt_t, + dis_thres=self.cfg.loss_dis_thres, + loss_str='coarse' + ) + else: + + + init_R, init_t, points = compute_coarse_Rt( + end_points, + atten_list[-1], p1, p2, end_points['depth'], + radius, + end_points['mask'], + end_points['bbox'], + end_points['model'] / (radius.reshape(-1, 1, 1) + 1e-6), + self.cfg.nproposal1, self.cfg.nproposal2, + ) + + end_points['init_R'] = init_R + end_points['init_t'] = init_t + + if self.return_feat: + return end_points, self.out_proj(f1), self.out_proj(f2) + else: + return end_points, points + diff --git a/project/grasp_box/submodules/SAM6D/model/feature_extraction.py b/project/grasp_box/submodules/SAM6D/model/feature_extraction.py new file mode 100644 index 0000000000000000000000000000000000000000..5702331b86df2c00b5f896eea35ef1a92a115fac --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/feature_extraction.py @@ -0,0 +1,179 @@ +import os +import torch +import torch.nn as nn +from torch.nn import functional as F +import torch.utils.model_zoo as model_zoo +from functools import partial +import timm.models.vision_transformer +from model_utils import ( + LayerNorm2d, + interpolate_pos_embed, + get_chosen_pixel_feats, + sample_pts_feats +) + + + +class ViT(timm.models.vision_transformer.VisionTransformer): + def __init__(self, **kwargs): + super(ViT, self).__init__(**kwargs) + + def forward(self, x): + x = self.patch_embed(x) + x = self._pos_embed(x) + x = self.norm_pre(x) + + out = [] + d = len(self.blocks) + n = d // 4 + idx_nblock = [d-1, d-n-1, d-2*n-1, d-3*n-1] + + for idx, blk in enumerate(self.blocks): + x = blk(x) + if idx in idx_nblock: + out.append(self.norm(x)) + return out + + + +class ViT_AE(nn.Module): + def __init__(self, cfg,) -> None: + super(ViT_AE, self).__init__() + self.cfg = cfg + self.vit_type = cfg.vit_type + self.up_type = cfg.up_type + self.embed_dim = cfg.embed_dim + self.out_dim = cfg.out_dim + self.use_pyramid_feat = cfg.use_pyramid_feat + self.pretrained = cfg.pretrained + + if self.vit_type == 'vit_base': + self.vit = ViT( + patch_size=16, embed_dim=self.embed_dim, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6),) + elif self.vit_type == 'vit_large': + self.vit = ViT( + patch_size=16, embed_dim=self.embed_dim, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), ) + else: + assert False + + if self.use_pyramid_feat: + nblock = 4 + else: + nblock = 1 + + if self.up_type == 'linear': + self.output_upscaling = nn.Linear(self.embed_dim * nblock, 16 * self.out_dim, bias=True) + elif self.up_type == 'deconv': + self.output_upscaling = nn.Sequential( + nn.ConvTranspose2d(self.embed_dim * nblock, self.out_dim*2, kernel_size=2, stride=2), + LayerNorm2d(self.out_dim*2), + nn.GELU(), + nn.ConvTranspose2d(self.out_dim*2, self.out_dim, kernel_size=2, stride=2), + ) + else: + assert False + + if self.pretrained: + vit_checkpoint = os.path.join('/home/kyber/charles/project/grasp_box/weights/SAM_6D', 'mae_pretrain_'+ self.vit_type +'.pth') + checkpoint = torch.load(vit_checkpoint, map_location='cpu') + print("load pre-trained checkpoint from: %s" % vit_checkpoint) + checkpoint_model = checkpoint['model'] + state_dict = self.vit.state_dict() + for k in ['head.weight', 'head.bias']: + if k in checkpoint_model and checkpoint_model[k].shape != state_dict[k].shape: + print(f"Removing key {k} from pretrained checkpoint") + del checkpoint_model[k] + # interpolate position embedding + interpolate_pos_embed(self.vit, checkpoint_model) + msg = self.vit.load_state_dict(checkpoint_model, strict=False) + + + def forward(self, x): + B,_,H,W = x.size() + vit_outs = self.vit(x) + cls_tokens = vit_outs[-1][:,0,:].contiguous() + vit_outs = [l[:,1:,:].contiguous() for l in vit_outs] + + if self.use_pyramid_feat: + x = torch.cat(vit_outs, dim=2) + else: + x = vit_outs[-1] + + if self.up_type == 'linear': + x = self.output_upscaling(x).reshape(B,14,14,4,4,self.out_dim).permute(0,5,1,3,2,4).contiguous() + x = x.reshape(B,-1,56,56) + x = F.interpolate(x, (H,W), mode="bilinear", align_corners=False) + elif self.up_type == 'deconv': + x = x.transpose(1,2).reshape(B,-1,14,14) + x = self.output_upscaling(x) + x = F.interpolate(x, (H,W), mode="bilinear", align_corners=False) + return x, cls_tokens + + + + +class ViTEncoder(nn.Module): + def __init__(self, cfg, npoint=2048): + super(ViTEncoder, self).__init__() + self.npoint = npoint + self.rgb_net = ViT_AE(cfg) + + def forward(self, end_points): + rgb = end_points['rgb'] + rgb_choose = end_points['rgb_choose'] + dense_fm = self.get_img_feats(rgb, rgb_choose) + dense_pm = end_points['pts'] + assert rgb_choose.size(1) == self.npoint + + if not self.training and 'dense_po' in end_points.keys() and 'dense_fo' in end_points.keys(): + dense_po = end_points['dense_po'].clone() + dense_fo = end_points['dense_fo'].clone() + + # normalize point clouds + radius = torch.norm(dense_po, dim=2).max(1)[0] + dense_pm = dense_pm / (radius.reshape(-1, 1, 1) + 1e-6) + dense_po = dense_po / (radius.reshape(-1, 1, 1) + 1e-6) + + else: + tem1_rgb = end_points['tem1_rgb'] + tem1_choose = end_points['tem1_choose'] + tem1_pts = end_points['tem1_pts'] + tem2_rgb = end_points['tem2_rgb'] + tem2_choose = end_points['tem2_choose'] + tem2_pts = end_points['tem2_pts'] + + # normalize point clouds + dense_po = torch.cat([tem1_pts, tem2_pts], dim=1) + radius = torch.norm(dense_po, dim=2).max(1)[0] + dense_pm = dense_pm / (radius.reshape(-1, 1, 1) + 1e-6) + tem1_pts = tem1_pts / (radius.reshape(-1, 1, 1) + 1e-6) + tem2_pts = tem2_pts / (radius.reshape(-1, 1, 1) + 1e-6) + + dense_po, dense_fo = self.get_obj_feats( + [tem1_rgb, tem2_rgb], + [tem1_pts, tem2_pts], + [tem1_choose, tem2_choose] + ) + + return dense_pm, dense_fm, dense_po, dense_fo, radius + + def get_img_feats(self, img, choose): + return get_chosen_pixel_feats(self.rgb_net(img)[0], choose) + + def get_obj_feats(self, tem_rgb_list, tem_pts_list, tem_choose_list, npoint=None): + if npoint is None: + npoint = self.npoint + + tem_feat_list =[] + for tem, tem_choose in zip(tem_rgb_list, tem_choose_list): + tem_feat_list.append(self.get_img_feats(tem, tem_choose)) + + tem_pts = torch.cat(tem_pts_list, dim=1) + tem_feat = torch.cat(tem_feat_list, dim=1) + + return sample_pts_feats(tem_pts, tem_feat, npoint) + + + diff --git a/project/grasp_box/submodules/SAM6D/model/fine_point_matching.py b/project/grasp_box/submodules/SAM6D/model/fine_point_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..ef2935e498bf9e261d2218b961b3654ff797c7ef --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/fine_point_matching.py @@ -0,0 +1,133 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from transformer import SparseToDenseTransformer +from model_utils import compute_feature_similarity, compute_fine_Rt +from loss_utils import compute_correspondence_loss +from pointnet2_utils import QueryAndGroup +from pytorch_utils import SharedMLP, Conv1d + + +class FinePointMatching(nn.Module): + def __init__(self, cfg, return_feat=False): + super(FinePointMatching, self).__init__() + self.cfg = cfg + self.return_feat = return_feat + self.nblock = self.cfg.nblock + + self.in_proj = nn.Linear(cfg.input_dim, cfg.hidden_dim) + self.out_proj = nn.Linear(cfg.hidden_dim, cfg.out_dim) + + self.bg_token = nn.Parameter(torch.randn(1, 1, cfg.hidden_dim) * .02) + self.PE = PositionalEncoding(cfg.hidden_dim, r1=cfg.pe_radius1, r2=cfg.pe_radius2) + + self.transformers = [] + for _ in range(self.nblock): + self.transformers.append(SparseToDenseTransformer( + cfg.hidden_dim, + num_heads=4, + sparse_blocks=['self', 'cross'], + dropout=None, + activation_fn='ReLU', + focusing_factor=cfg.focusing_factor, + with_bg_token=True, + replace_bg_token=True + )) + self.transformers = nn.ModuleList(self.transformers) + + def forward(self, p1, f1, geo1, fps_idx1, p2, f2, geo2, fps_idx2, radius, end_points): + B = p1.size(0) + + init_R = end_points['init_R'] + init_t = end_points['init_t'] + p1_ = (p1 - init_t.unsqueeze(1)) @ init_R + + f1 = self.in_proj(f1) + self.PE(p1_) + f1 = torch.cat([self.bg_token.repeat(B,1,1), f1], dim=1) # adding bg + + f2 = self.in_proj(f2) + self.PE(p2) + f2 = torch.cat([self.bg_token.repeat(B,1,1), f2], dim=1) # adding bg + + atten_list = [] + for idx in range(self.nblock): + f1, f2 = self.transformers[idx](f1, geo1, fps_idx1, f2, geo2, fps_idx2) + + if self.training or idx==self.nblock-1: + atten_list.append(compute_feature_similarity( + self.out_proj(f1), + self.out_proj(f2), + self.cfg.sim_type, + self.cfg.temp, + self.cfg.normalize_feat + )) + + if self.training: + gt_R = end_points['rotation_label'] + gt_t = end_points['translation_label'] / (radius.reshape(-1, 1)+1e-6) + + end_points = compute_correspondence_loss( + end_points, atten_list, p1, p2, gt_R, gt_t, + dis_thres=self.cfg.loss_dis_thres, + loss_str='fine' + ) + else: + + + pred_R, pred_t, pred_pose_score, points = compute_fine_Rt( + atten_list[-1], p1, p2, radius, end_points, + end_points['model'] / (radius.reshape(-1, 1, 1) + 1e-6), + ) + + end_points['pred_R'] = pred_R + end_points['pred_t'] = pred_t * (radius.reshape(-1, 1)+1e-6) + end_points['pred_pose_score'] = pred_pose_score + + if torch.equal(pred_t, torch.tensor([0.,0.,0.], device = 'cuda').reshape(1,3)): + end_points['pred_pose_score'] = None + + if self.return_feat: + return end_points, self.out_proj(f1), self.out_proj(f2) + else: + + return end_points, points + + + +class PositionalEncoding(nn.Module): + def __init__(self, out_dim, r1=0.1, r2=0.2, nsample1=32, nsample2=64, use_xyz=True, bn=True): + super(PositionalEncoding, self).__init__() + self.group1 = QueryAndGroup(r1, nsample1, use_xyz=use_xyz) + self.group2 = QueryAndGroup(r2, nsample2, use_xyz=use_xyz) + input_dim = 6 if use_xyz else 3 + + self.mlp1 = SharedMLP([input_dim, 32, 64, 128], bn=bn) + self.mlp2 = SharedMLP([input_dim, 32, 64, 128], bn=bn) + self.mlp3 = Conv1d(256, out_dim, 1, activation=None, bn=None) + + def forward(self, pts1, pts2=None): + if pts2 is None: + pts2 = pts1 + + # scale1 + feat1 = self.group1( + pts1.contiguous(), pts2.contiguous(), pts1.transpose(1,2).contiguous() + ) + feat1 = self.mlp1(feat1) + feat1 = F.max_pool2d( + feat1, kernel_size=[1, feat1.size(3)] + ) + + # scale2 + feat2 = self.group2( + pts1.contiguous(), pts2.contiguous(), pts1.transpose(1,2).contiguous() + ) + feat2 = self.mlp2(feat2) + feat2 = F.max_pool2d( + feat2, kernel_size=[1, feat2.size(3)] + ) + + feat = torch.cat([feat1, feat2], dim=1).squeeze(-1) + feat = self.mlp3(feat).transpose(1,2) + return feat + diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_modules.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_modules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dcadd152dd7b33d88b069cc33d540e39416858d3 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_modules.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_utils.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc53941620e24121983b5a50b1e5bb8d6bc3944a Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_utils.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd20d88f56df9364e2e5ca9f01907e6133a52f52 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pointnet2_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pytorch_utils.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pytorch_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ad60a74a46004cf43810a0e3e96d9b9c9ad5f83 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pytorch_utils.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pytorch_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pytorch_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4efd855ac39d83bbba99e572999e308b7bc76907 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/__pycache__/pytorch_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/ball_query.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/ball_query.h new file mode 100644 index 0000000000000000000000000000000000000000..4a65b5ac082ba2f3d835365749d3abb806193538 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/ball_query.h @@ -0,0 +1,10 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include + +at::Tensor ball_query(at::Tensor new_xyz, at::Tensor xyz, const float radius, + const int nsample); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/cuda_utils.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/cuda_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..d4c4bb4e1d93cb75a66000364581b9a686bb1ef5 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/cuda_utils.h @@ -0,0 +1,46 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#ifndef _CUDA_UTILS_H +#define _CUDA_UTILS_H + +#include +#include +#include + +#include +#include + +#include + +#define TOTAL_THREADS 512 + +inline int opt_n_threads(int work_size) { + const int pow_2 = std::log(static_cast(work_size)) / std::log(2.0); + + return max(min(1 << pow_2, TOTAL_THREADS), 1); +} + +inline dim3 opt_block_config(int x, int y) { + const int x_threads = opt_n_threads(x); + const int y_threads = + max(min(opt_n_threads(y), TOTAL_THREADS / x_threads), 1); + dim3 block_config(x_threads, y_threads, 1); + + return block_config; +} + +#define CUDA_CHECK_ERRORS() \ + do { \ + cudaError_t err = cudaGetLastError(); \ + if (cudaSuccess != err) { \ + fprintf(stderr, "CUDA kernel failed : %s\n%s at L:%d in %s\n", \ + cudaGetErrorString(err), __PRETTY_FUNCTION__, __LINE__, \ + __FILE__); \ + exit(-1); \ + } \ + } while (0) + +#endif diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/group_points.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/group_points.h new file mode 100644 index 0000000000000000000000000000000000000000..24e7cc78e4c534fce5aba1f0d2db96ca9099dff0 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/group_points.h @@ -0,0 +1,10 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include + +at::Tensor group_points(at::Tensor points, at::Tensor idx); +at::Tensor group_points_grad(at::Tensor grad_out, at::Tensor idx, const int n); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/interpolate.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/interpolate.h new file mode 100644 index 0000000000000000000000000000000000000000..2af34c6699244b870f267bdee6a8bf0d40edad0f --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/interpolate.h @@ -0,0 +1,15 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once + +#include +#include + +std::vector three_nn(at::Tensor unknowns, at::Tensor knows); +at::Tensor three_interpolate(at::Tensor points, at::Tensor idx, + at::Tensor weight); +at::Tensor three_interpolate_grad(at::Tensor grad_out, at::Tensor idx, + at::Tensor weight, const int m); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/sampling.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/sampling.h new file mode 100644 index 0000000000000000000000000000000000000000..366ef31cf5010e6248ac0a59ae65be40fdbd3a61 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/sampling.h @@ -0,0 +1,11 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include + +at::Tensor gather_points(at::Tensor points, at::Tensor idx); +at::Tensor gather_points_grad(at::Tensor grad_out, at::Tensor idx, const int n); +at::Tensor furthest_point_sampling(at::Tensor points, const int nsamples); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/utils.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..925f7697eb36842f8be4918ea4ca0b1d9646da83 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/include/utils.h @@ -0,0 +1,30 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include +#include + +#define CHECK_CUDA(x) \ + do { \ + TORCH_CHECK(x.type().is_cuda(), #x " must be a CUDA tensor"); \ + } while (0) + +#define CHECK_CONTIGUOUS(x) \ + do { \ + TORCH_CHECK(x.is_contiguous(), #x " must be a contiguous tensor"); \ + } while (0) + +#define CHECK_IS_INT(x) \ + do { \ + TORCH_CHECK(x.scalar_type() == at::ScalarType::Int, \ + #x " must be an int tensor"); \ + } while (0) + +#define CHECK_IS_FLOAT(x) \ + do { \ + TORCH_CHECK(x.scalar_type() == at::ScalarType::Float, \ + #x " must be a float tensor"); \ + } while (0) diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.cpp b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b9cf4f915adc45ed8a06bccdb5f60216ee849fc8 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.cpp @@ -0,0 +1,37 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include "ball_query.h" +#include "utils.h" + +void query_ball_point_kernel_wrapper(int b, int n, int m, float radius, + int nsample, const float *new_xyz, + const float *xyz, int *idx); + +at::Tensor ball_query(at::Tensor new_xyz, at::Tensor xyz, const float radius, + const int nsample) { + CHECK_CONTIGUOUS(new_xyz); + CHECK_CONTIGUOUS(xyz); + CHECK_IS_FLOAT(new_xyz); + CHECK_IS_FLOAT(xyz); + + if (new_xyz.type().is_cuda()) { + CHECK_CUDA(xyz); + } + + at::Tensor idx = + torch::zeros({new_xyz.size(0), new_xyz.size(1), nsample}, + at::device(new_xyz.device()).dtype(at::ScalarType::Int)); + + if (new_xyz.type().is_cuda()) { + query_ball_point_kernel_wrapper(xyz.size(0), xyz.size(1), new_xyz.size(1), + radius, nsample, new_xyz.data(), + xyz.data(), idx.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return idx; +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.h new file mode 100644 index 0000000000000000000000000000000000000000..4a65b5ac082ba2f3d835365749d3abb806193538 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.h @@ -0,0 +1,10 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include + +at::Tensor ball_query(at::Tensor new_xyz, at::Tensor xyz, const float radius, + const int nsample); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query_gpu.cu b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query_gpu.cu new file mode 100644 index 0000000000000000000000000000000000000000..cfc2eebb1b6528cda5b6a95849eefbba5237448b --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query_gpu.cu @@ -0,0 +1,59 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include +#include +#include + +#include "cuda_utils.h" + +// input: new_xyz(b, m, 3) xyz(b, n, 3) +// output: idx(b, m, nsample) +__global__ void query_ball_point_kernel(int b, int n, int m, float radius, + int nsample, + const float *__restrict__ new_xyz, + const float *__restrict__ xyz, + int *__restrict__ idx) { + int batch_index = blockIdx.x; + xyz += batch_index * n * 3; + new_xyz += batch_index * m * 3; + idx += m * nsample * batch_index; + + int index = threadIdx.x; + int stride = blockDim.x; + + float radius2 = radius * radius; + for (int j = index; j < m; j += stride) { + float new_x = new_xyz[j * 3 + 0]; + float new_y = new_xyz[j * 3 + 1]; + float new_z = new_xyz[j * 3 + 2]; + for (int k = 0, cnt = 0; k < n && cnt < nsample; ++k) { + float x = xyz[k * 3 + 0]; + float y = xyz[k * 3 + 1]; + float z = xyz[k * 3 + 2]; + float d2 = (new_x - x) * (new_x - x) + (new_y - y) * (new_y - y) + + (new_z - z) * (new_z - z); + if (d2 < radius2) { + if (cnt == 0) { + for (int l = 0; l < nsample; ++l) { + idx[j * nsample + l] = k; + } + } + idx[j * nsample + cnt] = k; + ++cnt; + } + } + } +} + +void query_ball_point_kernel_wrapper(int b, int n, int m, float radius, + int nsample, const float *new_xyz, + const float *xyz, int *idx) { + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + query_ball_point_kernel<<>>( + b, n, m, radius, nsample, new_xyz, xyz, idx); + + CUDA_CHECK_ERRORS(); +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/bindings.cpp b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/bindings.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9783d87adef7eec31cae2efe4fc0b293e09f6177 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/bindings.cpp @@ -0,0 +1,24 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include "ball_query.h" +#include "group_points.h" +#include "interpolate.h" +#include "sampling.h" + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("gather_points", &gather_points); + m.def("gather_points_grad", &gather_points_grad); + m.def("furthest_point_sampling", &furthest_point_sampling); + + m.def("three_nn", &three_nn); + m.def("three_interpolate", &three_interpolate); + m.def("three_interpolate_grad", &three_interpolate_grad); + + m.def("ball_query", &ball_query); + + m.def("group_points", &group_points); + m.def("group_points_grad", &group_points_grad); +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.cpp b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ab2fe1f1c1d4ce6ddf80a7c26b8bb987fa5be9de --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.cpp @@ -0,0 +1,65 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include "group_points.h" +#include "utils.h" + +void group_points_kernel_wrapper(int b, int c, int n, int npoints, int nsample, + const float *points, const int *idx, + float *out); + +void group_points_grad_kernel_wrapper(int b, int c, int n, int npoints, + int nsample, const float *grad_out, + const int *idx, float *grad_points); + +at::Tensor group_points(at::Tensor points, at::Tensor idx) { + CHECK_CONTIGUOUS(points); + CHECK_CONTIGUOUS(idx); + CHECK_IS_FLOAT(points); + CHECK_IS_INT(idx); + + if (points.type().is_cuda()) { + CHECK_CUDA(idx); + } + + at::Tensor output = + torch::zeros({points.size(0), points.size(1), idx.size(1), idx.size(2)}, + at::device(points.device()).dtype(at::ScalarType::Float)); + + if (points.type().is_cuda()) { + group_points_kernel_wrapper(points.size(0), points.size(1), points.size(2), + idx.size(1), idx.size(2), points.data(), + idx.data(), output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} + +at::Tensor group_points_grad(at::Tensor grad_out, at::Tensor idx, const int n) { + CHECK_CONTIGUOUS(grad_out); + CHECK_CONTIGUOUS(idx); + CHECK_IS_FLOAT(grad_out); + CHECK_IS_INT(idx); + + if (grad_out.type().is_cuda()) { + CHECK_CUDA(idx); + } + + at::Tensor output = + torch::zeros({grad_out.size(0), grad_out.size(1), n}, + at::device(grad_out.device()).dtype(at::ScalarType::Float)); + + if (grad_out.type().is_cuda()) { + group_points_grad_kernel_wrapper( + grad_out.size(0), grad_out.size(1), n, idx.size(1), idx.size(2), + grad_out.data(), idx.data(), output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.h new file mode 100644 index 0000000000000000000000000000000000000000..24e7cc78e4c534fce5aba1f0d2db96ca9099dff0 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.h @@ -0,0 +1,10 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include + +at::Tensor group_points(at::Tensor points, at::Tensor idx); +at::Tensor group_points_grad(at::Tensor grad_out, at::Tensor idx, const int n); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points_gpu.cu b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points_gpu.cu new file mode 100644 index 0000000000000000000000000000000000000000..98a3be1225a6797f1aa980d7b9910699db28aa86 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points_gpu.cu @@ -0,0 +1,80 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include +#include + +#include "cuda_utils.h" + +// input: points(b, c, n) idx(b, npoints, nsample) +// output: out(b, c, npoints, nsample) +__global__ void group_points_kernel(int b, int c, int n, int npoints, + int nsample, + const float *__restrict__ points, + const int *__restrict__ idx, + float *__restrict__ out) { + int batch_index = blockIdx.x; + points += batch_index * n * c; + idx += batch_index * npoints * nsample; + out += batch_index * npoints * nsample * c; + + const int index = threadIdx.y * blockDim.x + threadIdx.x; + const int stride = blockDim.y * blockDim.x; + for (int i = index; i < c * npoints; i += stride) { + const int l = i / npoints; + const int j = i % npoints; + for (int k = 0; k < nsample; ++k) { + int ii = idx[j * nsample + k]; + out[(l * npoints + j) * nsample + k] = points[l * n + ii]; + } + } +} + +void group_points_kernel_wrapper(int b, int c, int n, int npoints, int nsample, + const float *points, const int *idx, + float *out) { + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + group_points_kernel<<>>( + b, c, n, npoints, nsample, points, idx, out); + + CUDA_CHECK_ERRORS(); +} + +// input: grad_out(b, c, npoints, nsample), idx(b, npoints, nsample) +// output: grad_points(b, c, n) +__global__ void group_points_grad_kernel(int b, int c, int n, int npoints, + int nsample, + const float *__restrict__ grad_out, + const int *__restrict__ idx, + float *__restrict__ grad_points) { + int batch_index = blockIdx.x; + grad_out += batch_index * npoints * nsample * c; + idx += batch_index * npoints * nsample; + grad_points += batch_index * n * c; + + const int index = threadIdx.y * blockDim.x + threadIdx.x; + const int stride = blockDim.y * blockDim.x; + for (int i = index; i < c * npoints; i += stride) { + const int l = i / npoints; + const int j = i % npoints; + for (int k = 0; k < nsample; ++k) { + int ii = idx[j * nsample + k]; + atomicAdd(grad_points + l * n + ii, + grad_out[(l * npoints + j) * nsample + k]); + } + } +} + +void group_points_grad_kernel_wrapper(int b, int c, int n, int npoints, + int nsample, const float *grad_out, + const int *idx, float *grad_points) { + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + group_points_grad_kernel<<>>( + b, c, n, npoints, nsample, grad_out, idx, grad_points); + + CUDA_CHECK_ERRORS(); +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.cpp b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.cpp new file mode 100644 index 0000000000000000000000000000000000000000..065ac313a718455d3999da4482aa691828ca51bf --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.cpp @@ -0,0 +1,104 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include "interpolate.h" +#include "utils.h" + +void three_nn_kernel_wrapper(int b, int n, int m, const float *unknown, + const float *known, float *dist2, int *idx); +void three_interpolate_kernel_wrapper(int b, int c, int m, int n, + const float *points, const int *idx, + const float *weight, float *out); +void three_interpolate_grad_kernel_wrapper(int b, int c, int n, int m, + const float *grad_out, + const int *idx, const float *weight, + float *grad_points); + +std::vector three_nn(at::Tensor unknowns, at::Tensor knows) { + CHECK_CONTIGUOUS(unknowns); + CHECK_CONTIGUOUS(knows); + CHECK_IS_FLOAT(unknowns); + CHECK_IS_FLOAT(knows); + + if (unknowns.type().is_cuda()) { + CHECK_CUDA(knows); + } + + at::Tensor idx = + torch::zeros({unknowns.size(0), unknowns.size(1), 3}, + at::device(unknowns.device()).dtype(at::ScalarType::Int)); + at::Tensor dist2 = + torch::zeros({unknowns.size(0), unknowns.size(1), 3}, + at::device(unknowns.device()).dtype(at::ScalarType::Float)); + + if (unknowns.type().is_cuda()) { + three_nn_kernel_wrapper(unknowns.size(0), unknowns.size(1), knows.size(1), + unknowns.data(), knows.data(), + dist2.data(), idx.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return {dist2, idx}; +} + +at::Tensor three_interpolate(at::Tensor points, at::Tensor idx, + at::Tensor weight) { + CHECK_CONTIGUOUS(points); + CHECK_CONTIGUOUS(idx); + CHECK_CONTIGUOUS(weight); + CHECK_IS_FLOAT(points); + CHECK_IS_INT(idx); + CHECK_IS_FLOAT(weight); + + if (points.type().is_cuda()) { + CHECK_CUDA(idx); + CHECK_CUDA(weight); + } + + at::Tensor output = + torch::zeros({points.size(0), points.size(1), idx.size(1)}, + at::device(points.device()).dtype(at::ScalarType::Float)); + + if (points.type().is_cuda()) { + three_interpolate_kernel_wrapper( + points.size(0), points.size(1), points.size(2), idx.size(1), + points.data(), idx.data(), weight.data(), + output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} +at::Tensor three_interpolate_grad(at::Tensor grad_out, at::Tensor idx, + at::Tensor weight, const int m) { + CHECK_CONTIGUOUS(grad_out); + CHECK_CONTIGUOUS(idx); + CHECK_CONTIGUOUS(weight); + CHECK_IS_FLOAT(grad_out); + CHECK_IS_INT(idx); + CHECK_IS_FLOAT(weight); + + if (grad_out.type().is_cuda()) { + CHECK_CUDA(idx); + CHECK_CUDA(weight); + } + + at::Tensor output = + torch::zeros({grad_out.size(0), grad_out.size(1), m}, + at::device(grad_out.device()).dtype(at::ScalarType::Float)); + + if (grad_out.type().is_cuda()) { + three_interpolate_grad_kernel_wrapper( + grad_out.size(0), grad_out.size(1), grad_out.size(2), m, + grad_out.data(), idx.data(), weight.data(), + output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.h new file mode 100644 index 0000000000000000000000000000000000000000..2af34c6699244b870f267bdee6a8bf0d40edad0f --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.h @@ -0,0 +1,15 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once + +#include +#include + +std::vector three_nn(at::Tensor unknowns, at::Tensor knows); +at::Tensor three_interpolate(at::Tensor points, at::Tensor idx, + at::Tensor weight); +at::Tensor three_interpolate_grad(at::Tensor grad_out, at::Tensor idx, + at::Tensor weight, const int m); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate_gpu.cu b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate_gpu.cu new file mode 100644 index 0000000000000000000000000000000000000000..b13dbfa38dc4014500e0e788d6d310e1a56bdd65 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate_gpu.cu @@ -0,0 +1,159 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include +#include +#include + +#include "cuda_utils.h" + +// input: unknown(b, n, 3) known(b, m, 3) +// output: dist2(b, n, 3), idx(b, n, 3) +__global__ void three_nn_kernel(int b, int n, int m, + const float *__restrict__ unknown, + const float *__restrict__ known, + float *__restrict__ dist2, + int *__restrict__ idx) { + int batch_index = blockIdx.x; + unknown += batch_index * n * 3; + known += batch_index * m * 3; + dist2 += batch_index * n * 3; + idx += batch_index * n * 3; + + int index = threadIdx.x; + int stride = blockDim.x; + for (int j = index; j < n; j += stride) { + float ux = unknown[j * 3 + 0]; + float uy = unknown[j * 3 + 1]; + float uz = unknown[j * 3 + 2]; + + double best1 = 1e40, best2 = 1e40, best3 = 1e40; + int besti1 = 0, besti2 = 0, besti3 = 0; + for (int k = 0; k < m; ++k) { + float x = known[k * 3 + 0]; + float y = known[k * 3 + 1]; + float z = known[k * 3 + 2]; + float d = (ux - x) * (ux - x) + (uy - y) * (uy - y) + (uz - z) * (uz - z); + if (d < best1) { + best3 = best2; + besti3 = besti2; + best2 = best1; + besti2 = besti1; + best1 = d; + besti1 = k; + } else if (d < best2) { + best3 = best2; + besti3 = besti2; + best2 = d; + besti2 = k; + } else if (d < best3) { + best3 = d; + besti3 = k; + } + } + dist2[j * 3 + 0] = best1; + dist2[j * 3 + 1] = best2; + dist2[j * 3 + 2] = best3; + + idx[j * 3 + 0] = besti1; + idx[j * 3 + 1] = besti2; + idx[j * 3 + 2] = besti3; + } +} + +void three_nn_kernel_wrapper(int b, int n, int m, const float *unknown, + const float *known, float *dist2, int *idx) { + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + three_nn_kernel<<>>(b, n, m, unknown, known, + dist2, idx); + + CUDA_CHECK_ERRORS(); +} + +// input: points(b, c, m), idx(b, n, 3), weight(b, n, 3) +// output: out(b, c, n) +__global__ void three_interpolate_kernel(int b, int c, int m, int n, + const float *__restrict__ points, + const int *__restrict__ idx, + const float *__restrict__ weight, + float *__restrict__ out) { + int batch_index = blockIdx.x; + points += batch_index * m * c; + + idx += batch_index * n * 3; + weight += batch_index * n * 3; + + out += batch_index * n * c; + + const int index = threadIdx.y * blockDim.x + threadIdx.x; + const int stride = blockDim.y * blockDim.x; + for (int i = index; i < c * n; i += stride) { + const int l = i / n; + const int j = i % n; + float w1 = weight[j * 3 + 0]; + float w2 = weight[j * 3 + 1]; + float w3 = weight[j * 3 + 2]; + + int i1 = idx[j * 3 + 0]; + int i2 = idx[j * 3 + 1]; + int i3 = idx[j * 3 + 2]; + + out[i] = points[l * m + i1] * w1 + points[l * m + i2] * w2 + + points[l * m + i3] * w3; + } +} + +void three_interpolate_kernel_wrapper(int b, int c, int m, int n, + const float *points, const int *idx, + const float *weight, float *out) { + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + three_interpolate_kernel<<>>( + b, c, m, n, points, idx, weight, out); + + CUDA_CHECK_ERRORS(); +} + +// input: grad_out(b, c, n), idx(b, n, 3), weight(b, n, 3) +// output: grad_points(b, c, m) + +__global__ void three_interpolate_grad_kernel( + int b, int c, int n, int m, const float *__restrict__ grad_out, + const int *__restrict__ idx, const float *__restrict__ weight, + float *__restrict__ grad_points) { + int batch_index = blockIdx.x; + grad_out += batch_index * n * c; + idx += batch_index * n * 3; + weight += batch_index * n * 3; + grad_points += batch_index * m * c; + + const int index = threadIdx.y * blockDim.x + threadIdx.x; + const int stride = blockDim.y * blockDim.x; + for (int i = index; i < c * n; i += stride) { + const int l = i / n; + const int j = i % n; + float w1 = weight[j * 3 + 0]; + float w2 = weight[j * 3 + 1]; + float w3 = weight[j * 3 + 2]; + + int i1 = idx[j * 3 + 0]; + int i2 = idx[j * 3 + 1]; + int i3 = idx[j * 3 + 2]; + + atomicAdd(grad_points + l * m + i1, grad_out[i] * w1); + atomicAdd(grad_points + l * m + i2, grad_out[i] * w2); + atomicAdd(grad_points + l * m + i3, grad_out[i] * w3); + } +} + +void three_interpolate_grad_kernel_wrapper(int b, int c, int n, int m, + const float *grad_out, + const int *idx, const float *weight, + float *grad_points) { + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + three_interpolate_grad_kernel<<>>( + b, c, n, m, grad_out, idx, weight, grad_points); + + CUDA_CHECK_ERRORS(); +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.cpp b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0a76abfbe8c6240327b4fb29c09b05b26037d461 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.cpp @@ -0,0 +1,91 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#include "sampling.h" +#include "utils.h" + +void gather_points_kernel_wrapper(int b, int c, int n, int npoints, + const float *points, const int *idx, + float *out); +void gather_points_grad_kernel_wrapper(int b, int c, int n, int npoints, + const float *grad_out, const int *idx, + float *grad_points); + +void furthest_point_sampling_kernel_wrapper(int b, int n, int m, + const float *dataset, float *temp, + int *idxs); + +at::Tensor gather_points(at::Tensor points, at::Tensor idx) { + CHECK_CONTIGUOUS(points); + CHECK_CONTIGUOUS(idx); + CHECK_IS_FLOAT(points); + CHECK_IS_INT(idx); + + if (points.type().is_cuda()) { + CHECK_CUDA(idx); + } + + at::Tensor output = + torch::zeros({points.size(0), points.size(1), idx.size(1)}, + at::device(points.device()).dtype(at::ScalarType::Float)); + + if (points.type().is_cuda()) { + gather_points_kernel_wrapper(points.size(0), points.size(1), points.size(2), + idx.size(1), points.data(), + idx.data(), output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} + +at::Tensor gather_points_grad(at::Tensor grad_out, at::Tensor idx, + const int n) { + CHECK_CONTIGUOUS(grad_out); + CHECK_CONTIGUOUS(idx); + CHECK_IS_FLOAT(grad_out); + CHECK_IS_INT(idx); + + if (grad_out.type().is_cuda()) { + CHECK_CUDA(idx); + } + + at::Tensor output = + torch::zeros({grad_out.size(0), grad_out.size(1), n}, + at::device(grad_out.device()).dtype(at::ScalarType::Float)); + + if (grad_out.type().is_cuda()) { + gather_points_grad_kernel_wrapper(grad_out.size(0), grad_out.size(1), n, + idx.size(1), grad_out.data(), + idx.data(), output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} +at::Tensor furthest_point_sampling(at::Tensor points, const int nsamples) { + CHECK_CONTIGUOUS(points); + CHECK_IS_FLOAT(points); + + at::Tensor output = + torch::zeros({points.size(0), nsamples}, + at::device(points.device()).dtype(at::ScalarType::Int)); + + at::Tensor tmp = + torch::full({points.size(0), points.size(1)}, 1e10, + at::device(points.device()).dtype(at::ScalarType::Float)); + + if (points.type().is_cuda()) { + furthest_point_sampling_kernel_wrapper( + points.size(0), points.size(1), nsamples, points.data(), + tmp.data(), output.data()); + } else { + TORCH_CHECK(false, "CPU not supported"); + } + + return output; +} diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.h new file mode 100644 index 0000000000000000000000000000000000000000..366ef31cf5010e6248ac0a59ae65be40fdbd3a61 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.h @@ -0,0 +1,11 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include + +at::Tensor gather_points(at::Tensor points, at::Tensor idx); +at::Tensor gather_points_grad(at::Tensor grad_out, at::Tensor idx, const int n); +at::Tensor furthest_point_sampling(at::Tensor points, const int nsamples); diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/utils.h b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..925f7697eb36842f8be4918ea4ca0b1d9646da83 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/utils.h @@ -0,0 +1,30 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +#pragma once +#include +#include + +#define CHECK_CUDA(x) \ + do { \ + TORCH_CHECK(x.type().is_cuda(), #x " must be a CUDA tensor"); \ + } while (0) + +#define CHECK_CONTIGUOUS(x) \ + do { \ + TORCH_CHECK(x.is_contiguous(), #x " must be a contiguous tensor"); \ + } while (0) + +#define CHECK_IS_INT(x) \ + do { \ + TORCH_CHECK(x.scalar_type() == at::ScalarType::Int, \ + #x " must be an int tensor"); \ + } while (0) + +#define CHECK_IS_FLOAT(x) \ + do { \ + TORCH_CHECK(x.scalar_type() == at::ScalarType::Float, \ + #x " must be a float tensor"); \ + } while (0) diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/.ninja_log b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/.ninja_log new file mode 100644 index 0000000000000000000000000000000000000000..17dd87be28652df2457320d163b06a3957ca5c4a --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/.ninja_log @@ -0,0 +1,19 @@ +# ninja log v5 +1 9715 1770135622526467543 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate_gpu.o c681e007447d7a76 +0 10384 1770135623196474281 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query_gpu.o 99662dc0c199429a +0 10741 1770135623553477871 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points_gpu.o 2813b340cb9ec528 +1 10820 1770135623633478675 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/sampling_gpu.o 3f337ffeee560c4f +0 12969 1770135625782500285 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate.o 4cfef8f4a650c8fe +1 13044 1770135625857501039 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/sampling.o 75de241047224dbb +0 13108 1770135625922501693 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query.o 11c6c1e783f7535d +0 14014 1770135626829510813 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/bindings.o e4ff702c891d8fbb +0 14227 1770135627042512955 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points.o a519bad3c60eb4dc +12 10243 1770144787241624884 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points_gpu.o 79a9353465b7b553 +11 10253 1770144787251624985 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query_gpu.o 93e6a5b6f402418d +12 10285 1770144787283625307 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate_gpu.o 72cbc1647becf53e +13 10496 1770144787493627418 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/sampling_gpu.o 1022fcce2655da33 +12 13673 1770144790671659376 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points.o 8dacbed89c36e5e6 +12 13754 1770144790752660190 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate.o 5bf4ca6a6300fa03 +12 13783 1770144790781660482 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/sampling.o c01d1bdcecea78f3 +11 13827 1770144790825660924 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/bindings.o 50e513f230dfdc39 +11 16189 1770144793188684686 /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query.o 9e5ecf84e7a20e34 diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query_gpu.o b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query_gpu.o new file mode 100644 index 0000000000000000000000000000000000000000..affd9ad19b277cb9071f71968d7a18061b68fbce Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query_gpu.o differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points_gpu.o b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points_gpu.o new file mode 100644 index 0000000000000000000000000000000000000000..85d03b73f0b8cd170d5afe1284f8b957ec4cd30b Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points_gpu.o differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate_gpu.o b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate_gpu.o new file mode 100644 index 0000000000000000000000000000000000000000..0095a20e91f58a3fb025a35e072b3dbc5bf86ab7 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate_gpu.o differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/build.ninja b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/build.ninja new file mode 100644 index 0000000000000000000000000000000000000000..03d3477c00db9023ef02e0efc0946dc9cac28b67 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/build.ninja @@ -0,0 +1,45 @@ +ninja_required_version = 1.3 +cxx = c++ +nvcc = /usr/local/cuda/bin/nvcc + +cflags = -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O2 -Wall -g -fstack-protector-strong -Wformat -Werror=format-security -g -fwrapv -O2 -fPIC -I_ext_src/include -I/home/kyber/charles/project/.venv/lib/python3.10/site-packages/torch/include -I/home/kyber/charles/project/.venv/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/usr/local/cuda/include -I/home/kyber/charles/project/.venv/include -I/usr/include/python3.10 -c +post_cflags = -DTORCH_API_INCLUDE_EXTENSION_H -DTORCH_EXTENSION_NAME=_ext -std=c++17 +cuda_cflags = -I_ext_src/include -I/home/kyber/charles/project/.venv/lib/python3.10/site-packages/torch/include -I/home/kyber/charles/project/.venv/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/usr/local/cuda/include -I/home/kyber/charles/project/.venv/include -I/usr/include/python3.10 -c +cuda_post_cflags = -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_BFLOAT16_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ --expt-relaxed-constexpr --compiler-options ''"'"'-fPIC'"'"'' -O3 -DCUDA_HAS_FP16=1 -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ -DTORCH_API_INCLUDE_EXTENSION_H -DTORCH_EXTENSION_NAME=_ext -gencode=arch=compute_89,code=compute_89 -gencode=arch=compute_89,code=sm_89 -std=c++17 +cuda_dlink_post_cflags = +sycl_dlink_post_cflags = +ldflags = + +rule compile + command = $cxx -MMD -MF $out.d $cflags -c $in -o $out $post_cflags + depfile = $out.d + deps = gcc + +rule cuda_compile + depfile = $out.d + deps = gcc + command = $nvcc --generate-dependencies-with-compile --dependency-output $out.d $cuda_cflags -c $in -o $out $cuda_post_cflags + + + + + + + +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query.o: compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query.cpp +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/ball_query_gpu.o: cuda_compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/ball_query_gpu.cu +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/bindings.o: compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/bindings.cpp +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points.o: compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points.cpp +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/group_points_gpu.o: cuda_compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/group_points_gpu.cu +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate.o: compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate.cpp +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/interpolate_gpu.o: cuda_compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/interpolate_gpu.cu +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/sampling.o: compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling.cpp +build /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-310/_ext_src/src/sampling_gpu.o: cuda_compile /home/kyber/charles/project/grasp_box/submodules/SAM6D/model/pointnet2/_ext_src/src/sampling_gpu.cu + + + + + + + + diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/.ninja_log b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/.ninja_log new file mode 100644 index 0000000000000000000000000000000000000000..c2cfcda73318d8c4ed524a4fbb91ab9f8b754352 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/.ninja_log @@ -0,0 +1,10 @@ +# ninja log v5 +0 11553 1727897164236010993 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query_gpu.o 2590d3bd35b1ae15 +1 11556 1727897164240010993 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points_gpu.o 58f08b9c12cd45da +1 11558 1727897164244010993 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate_gpu.o 21daa0944282dde8 +2 11715 1727897164400010993 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/sampling_gpu.o e1edabb08c427d2 +1 13391 1727897166072010997 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points.o 6d9f7168f74bdf55 +1 13527 1727897166208010997 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate.o a1a8af9e6c1f09ce +2 13805 1727897166488010997 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/sampling.o 8fb93d9e28b25501 +1 13943 1727897166628010998 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/bindings.o 1a8390a5b9495a43 +0 14823 1727897167508010999 /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query.o 8ddf03c07ee31f25 diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query_gpu.o b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query_gpu.o new file mode 100644 index 0000000000000000000000000000000000000000..11291166f92d8e14a95d19d5b305b254b14f4c1e Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query_gpu.o differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points_gpu.o b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points_gpu.o new file mode 100644 index 0000000000000000000000000000000000000000..f514c56b838058a2a72a5d71dd86c680bd099557 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points_gpu.o differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate_gpu.o b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate_gpu.o new file mode 100644 index 0000000000000000000000000000000000000000..b95cdf1ff2fd825d259801b4d1692fa19b55d5a6 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate_gpu.o differ diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/build.ninja b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/build.ninja new file mode 100644 index 0000000000000000000000000000000000000000..f085cf443d50f7b1b7f54f6917895b164b69726f --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/build.ninja @@ -0,0 +1,41 @@ +ninja_required_version = 1.3 +cxx = c++ +nvcc = /usr/local/cuda/bin/nvcc + +cflags = -pthread -B /opt/conda/envs/my/compiler_compat -Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall -fPIC -O2 -isystem /opt/conda/envs/my/include -fPIC -O2 -isystem /opt/conda/envs/my/include -fPIC -I_ext_src/include -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include/torch/csrc/api/include -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include/TH -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include/THC -I/usr/local/cuda/include -I/opt/conda/envs/my/include/python3.8 -c +post_cflags = -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_ext -D_GLIBCXX_USE_CXX11_ABI=0 -std=c++17 +cuda_cflags = -I_ext_src/include -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include/torch/csrc/api/include -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include/TH -I/opt/conda/envs/my/lib/python3.8/site-packages/torch/include/THC -I/usr/local/cuda/include -I/opt/conda/envs/my/include/python3.8 -c +cuda_post_cflags = -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_BFLOAT16_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ --expt-relaxed-constexpr --compiler-options ''"'"'-fPIC'"'"'' -O3 -DCUDA_HAS_FP16=1 -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_ext -D_GLIBCXX_USE_CXX11_ABI=0 -gencode=arch=compute_89,code=compute_89 -gencode=arch=compute_89,code=sm_89 -std=c++17 +cuda_dlink_post_cflags = +ldflags = + +rule compile + command = $cxx -MMD -MF $out.d $cflags -c $in -o $out $post_cflags + depfile = $out.d + deps = gcc + +rule cuda_compile + depfile = $out.d + deps = gcc + command = $nvcc $cuda_cflags -c $in -o $out $cuda_post_cflags + + + + + +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query.o: compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/ball_query.cpp +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/ball_query_gpu.o: cuda_compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/ball_query_gpu.cu +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/bindings.o: compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/bindings.cpp +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points.o: compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/group_points.cpp +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/group_points_gpu.o: cuda_compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/group_points_gpu.cu +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate.o: compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/interpolate.cpp +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/interpolate_gpu.o: cuda_compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/interpolate_gpu.cu +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/sampling.o: compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/sampling.cpp +build /workspace/submodules/SAM_6D/model/pointnet2/build/temp.linux-x86_64-cpython-38/_ext_src/src/sampling_gpu.o: cuda_compile /workspace/submodules/SAM_6D/model/pointnet2/_ext_src/src/sampling_gpu.cu + + + + + + + diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/PKG-INFO b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/PKG-INFO new file mode 100644 index 0000000000000000000000000000000000000000..43db20a212c5be493b0247599bcfd33efbebc037 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/PKG-INFO @@ -0,0 +1,3 @@ +Metadata-Version: 2.4 +Name: pointnet2 +Version: 0.0.0 diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/SOURCES.txt b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/SOURCES.txt new file mode 100644 index 0000000000000000000000000000000000000000..292faea898aa05982695ed382f2937765e19737c --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/SOURCES.txt @@ -0,0 +1,14 @@ +setup.py +_ext_src/src/ball_query.cpp +_ext_src/src/ball_query_gpu.cu +_ext_src/src/bindings.cpp +_ext_src/src/group_points.cpp +_ext_src/src/group_points_gpu.cu +_ext_src/src/interpolate.cpp +_ext_src/src/interpolate_gpu.cu +_ext_src/src/sampling.cpp +_ext_src/src/sampling_gpu.cu +pointnet2.egg-info/PKG-INFO +pointnet2.egg-info/SOURCES.txt +pointnet2.egg-info/dependency_links.txt +pointnet2.egg-info/top_level.txt \ No newline at end of file diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/dependency_links.txt b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/dependency_links.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/top_level.txt b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..047cf37410a3a0fb88a8ba9fce0fa74cefa80d8f --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2.egg-info/top_level.txt @@ -0,0 +1 @@ +pointnet2 diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_modules.py b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..e16e183d125414cc98998fd9ee052eaec584e01c --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_modules.py @@ -0,0 +1,237 @@ +from __future__ import ( + division, + absolute_import, + with_statement, + print_function, + unicode_literals, +) +import torch +import torch.nn as nn +import torch.nn.functional as F +import pytorch_utils + +# from pointnet2_3090 import pointnet2_utils + +import ipdb + +if False: + # Workaround for type hints without depending on the `typing` module + from typing import * + + +class _PointnetSAModuleBase(nn.Module): + def __init__(self): + super(_PointnetSAModuleBase, self).__init__() + self.npoint = None + self.groupers = None + self.mlps = None + + def forward(self, xyz, features=None): + # type: (_PointnetSAModuleBase, torch.Tensor, torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor] + r""" + Parameters + ---------- + xyz : torch.Tensor + (B, N, 3) tensor of the xyz coordinates of the features + features : torch.Tensor + (B, N, C) tensor of the descriptors of the the features + + Returns + ------- + new_xyz : torch.Tensor + (B, npoint, 3) tensor of the new features' xyz + new_features : torch.Tensor + (B, \sum_k(mlps[k][-1]), npoint) tensor of the new_features descriptors + """ + + new_features_list = [] + + xyz_flipped = xyz.transpose(1, 2).contiguous() + new_xyz = ( + pointnet2_utils.gather_operation( + xyz_flipped, pointnet2_utils.furthest_point_sample(xyz, self.npoint) + ) + .transpose(1, 2) + .contiguous() + if self.npoint is not None + else None + ) + + for i in range(len(self.groupers)): + new_features = self.groupers[i]( + xyz, new_xyz, features + ) # (B, C, npoint, nsample) + + new_features = self.mlps[i](new_features) # (B, mlp[-1], npoint, nsample) + new_features = F.max_pool2d( + new_features, kernel_size=[1, new_features.size(3)] + ) # (B, mlp[-1], npoint, 1) + new_features = new_features.squeeze(-1) # (B, mlp[-1], npoint) + + new_features_list.append(new_features) + + return new_xyz, torch.cat(new_features_list, dim=1) + + +class PointnetSAModuleMSG(_PointnetSAModuleBase): + r"""Pointnet set abstrction layer with multiscale grouping + + Parameters + ---------- + npoint : int + Number of features + radii : list of float32 + list of radii to group with + nsamples : list of int32 + Number of samples in each ball query + mlps : list of list of int32 + Spec of the pointnet before the global max_pool for each scale + bn : bool + Use batchnorm + """ + + def __init__(self, npoint, radii, nsamples, mlps, bn=True, use_xyz=True): + # type: (PointnetSAModuleMSG, int, List[float], List[int], List[List[int]], bool, bool) -> None + super(PointnetSAModuleMSG, self).__init__() + + assert len(radii) == len(nsamples) == len(mlps) + + self.npoint = npoint + self.groupers = nn.ModuleList() + self.mlps = nn.ModuleList() + for i in range(len(radii)): + radius = radii[i] + nsample = nsamples[i] + self.groupers.append( + pointnet2_utils.QueryAndGroup(radius, nsample, use_xyz=use_xyz) + if npoint is not None + else pointnet2_utils.GroupAll(use_xyz) + ) + mlp_spec = mlps[i] + if use_xyz: + mlp_spec[0] += 3 + + self.mlps.append(pytorch_utils.SharedMLP(mlp_spec, bn=bn)) + + +class PointnetSAModule(PointnetSAModuleMSG): + r"""Pointnet set abstrction layer + + Parameters + ---------- + npoint : int + Number of features + radius : float + Radius of ball + nsample : int + Number of samples in the ball query + mlp : list + Spec of the pointnet before the global max_pool + bn : bool + Use batchnorm + """ + + def __init__( + self, mlp, npoint=None, radius=None, nsample=None, bn=True, use_xyz=True + ): + # type: (PointnetSAModule, List[int], int, float, int, bool, bool) -> None + super(PointnetSAModule, self).__init__( + mlps=[mlp], + npoint=npoint, + radii=[radius], + nsamples=[nsample], + bn=bn, + use_xyz=use_xyz, + ) + + +class PointnetFPModule(nn.Module): + r"""Propigates the features of one set to another + + Parameters + ---------- + mlp : list + Pointnet module parameters + bn : bool + Use batchnorm + """ + + def __init__(self, mlp, bn=True): + # type: (PointnetFPModule, List[int], bool) -> None + super(PointnetFPModule, self).__init__() + self.mlp = pytorch_utils.SharedMLP(mlp, bn=bn) + + def forward(self, unknown, known, unknow_feats, known_feats): + # type: (PointnetFPModule, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor) -> torch.Tensor + r""" + Parameters + ---------- + unknown : torch.Tensor + (B, n, 3) tensor of the xyz positions of the unknown features + known : torch.Tensor + (B, m, 3) tensor of the xyz positions of the known features + unknow_feats : torch.Tensor + (B, C1, n) tensor of the features to be propigated to + known_feats : torch.Tensor + (B, C2, m) tensor of features to be propigated + + Returns + ------- + new_features : torch.Tensor + (B, mlp[-1], n) tensor of the features of the unknown features + """ + + if known is not None: + dist, idx = pointnet2_utils.three_nn(unknown, known) + dist_recip = 1.0 / (dist + 1e-8) + norm = torch.sum(dist_recip, dim=2, keepdim=True) + weight = dist_recip / norm + + interpolated_feats = pointnet2_utils.three_interpolate( + known_feats, idx.detach(), weight.detach() + ) + else: + interpolated_feats = known_feats.expand( + *(known_feats.size()[0:2] + [unknown.size(1)]) + ) + + if unknow_feats is not None: + new_features = torch.cat( + [interpolated_feats, unknow_feats], dim=1 + ) # (B, C2 + C1, n) + else: + new_features = interpolated_feats + # ipdb.set_trace() + + new_features = new_features.unsqueeze(-1) + new_features = self.mlp(new_features) + + return new_features.squeeze(-1) + + +if __name__ == "__main__": + from torch.autograd import Variable + + torch.manual_seed(1) + torch.cuda.manual_seed_all(1) + xyz = Variable(torch.randn(2, 9, 3).cuda(), requires_grad=True) + xyz_feats = Variable(torch.randn(2, 9, 6).cuda(), requires_grad=True) + + test_module = PointnetSAModuleMSG( + npoint=2, radii=[5.0, 10.0], nsamples=[6, 3], mlps=[[9, 3], [9, 6]] + ) + test_module.cuda() + print(test_module(xyz, xyz_feats)) + + # test_module = PointnetFPModule(mlp=[6, 6]) + # test_module.cuda() + # from torch.autograd import gradcheck + # inputs = (xyz, xyz, None, xyz_feats) + # test = gradcheck(test_module, inputs, eps=1e-6, atol=1e-4) + # print(test) + + for _ in range(1): + _, new_features = test_module(xyz, xyz_feats) + new_features.backward(torch.cuda.FloatTensor(*new_features.size()).fill_(1)) + print(new_features) + print(xyz.grad) diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_test.py b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_test.py new file mode 100644 index 0000000000000000000000000000000000000000..be60b28ae7ebca73669f7b8622d0d59ad088b46e --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_test.py @@ -0,0 +1,33 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +''' Testing customized ops. ''' + +import torch +from torch.autograd import gradcheck +import numpy as np + +import os +import sys +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(BASE_DIR) +import pointnet2_utils + +def test_interpolation_grad(): + batch_size = 1 + feat_dim = 2 + m = 4 + feats = torch.randn(batch_size, feat_dim, m, requires_grad=True).float().cuda() + + def interpolate_func(inputs): + idx = torch.from_numpy(np.array([[[0,1,2],[1,2,3]]])).int().cuda() + weight = torch.from_numpy(np.array([[[1,1,1],[2,2,2]]])).float().cuda() + interpolated_feats = pointnet2_utils.three_interpolate(inputs, idx, weight) + return interpolated_feats + + assert (gradcheck(interpolate_func, feats, atol=1e-1, rtol=1e-1)) + +if __name__=='__main__': + test_interpolation_grad() diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_utils.py b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4f617fe31be5e0938a62be1cbf99d729c3ee63ec --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pointnet2_utils.py @@ -0,0 +1,426 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +''' Modified based on: https://github.com/erikwijmans/Pointnet2_PyTorch ''' +from __future__ import ( + division, + absolute_import, + with_statement, + print_function, + unicode_literals, +) +import torch +from torch.autograd import Function +import torch.nn as nn +import pytorch_utils as pt_utils +import sys + +try: + import builtins +except: + import __builtin__ as builtins + +try: + import pointnet2._ext as _ext +except ImportError: + if not getattr(builtins, "__POINTNET2_SETUP__", False): + raise ImportError( + "Could not import _ext module.\n" + "Please see the setup instructions in the README: " + "https://github.com/erikwijmans/Pointnet2_PyTorch/blob/master/README.rst" + ) + +if False: + # Workaround for type hints without depending on the `typing` module + from typing import * + + +class RandomDropout(nn.Module): + def __init__(self, p=0.5, inplace=False): + super(RandomDropout, self).__init__() + self.p = p + self.inplace = inplace + + def forward(self, X): + theta = torch.Tensor(1).uniform_(0, self.p)[0] + return pt_utils.feature_dropout_no_scaling(X, theta, self.train, self.inplace) + + +class FurthestPointSampling(Function): + @staticmethod + def forward(ctx, xyz, npoint): + # type: (Any, torch.Tensor, int) -> torch.Tensor + r""" + Uses iterative furthest point sampling to select a set of npoint features that have the largest + minimum distance + + Parameters + ---------- + xyz : torch.Tensor + (B, N, 3) tensor where N > npoint + npoint : int32 + number of features in the sampled set + + Returns + ------- + torch.Tensor + (B, npoint) tensor containing the set + """ + fps_inds = _ext.furthest_point_sampling(xyz, npoint) + ctx.mark_non_differentiable(fps_inds) + return fps_inds + + @staticmethod + def backward(xyz, a=None): + return None, None + + +furthest_point_sample = FurthestPointSampling.apply + + +class GatherOperation(Function): + @staticmethod + def forward(ctx, features, idx): + # type: (Any, torch.Tensor, torch.Tensor) -> torch.Tensor + r""" + + Parameters + ---------- + features : torch.Tensor + (B, C, N) tensor + + idx : torch.Tensor + (B, npoint) tensor of the features to gather + + Returns + ------- + torch.Tensor + (B, C, npoint) tensor + """ + + _, C, N = features.size() + + ctx.for_backwards = (idx, C, N) + + return _ext.gather_points(features, idx) + + @staticmethod + def backward(ctx, grad_out): + idx, C, N = ctx.for_backwards + + grad_features = _ext.gather_points_grad(grad_out.contiguous(), idx, N) + return grad_features, None + + +gather_operation = GatherOperation.apply + + +class ThreeNN(Function): + @staticmethod + def forward(ctx, unknown, known): + # type: (Any, torch.Tensor, torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor] + r""" + Find the three nearest neighbors of unknown in known + Parameters + ---------- + unknown : torch.Tensor + (B, n, 3) tensor of known features + known : torch.Tensor + (B, m, 3) tensor of unknown features + + Returns + ------- + dist : torch.Tensor + (B, n, 3) l2 distance to the three nearest neighbors + idx : torch.Tensor + (B, n, 3) index of 3 nearest neighbors + """ + dist2, idx = _ext.three_nn(unknown, known) + + return torch.sqrt(dist2), idx + + @staticmethod + def backward(ctx, a=None, b=None): + return None, None + + +three_nn = ThreeNN.apply + + +class ThreeInterpolate(Function): + @staticmethod + def forward(ctx, features, idx, weight): + # type(Any, torch.Tensor, torch.Tensor, torch.Tensor) -> Torch.Tensor + r""" + Performs weight linear interpolation on 3 features + Parameters + ---------- + features : torch.Tensor + (B, c, m) Features descriptors to be interpolated from + idx : torch.Tensor + (B, n, 3) three nearest neighbors of the target features in features + weight : torch.Tensor + (B, n, 3) weights + + Returns + ------- + torch.Tensor + (B, c, n) tensor of the interpolated features + """ + B, c, m = features.size() + n = idx.size(1) + + ctx.three_interpolate_for_backward = (idx, weight, m) + + return _ext.three_interpolate(features, idx, weight) + + @staticmethod + def backward(ctx, grad_out): + # type: (Any, torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor] + r""" + Parameters + ---------- + grad_out : torch.Tensor + (B, c, n) tensor with gradients of ouputs + + Returns + ------- + grad_features : torch.Tensor + (B, c, m) tensor with gradients of features + + None + + None + """ + idx, weight, m = ctx.three_interpolate_for_backward + + grad_features = _ext.three_interpolate_grad( + grad_out.contiguous(), idx, weight, m + ) + + return grad_features, None, None + + +three_interpolate = ThreeInterpolate.apply + + +class GroupingOperation(Function): + @staticmethod + def forward(ctx, features, idx): + # type: (Any, torch.Tensor, torch.Tensor) -> torch.Tensor + r""" + + Parameters + ---------- + features : torch.Tensor + (B, C, N) tensor of features to group + idx : torch.Tensor + (B, npoint, nsample) tensor containing the indicies of features to group with + + Returns + ------- + torch.Tensor + (B, C, npoint, nsample) tensor + """ + B, nfeatures, nsample = idx.size() + _, C, N = features.size() + + ctx.for_backwards = (idx, N) + + return _ext.group_points(features, idx) + + @staticmethod + def backward(ctx, grad_out): + # type: (Any, torch.tensor) -> Tuple[torch.Tensor, torch.Tensor] + r""" + + Parameters + ---------- + grad_out : torch.Tensor + (B, C, npoint, nsample) tensor of the gradients of the output from forward + + Returns + ------- + torch.Tensor + (B, C, N) gradient of the features + None + """ + idx, N = ctx.for_backwards + + grad_features = _ext.group_points_grad(grad_out.contiguous(), idx, N) + + return grad_features, None + + +grouping_operation = GroupingOperation.apply + + +class BallQuery(Function): + @staticmethod + def forward(ctx, radius, nsample, xyz, new_xyz): + # type: (Any, float, int, torch.Tensor, torch.Tensor) -> torch.Tensor + r""" + + Parameters + ---------- + radius : float + radius of the balls + nsample : int + maximum number of features in the balls + xyz : torch.Tensor + (B, N, 3) xyz coordinates of the features + new_xyz : torch.Tensor + (B, npoint, 3) centers of the ball query + + Returns + ------- + torch.Tensor + (B, npoint, nsample) tensor with the indicies of the features that form the query balls + """ + inds = _ext.ball_query(new_xyz, xyz, radius, nsample) + ctx.mark_non_differentiable(inds) + return inds + + @staticmethod + def backward(ctx, a=None): + return None, None, None, None + + +ball_query = BallQuery.apply + + +class QueryAndGroup(nn.Module): + r""" + Groups with a ball query of radius + + Parameters + --------- + radius : float32 + Radius of ball + nsample : int32 + Maximum number of features to gather in the ball + """ + + def __init__(self, radius, nsample, use_xyz=True, ret_grouped_xyz=False, normalize_xyz=False, sample_uniformly=False, ret_unique_cnt=False): + # type: (QueryAndGroup, float, int, bool) -> None + super(QueryAndGroup, self).__init__() + self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz + self.ret_grouped_xyz = ret_grouped_xyz + self.normalize_xyz = normalize_xyz + self.sample_uniformly = sample_uniformly + self.ret_unique_cnt = ret_unique_cnt + if self.ret_unique_cnt: + assert(self.sample_uniformly) + + def forward(self, xyz, new_xyz, features=None): + # type: (QueryAndGroup, torch.Tensor. torch.Tensor, torch.Tensor) -> Tuple[Torch.Tensor] + r""" + Parameters + ---------- + xyz : torch.Tensor + xyz coordinates of the features (B, N, 3) + new_xyz : torch.Tensor + centriods (B, npoint, 3) + features : torch.Tensor + Descriptors of the features (B, C, N) + + Returns + ------- + new_features : torch.Tensor + (B, 3 + C, npoint, nsample) tensor + """ + idx = ball_query(self.radius, self.nsample, xyz, new_xyz) + + if self.sample_uniformly: + unique_cnt = torch.zeros((idx.shape[0], idx.shape[1])) + for i_batch in range(idx.shape[0]): + for i_region in range(idx.shape[1]): + unique_ind = torch.unique(idx[i_batch, i_region, :]) + num_unique = unique_ind.shape[0] + unique_cnt[i_batch, i_region] = num_unique + sample_ind = torch.randint(0, num_unique, (self.nsample - num_unique,), dtype=torch.long) + all_ind = torch.cat((unique_ind, unique_ind[sample_ind])) + idx[i_batch, i_region, :] = all_ind + + + xyz_trans = xyz.transpose(1, 2).contiguous() + grouped_xyz = grouping_operation(xyz_trans, idx) # (B, 3, npoint, nsample) + grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) + if self.normalize_xyz: + grouped_xyz /= self.radius + + if features is not None: + grouped_features = grouping_operation(features, idx) + if self.use_xyz: + new_features = torch.cat( + [grouped_xyz, grouped_features], dim=1 + ) # (B, C + 3, npoint, nsample) + else: + new_features = grouped_features + else: + assert ( + self.use_xyz + ), "Cannot have not features and not use xyz as a feature!" + new_features = grouped_xyz + + ret = [new_features] + if self.ret_grouped_xyz: + ret.append(grouped_xyz) + if self.ret_unique_cnt: + ret.append(unique_cnt) + if len(ret) == 1: + return ret[0] + else: + return tuple(ret) + + +class GroupAll(nn.Module): + r""" + Groups all features + + Parameters + --------- + """ + + def __init__(self, use_xyz=True, ret_grouped_xyz=False): + # type: (GroupAll, bool) -> None + super(GroupAll, self).__init__() + self.use_xyz = use_xyz + self.ret_grouped_xyz = ret_grouped_xyz + + def forward(self, xyz, new_xyz, features=None): + # type: (GroupAll, torch.Tensor, torch.Tensor, torch.Tensor) -> Tuple[torch.Tensor] + r""" + Parameters + ---------- + xyz : torch.Tensor + xyz coordinates of the features (B, N, 3) + new_xyz : torch.Tensor + Ignored + features : torch.Tensor + Descriptors of the features (B, C, N) + + Returns + ------- + new_features : torch.Tensor + (B, C + 3, 1, N) tensor + """ + + grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) + if features is not None: + grouped_features = features.unsqueeze(2) + if self.use_xyz: + new_features = torch.cat( + [grouped_xyz, grouped_features], dim=1 + ) # (B, 3 + C, 1, N) + else: + new_features = grouped_features + else: + new_features = grouped_xyz + + if self.ret_grouped_xyz: + return new_features, grouped_xyz + else: + return new_features diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/pytorch_utils.py b/project/grasp_box/submodules/SAM6D/model/pointnet2/pytorch_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e924b5e2e1d7e9db6f5b42fe20ef3fe2c2b543a3 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/pytorch_utils.py @@ -0,0 +1,330 @@ +from __future__ import ( + division, + absolute_import, + with_statement, + print_function, + unicode_literals, +) +import os +import torch +import torch.nn as nn +from torch.autograd.function import InplaceFunction +from itertools import repeat +import numpy as np +import shutil +import tqdm +from scipy.stats import t as student_t +import statistics as stats + + +if False: + # Workaround for type hints without depending on the `typing` module + from typing import * + + +class SharedMLP(nn.Sequential): + def __init__( + self, + args, + bn=False, + activation=nn.ReLU(inplace=True), + preact=False, + first=False, + name="", + ): + # type: (SharedMLP, List[int], bool, Any, bool, bool, AnyStr) -> None + super(SharedMLP, self).__init__() + + for i in range(len(args) - 1): + self.add_module( + name + "layer{}".format(i), + Conv2d( + args[i], + args[i + 1], + bn=(not first or not preact or (i != 0)) and bn, + activation=activation + if (not first or not preact or (i != 0)) + else None, + preact=preact, + ), + ) + + +class _BNBase(nn.Sequential): + def __init__(self, in_size, batch_norm=None, name=""): + super(_BNBase, self).__init__() + self.add_module(name + "bn", batch_norm(in_size)) + + nn.init.constant_(self[0].weight, 1.0) + nn.init.constant_(self[0].bias, 0) + + +class BatchNorm1d(_BNBase): + def __init__(self, in_size, name=""): + # type: (BatchNorm1d, int, AnyStr) -> None + super(BatchNorm1d, self).__init__(in_size, batch_norm=nn.BatchNorm1d, name=name) + + +class BatchNorm2d(_BNBase): + def __init__(self, in_size, name=""): + # type: (BatchNorm2d, int, AnyStr) -> None + super(BatchNorm2d, self).__init__(in_size, batch_norm=nn.BatchNorm2d, name=name) + + +class BatchNorm3d(_BNBase): + def __init__(self, in_size, name=""): + # type: (BatchNorm3d, int, AnyStr) -> None + super(BatchNorm3d, self).__init__(in_size, batch_norm=nn.BatchNorm3d, name=name) + + +class _ConvBase(nn.Sequential): + def __init__( + self, + in_size, + out_size, + kernel_size, + stride, + padding, + dilation, + activation, + bn, + init, + conv=None, + norm_layer=None, + bias=True, + preact=False, + name="", + ): + super(_ConvBase, self).__init__() + + bias = bias and (not bn) + conv_unit = conv( + in_size, + out_size, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=bias, + ) + init(conv_unit.weight) + if bias: + nn.init.constant_(conv_unit.bias, 0) + + if bn: + if not preact: + bn_unit = norm_layer(out_size) + else: + bn_unit = norm_layer(in_size) + + if preact: + if bn: + self.add_module(name + "normlayer", bn_unit) + + if activation is not None: + self.add_module(name + "activation", activation) + + self.add_module(name + "conv", conv_unit) + + if not preact: + if bn: + self.add_module(name + "normlayer", bn_unit) + + if activation is not None: + self.add_module(name + "activation", activation) + + +class Conv1d(_ConvBase): + def __init__( + self, + in_size, + out_size, + kernel_size=1, + stride=1, + padding=0, + dilation=1, + activation=nn.ReLU(inplace=True), + bn=False, + init=nn.init.kaiming_normal_, + bias=True, + preact=False, + name="", + norm_layer=BatchNorm1d, + ): + # type: (Conv1d, int, int, int, int, int, int, Any, bool, Any, bool, bool, AnyStr, _BNBase) -> None + super(Conv1d, self).__init__( + in_size, + out_size, + kernel_size, + stride, + padding, + dilation, + activation, + bn, + init, + conv=nn.Conv1d, + norm_layer=norm_layer, + bias=bias, + preact=preact, + name=name, + ) + + +class Conv2d(_ConvBase): + def __init__( + self, + in_size, + out_size, + kernel_size=(1, 1), + stride=(1, 1), + padding=(0, 0), + dilation=(1, 1), + activation=nn.ReLU(inplace=True), + bn=False, + init=nn.init.kaiming_normal_, + bias=True, + preact=False, + name="", + norm_layer=BatchNorm2d, + ): + # type: (Conv2d, int, int, Tuple[int, int], Tuple[int, int], Tuple[int, int], Tuple[int, int], Any, bool, Any, bool, bool, AnyStr, _BNBase) -> None + super(Conv2d, self).__init__( + in_size, + out_size, + kernel_size, + stride, + padding, + dilation, + activation, + bn, + init, + conv=nn.Conv2d, + norm_layer=norm_layer, + bias=bias, + preact=preact, + name=name, + ) + + +class Conv3d(_ConvBase): + def __init__( + self, + in_size, + out_size, + kernel_size=(1, 1, 1), + stride=(1, 1, 1), + padding=(0, 0, 0), + dilation=(1, 1, 1), + activation=nn.ReLU(inplace=True), + bn=False, + init=nn.init.kaiming_normal_, + bias=True, + preact=False, + name="", + norm_layer=BatchNorm3d, + ): + # type: (Conv3d, int, int, Tuple[int, int, int], Tuple[int, int, int], Tuple[int, int, int], Tuple[int, int, int], Any, bool, Any, bool, bool, AnyStr, _BNBase) -> None + super(Conv3d, self).__init__( + in_size, + out_size, + kernel_size, + stride, + padding, + dilation, + activation, + bn, + init, + conv=nn.Conv3d, + norm_layer=norm_layer, + bias=bias, + preact=preact, + name=name, + ) + + +class FC(nn.Sequential): + def __init__( + self, + in_size, + out_size, + activation=nn.ReLU(inplace=True), + bn=False, + init=None, + preact=False, + name="", + ): + # type: (FC, int, int, Any, bool, Any, bool, AnyStr) -> None + super(FC, self).__init__() + + fc = nn.Linear(in_size, out_size, bias=not bn) + if init is not None: + init(fc.weight) + if not bn: + nn.init.constant_(fc.bias, 0) + + if preact: + if bn: + self.add_module(name + "bn", BatchNorm1d(in_size)) + + if activation is not None: + self.add_module(name + "activation", activation) + + self.add_module(name + "fc", fc) + + if not preact: + if bn: + self.add_module(name + "bn", BatchNorm1d(out_size)) + + if activation is not None: + self.add_module(name + "activation", activation) + + +def group_model_params(model, **kwargs): + # type: (nn.Module, ...) -> List[Dict] + decay_group = [] + no_decay_group = [] + + for name, param in model.named_parameters(): + if name.find("normlayer") != -1 or name.find("bias") != -1: + no_decay_group.append(param) + else: + decay_group.append(param) + + assert len(list(model.parameters())) == len(decay_group) + len(no_decay_group) + + return [ + dict(params=decay_group, **kwargs), + dict(params=no_decay_group, weight_decay=0.0, **kwargs), + ] + + +def set_bn_momentum_default(bn_momentum): + def fn(m): + if isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d)): + m.momentum = bn_momentum + + return fn + + +class BNMomentumScheduler(object): + def __init__(self, model, bn_lambda, last_epoch=-1, setter=set_bn_momentum_default): + if not isinstance(model, nn.Module): + raise RuntimeError( + "Class '{}' is not a PyTorch nn Module".format(type(model).__name__) + ) + + self.model = model + self.setter = setter + self.lmbd = bn_lambda + + self.step(last_epoch + 1) + self.last_epoch = last_epoch + + def step(self, epoch=None): + if epoch is None: + epoch = self.last_epoch + 1 + + self.last_epoch = epoch + self.model.apply(self.setter(self.lmbd(epoch))) + + diff --git a/project/grasp_box/submodules/SAM6D/model/pointnet2/setup.py b/project/grasp_box/submodules/SAM6D/model/pointnet2/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..ebc9448aa78e249575f4f51e77fc9d28ad07c61c --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pointnet2/setup.py @@ -0,0 +1,36 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +import os +from setuptools import setup, find_packages +from torch.utils.cpp_extension import BuildExtension, CUDAExtension +import glob + +_ext_src_root = "_ext_src" +_ext_sources = glob.glob("{}/src/*.cpp".format(_ext_src_root)) + glob.glob( + "{}/src/*.cu".format(_ext_src_root) +) +_ext_headers = glob.glob("{}/include/*".format(_ext_src_root)) + +setup( + name='pointnet2', + packages = find_packages(), + ext_modules=[ + CUDAExtension( + name='pointnet2._ext', + sources=_ext_sources, + include_dirs = [os.path.join(_ext_src_root, "include")], + extra_compile_args={ + # "cxx": ["-O2", "-I{}".format("{}/include".format(_ext_src_root))], + # "nvcc": ["-O2", "-I{}".format("{}/include".format(_ext_src_root))], + "cxx": [], + "nvcc": ["-O3", + "-DCUDA_HAS_FP16=1", + "-D__CUDA_NO_HALF_OPERATORS__", + "-D__CUDA_NO_HALF_CONVERSIONS__", + "-D__CUDA_NO_HALF2_OPERATORS__", + ]},) + ], + cmdclass={'build_ext': BuildExtension.with_options(use_ninja=True)} +) diff --git a/project/grasp_box/submodules/SAM6D/model/pose_estimation_model.py b/project/grasp_box/submodules/SAM6D/model/pose_estimation_model.py new file mode 100644 index 0000000000000000000000000000000000000000..84ccb88b0dddfc39b1a1c4f3a47a69a6d4961bb9 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/pose_estimation_model.py @@ -0,0 +1,68 @@ +import torch +import torch.nn as nn + + +from feature_extraction import ViTEncoder +from coarse_point_matching import CoarsePointMatching +from fine_point_matching import FinePointMatching +from transformer import GeometricStructureEmbedding +from model_utils import sample_pts_feats +from vis_utils import visualize_points_3d, features_to_colors, visualize_two_sets_3d + + +class Net(nn.Module): + def __init__(self, cfg): + super(Net, self).__init__() + self.cfg = cfg + self.coarse_npoint = cfg.coarse_npoint + self.fine_npoint = cfg.fine_npoint + + self.feature_extraction = ViTEncoder(cfg.feature_extraction, self.fine_npoint) + self.geo_embedding = GeometricStructureEmbedding(cfg.geo_embedding) + self.coarse_point_matching = CoarsePointMatching(cfg.coarse_point_matching) + self.fine_point_matching = FinePointMatching(cfg.fine_point_matching) + + def forward(self, end_points): + + dense_pm, dense_fm, dense_po, dense_fo, radius = self.feature_extraction(end_points) + + # visualize_two_sets_3d( + # dense_po.squeeze(0).cpu().numpy(), + # dense_po.squeeze(0).cpu().numpy(), + # "dense", s=1) + + # breakpoint() + # pre-compute geometric embeddings for geometric transformer + bg_point = torch.ones(dense_pm.size(0),1,3).float().to(dense_pm.device) * 100 + + sparse_pm, sparse_fm, fps_idx_m = sample_pts_feats( + dense_pm, dense_fm, self.coarse_npoint, return_index=True + ) + geo_embedding_m = self.geo_embedding(torch.cat([bg_point, sparse_pm], dim=1)) + + sparse_po, sparse_fo, fps_idx_o = sample_pts_feats( + dense_po, dense_fo, self.coarse_npoint, return_index=True + ) + geo_embedding_o = self.geo_embedding(torch.cat([bg_point, sparse_po], dim=1)) + + # coarse_point_matching + + end_points, points = self.coarse_point_matching( + sparse_pm, sparse_fm, geo_embedding_m, + sparse_po, sparse_fo, geo_embedding_o, + radius, end_points, + ) + # breakpoint() + + end_points, _ = self.fine_point_matching( + dense_pm, dense_fm, geo_embedding_m, fps_idx_m, + dense_po, dense_fo, geo_embedding_o, fps_idx_o, + radius, end_points) + + + return end_points, points + + + # end_points_coarse['init_t'] = end_points_coarse['init_t'] * (radius.reshape(-1, 1)+1e-6) + # return end_points_coarse, points + diff --git a/project/grasp_box/submodules/SAM6D/model/transformer.py b/project/grasp_box/submodules/SAM6D/model/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..f68bf4f7fd286eace405b3fd5ac779afd906279d --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/model/transformer.py @@ -0,0 +1,676 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np + +from typing import Union, Dict, Optional, Tuple +from einops import rearrange + +from model_utils import pairwise_distance +from pointnet2_utils import gather_operation + +NORM_LAYERS = { + 'BatchNorm1d': nn.BatchNorm1d, + 'BatchNorm2d': nn.BatchNorm2d, + 'BatchNorm3d': nn.BatchNorm3d, + 'InstanceNorm1d': nn.InstanceNorm1d, + 'InstanceNorm2d': nn.InstanceNorm2d, + 'InstanceNorm3d': nn.InstanceNorm3d, + 'GroupNorm': nn.GroupNorm, + 'LayerNorm': nn.LayerNorm, +} + + +ACT_LAYERS = { + 'ReLU': nn.ReLU, + 'LeakyReLU': nn.LeakyReLU, + 'ELU': nn.ELU, + 'GELU': nn.GELU, + 'Sigmoid': nn.Sigmoid, + 'Softplus': nn.Softplus, + 'Tanh': nn.Tanh, + 'Identity': nn.Identity, +} + + +CONV_LAYERS = { + 'Linear': nn.Linear, + 'Conv1d': nn.Conv1d, + 'Conv2d': nn.Conv2d, + 'Conv3d': nn.Conv3d, +} + +def _check_block_type(block): + if block not in ['self', 'cross']: + raise ValueError('Unsupported block type "{}".'.format(block)) + + +def parse_cfg(cfg: Union[str, Dict]) -> Tuple[str, Dict]: + assert isinstance(cfg, (str, Dict)), 'Illegal cfg type: {}.'.format(type(cfg)) + if isinstance(cfg, str): + cfg = {'type': cfg} + else: + cfg = cfg.copy() + layer = cfg.pop('type') + return layer, cfg + + +def build_dropout_layer(p: Optional[float], **kwargs) -> nn.Module: + r"""Factory function for dropout layer.""" + if p is None or p == 0: + return nn.Identity() + else: + return nn.Dropout(p=p, **kwargs) + +def build_norm_layer(num_features, norm_cfg: Optional[Union[str, Dict]]) -> nn.Module: + r"""Factory function for normalization layers.""" + if norm_cfg is None: + return nn.Identity() + layer, kwargs = parse_cfg(norm_cfg) + assert layer in NORM_LAYERS, f'Illegal normalization: {layer}.' + if layer == 'GroupNorm': + kwargs['num_channels'] = num_features + elif layer == 'LayerNorm': + kwargs['normalized_shape'] = num_features + else: + kwargs['num_features'] = num_features + return NORM_LAYERS[layer](**kwargs) + + +def build_act_layer(act_cfg: Optional[Union[str, Dict]]) -> nn.Module: + r"""Factory function for activation functions.""" + if act_cfg is None: + return nn.Identity() + layer, kwargs = parse_cfg(act_cfg) + assert layer in ACT_LAYERS, f'Illegal activation: {layer}.' + if layer == 'LeakyReLU': + if 'negative_slope' not in kwargs: + kwargs['negative_slope'] = 0.2 + return ACT_LAYERS[layer](**kwargs) + + +class MultiHeadAttention(nn.Module): + def __init__(self, d_model, num_heads, dropout=None): + super(MultiHeadAttention, self).__init__() + if d_model % num_heads != 0: + raise ValueError('`d_model` ({}) must be a multiple of `num_heads` ({}).'.format(d_model, num_heads)) + + self.d_model = d_model + self.num_heads = num_heads + self.d_model_per_head = d_model // num_heads + + self.proj_q = nn.Linear(self.d_model, self.d_model) + self.proj_k = nn.Linear(self.d_model, self.d_model) + self.proj_v = nn.Linear(self.d_model, self.d_model) + + self.dropout = build_dropout_layer(dropout) + + def forward( + self, input_q, input_k, input_v, key_weights=None, key_masks=None, attention_factors=None, attention_masks=None + ): + """Vanilla Self-attention forward propagation. + + Args: + input_q (Tensor): input tensor for query (B, N, C) + input_k (Tensor): input tensor for key (B, M, C) + input_v (Tensor): input tensor for value (B, M, C) + key_weights (Tensor): soft masks for the keys (B, M) + key_masks (BoolTensor): True if ignored, False if preserved (B, M) + attention_factors (Tensor): factors for attention matrix (B, N, M) + attention_masks (BoolTensor): True if ignored, False if preserved (B, N, M) + + Returns: + hidden_states: torch.Tensor (B, C, N) + attention_scores: intermediate values + 'attention_scores': torch.Tensor (B, H, N, M), attention scores before dropout + """ + q = rearrange(self.proj_q(input_q), 'b n (h c) -> b h n c', h=self.num_heads) + k = rearrange(self.proj_k(input_k), 'b m (h c) -> b h m c', h=self.num_heads) + v = rearrange(self.proj_v(input_v), 'b m (h c) -> b h m c', h=self.num_heads) + + attention_scores = torch.einsum('bhnc,bhmc->bhnm', q, k) / self.d_model_per_head ** 0.5 + if attention_factors is not None: + attention_scores = attention_factors.unsqueeze(1) * attention_scores + if key_weights is not None: + attention_scores = attention_scores * key_weights.unsqueeze(1).unsqueeze(1) + if key_masks is not None: + attention_scores = attention_scores.masked_fill(key_masks.unsqueeze(1).unsqueeze(1), float('-inf')) + if attention_masks is not None: + attention_scores = attention_scores.masked_fill(attention_masks, float('-inf')) + attention_scores = F.softmax(attention_scores, dim=-1) + attention_scores = self.dropout(attention_scores) + + hidden_states = torch.matmul(attention_scores, v) + + hidden_states = rearrange(hidden_states, 'b h n c -> b n (h c)') + + return hidden_states, attention_scores + + +class AttentionLayer(nn.Module): + def __init__(self, d_model, num_heads, dropout=None): + super(AttentionLayer, self).__init__() + self.attention = MultiHeadAttention(d_model, num_heads, dropout=dropout) + self.linear = nn.Linear(d_model, d_model) + self.dropout = build_dropout_layer(dropout) + self.norm = nn.LayerNorm(d_model) + + def forward( + self, + input_states, + memory_states, + memory_weights=None, + memory_masks=None, + attention_factors=None, + attention_masks=None, + ): + hidden_states, attention_scores = self.attention( + input_states, + memory_states, + memory_states, + key_weights=memory_weights, + key_masks=memory_masks, + attention_factors=attention_factors, + attention_masks=attention_masks, + ) + hidden_states = self.linear(hidden_states) + hidden_states = self.dropout(hidden_states) + output_states = self.norm(hidden_states + input_states) + return output_states, attention_scores + +class AttentionOutput(nn.Module): + def __init__(self, d_model, dropout=None, activation_fn='ReLU'): + super(AttentionOutput, self).__init__() + self.expand = nn.Linear(d_model, d_model * 2) + self.activation = build_act_layer(activation_fn) + self.squeeze = nn.Linear(d_model * 2, d_model) + self.dropout = build_dropout_layer(dropout) + self.norm = nn.LayerNorm(d_model) + + def forward(self, input_states): + hidden_states = self.expand(input_states) + hidden_states = self.activation(hidden_states) + hidden_states = self.squeeze(hidden_states) + hidden_states = self.dropout(hidden_states) + output_states = self.norm(input_states + hidden_states) + return output_states + + +class TransformerLayer(nn.Module): + def __init__(self, d_model, num_heads, dropout=None, activation_fn='ReLU'): + super(TransformerLayer, self).__init__() + self.attention = AttentionLayer(d_model, num_heads, dropout=dropout) + self.output = AttentionOutput(d_model, dropout=dropout, activation_fn=activation_fn) + + def forward( + self, + input_states, + memory_states, + memory_weights=None, + memory_masks=None, + attention_factors=None, + attention_masks=None, + ): + hidden_states, attention_scores = self.attention( + input_states, + memory_states, + memory_weights=memory_weights, + memory_masks=memory_masks, + attention_factors=attention_factors, + attention_masks=attention_masks, + ) + output_states = self.output(hidden_states) + return output_states, attention_scores + + +class ConditionalTransformer(nn.Module): + def __init__(self, blocks, d_model, num_heads, dropout=None, activation_fn='ReLU', return_attention_scores=False): + super(ConditionalTransformer, self).__init__() + self.blocks = blocks + layers = [] + for block in self.blocks: + _check_block_type(block) + layers.append(TransformerLayer(d_model, num_heads, dropout=dropout, activation_fn=activation_fn)) + self.layers = nn.ModuleList(layers) + self.return_attention_scores = return_attention_scores + + def forward(self, feats0, feats1, masks0=None, masks1=None): + attention_scores = [] + for i, block in enumerate(self.blocks): + if block == 'self': + feats0, scores0 = self.layers[i](feats0, feats0, memory_masks=masks0) + feats1, scores1 = self.layers[i](feats1, feats1, memory_masks=masks1) + else: + feats0, scores0 = self.layers[i](feats0, feats1, memory_masks=masks1) + feats1, scores1 = self.layers[i](feats1, feats0, memory_masks=masks0) + if self.return_attention_scores: + attention_scores.append([scores0, scores1]) + if self.return_attention_scores: + return feats0, feats1, attention_scores + else: + return feats0, feats1 + + + + +class SinusoidalPositionalEmbedding(nn.Module): + def __init__(self, d_model): + super(SinusoidalPositionalEmbedding, self).__init__() + if d_model % 2 != 0: + raise ValueError(f'Sinusoidal positional encoding with odd d_model: {d_model}') + self.d_model = d_model + div_indices = torch.arange(0, d_model, 2).float() + div_term = torch.exp(div_indices * (-np.log(10000.0) / d_model)) + self.register_buffer('div_term', div_term) + + def forward(self, emb_indices): + r"""Sinusoidal Positional Embedding. + + Args: + emb_indices: torch.Tensor (*) + + Returns: + embeddings: torch.Tensor (*, D) + """ + input_shape = emb_indices.shape + omegas = emb_indices.view(-1, 1, 1) * self.div_term.view(1, -1, 1) # (-1, d_model/2, 1) + sin_embeddings = torch.sin(omegas) + cos_embeddings = torch.cos(omegas) + embeddings = torch.cat([sin_embeddings, cos_embeddings], dim=2) # (-1, d_model/2, 2) + embeddings = embeddings.view(*input_shape, self.d_model) # (*, d_model) + embeddings = embeddings.detach() + return embeddings + + +class GeometricStructureEmbedding(nn.Module): + def __init__(self, cfg): + super(GeometricStructureEmbedding, self).__init__() + self.sigma_d = cfg.sigma_d + self.sigma_a = cfg.sigma_a + self.factor_a = 180.0 / (self.sigma_a * np.pi) + self.angle_k = cfg.angle_k + + self.embedding = SinusoidalPositionalEmbedding(cfg.hidden_dim) + self.proj_d = nn.Linear(cfg.hidden_dim, cfg.hidden_dim) + self.proj_a = nn.Linear(cfg.hidden_dim, cfg.hidden_dim) + + self.reduction_a = cfg.reduction_a + if self.reduction_a not in ['max', 'mean']: + raise ValueError(f'Unsupported reduction mode: {self.reduction_a}.') + + @torch.no_grad() + def get_embedding_indices(self, points): + r"""Compute the indices of pair-wise distance embedding and triplet-wise angular embedding. + + Args: + points: torch.Tensor (B, N, 3), input point cloud + + Returns: + d_indices: torch.FloatTensor (B, N, N), distance embedding indices + a_indices: torch.FloatTensor (B, N, N, k), angular embedding indices + """ + batch_size, num_point, _ = points.shape + + dist_map = torch.sqrt(pairwise_distance(points, points)) # (B, N, N) + d_indices = dist_map / self.sigma_d + + k = self.angle_k + knn_indices = dist_map.topk(k=k + 1, dim=2, largest=False)[1][:, :, 1:] # (B, N, k) + knn_indices = knn_indices.unsqueeze(3).expand(batch_size, num_point, k, 3) # (B, N, k, 3) + expanded_points = points.unsqueeze(1).expand(batch_size, num_point, num_point, 3) # (B, N, N, 3) + knn_points = torch.gather(expanded_points, dim=2, index=knn_indices) # (B, N, k, 3) + ref_vectors = knn_points - points.unsqueeze(2) # (B, N, k, 3) + anc_vectors = points.unsqueeze(1) - points.unsqueeze(2) # (B, N, N, 3) + ref_vectors = ref_vectors.unsqueeze(2).expand(batch_size, num_point, num_point, k, 3) # (B, N, N, k, 3) + anc_vectors = anc_vectors.unsqueeze(3).expand(batch_size, num_point, num_point, k, 3) # (B, N, N, k, 3) + sin_values = torch.linalg.norm(torch.cross(ref_vectors, anc_vectors, dim=-1), dim=-1) # (B, N, N, k) + cos_values = torch.sum(ref_vectors * anc_vectors, dim=-1) # (B, N, N, k) + angles = torch.atan2(sin_values, cos_values) # (B, N, N, k) + a_indices = angles * self.factor_a + + return d_indices, a_indices + + def forward(self, points): + d_indices, a_indices = self.get_embedding_indices(points) + + d_embeddings = self.embedding(d_indices) + d_embeddings = self.proj_d(d_embeddings) + + a_embeddings = self.embedding(a_indices) + a_embeddings = self.proj_a(a_embeddings) + if self.reduction_a == 'max': + a_embeddings = a_embeddings.max(dim=3)[0] + else: + a_embeddings = a_embeddings.mean(dim=3) + + embeddings = d_embeddings + a_embeddings + + return embeddings + + +class RPEMultiHeadAttention(nn.Module): + def __init__(self, d_model, num_heads, dropout=None): + super(RPEMultiHeadAttention, self).__init__() + if d_model % num_heads != 0: + raise ValueError('`d_model` ({}) must be a multiple of `num_heads` ({}).'.format(d_model, num_heads)) + + self.d_model = d_model + self.num_heads = num_heads + self.d_model_per_head = d_model // num_heads + + self.proj_q = nn.Linear(self.d_model, self.d_model) + self.proj_k = nn.Linear(self.d_model, self.d_model) + self.proj_v = nn.Linear(self.d_model, self.d_model) + self.proj_p = nn.Linear(self.d_model, self.d_model) + + self.dropout = build_dropout_layer(dropout) + + def forward(self, input_q, input_k, input_v, embed_qk, key_weights=None, key_masks=None, attention_factors=None): + r"""Scaled Dot-Product Attention with Pre-computed Relative Positional Embedding (forward) + + Args: + input_q: torch.Tensor (B, N, C) + input_k: torch.Tensor (B, M, C) + input_v: torch.Tensor (B, M, C) + embed_qk: torch.Tensor (B, N, M, C), relative positional embedding + key_weights: torch.Tensor (B, M), soft masks for the keys + key_masks: torch.Tensor (B, M), True if ignored, False if preserved + attention_factors: torch.Tensor (B, N, M) + + Returns: + hidden_states: torch.Tensor (B, C, N) + attention_scores: torch.Tensor (B, H, N, M) + """ + q = rearrange(self.proj_q(input_q), 'b n (h c) -> b h n c', h=self.num_heads) + k = rearrange(self.proj_k(input_k), 'b m (h c) -> b h m c', h=self.num_heads) + v = rearrange(self.proj_v(input_v), 'b m (h c) -> b h m c', h=self.num_heads) + p = rearrange(self.proj_p(embed_qk), 'b n m (h c) -> b h n m c', h=self.num_heads) + + attention_scores_p = torch.einsum('bhnc,bhnmc->bhnm', q, p) + attention_scores_e = torch.einsum('bhnc,bhmc->bhnm', q, k) + attention_scores = (attention_scores_e + attention_scores_p) / self.d_model_per_head ** 0.5 + if attention_factors is not None: + attention_scores = attention_factors.unsqueeze(1) * attention_scores + if key_weights is not None: + attention_scores = attention_scores * key_weights.unsqueeze(1).unsqueeze(1) + if key_masks is not None: + attention_scores = attention_scores.masked_fill(key_masks.unsqueeze(1).unsqueeze(1), float('-inf')) + attention_scores = F.softmax(attention_scores, dim=-1) + attention_scores = self.dropout(attention_scores) + + hidden_states = torch.matmul(attention_scores, v) + + hidden_states = rearrange(hidden_states, 'b h n c -> b n (h c)') + + return hidden_states, attention_scores + + +class RPEAttentionLayer(nn.Module): + def __init__(self, d_model, num_heads, dropout=None): + super(RPEAttentionLayer, self).__init__() + self.attention = RPEMultiHeadAttention(d_model, num_heads, dropout=dropout) + self.linear = nn.Linear(d_model, d_model) + self.dropout = build_dropout_layer(dropout) + self.norm = nn.LayerNorm(d_model) + + def forward( + self, + input_states, + memory_states, + position_states, + memory_weights=None, + memory_masks=None, + attention_factors=None, + ): + hidden_states, attention_scores = self.attention( + input_states, + memory_states, + memory_states, + position_states, + key_weights=memory_weights, + key_masks=memory_masks, + attention_factors=attention_factors, + ) + hidden_states = self.linear(hidden_states) + hidden_states = self.dropout(hidden_states) + output_states = self.norm(hidden_states + input_states) + return output_states, attention_scores + + +class RPETransformerLayer(nn.Module): + def __init__(self, d_model, num_heads, dropout=None, activation_fn='ReLU'): + super(RPETransformerLayer, self).__init__() + self.attention = RPEAttentionLayer(d_model, num_heads, dropout=dropout) + self.output = AttentionOutput(d_model, dropout=dropout, activation_fn=activation_fn) + + def forward( + self, + input_states, + memory_states, + position_states, + memory_weights=None, + memory_masks=None, + attention_factors=None, + ): + hidden_states, attention_scores = self.attention( + input_states, + memory_states, + position_states, + memory_weights=memory_weights, + memory_masks=memory_masks, + attention_factors=attention_factors, + ) + output_states = self.output(hidden_states) + return output_states, attention_scores + + + +class GeometricTransformer(nn.Module): + def __init__( + self, + blocks, + d_model, + num_heads, + dropout=None, + activation_fn='ReLU', + return_attention_scores=False, + parallel=False, + ): + super(GeometricTransformer, self).__init__() + self.blocks = blocks + layers = [] + for block in self.blocks: + _check_block_type(block) + if block == 'self': + layers.append(RPETransformerLayer(d_model, num_heads, dropout=dropout, activation_fn=activation_fn)) + else: + layers.append(TransformerLayer(d_model, num_heads, dropout=dropout, activation_fn=activation_fn)) + self.layers = nn.ModuleList(layers) + self.return_attention_scores = return_attention_scores + self.parallel = parallel + + def forward(self, feats0, embeddings0, feats1, embeddings1, masks0=None, masks1=None): + attention_scores = [] + for i, block in enumerate(self.blocks): + if block == 'self': + feats0, scores0 = self.layers[i](feats0, feats0, embeddings0, memory_masks=masks0) + feats1, scores1 = self.layers[i](feats1, feats1, embeddings1, memory_masks=masks1) + else: + if self.parallel: + new_feats0, scores0 = self.layers[i](feats0, feats1, memory_masks=masks1) + new_feats1, scores1 = self.layers[i](feats1, feats0, memory_masks=masks0) + feats0 = new_feats0 + feats1 = new_feats1 + else: + feats0, scores0 = self.layers[i](feats0, feats1, memory_masks=masks1) + feats1, scores1 = self.layers[i](feats1, feats0, memory_masks=masks0) + if self.return_attention_scores: + attention_scores.append([scores0, scores1]) + if self.return_attention_scores: + return feats0, feats1, attention_scores + else: + return feats0, feats1 + + + + +class LinearAttention(nn.Module): + def __init__(self, d_model, num_heads, focusing_factor=3): + super(LinearAttention, self).__init__() + if d_model % num_heads != 0: + raise ValueError('`d_model` ({}) must be a multiple of `num_heads` ({}).'.format(d_model, num_heads)) + self.d_model = d_model + self.num_heads = num_heads + self.d_model_per_head = d_model // num_heads + self.focusing_factor = focusing_factor + self.kernel_function = nn.ReLU() + + self.proj_q = nn.Linear(self.d_model, self.d_model) + self.proj_k = nn.Linear(self.d_model, self.d_model) + self.proj_v = nn.Linear(self.d_model, self.d_model) + self.scale = nn.Parameter(torch.zeros(size=(1, 1, self.d_model))) + + def forward(self, input_q, input_k, input_v): + + q = self.proj_q(input_q) + k = self.proj_k(input_k) + v = self.proj_v(input_v) + scale = nn.Softplus()(self.scale) + + q = self.kernel_function(q) + 1e-6 + k = self.kernel_function(k) + 1e-6 + q = q / scale + k = k / scale + q_norm = q.norm(dim=-1, keepdim=True) + k_norm = k.norm(dim=-1, keepdim=True) + q = q ** self.focusing_factor + k = k ** self.focusing_factor + q = (q / q.norm(dim=-1, keepdim=True)) * q_norm + k = (k / k.norm(dim=-1, keepdim=True)) * k_norm + + q, k, v = (rearrange(x, "b n (h c) -> (b h) n c", h=self.num_heads) for x in [q, k, v]) + i, j, c, d = q.shape[-2], k.shape[-2], k.shape[-1], v.shape[-1] + + z = 1 / (torch.einsum("b i c, b c -> b i", q, k.sum(dim=1)) + 1e-6) + if i * j * (c + d) > c * d * (i + j): + kv = torch.einsum("b j c, b j d -> b c d", k, v) + x = torch.einsum("b i c, b c d, b i -> b i d", q, kv, z) + else: + qk = torch.einsum("b i c, b j c -> b i j", q, k) + x = torch.einsum("b i j, b j d, b i -> b i d", qk, v, z) + x = rearrange(x, "(b h) n c -> b n (h c)", h=self.num_heads) + + return x + + +class LinearAttentionLayer(nn.Module): + def __init__(self, d_model, num_heads, dropout=False, focusing_factor=3): + super(LinearAttentionLayer, self).__init__() + self.attention = LinearAttention(d_model, num_heads, focusing_factor=focusing_factor) + self.linear = nn.Linear(d_model, d_model) + self.dropout = build_dropout_layer(dropout) + self.norm = nn.LayerNorm(d_model) + + def forward( + self, + input_states, + memory_states, + ): + hidden_states= self.attention( + input_states, + memory_states, + memory_states, + ) + hidden_states = self.linear(hidden_states) + hidden_states = self.dropout(hidden_states) + output_states = self.norm(hidden_states + input_states) + return output_states + + + +class LinearTransformerLayer(nn.Module): + def __init__(self, d_model, num_heads, dropout=None, activation_fn='ReLU', focusing_factor=3): + super(LinearTransformerLayer, self).__init__() + self.attention = LinearAttentionLayer(d_model, num_heads, dropout=dropout, focusing_factor=focusing_factor) + self.output = AttentionOutput(d_model, dropout=dropout, activation_fn=activation_fn) + + def forward( + self, + input_states, + memory_states + ): + hidden_states = self.attention( + input_states, + memory_states + ) + output_states = self.output(hidden_states) + return output_states + + + + +class SparseToDenseTransformer(nn.Module): + def __init__( + self, + d_model, + sparse_blocks, + num_heads=4, + dropout=None, + activation_fn='ReLU', + parallel=False, + focusing_factor=3, + with_bg_token=True, + replace_bg_token=True + ): + super(SparseToDenseTransformer, self).__init__() + self.with_bg_token = with_bg_token + self.replace_bg_token = replace_bg_token + + self.sparse_layer = GeometricTransformer( + blocks=sparse_blocks, + d_model=d_model, + num_heads=num_heads, + dropout=dropout, + activation_fn=activation_fn, + parallel=parallel, + return_attention_scores=False, + ) + self.dense_layer = LinearTransformerLayer(d_model, num_heads, focusing_factor=focusing_factor) + + + def forward(self, dense_feats0, embeddings0, fps_idx0, dense_feats1, embeddings1, fps_idx1, masks0=None, masks1=None): + feats0 = self._sample_feats(dense_feats0, fps_idx0) + feats1 = self._sample_feats(dense_feats1, fps_idx1) + feats0, feats1 = self.sparse_layer(feats0, embeddings0, feats1, embeddings1, masks0, masks1) + + dense_feats0 = self._get_dense_feats(dense_feats0, feats0) + dense_feats1 = self._get_dense_feats(dense_feats1, feats1) + return dense_feats0, dense_feats1 + + def _sample_feats(self, dense_feats, fps_idx): + if self.with_bg_token: + bg_token = dense_feats[:, 0:1, :].contiguous() + feats = gather_operation(dense_feats.transpose(1,2).contiguous(), fps_idx) + feats = feats.transpose(1,2).contiguous() + if self.with_bg_token: + feats = torch.cat([bg_token, feats], dim=1) + return feats + + def _get_dense_feats(self, dense_feats, feats): + if self.with_bg_token and self.replace_bg_token: + bg_token = feats[:, 0:1, :].contiguous() + dense_feats = self.dense_layer( + dense_feats[:,1:,:].contiguous(), + feats[:,1:,:].contiguous() + ) + dense_feats = torch.cat([bg_token, dense_feats], dim=1) + else: + dense_feats = self.dense_layer( + dense_feats, + feats + ) + return dense_feats + + + diff --git a/project/grasp_box/submodules/SAM6D/pose_estimator.py b/project/grasp_box/submodules/SAM6D/pose_estimator.py new file mode 100644 index 0000000000000000000000000000000000000000..bc8b5f23aad0294e6966b6fe1c8e5620139e5231 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/pose_estimator.py @@ -0,0 +1,320 @@ +import os +import sys +import numpy as np +import importlib +import torch +import cv2 +import trimesh +import yaml +import copy +import numpy as np + +import torchvision.transforms as transforms +from PIL import Image +import open3d as o3d +ROOT_DIR = "/home/kyber/charles/project/grasp_box/submodules/SAM6D" +sys.path.append(os.path.join(ROOT_DIR, 'provider')) +sys.path.append(os.path.join(ROOT_DIR, 'utils')) +# sys.path.append(os.path.join(ROOT_DIR, 'config')) +sys.path.append(os.path.join(ROOT_DIR, 'model')) +sys.path.append(os.path.join(ROOT_DIR, 'model', 'pointnet2')) +from data_utils import ( + load_im, + get_bbox, + get_point_cloud_from_depth, + get_resize_rgb_choose, + trimesh_to_open3d +) +from draw_utils import draw_detections +from submodules.SAM6D.config.config import Config + +rgb_transform = transforms.Compose([transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225])]) + + +def visualize(rgb, pred_rot, pred_trans, model_points, K, save_path): + + img = draw_detections(rgb, pred_rot, pred_trans, model_points, K, color=(255, 0, 0)) + img = Image.fromarray(np.uint8(img)) + # img.save(save_path) + # prediction = Image.open(save_path) + prediction = img + + # concat side by side in PIL + rgb = Image.fromarray(np.uint8(rgb)) + img = np.array(img) + concat = Image.new('RGB', (img.shape[1] + prediction.size[0], img.shape[0])) + concat.paste(rgb, (0, 0)) + concat.paste(prediction, (img.shape[1], 0)) + return img + + +class SAM6DPoseEstimator: + def __init__( + self, + config_path: str, + K: np.array, + model_cfg_path: str, + vis: bool + ): + """Initialize the pose estimator with the given configuration file. + + Args: + config_path (str): Path to the configuration YAML file. + """ + if config_path is not None: + with open(config_path, "r") as file: + self.config = yaml.safe_load(file) + self.cad_database = self.config["cad_database"] + else: + self.cad_databset = {"box": '/home/kyber/charles/project/ManiSkill3/src/maniskill2_benchmark/msx-envs/src/msx_envs/assets/object/box_01/box_01.gltf'} + self.cfg = Config.fromfile(model_cfg_path) + self.cfg.exp_name = "test" + self.cfg.model_name = "pose_estimation_model" + self.cfg.log_dir = "log" + self.cfg.output_dir = "log" + self.cfg.det_score_thresh = 0.2 + self.K = K + self.vis = vis + + MODEL = importlib.import_module(self.cfg.model_name) + self.model = MODEL.Net(self.cfg.model) + self.model = self.model.cuda() + self.model.eval() + self.model.load_state_dict(torch.load("/home/kyber/charles/project/grasp_box/weights/SAM_6D/sam-6d-pem-base.pth", weights_only=True)['model']) + self.cad_cache = {} + self.load_all_mesh() + + + def _get_template(self, obj, tem_index=1, scale = None): + path = os.path.dirname(self.cad_database[obj]) + rgb_path = os.path.join(path, 'rgb_'+str(tem_index)+'.png') + mask_path = os.path.join(path, 'mask_'+str(tem_index)+'.png') + xyz_path = os.path.join(path, 'xyz_'+str(tem_index)+'.npy') + + rgb = load_im(rgb_path).astype(np.uint8) + xyz = np.load(xyz_path).astype(np.float32) + mask = load_im(mask_path).astype(np.uint8) == 255 + + if scale is not None: + + H, W = mask.shape + + ys, xs = np.where(mask == 1) + y1, y2 = ys.min(), ys.max() + x1, x2 = xs.min(), xs.max() + + xyz_crop = xyz[y1:y2+1, x1:x2+1] + xyz_flat = xyz_crop.reshape(-1,3) + xyz_flat[:,0] *= scale[0] + xyz_flat[:,1] *= scale[1] + xyz_flat[:,2] *= scale[2] + + xyz_recover = np.full((H, W, 3), (-1,-1,-1), dtype=np.float32) + xyz_recover[y1:y2+1, x1:x2+1] = xyz_flat.reshape(y2-y1+1, x2-x1+1,3) + xyz = xyz_recover + + + bbox = get_bbox(mask) + y1, y2, x1, x2 = bbox + mask = mask[y1:y2, x1:x2] + + rgb = rgb[:,:,::-1][y1:y2, x1:x2, :] + + if self.cfg.test_dataset.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + + rgb = cv2.resize(rgb, (self.cfg.test_dataset.img_size, self.cfg.test_dataset.img_size), interpolation=cv2.INTER_LINEAR) + rgb = rgb_transform(np.array(rgb)) + + choose = (mask>0).astype(np.float32).flatten().nonzero()[0] + if len(choose) <= self.cfg.test_dataset.n_sample_template_point: + choose_idx = np.random.choice(np.arange(len(choose)), self.cfg.test_dataset.n_sample_template_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), self.cfg.test_dataset.n_sample_template_point, replace=False) + choose = choose[choose_idx] + xyz = xyz[y1:y2, x1:x2, :].reshape((-1, 3))[choose, :] + + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], self.cfg.test_dataset.img_size) + return rgb, rgb_choose, xyz + + + def get_templates(self, obj, scale = None): + n_template_view = self.cfg.test_dataset.n_template_view + all_tem = [] + all_tem_choose = [] + all_tem_pts = [] + + total_nView = 42 + for v in range(n_template_view): + i = int(total_nView / n_template_view * v) + tem, tem_choose, tem_pts = self._get_template(obj, i, scale) + all_tem.append(torch.FloatTensor(tem).unsqueeze(0).cuda()) + all_tem_choose.append(torch.IntTensor(tem_choose).long().unsqueeze(0).cuda()) + all_tem_pts.append(torch.FloatTensor(tem_pts).unsqueeze(0).cuda()) + return all_tem, all_tem_pts, all_tem_choose + + + def load_all_mesh(self, scale = None, rescale_obj = None): + + if rescale_obj is not None: + + mid_x, mid_y, mid_z = scale + scale_matrix = np.eye(4) + scale_matrix[0,0] = mid_x + scale_matrix[1,1] = mid_y + scale_matrix[2,2] = mid_z + mesh = self.cad_cache[rescale_obj]['mesh'] + mesh_s=copy.deepcopy(mesh) + mesh_s.apply_transform(scale_matrix) + model_points = mesh_s.sample(self.cfg.test_dataset.n_sample_model_point).astype(np.float32) + radius = np.max(np.linalg.norm(model_points, axis=1)) + all_tem, all_tem_pts, all_tem_choose = self.get_templates(rescale_obj,scale) + with torch.no_grad(): + all_tem_pts, all_tem_feat = self.model.feature_extraction.get_obj_feats(all_tem, all_tem_pts, all_tem_choose) + self.all_mesh_dict[rescale_obj] = mesh_s + self.cad_cache['tmp'] = { + 'radius': radius, + 'mesh': mesh_s, + 'model_points': model_points, + 'all_tem_pts': all_tem_pts, + 'all_tem_feat': all_tem_feat, + } + + return + + all_mesh_dict = {} + + for obj in self.cad_database: + print(obj, self.cad_database[obj]) + try: + mesh = trimesh.load(self.cad_database[obj], force='mesh') + all_mesh_dict[obj] = mesh + except: + print("missing mesh") + exit() + model_points = mesh.sample(self.cfg.test_dataset.n_sample_model_point).astype(np.float32) + radius = np.max(np.linalg.norm(model_points, axis=1)) + all_tem, all_tem_pts, all_tem_choose = self.get_templates(obj) + with torch.no_grad(): + all_tem_pts, all_tem_feat = self.model.feature_extraction.get_obj_feats(all_tem, all_tem_pts, all_tem_choose) + self.cad_cache[obj] = { + 'radius': radius, + 'mesh': mesh, + 'model_points': model_points, + 'all_tem_pts': all_tem_pts, + 'all_tem_feat': all_tem_feat, + } + self.all_mesh_dict = all_mesh_dict + print("Pose Estimator S6 load meshes:", self.all_mesh_dict.keys()) + + + def get_data(self, rgb, mask, depth, obj, scale): + + if scale is not None: + self.load_all_mesh(scale, rescale_obj=obj) + + rgb_ori = rgb.copy() + + if scale is not None: + obj_mesh = self.cad_cache['tmp'] + else: + obj_mesh = self.cad_cache[obj] + + depth = depth.astype(np.float64) + depth /= 1000 + whole_pts = get_point_cloud_from_depth(depth, self.K) + mask_ori = mask.copy() + mask = np.logical_and(mask > 0, depth > 0) + + bbox = get_bbox(mask) + + + + y1, y2, x1, x2 = bbox + + mask = mask[y1:y2, x1:x2] + choose = mask.astype(np.float32).flatten().nonzero()[0] + + cloud = whole_pts.copy()[y1:y2, x1:x2, :].reshape(-1, 3)[choose, :] + center = np.mean(cloud, axis=0) + tmp_cloud = cloud - center[None, :] + flag = np.linalg.norm(tmp_cloud, axis=1) < obj_mesh['radius'] * 1.2 + choose = choose[flag] + cloud = cloud[flag] + obs_pts = self.cfg.test_dataset.n_sample_observed_point + if len(choose) <= obs_pts: + try: + choose_idx = np.random.choice(np.arange(len(choose)), obs_pts) + except ValueError: + return None + else: + choose_idx = np.random.choice(np.arange(len(choose)), obs_pts, replace=False) + choose = choose[choose_idx] + cloud = cloud[choose_idx] + + + img_size = self.cfg.test_dataset.img_size + rgb = rgb.copy()[y1:y2, x1:x2, :][:,:,::-1] + if self.cfg.test_dataset.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + rgb = cv2.resize(rgb, (img_size, img_size), interpolation=cv2.INTER_LINEAR) + rgb = rgb_transform(np.array(rgb)) + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], img_size) + + n_try = 1 + + ret_dict = { + 'pts': torch.FloatTensor(np.array([cloud]*n_try)).cuda(), + 'rgb': torch.unsqueeze(rgb, 0).repeat(n_try,1, 1, 1).cuda(), + 'rgb_choose': torch.IntTensor(np.array([rgb_choose]*n_try)).long().cuda(), + 'model': torch.FloatTensor(obj_mesh['model_points']).unsqueeze(0).repeat(n_try, 1, 1).cuda(), + 'K': torch.FloatTensor(self.K).unsqueeze(0).repeat(n_try, 1, 1).cuda(), + 'dense_po' : obj_mesh['all_tem_pts'].repeat(n_try,1,1), + 'dense_fo' : obj_mesh['all_tem_feat'].repeat(n_try,1,1), + "mesh": obj_mesh['mesh'], + "depth": torch.FloatTensor(depth).cuda(), + 'mask': torch.FloatTensor(mask).cuda(), + 'bbox': torch.FloatTensor([y1, y2, x1, x2]), + 'rgb_ori': torch.FloatTensor(rgb_ori).cuda(), + 'mask_ori': torch.FloatTensor(mask_ori).cuda(), + } + + return ret_dict + + + def inference(self, rgb, mask, depth, obj, scale = None): + + ret_dict = self.get_data(rgb, mask, depth, obj, scale) + + if ret_dict == None: + return None, None, None,None, None + + out, points = self.model(ret_dict) + + if out['pred_pose_score']!= None: + pose_scores = out['pred_pose_score'].detach().cpu().numpy() + else: + pose_scores = None + + pred_rot = out['pred_R'].detach().cpu().numpy() + pred_trans = out['pred_t'].detach().cpu().numpy() + + # pred_rot = out['init_R'].detach().cpu().numpy() + # pred_trans = out['init_t'].detach().cpu().numpy() + # pred_trans = pred_trans * (self.cad_cache[obj]['radius'] + 1e-6) + + vis = rgb.copy() + if self.vis: + + # vis = visualize(rgb, pred_rot, pred_trans, self.cad_cache[obj]["model_points"], ret_dict["K"].cpu(), f"SAM6D_{obj}.png") + if scale is not None: + vis = visualize(rgb, pred_rot, pred_trans, self.cad_cache['tmp']["model_points"], ret_dict["K"].cpu(), f"SAM6D_{obj}.png") + else: + vis = visualize(rgb, pred_rot, pred_trans, self.cad_cache[obj]["model_points"], ret_dict["K"].cpu(), f"SAM6D_{obj}.png") + + + # return pose_scores, pred_rot, pred_trans,vis + return pose_scores, pred_rot, pred_trans,vis, points + \ No newline at end of file diff --git a/project/grasp_box/submodules/SAM6D/prepare.sh b/project/grasp_box/submodules/SAM6D/prepare.sh new file mode 100644 index 0000000000000000000000000000000000000000..1bb749716535be3fdac2a232fcd5a9754809f390 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/prepare.sh @@ -0,0 +1,19 @@ +### Create conda environment +conda env create -f environment.yaml +conda activate sam6d + +### Install pointnet2 +cd Pose_Estimation_Model/model/pointnet2 +python setup.py install +cd ../../../ + +### Download ISM pretrained model +cd Instance_Segmentation_Model +python download_sam.py +python download_fastsam.py +python download_dinov2.py +cd ../ + +### Download PEM pretrained model +cd Pose_Estimation_Model +python download_sam6d-pem.py diff --git a/project/grasp_box/submodules/SAM6D/provider/bop_test_dataset.py b/project/grasp_box/submodules/SAM6D/provider/bop_test_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..04cb36ad0410842d12f266e08bf2a02d079275f4 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/provider/bop_test_dataset.py @@ -0,0 +1,211 @@ + +import os +import sys +import json +import cv2 +import numpy as np +from tqdm import tqdm +import pycocotools.mask as cocomask + +import torch +import torchvision.transforms as transforms + +from data_utils import ( + get_model_info, + get_bop_depth_map, + get_bop_image, + get_bbox, + get_point_cloud_from_depth, + get_resize_rgb_choose, +) +from bop_object_utils import load_objs + + +class BOPTestset(): + def __init__(self, cfg, eval_dataset_name='ycbv', detetion_path=None): + assert detetion_path is not None + + self.cfg = cfg + self.dataset = eval_dataset_name + self.data_dir = cfg.data_dir + self.rgb_mask_flag = cfg.rgb_mask_flag + self.img_size = cfg.img_size + self.n_sample_observed_point = cfg.n_sample_observed_point + self.n_sample_model_point = cfg.n_sample_model_point + self.n_sample_template_point = cfg.n_sample_template_point + self.n_template_view = cfg.n_template_view + self.minimum_n_point = cfg.minimum_n_point + self.seg_filter_score = cfg.seg_filter_score + self.transform = transforms.Compose([transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225])]) + + if eval_dataset_name == 'tless': + model_path = 'models_cad' + else: + model_path = 'models' + self.template_folder = os.path.join(cfg.template_dir, eval_dataset_name) + + self.data_folder = os.path.join(self.data_dir, eval_dataset_name, 'test') + self.model_folder = os.path.join(self.data_dir, eval_dataset_name, model_path) + obj, obj_ids = load_objs(self.model_folder, self.template_folder, sample_num=self.n_sample_model_point, n_template_view=self.n_template_view) + obj_idxs = {obj_id: idx for idx, obj_id in enumerate(obj_ids)} + self.objects = obj + self.obj_idxs = obj_idxs + + with open(detetion_path) as f: + dets = json.load(f) # keys: scene_id, image_id, category_id, bbox, score, segmentation + + self.det_keys = [] + self.dets = {} + for det in tqdm(dets, 'processing detection results'): + scene_id = det['scene_id'] # data type: int + img_id = det['image_id'] # data type: int + key = str(scene_id).zfill(6) + '_' + str(img_id).zfill(6) + if key not in self.det_keys: + self.det_keys.append(key) + self.dets[key] = [] + self.dets[key].append(det) + del dets + print('testing on {} images on {}...'.format(len(self.det_keys), eval_dataset_name)) + + def __len__(self): + return len(self.det_keys) + + def __getitem__(self, index): + dets = self.dets[self.det_keys[index]] + + instances = [] + for det in dets: + if det['score']>self.seg_filter_score: + instance = self.get_instance(det) + if instance is not None: + instances.append(instance) + + ret_dict = {} + for key in instances[0].keys(): + ret_dict[key] = torch.stack([instance[key] for instance in instances]) + ret_dict['scene_id'] = torch.IntTensor([int(self.det_keys[index][0:6])]) + ret_dict['img_id'] = torch.IntTensor([int(self.det_keys[index][7:13])]) + ret_dict['seg_time'] = torch.FloatTensor([dets[0]['time']]) + return ret_dict + + def get_instance(self, data): + scene_id = data['scene_id'] # data type: int + img_id = data['image_id'] # data type: int + obj_id = data['category_id'] # data type: int + bbox = data['bbox'] # list, len:4 + seg = data['segmentation'] # keys: counts, size + score = data['score'] + + scene_folder = os.path.join(self.data_folder, f'{scene_id:06d}') + scene_camera = json.load(open(os.path.join(scene_folder, 'scene_camera.json'))) + K = np.array(scene_camera[str(img_id)]['cam_K']).reshape((3, 3)).copy() + depth_scale = scene_camera[str(img_id)]['depth_scale'] + inst = dict(scene_id=scene_id, img_id=img_id, data_folder=self.data_folder) + + obj_idx = self.obj_idxs[obj_id] + model_points, _ = get_model_info(self.objects[obj_idx]) + + # depth + depth = get_bop_depth_map(inst) * depth_scale + + # mask + h,w = seg['size'] + try: + rle = cocomask.frPyObjects(seg, h, w) + except: + rle = seg + mask = cocomask.decode(rle) + mask = np.logical_and(mask > 0, depth > 0) + if np.sum(mask) > self.minimum_n_point: + bbox = get_bbox(mask) + y1, y2, x1, x2 = bbox + else: + return None + mask = mask[y1:y2, x1:x2] + choose = mask.astype(np.float32).flatten().nonzero()[0] + + # pts + cloud = get_point_cloud_from_depth(depth, K, [y1, y2, x1, x2]) + cloud = cloud.reshape(-1, 3)[choose, :] + center = np.mean(cloud, axis=0) + tmp_cloud = cloud - center[None, :] + flag = np.linalg.norm(tmp_cloud, axis=1) < self.objects[obj_idx].diameter * 0.6 + if np.sum(flag) < self.minimum_n_point: + return None + choose = choose[flag] + cloud = cloud[flag] + + if len(choose) <= self.n_sample_observed_point: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_observed_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_observed_point, replace=False) + choose = choose[choose_idx] + cloud = cloud[choose_idx] + + # rgb + rgb = get_bop_image(inst, [y1,y2,x1,x2], self.img_size, mask if self.rgb_mask_flag else None) + rgb = self.transform(np.array(rgb)) + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], self.img_size) + + ret_dict = {} + ret_dict['pts'] = torch.FloatTensor(cloud) + ret_dict['rgb'] = torch.FloatTensor(rgb) + ret_dict['rgb_choose'] = torch.IntTensor(rgb_choose).long() + ret_dict['obj'] = torch.IntTensor([obj_idx]).long() + ret_dict['model'] = torch.FloatTensor(model_points) + + ret_dict['obj_id'] = torch.IntTensor([obj_id]) + ret_dict['score'] = torch.FloatTensor([score]) + return ret_dict + + + def _get_template(self, obj, tem_index=1): + rgb, mask, xyz = obj.get_template(tem_index) + + bbox = get_bbox(mask) + y1, y2, x1, x2 = bbox + mask = mask[y1:y2, x1:x2] + + rgb = rgb[:,:,::-1][y1:y2, x1:x2, :] + if self.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + + rgb = cv2.resize(rgb, (self.img_size, self.img_size), interpolation=cv2.INTER_LINEAR) + rgb = self.transform(np.array(rgb)) + + choose = (mask>0).astype(np.float32).flatten().nonzero()[0] + if len(choose) <= self.n_sample_template_point: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_template_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_template_point, replace=False) + choose = choose[choose_idx] + xyz = xyz[y1:y2, x1:x2, :].reshape((-1, 3))[choose, :] + + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], self.img_size) + return rgb, rgb_choose, xyz + + + def get_templates(self): + n_template_view = self.n_template_view + all_tem_rgb = [[] for i in range(n_template_view)] + all_tem_choose = [[] for i in range(n_template_view)] + all_tem_pts = [[] for i in range(n_template_view)] + + for obj in self.objects: + for i in range(n_template_view): + tem_rgb, tem_choose, tem_pts = self._get_template(obj, i) + all_tem_rgb[i].append(torch.FloatTensor(tem_rgb)) + all_tem_choose[i].append(torch.IntTensor(tem_choose).long()) + all_tem_pts[i].append(torch.FloatTensor(tem_pts)) + + for i in range(n_template_view): + all_tem_rgb[i] = torch.stack(all_tem_rgb[i]).cuda() + all_tem_choose[i] = torch.stack(all_tem_choose[i]).cuda() + all_tem_pts[i] = torch.stack(all_tem_pts[i]).cuda() + + return all_tem_rgb, all_tem_pts, all_tem_choose + + + diff --git a/project/grasp_box/submodules/SAM6D/provider/training_dataset.py b/project/grasp_box/submodules/SAM6D/provider/training_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..be1e4d716658677931ec257eeb826a836d46eea1 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/provider/training_dataset.py @@ -0,0 +1,312 @@ + +import os +import sys +import json +import cv2 +import trimesh +import numpy as np +import h5py + +import torch +import torchvision.transforms as transforms + +import imgaug.augmenters as iaa +from imgaug.augmenters import (Sequential, SomeOf, OneOf, Sometimes, WithColorspace, WithChannels, Noop, + Lambda, AssertLambda, AssertShape, Scale, CropAndPad, Pad, Crop, Fliplr, + Flipud, Superpixels, ChangeColorspace, PerspectiveTransform, Grayscale, + GaussianBlur, AverageBlur, MedianBlur, Convolve, Sharpen, Emboss, EdgeDetect, + DirectedEdgeDetect, Add, AddElementwise, AdditiveGaussianNoise, Multiply, + MultiplyElementwise, Dropout, CoarseDropout, Invert, ContrastNormalization, + Affine, PiecewiseAffine, ElasticTransformation, pillike, LinearContrast) # noqa + +from data_utils import ( + load_im, + io_load_gt, + io_load_masks, + get_point_cloud_from_depth, + get_resize_rgb_choose, + get_random_rotation, + get_bbox, +) + +class Dataset(): + def __init__(self, cfg, num_img_per_epoch=-1): + self.cfg = cfg + + self.data_dir = cfg.data_dir + self.num_img_per_epoch = num_img_per_epoch + self.min_visib_px = cfg.min_px_count_visib + self.min_visib_frac = cfg.min_visib_fract + self.dilate_mask = cfg.dilate_mask + self.rgb_mask_flag = cfg.rgb_mask_flag + self.shift_range = cfg.shift_range + self.img_size = cfg.img_size + self.n_sample_observed_point = cfg.n_sample_observed_point + self.n_sample_model_point = cfg.n_sample_model_point + self.n_sample_template_point = cfg.n_sample_template_point + + + self.data_paths = [ + os.path.join('MegaPose-GSO', 'train_pbr_web'), + os.path.join('MegaPose-ShapeNetCore', 'train_pbr_web') + ] + self.model_paths = [ + os.path.join(self.data_dir, 'MegaPose-GSO', 'Google_Scanned_Objects'), + os.path.join(self.data_dir, 'MegaPose-ShapeNetCore', 'shapenetcorev2'), + ] + self.templates_paths = [ + os.path.join(self.data_dir, 'MegaPose-GSO', 'templates'), + os.path.join(self.data_dir, 'MegaPose-ShapeNetCore', 'templates'), + ] + + self.dataset_paths = [] + for f in self.data_paths: + with open(os.path.join(self.data_dir, f, 'key_to_shard.json')) as fr: + key_shards = json.load(fr) + + for k in key_shards.keys(): + path_name = os.path.join(f, "shard-" + f"{key_shards[k]:06d}", k) + self.dataset_paths.append(path_name) + + self.length = len(self.dataset_paths) + print('Total {} images .....'.format(self.length)) + + + with open(os.path.join(self.data_dir, self.data_paths[0], 'gso_models.json')) as fr: + self.model_info = [json.load(fr)] + with open(os.path.join(self.data_dir, self.data_paths[1], 'shapenet_models.json')) as fr: + self.model_info.append(json.load(fr)) + + # gdrnpp aug + aug_code = ( + "Sequential([" + "Sometimes(0.5, CoarseDropout( p=0.2, size_percent=0.05) )," + "Sometimes(0.4, GaussianBlur((0., 3.)))," + "Sometimes(0.3, pillike.EnhanceSharpness(factor=(0., 50.)))," + "Sometimes(0.3, pillike.EnhanceContrast(factor=(0.2, 50.)))," + "Sometimes(0.5, pillike.EnhanceBrightness(factor=(0.1, 6.)))," + "Sometimes(0.3, pillike.EnhanceColor(factor=(0., 20.)))," + "Sometimes(0.5, Add((-25, 25), per_channel=0.3))," + "Sometimes(0.3, Invert(0.2, per_channel=True))," + "Sometimes(0.5, Multiply((0.6, 1.4), per_channel=0.5))," + "Sometimes(0.5, Multiply((0.6, 1.4)))," + "Sometimes(0.1, AdditiveGaussianNoise(scale=10, per_channel=True))," + "Sometimes(0.5, iaa.contrast.LinearContrast((0.5, 2.2), per_channel=0.3))," + "Sometimes(0.5, Grayscale(alpha=(0.0, 1.0)))," + "], random_order=True)" + # cosy+aae + ) + self.color_augmentor = eval(aug_code) + self.transform = transforms.Compose([transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225])]) + + + + def __len__(self): + return self.length if self.num_img_per_epoch == -1 else self.num_img_per_epoch + + def reset(self): + if self.num_img_per_epoch == -1: + self.num_img_per_epoch = self.length + + num_img = self.length + if num_img <= self.num_img_per_epoch: + self.img_idx = np.random.choice(num_img, self.num_img_per_epoch) + else: + self.img_idx = np.random.choice(num_img, self.num_img_per_epoch, replace=False) + + + def __getitem__(self, index): + while True: # return valid data for train + processed_data = self.read_data(self.img_idx[index]) + if processed_data is None: + index = self._rand_another(index) + continue + return processed_data + + def _rand_another(self, idx): + pool = [i for i in range(self.__len__()) if i != idx] + return np.random.choice(pool) + + def read_data(self, index): + path_head = self.dataset_paths[index] + dataset_type = path_head.split('/')[0][9:] + if not self._check_path(os.path.join(self.data_dir, path_head)): + return None + + # gt_info + gt_info = io_load_gt(open(os.path.join(self.data_dir, path_head+'.gt_info.json'), 'rb')) + valid_idx = [] + for k, item in enumerate(gt_info): + if item['px_count_valid'] >= self.min_visib_px and item['visib_fract'] >= self.min_visib_frac: + valid_idx.append(k) + if len(valid_idx) == 0: + return None + num_instance = len(valid_idx) + valid_idx = valid_idx[np.random.randint(0, num_instance)] + gt_info = gt_info[valid_idx] + # bbox = gt_info['bbox_visib'] + # x1, y1, x2, y2 = bbox[0], bbox[1], bbox[0]+bbox[2], bbox[1]+bbox[3] + + # gt + gt = io_load_gt(open(os.path.join(self.data_dir, path_head+'.gt.json'), 'rb'))[valid_idx] + obj_id = gt['obj_id'] + target_R = np.array(gt['cam_R_m2c']).reshape(3,3).astype(np.float32) + target_t = np.array(gt['cam_t_m2c']).reshape(3).astype(np.float32) / 1000.0 + + # camera + camera = json.load(open(os.path.join(self.data_dir, path_head+'.camera.json'), 'rb')) + K = np.array(camera['cam_K']).reshape(3,3) + + + # template + tem1_rgb, tem1_choose, tem1_pts = self._get_template(dataset_type, obj_id, 0) + tem2_rgb, tem2_choose, tem2_pts = self._get_template(dataset_type, obj_id, 1) + if tem1_rgb is None: + return None + + + # mask + mask = io_load_masks(open(os.path.join(self.data_dir, path_head+'.mask_visib.json'), 'rb'))[valid_idx] + if np.sum(mask) == 0: + return None + if self.dilate_mask and np.random.rand() < 0.5: + mask = np.array(mask>0).astype(np.uint8) + mask = cv2.dilate(mask, cv2.getStructuringElement(cv2.MORPH_CROSS, (3,3)), iterations=4) + + bbox = get_bbox(mask>0) + y1,y2,x1,x2 = bbox + mask = mask[y1:y2, x1:x2] + choose = mask.astype(np.float32).flatten().nonzero()[0] + + # depth + depth = load_im(os.path.join(self.data_dir, path_head+'.depth.png')).astype(np.float32) + depth = depth * camera['depth_scale'] / 1000.0 + pts = get_point_cloud_from_depth(depth, K, [y1, y2, x1, x2]) + pts = pts.reshape(-1, 3)[choose, :] + + target_pts = (pts - target_t[None, :]) @ target_R + tem_pts = np.concatenate([tem1_pts, tem2_pts], axis=0) + radius = np.max(np.linalg.norm(tem_pts, axis=1)) + flag = np.linalg.norm(target_pts, axis=1) < radius * 1.2 # for outlier removal + + pts = pts[flag] + choose = choose[flag] + + if len(choose) < 32: + return None + + if len(choose) <= self.n_sample_observed_point: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_observed_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_observed_point, replace=False) + choose = choose[choose_idx] + pts = pts[choose_idx] + + # rgb + rgb = load_im(os.path.join(self.data_dir, path_head+'.rgb.jpg')).astype(np.uint8) + rgb = rgb[..., ::-1][y1:y2, x1:x2, :] + if np.random.rand() < 0.8: + rgb = self.color_augmentor.augment_image(rgb) + if self.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + rgb = cv2.resize(rgb, (self.img_size, self.img_size), interpolation=cv2.INTER_LINEAR) + rgb = self.transform(np.array(rgb)) + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], self.img_size) + + # rotation aug + rand_R = get_random_rotation() + tem1_pts = tem1_pts @ rand_R + tem2_pts = tem2_pts @ rand_R + target_R = target_R @ rand_R + + # translation aug + add_t = np.random.uniform(-self.shift_range, self.shift_range, (1, 3)) + target_t = target_t + add_t[0] + add_t = add_t + 0.001*np.random.randn(pts.shape[0], 3) + pts = np.add(pts, add_t) + + ret_dict = { + 'pts': torch.FloatTensor(pts), + 'rgb': torch.FloatTensor(rgb), + 'rgb_choose': torch.IntTensor(rgb_choose).long(), + 'translation_label': torch.FloatTensor(target_t), + 'rotation_label': torch.FloatTensor(target_R), + 'tem1_rgb': torch.FloatTensor(tem1_rgb), + 'tem1_choose': torch.IntTensor(tem1_choose).long(), + 'tem1_pts': torch.FloatTensor(tem1_pts), + 'tem2_rgb': torch.FloatTensor(tem2_rgb), + 'tem2_choose': torch.IntTensor(tem2_choose).long(), + 'tem2_pts': torch.FloatTensor(tem2_pts), + 'K': torch.FloatTensor(K), + } + return ret_dict + + def _get_template(self, type, obj_id, tem_index=1): + if type == 'GSO': + info = self.model_info[0][obj_id] + assert info['obj_id'] == obj_id + file_base = os.path.join( + self.templates_paths[0], + info['gso_id'], + ) + + elif type == 'ShapeNetCore': + info = self.model_info[1][obj_id] + assert info['obj_id'] == obj_id + file_base = os.path.join( + self.templates_paths[1], + info['shapenet_synset_id'], + info['shapenet_source_id'], + ) + + rgb_path = os.path.join(file_base, 'rgb_'+str(tem_index)+'.png') + xyz_path = os.path.join(file_base, 'xyz_'+str(tem_index)+'.npy') + mask_path = os.path.join(file_base, 'mask_'+str(tem_index)+'.png') + if not os.path.exists(rgb_path): + return None, None, None + + # mask + mask = load_im(mask_path).astype(np.uint8) == 255 + bbox = get_bbox(mask) + y1,y2,x1,x2 = bbox + mask = mask[y1:y2, x1:x2] + + # rgb + rgb = load_im(rgb_path).astype(np.uint8)[..., ::-1][y1:y2, x1:x2, :] + if np.random.rand() < 0.8: + rgb = self.color_augmentor.augment_image(rgb) + if self.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + rgb = cv2.resize(rgb, (self.img_size, self.img_size), interpolation=cv2.INTER_LINEAR) + rgb = self.transform(np.array(rgb)) + + # xyz + choose = mask.astype(np.float32).flatten().nonzero()[0] + if len(choose) <= self.n_sample_template_point: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_template_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), self.n_sample_template_point, replace=False) + choose = choose[choose_idx] + + xyz = np.load(xyz_path).astype(np.float32)[y1:y2, x1:x2, :] + xyz = xyz.reshape((-1, 3))[choose, :] * 0.1 + choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], self.img_size) + + return rgb, choose, xyz + + def _check_path(self, path_head): + keys = [ + '.camera.json', + '.depth.png', + '.gt_info.json', + '.gt.json', + '.mask_visib.json', + '.rgb.jpg' + ] + + for k in keys: + if not os.path.exists(path_head + k): + return False + return True diff --git a/project/grasp_box/submodules/SAM6D/run_inference_custom.py b/project/grasp_box/submodules/SAM6D/run_inference_custom.py new file mode 100644 index 0000000000000000000000000000000000000000..dfc3e5c8ffd20e2bd08b9173ce38466f80ccaa65 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/run_inference_custom.py @@ -0,0 +1,318 @@ +import gorilla +import argparse +import os +import sys +from PIL import Image +import os.path as osp +import numpy as np +import random +import importlib +import json + +import torch +import torchvision.transforms as transforms +import cv2 + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +ROOT_DIR = os.path.join(BASE_DIR, '..', 'Pose_Estimation_Model') +sys.path.append(os.path.join(ROOT_DIR, 'provider')) +sys.path.append(os.path.join(ROOT_DIR, 'utils')) +sys.path.append(os.path.join(ROOT_DIR, 'model')) +sys.path.append(os.path.join(BASE_DIR, 'model', 'pointnet2')) + + +def get_parser(): + parser = argparse.ArgumentParser( + description="Pose Estimation") + # pem + parser.add_argument("--gpus", + type=str, + default="0", + help="path to pretrain model") + parser.add_argument("--model", + type=str, + default="pose_estimation_model", + help="path to model file") + parser.add_argument("--config", + type=str, + default="config/base.yaml", + help="path to config file, different config.yaml use different config") + parser.add_argument("--iter", + type=int, + default=600000, + help="epoch num. for testing") + parser.add_argument("--exp_id", + type=int, + default=0, + help="") + + # input + parser.add_argument("--output_dir", nargs="?", help="Path to root directory of the output") + parser.add_argument("--cad_path", nargs="?", help="Path to CAD(mm)") + parser.add_argument("--rgb_path", nargs="?", help="Path to RGB image") + parser.add_argument("--depth_path", nargs="?", help="Path to Depth image(mm)") + parser.add_argument("--cam_path", nargs="?", help="Path to camera information") + parser.add_argument("--seg_path", nargs="?", help="Path to segmentation information(generated by ISM)") + parser.add_argument("--det_score_thresh", default=0.2, help="The score threshold of detection") + args_cfg = parser.parse_args() + + return args_cfg + +def init(): + args = get_parser() + exp_name = args.model + '_' + \ + osp.splitext(args.config.split("/")[-1])[0] + '_id' + str(args.exp_id) + log_dir = osp.join("log", exp_name) + + cfg = gorilla.Config.fromfile(args.config) + cfg.exp_name = exp_name + cfg.gpus = args.gpus + cfg.model_name = args.model + cfg.log_dir = log_dir + cfg.test_iter = args.iter + + cfg.output_dir = args.output_dir + cfg.cad_path = args.cad_path + cfg.rgb_path = args.rgb_path + cfg.depth_path = args.depth_path + cfg.cam_path = args.cam_path + cfg.seg_path = args.seg_path + + cfg.det_score_thresh = args.det_score_thresh + gorilla.utils.set_cuda_visible_devices(gpu_ids = cfg.gpus) + + return cfg + + + +from data_utils import ( + load_im, + get_bbox, + get_point_cloud_from_depth, + get_resize_rgb_choose, +) +from draw_utils import draw_detections +import pycocotools.mask as cocomask +import trimesh + +rgb_transform = transforms.Compose([transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225])]) + +def visualize(rgb, pred_rot, pred_trans, model_points, K, save_path): + img = draw_detections(rgb, pred_rot, pred_trans, model_points, K, color=(255, 0, 0)) + img = Image.fromarray(np.uint8(img)) + img.save(save_path) + prediction = Image.open(save_path) + + # concat side by side in PIL + rgb = Image.fromarray(np.uint8(rgb)) + img = np.array(img) + concat = Image.new('RGB', (img.shape[1] + prediction.size[0], img.shape[0])) + concat.paste(rgb, (0, 0)) + concat.paste(prediction, (img.shape[1], 0)) + return concat + + +def _get_template(path, cfg, tem_index=1): + rgb_path = os.path.join(path, 'rgb_'+str(tem_index)+'.png') + mask_path = os.path.join(path, 'mask_'+str(tem_index)+'.png') + xyz_path = os.path.join(path, 'xyz_'+str(tem_index)+'.npy') + + rgb = load_im(rgb_path).astype(np.uint8) + xyz = np.load(xyz_path).astype(np.float32) / 1000.0 + mask = load_im(mask_path).astype(np.uint8) == 255 + + bbox = get_bbox(mask) + y1, y2, x1, x2 = bbox + mask = mask[y1:y2, x1:x2] + + rgb = rgb[:,:,::-1][y1:y2, x1:x2, :] + if cfg.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + + rgb = cv2.resize(rgb, (cfg.img_size, cfg.img_size), interpolation=cv2.INTER_LINEAR) + rgb = rgb_transform(np.array(rgb)) + + choose = (mask>0).astype(np.float32).flatten().nonzero()[0] + if len(choose) <= cfg.n_sample_template_point: + choose_idx = np.random.choice(np.arange(len(choose)), cfg.n_sample_template_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), cfg.n_sample_template_point, replace=False) + choose = choose[choose_idx] + xyz = xyz[y1:y2, x1:x2, :].reshape((-1, 3))[choose, :] + + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], cfg.img_size) + return rgb, rgb_choose, xyz + + +def get_templates(path, cfg): + n_template_view = cfg.n_template_view + all_tem = [] + all_tem_choose = [] + all_tem_pts = [] + + total_nView = 42 + for v in range(n_template_view): + i = int(total_nView / n_template_view * v) + tem, tem_choose, tem_pts = _get_template(path, cfg, i) + all_tem.append(torch.FloatTensor(tem).unsqueeze(0).cuda()) + all_tem_choose.append(torch.IntTensor(tem_choose).long().unsqueeze(0).cuda()) + all_tem_pts.append(torch.FloatTensor(tem_pts).unsqueeze(0).cuda()) + return all_tem, all_tem_pts, all_tem_choose + + + + + +def get_test_data(rgb_path, depth_path, cam_path, cad_path, seg_path, det_score_thresh, cfg): + dets = [] + with open(seg_path) as f: + dets_ = json.load(f) # keys: scene_id, image_id, category_id, bbox, score, segmentation + for det in dets_: + if det['score'] > det_score_thresh: + dets.append(det) + del dets_ + + cam_info = json.load(open(cam_path)) + K = np.array(cam_info['cam_K']).reshape(3, 3) + + whole_image = load_im(rgb_path).astype(np.uint8) + if len(whole_image.shape)==2: + whole_image = np.concatenate([whole_image[:,:,None], whole_image[:,:,None], whole_image[:,:,None]], axis=2) + whole_depth = load_im(depth_path).astype(np.float32) * cam_info['depth_scale'] / 1000.0 + whole_pts = get_point_cloud_from_depth(whole_depth, K) + + mesh = trimesh.load_mesh(cad_path) + model_points = mesh.sample(cfg.n_sample_model_point).astype(np.float32) / 1000.0 + radius = np.max(np.linalg.norm(model_points, axis=1)) + + + all_rgb = [] + all_cloud = [] + all_rgb_choose = [] + all_score = [] + all_dets = [] + for inst in dets: + seg = inst['segmentation'] + score = inst['score'] + + # mask + h,w = seg['size'] + try: + rle = cocomask.frPyObjects(seg, h, w) + except: + rle = seg + mask = cocomask.decode(rle) + mask = np.logical_and(mask > 0, whole_depth > 0) + if np.sum(mask) > 32: + bbox = get_bbox(mask) + y1, y2, x1, x2 = bbox + else: + continue + mask = mask[y1:y2, x1:x2] + choose = mask.astype(np.float32).flatten().nonzero()[0] + + # pts + cloud = whole_pts.copy()[y1:y2, x1:x2, :].reshape(-1, 3)[choose, :] + center = np.mean(cloud, axis=0) + tmp_cloud = cloud - center[None, :] + flag = np.linalg.norm(tmp_cloud, axis=1) < radius * 1.2 + if np.sum(flag) < 4: + continue + choose = choose[flag] + cloud = cloud[flag] + + if len(choose) <= cfg.n_sample_observed_point: + choose_idx = np.random.choice(np.arange(len(choose)), cfg.n_sample_observed_point) + else: + choose_idx = np.random.choice(np.arange(len(choose)), cfg.n_sample_observed_point, replace=False) + choose = choose[choose_idx] + cloud = cloud[choose_idx] + + # rgb + rgb = whole_image.copy()[y1:y2, x1:x2, :][:,:,::-1] + if cfg.rgb_mask_flag: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + rgb = cv2.resize(rgb, (cfg.img_size, cfg.img_size), interpolation=cv2.INTER_LINEAR) + rgb = rgb_transform(np.array(rgb)) + rgb_choose = get_resize_rgb_choose(choose, [y1, y2, x1, x2], cfg.img_size) + + all_rgb.append(torch.FloatTensor(rgb)) + all_cloud.append(torch.FloatTensor(cloud)) + all_rgb_choose.append(torch.IntTensor(rgb_choose).long()) + all_score.append(score) + all_dets.append(inst) + + ret_dict = {} + ret_dict['pts'] = torch.stack(all_cloud).cuda() + ret_dict['rgb'] = torch.stack(all_rgb).cuda() + ret_dict['rgb_choose'] = torch.stack(all_rgb_choose).cuda() + ret_dict['score'] = torch.FloatTensor(all_score).cuda() + + ninstance = ret_dict['pts'].size(0) + ret_dict['model'] = torch.FloatTensor(model_points).unsqueeze(0).repeat(ninstance, 1, 1).cuda() + ret_dict['K'] = torch.FloatTensor(K).unsqueeze(0).repeat(ninstance, 1, 1).cuda() + return ret_dict, whole_image, whole_pts.reshape(-1, 3), model_points, all_dets + + + +if __name__ == "__main__": + cfg = init() + + random.seed(cfg.rd_seed) + torch.manual_seed(cfg.rd_seed) + + # model + print("=> creating model ...") + MODEL = importlib.import_module(cfg.model_name) + model = MODEL.Net(cfg.model) + model = model.cuda() + model.eval() + checkpoint = os.path.join(os.path.dirname((os.path.abspath(__file__))), 'checkpoints', 'sam-6d-pem-base.pth') + gorilla.solver.load_checkpoint(model=model, filename=checkpoint) + + print("=> extracting templates ...") + tem_path = os.path.join(cfg.output_dir, 'templates') + all_tem, all_tem_pts, all_tem_choose = get_templates(tem_path, cfg.test_dataset) + with torch.no_grad(): + all_tem_pts, all_tem_feat = model.feature_extraction.get_obj_feats(all_tem, all_tem_pts, all_tem_choose) + + print("=> loading input data ...") + input_data, img, whole_pts, model_points, detections = get_test_data( + cfg.rgb_path, cfg.depth_path, cfg.cam_path, cfg.cad_path, cfg.seg_path, + cfg.det_score_thresh, cfg.test_dataset + ) + ninstance = input_data['pts'].size(0) + + print("=> running model ...") + with torch.no_grad(): + input_data['dense_po'] = all_tem_pts.repeat(ninstance,1,1) + input_data['dense_fo'] = all_tem_feat.repeat(ninstance,1,1) + out = model(input_data) + + if 'pred_pose_score' in out.keys(): + pose_scores = out['pred_pose_score'] * out['score'] + else: + pose_scores = out['score'] + pose_scores = pose_scores.detach().cpu().numpy() + pred_rot = out['pred_R'].detach().cpu().numpy() + pred_trans = out['pred_t'].detach().cpu().numpy() * 1000 + + print("=> saving results ...") + os.makedirs(f"{cfg.output_dir}/sam6d_results", exist_ok=True) + for idx, det in enumerate(detections): + detections[idx]['score'] = float(pose_scores[idx]) + detections[idx]['R'] = list(pred_rot[idx].tolist()) + detections[idx]['t'] = list(pred_trans[idx].tolist()) + + with open(os.path.join(f"{cfg.output_dir}/sam6d_results", 'detection_pem.json'), "w") as f: + json.dump(detections, f) + + print("=> visualizating ...") + save_path = os.path.join(f"{cfg.output_dir}/sam6d_results", 'vis_pem.png') + valid_masks = pose_scores == pose_scores.max() + K = input_data['K'].detach().cpu().numpy()[valid_masks] + vis_img = visualize(img, pred_rot[valid_masks], pred_trans[valid_masks], model_points*1000, K, save_path) + vis_img.save(save_path) + diff --git a/project/grasp_box/submodules/SAM6D/test_bop.py b/project/grasp_box/submodules/SAM6D/test_bop.py new file mode 100644 index 0000000000000000000000000000000000000000..e1433b482ac277a71955618814022c35b92ff659 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/test_bop.py @@ -0,0 +1,245 @@ +import gorilla +from tqdm import tqdm +import argparse +import os +import sys +import os.path as osp +import time +import logging +import numpy as np +import random +import importlib +import pickle as cPickle +import json +import torch + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(os.path.join(BASE_DIR, 'provider')) +sys.path.append(os.path.join(BASE_DIR, 'utils')) +sys.path.append(os.path.join(BASE_DIR, 'model')) +sys.path.append(os.path.join(BASE_DIR, 'model', 'pointnet2')) + + +detetion_paths = { + 'ycbv': '../Instance_Segmentation_Model/log/sam/result_ycbv.json', + 'tudl': '../Instance_Segmentation_Model/log/sam/result_tudl.json', + 'tless': '../Instance_Segmentation_Model/log/sam/result_tless.json', + 'lmo': '../Instance_Segmentation_Model/log/sam/result_lmo.json', + 'itodd': '../Instance_Segmentation_Model/log/sam/result_itodd.json', + 'icbin': '../Instance_Segmentation_Model/log/sam/result_icbin.json', + 'hb': '../Instance_Segmentation_Model/log/sam/result_hb.json' +} + + +def get_parser(): + parser = argparse.ArgumentParser( + description="Pose Estimation") + + parser.add_argument("--gpus", + type=str, + default="0", + help="index of gpu") + parser.add_argument("--model", + type=str, + default="pose_estimation_model", + help="name of model") + parser.add_argument("--config", + type=str, + default="config/base.yaml", + help="path to config file") + parser.add_argument("--dataset", + type=str, + default="all", + help="") + parser.add_argument("--checkpoint_path", + type=str, + default="none", + help="path to checkpoint file") + parser.add_argument("--iter", + type=int, + default=0, + help="iter num. for testing") + parser.add_argument("--view", + type=int, + default=-1, + help="view number of templates") + parser.add_argument("--exp_id", + type=int, + default=0, + help="experiment id") + args_cfg = parser.parse_args() + + return args_cfg + +def init(): + args = get_parser() + exp_name = args.model + '_' + \ + osp.splitext(args.config.split("/")[-1])[0] + '_id' + str(args.exp_id) + log_dir = osp.join("log", exp_name) + if not os.path.isdir(log_dir): + os.makedirs(log_dir) + + cfg = gorilla.Config.fromfile(args.config) + cfg.exp_name = exp_name + cfg.gpus = args.gpus + cfg.model_name = args.model + cfg.log_dir = log_dir + cfg.checkpoint_path = args.checkpoint_path + cfg.test_iter = args.iter + cfg.dataset = args.dataset + + if args.view != -1: + cfg.test_dataset.n_template_view = args.view + + gorilla.utils.set_cuda_visible_devices(gpu_ids = cfg.gpus) + return cfg + + + +def test(model, cfg, save_path, dataset_name, detetion_path): + model.eval() + bs = cfg.test_dataloader.bs + + # build dataloader + dataset = importlib.import_module(cfg.test_dataset.name) + dataset = dataset.BOPTestset(cfg.test_dataset, dataset_name, detetion_path) + dataloder = torch.utils.data.DataLoader( + dataset, + batch_size=1, + num_workers=cfg.test_dataloader.num_workers, + shuffle=cfg.test_dataloader.shuffle, + sampler=None, + drop_last=cfg.test_dataloader.drop_last, + pin_memory=cfg.test_dataloader.pin_memory + ) + + # prepare for target objects + all_tem, all_tem_pts, all_tem_choose = dataset.get_templates() + with torch.no_grad(): + dense_po, dense_fo = model.feature_extraction.get_obj_feats(all_tem, all_tem_pts, all_tem_choose) + + lines = [] + with tqdm(total=len(dataloder)) as t: + for i, data in enumerate(dataloder): + torch.cuda.synchronize() + end = time.time() + + for key in data: + data[key] = data[key].cuda() + n_instance = data['pts'].size(1) + n_batch = int(np.ceil(n_instance/bs)) + + pred_Rs = [] + pred_Ts = [] + pred_scores = [] + for j in range(n_batch): + start_idx = j * bs + end_idx = n_instance if j == n_batch-1 else (j+1) * bs + obj = data['obj'][0][start_idx:end_idx].reshape(-1) + + # process inputs + inputs = {} + inputs['pts'] = data['pts'][0][start_idx:end_idx].contiguous() + inputs['rgb'] = data['rgb'][0][start_idx:end_idx].contiguous() + inputs['rgb_choose'] = data['rgb_choose'][0][start_idx:end_idx].contiguous() + inputs['model'] = data['model'][0][start_idx:end_idx].contiguous() + inputs['dense_po'] = dense_po[obj].contiguous() + inputs['dense_fo'] = dense_fo[obj].contiguous() + + # make predictions + with torch.no_grad(): + end_points = model(inputs) + pred_Rs.append(end_points['pred_R']) + pred_Ts.append(end_points['pred_t']) + pred_scores.append(end_points['pred_pose_score']) + + pred_Rs = torch.cat(pred_Rs, dim=0).reshape(-1, 9).detach().cpu().numpy() + pred_Ts = torch.cat(pred_Ts, dim=0).detach().cpu().numpy() * 1000 + pred_scores = torch.cat(pred_scores, dim=0) * data['score'][0,:,0] + pred_scores = pred_scores.detach().cpu().numpy() + image_time = time.time() - end + + # write results + scene_id = data['scene_id'].item() + img_id = data['img_id'].item() + image_time += data['seg_time'].item() + for k in range(n_instance): + line = ','.join(( + str(scene_id), + str(img_id), + str(data['obj_id'][0][k].item()), + str(pred_scores[k]), + ' '.join((str(v) for v in pred_Rs[k])), + ' '.join((str(v) for v in pred_Ts[k])), + f'{image_time}\n', + )) + lines.append(line) + + + t.set_description( + "Test [{}/{}]".format(i+1, len(dataloder)) + ) + t.update(1) + + with open(save_path, 'w+') as f: + f.writelines(lines) + + + + + +if __name__ == "__main__": + cfg = init() + + print("************************ Start Logging ************************") + print(cfg) + print("using gpu: {}".format(cfg.gpus)) + + random.seed(cfg.rd_seed) + torch.manual_seed(cfg.rd_seed) + + # model + print("creating model ...") + MODEL = importlib.import_module(cfg.model_name) + model = MODEL.Net(cfg.model) + if len(cfg.gpus)>1: + model = torch.nn.DataParallel(model, range(len(cfg.gpus.split(",")))) + model = model.cuda() + if cfg.checkpoint_path == 'none': + checkpoint = os.path.join(cfg.log_dir, 'checkpoint_iter' + str(cfg.test_iter).zfill(6) + '.pth') + else: + checkpoint = cfg.checkpoint_path + gorilla.solver.load_checkpoint(model=model, filename=checkpoint) + + + if cfg.dataset == 'all': + datasets = ['ycbv', 'tudl', 'lmo', 'icbin', 'tless', 'itodd' , 'hb'] + for dataset_name in datasets: + print('begining evaluation on {} ...'.format(dataset_name)) + + save_path = os.path.join(cfg.log_dir, dataset_name + '_eval_iter' + str(cfg.test_iter).zfill(6)) + if not os.path.isdir(save_path): + os.makedirs(save_path) + save_path = os.path.join(save_path,'result_' + dataset_name +'.csv') + test(model, cfg, save_path, dataset_name, detetion_paths[dataset_name]) + + print('saving to {} ...'.format(save_path)) + print('finishing evaluation on {} ...'.format(dataset_name)) + + else: + dataset_name = cfg.dataset + print('begining evaluation on {} ...'.format(dataset_name)) + + save_path = os.path.join(cfg.log_dir, dataset_name + '_eval_iter' + str(cfg.test_iter).zfill(6)) + if not os.path.isdir(save_path): + os.makedirs(save_path) + save_path = os.path.join(save_path,'result_' + dataset_name +'.csv') + test(model, cfg, save_path, dataset_name, detetion_paths[dataset_name]) + + print('saving to {} ...'.format(save_path)) + print('finishing evaluation on {} ...'.format(dataset_name)) + + + + + diff --git a/project/grasp_box/submodules/SAM6D/train.py b/project/grasp_box/submodules/SAM6D/train.py new file mode 100644 index 0000000000000000000000000000000000000000..70acf3264a631a248464bc88569dba041fafb200 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/train.py @@ -0,0 +1,140 @@ + +import gorilla +from tqdm import tqdm +import argparse +import os +import sys +import os.path as osp +import time +import logging +import numpy as np +import random +import importlib + +import torch +from torch.autograd import Variable +import torch.optim as optim + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(os.path.join(BASE_DIR, 'provider')) +sys.path.append(os.path.join(BASE_DIR, 'utils')) +sys.path.append(os.path.join(BASE_DIR, 'model')) +sys.path.append(os.path.join(BASE_DIR, 'model', 'pointnet2')) + +from solver import Solver, get_logger +from loss_utils import Loss + +def get_parser(): + parser = argparse.ArgumentParser( + description="Pose Estimation") + + parser.add_argument("--gpus", + type=str, + default="0", + help="index of gpu") + parser.add_argument("--model", + type=str, + default="pose_estimation_model", + help="name of model") + parser.add_argument("--config", + type=str, + default="config/base.yaml", + help="path to config file") + parser.add_argument("--exp_id", + type=int, + default=0, + help="experiment id") + parser.add_argument("--checkpoint_iter", + type=int, + default=-1, + help="iter num. of checkpoint") + args_cfg = parser.parse_args() + + return args_cfg + + +def init(): + args = get_parser() + exp_name = args.model + '_' + \ + osp.splitext(args.config.split("/")[-1])[0] + '_id' + str(args.exp_id) + log_dir = osp.join("log", exp_name) + + cfg = gorilla.Config.fromfile(args.config) + cfg.exp_name = exp_name + cfg.gpus = args.gpus + cfg.model_name = args.model + cfg.log_dir = log_dir + cfg.checkpoint_iter = args.checkpoint_iter + + if not os.path.isdir(log_dir): + os.makedirs(log_dir) + logger = get_logger( + level_print=logging.INFO, level_save=logging.WARNING, path_file=log_dir+"/training_logger.log") + gorilla.utils.set_cuda_visible_devices(gpu_ids=cfg.gpus) + + return logger, cfg + + +if __name__ == "__main__": + logger, cfg = init() + + logger.warning( + "************************ Start Logging ************************") + logger.info(cfg) + logger.info("using gpu: {}".format(cfg.gpus)) + + random.seed(cfg.rd_seed) + torch.manual_seed(cfg.rd_seed) + + # model + logger.info("=> creating model ...") + MODEL = importlib.import_module(cfg.model_name) + model = MODEL.Net(cfg.model) + if hasattr(cfg, 'pretrain_dir') and cfg.pretrain_dir is not None: + logger.info('loading pretrained backbone from {}'.format(cfg.pretrain_dir)) + key1, key2 = model.load_state_dict(torch.load(cfg.pretrain_dir)['model'], strict=False) + if len(cfg.gpus) > 1: + model = torch.nn.DataParallel(model, range(len(cfg.gpus.split(",")))) + model = model.cuda() + + loss = Loss().cuda() + count_parameters = sum(gorilla.parameter_count(model).values()) + logger.warning("#Total parameters : {}".format(count_parameters)) + + # dataloader + batchsize = cfg.train_dataloader.bs + num_epoch = cfg.training_epoch + + if cfg.lr_scheduler.type == 'WarmupCosineLR': + num_iter = cfg.lr_scheduler.max_iters + if hasattr(cfg, 'warmup_iter') and cfg.warmup_iter >0: + num_iter = num_iter + cfg.warmup_iter + iters_per_epoch = int(np.floor(num_iter / num_epoch)) + elif cfg.lr_scheduler.type == 'CyclicLR': + iters_per_epoch = cfg.lr_scheduler.step_size_up+cfg.lr_scheduler.step_size_down + train_dataset = importlib.import_module(cfg.train_dataset.name) + train_dataset = train_dataset.Dataset(cfg.train_dataset, iters_per_epoch*batchsize) + + + train_dataloader = torch.utils.data.DataLoader( + train_dataset, + batch_size=cfg.train_dataloader.bs, + num_workers=cfg.train_dataloader.num_workers, + shuffle=cfg.train_dataloader.shuffle, + sampler=None, + drop_last=cfg.train_dataloader.drop_last, + pin_memory=cfg.train_dataloader.pin_memory, + ) + + dataloaders = { + "train": train_dataloader, + } + + # solver + Trainer = Solver(model=model, loss=loss, + dataloaders=dataloaders, + logger=logger, + cfg=cfg) + Trainer.solve() + + logger.info('\nFinish!\n') diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/data_utils.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/data_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60a1c0c09fe917fc9adb31798725d6b1a11b183e Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/data_utils.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/data_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/data_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81c61ff2fe57b084f113dbdf8bd2199cb6dde4f9 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/data_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/draw_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/draw_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f84be7e5e59002701c4d80fbb7fab5f1b08ea60 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/draw_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/loss_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/loss_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e3d4f0cdb74221167f464ece3a0611cb715e645 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/loss_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/model_utils.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/model_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46d98b6cb537ffdd50c8a8b9ac10442f3dacaebe Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/model_utils.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/model_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/model_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..541394bb6b0dd9047e09955241f1e6efcf7bf3f4 Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/model_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/vis_utils.cpython-310.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/vis_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41d4ebfcb6f5a105ae08914de1b05bf3fd3fe49a Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/vis_utils.cpython-310.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/__pycache__/vis_utils.cpython-38.pyc b/project/grasp_box/submodules/SAM6D/utils/__pycache__/vis_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea67dbc1e8f5e02fa68b19e26aab453d5ae6e25f Binary files /dev/null and b/project/grasp_box/submodules/SAM6D/utils/__pycache__/vis_utils.cpython-38.pyc differ diff --git a/project/grasp_box/submodules/SAM6D/utils/bop_object_utils.py b/project/grasp_box/submodules/SAM6D/utils/bop_object_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f34215838194fde2f3fa7e5450e03ba3a45c8f79 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/bop_object_utils.py @@ -0,0 +1,117 @@ + +''' +Modified from https://github.com/rasmushaugaard/surfemb/blob/master/surfemb/data/obj.py +''' + +import os +import glob +import json +import numpy as np +import trimesh +from tqdm import tqdm + +from data_utils import ( + load_im, +) + +class Obj: + def __init__( + self, obj_id, + mesh: trimesh.Trimesh, + model_points, + diameter: float, + symmetry_flag: int, + template_path: str, + n_template_view: int, + ): + self.obj_id = obj_id + self.mesh = mesh + self.model_points = model_points + self.diameter = diameter + self.symmetry_flag = symmetry_flag + self._get_template(template_path, n_template_view) + + def get_item(self, return_color=False, sample_num=2048): + if return_color: + model_points, _, model_colors = trimesh.sample.sample_surface(self.mesh, sample_num, sample_color=True) + model_points = model_points.astype(np.float32) / 1000.0 + return (model_points, model_colors, self.symmetry_flag) + else: + return (self.model_points, self.symmetry_flag) + + def _get_template(self, path, nView): + if nView > 0: + total_nView = len(glob.glob(os.path.join(path, 'rgb_*.png'))) + + self.template = [] + self.template_mask = [] + self.template_pts = [] + + for v in range(nView): + i = int(total_nView / nView * v) + rgb_path = os.path.join(path, 'rgb_'+str(i)+'.png') + xyz_path = os.path.join(path, 'xyz_'+str(i)+'.npy') + mask_path = os.path.join(path, 'mask_'+str(i)+'.png') + + rgb = load_im(rgb_path).astype(np.uint8) + xyz = np.load(xyz_path).astype(np.float32) / 1000.0 + mask = load_im(mask_path).astype(np.uint8) == 255 + + self.template.append(rgb) + self.template_mask.append(mask) + self.template_pts.append(xyz) + else: + self.template = None + self.template_choose = None + self.template_pts = None + + def get_template(self, view_idx): + return self.template[view_idx], self.template_mask[view_idx], self.template_pts[view_idx] + + +def load_obj( + model_path, obj_id: int, sample_num: int, + template_path: str, + n_template_view: int, +): + models_info = json.load(open(os.path.join(model_path, 'models_info.json'))) + mesh = trimesh.load_mesh(os.path.join(model_path, f'obj_{obj_id:06d}.ply')) + model_points = mesh.sample(sample_num).astype(np.float32) / 1000.0 + diameter = models_info[str(obj_id)]['diameter'] / 1000.0 + if 'symmetries_continuous' in models_info[str(obj_id)]: + symmetry_flag = 1 + elif 'symmetries_discrete' in models_info[str(obj_id)]: + symmetry_flag = 1 + else: + symmetry_flag = 0 + return Obj( + obj_id, mesh, model_points, diameter, symmetry_flag, + template_path, n_template_view + ) + + +def load_objs( + model_path='models', + template_path='render_imgs', + sample_num=512, + n_template_view=0, + show_progressbar=True +): + objs = [] + obj_ids = sorted([int(p.split('/')[-1][4:10]) for p in glob.glob(os.path.join(model_path, '*.ply'))]) + + if n_template_view>0: + template_paths = sorted(glob.glob(os.path.join(template_path, '*'))) + assert len(template_paths) == len(obj_ids), '{} template_paths, {} obj_ids'.format(len(template_paths), len(obj_ids)) + else: + template_paths = [None for _ in range(len(obj_ids))] + + cnt = 0 + for obj_id in tqdm(obj_ids, 'loading objects') if show_progressbar else obj_ids: + objs.append( + load_obj(model_path, obj_id, sample_num, + template_paths[cnt], n_template_view) + ) + cnt+=1 + return objs, obj_ids + diff --git a/project/grasp_box/submodules/SAM6D/utils/data_utils.py b/project/grasp_box/submodules/SAM6D/utils/data_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fe8eda0676fbe1baae9e05116aee2cac38a2fe91 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/data_utils.py @@ -0,0 +1,237 @@ +import os +import numpy as np +import json +import imageio +import cv2 +import open3d as o3d +from PIL import Image + + +def trimesh_to_open3d(tri_mesh): + # Create the open3d TriangleMesh object from numpy arrays + o3d_mesh = o3d.geometry.TriangleMesh( + vertices=o3d.utility.Vector3dVector(tri_mesh.vertices.copy()), + triangles=o3d.utility.Vector3iVector(tri_mesh.faces.copy()) + ) + + # Check if vertex colors are present in the trimesh object + if hasattr(tri_mesh, 'visual'): + if hasattr(tri_mesh.visual, 'vertex_colors'): + # Convert the trimesh vertex colors to a format open3d understands + vertex_colors = tri_mesh.visual.vertex_colors[:, :3] / 255.0 + else: + vertex_colors = tri_mesh.visual.to_color().vertex_colors[:, :3] / 255.0 + o3d_mesh.vertex_colors = o3d.utility.Vector3dVector(vertex_colors) + + return o3d_mesh + +def load_im(path): + """Loads an image from a file. + + :param path: Path to the image file to load. + :return: ndarray with the loaded image. + """ + im = imageio.imread(path) + return im + + +def io_load_gt( + gt_file, + instance_ids=None, +): + """Load ground truth from an I/O object. + Instance_ids can be specified to load only a + subset of object instances. + + :param gt_file: I/O object that can be read with json.load. + :param instance_ids: List of instance ids. + :return: List of ground truth annotations (one dict per object instance). + """ + gt = json.load(gt_file) + if instance_ids is not None: + gt = [gt_n for n, gt_n in enumerate(gt) if n in instance_ids] + gt = [_gt_as_numpy(gt_n) for gt_n in gt] + return gt + + +def io_load_masks( + mask_file, + instance_ids=None +): + """Load object masks from an I/O object. + Instance_ids can be specified to apply RLE + decoding to a subset of object instances contained + in the file. + + :param mask_file: I/O object that can be read with json.load. + :param masks_path: Path to json file. + :return: a [N,H,W] binary array containing object masks. + """ + masks_rle = json.load(mask_file) + masks_rle = {int(k): v for k, v in masks_rle.items()} + if instance_ids is None: + instance_ids = masks_rle.keys() + instance_ids = sorted(instance_ids) + masks = np.stack([ + rle_to_binary_mask(masks_rle[instance_id]) + for instance_id in instance_ids]) + return masks + + +def _gt_as_numpy(gt): + if 'cam_R_m2c' in gt.keys(): + gt['cam_R_m2c'] = \ + np.array(gt['cam_R_m2c'], np.float64).reshape((3, 3)) + if 'cam_t_m2c' in gt.keys(): + gt['cam_t_m2c'] = \ + np.array(gt['cam_t_m2c'], np.float64).reshape((3, 1)) + return gt + + +def rle_to_binary_mask(rle): + """Converts a COCOs run-length encoding (RLE) to binary mask. + + :param rle: Mask in RLE format + :return: a 2D binary numpy array where '1's represent the object + """ + binary_array = np.zeros(np.prod(rle.get('size')), dtype=bool) + counts = rle.get('counts') + + start = 0 + for i in range(len(counts)-1): + start += counts[i] + end = start + counts[i+1] + binary_array[start:end] = (i + 1) % 2 + + binary_mask = binary_array.reshape(*rle.get('size'), order='F') + + return binary_mask + + +def get_point_cloud_from_depth(depth, K, bbox=None): + cam_fx, cam_fy, cam_cx, cam_cy = K[0,0], K[1,1], K[0,2], K[1,2] + + im_H, im_W = depth.shape + xmap = np.array([[i for i in range(im_W)] for j in range(im_H)]) + ymap = np.array([[j for i in range(im_W)] for j in range(im_H)]) + + if bbox is not None: + rmin, rmax, cmin, cmax = bbox + depth = depth[rmin:rmax, cmin:cmax].astype(np.float32) + xmap = xmap[rmin:rmax, cmin:cmax].astype(np.float32) + ymap = ymap[rmin:rmax, cmin:cmax].astype(np.float32) + + pt2 = depth.astype(np.float32) + pt0 = (xmap.astype(np.float32) - cam_cx) * pt2 / cam_fx + pt1 = (ymap.astype(np.float32) - cam_cy) * pt2 / cam_fy + + cloud = np.stack([pt0,pt1,pt2]).transpose((1,2,0)) + return cloud + + +def get_resize_rgb_choose(choose, bbox, img_size): + rmin, rmax, cmin, cmax = bbox + crop_h = rmax - rmin + ratio_h = img_size / crop_h + crop_w = cmax - cmin + ratio_w = img_size / crop_w + + row_idx = choose // crop_w + col_idx = choose % crop_w + choose = (np.floor(row_idx * ratio_h) * img_size + np.floor(col_idx * ratio_w)).astype(np.int64) + return choose + + +def get_bbox(label): + img_width, img_length = label.shape + rows = np.any(label, axis=1) + cols = np.any(label, axis=0) + + rmin, rmax = np.where(rows)[0][[0, -1]] + + cmin, cmax = np.where(cols)[0][[0, -1]] + rmax += 1 + cmax += 1 + r_b = rmax - rmin + c_b = cmax - cmin + b = min(max(r_b, c_b), min(img_width, img_length)) + center = [int((rmin + rmax) / 2), int((cmin + cmax) / 2)] + + rmin = center[0] - int(b / 2) + rmax = center[0] + int(b / 2) + cmin = center[1] - int(b / 2) + cmax = center[1] + int(b / 2) + + if rmin < 0: + delt = -rmin + rmin = 0 + rmax += delt + if cmin < 0: + delt = -cmin + cmin = 0 + cmax += delt + if rmax > img_width: + delt = rmax - img_width + rmax = img_width + rmin -= delt + if cmax > img_length: + delt = cmax - img_length + cmax = img_length + cmin -= delt + return [rmin, rmax, cmin, cmax] + +def get_random_rotation(): + angles = np.random.rand(3) * 2 * np.pi + rand_rotation = np.array([ + [1,0,0], + [0,np.cos(angles[0]),-np.sin(angles[0])], + [0,np.sin(angles[0]), np.cos(angles[0])] + ]) @ np.array([ + [np.cos(angles[1]),0,np.sin(angles[1])], + [0,1,0], + [-np.sin(angles[1]), 0, np.cos(angles[1])] + ]) @ np.array([ + [np.cos(angles[2]),-np.sin(angles[2]),0], + [np.sin(angles[2]), np.cos(angles[2]),0], + [0,0,1] + ]) + return rand_rotation + +def get_model_info(obj, return_color=False, sample_num=2048): + if return_color: + model_points, model_color, symmetry_flag = obj.get_item(return_color, sample_num) + return (model_points, model_color, symmetry_flag) + else: + model_points, symmetry_flag = obj.get_item() + return (model_points, symmetry_flag) + +def get_bop_depth_map(inst): + scene_id, img_id, data_folder = inst['scene_id'], inst['img_id'], inst['data_folder'] + try: + depth = np.array(Image.open(os.path.join(data_folder, f'{scene_id:06d}', 'depth', f'{img_id:06d}.png'))) / 1000.0 + except: + depth = np.array(Image.open(os.path.join(data_folder, f'{scene_id:06d}', 'depth', f'{img_id:06d}.tif'))) / 1000.0 + return depth + +def get_bop_image(inst, bbox, img_size, mask=None): + scene_id, img_id, data_folder = inst['scene_id'], inst['img_id'], inst['data_folder'] + rmin, rmax, cmin, cmax = bbox + img_path = os.path.join(data_folder, f'{scene_id:06d}/') + + strs = [f'rgb/{img_id:06d}.jpg', f'rgb/{img_id:06d}.png', f'gray/{img_id:06d}.tif'] + for s in strs: + if os.path.exists(os.path.join(img_path,s)): + img_path = os.path.join(img_path,s) + break + + rgb = load_im(img_path).astype(np.uint8) + if len(rgb.shape)==2: + rgb = np.concatenate([rgb[:,:,None], rgb[:,:,None], rgb[:,:,None]], axis=2) + rgb = rgb[..., ::-1][rmin:rmax, cmin:cmax, :3] + if mask is not None: + rgb = rgb * (mask[:,:,None]>0).astype(np.uint8) + rgb = cv2.resize(rgb, (img_size, img_size), interpolation=cv2.INTER_LINEAR) + return rgb + + + diff --git a/project/grasp_box/submodules/SAM6D/utils/draw_utils.py b/project/grasp_box/submodules/SAM6D/utils/draw_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e471ad5c9892c55aff6e6c0134f91554e2bea350 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/draw_utils.py @@ -0,0 +1,99 @@ +import numpy as np +import os +import cv2 + +def calculate_2d_projections(coordinates_3d, intrinsics): + """ + Input: + coordinates: [3, N] + intrinsics: [3, 3] + Return + projected_coordinates: [N, 2] + """ + projected_coordinates = intrinsics @ coordinates_3d + projected_coordinates = projected_coordinates[:2, :] / projected_coordinates[2, :] + projected_coordinates = projected_coordinates.T + projected_coordinates = np.array(projected_coordinates, dtype=np.int32) + + return projected_coordinates + +def get_3d_bbox(scale, shift = 0): + """ + Input: + scale: [3] or scalar + shift: [3] or scalar + Return + bbox_3d: [3, N] + + """ + if hasattr(scale, "__iter__"): + bbox_3d = np.array([[scale[0] / 2, +scale[1] / 2, scale[2] / 2], + [scale[0] / 2, +scale[1] / 2, -scale[2] / 2], + [-scale[0] / 2, +scale[1] / 2, scale[2] / 2], + [-scale[0] / 2, +scale[1] / 2, -scale[2] / 2], + [+scale[0] / 2, -scale[1] / 2, scale[2] / 2], + [+scale[0] / 2, -scale[1] / 2, -scale[2] / 2], + [-scale[0] / 2, -scale[1] / 2, scale[2] / 2], + [-scale[0] / 2, -scale[1] / 2, -scale[2] / 2]]) + shift + else: + bbox_3d = np.array([[scale / 2, +scale / 2, scale / 2], + [scale / 2, +scale / 2, -scale / 2], + [-scale / 2, +scale / 2, scale / 2], + [-scale / 2, +scale / 2, -scale / 2], + [+scale / 2, -scale / 2, scale / 2], + [+scale / 2, -scale / 2, -scale / 2], + [-scale / 2, -scale / 2, scale / 2], + [-scale / 2, -scale / 2, -scale / 2]]) +shift + + bbox_3d = bbox_3d.transpose() + return bbox_3d + +def draw_3d_bbox(img, imgpts, color, size=1): + imgpts = np.int32(imgpts).reshape(-1, 2) + + # draw ground layer in darker color + color_ground = (int(color[0] * 0.3), int(color[1] * 0.3), int(color[2] * 0.3)) + for i, j in zip([4, 5, 6, 7],[5, 7, 4, 6]): + img = cv2.line(img, tuple(imgpts[i]), tuple(imgpts[j]), color_ground, size) + + # draw pillars in blue color + color_pillar = (int(color[0]*0.6), int(color[1]*0.6), int(color[2]*0.6)) + for i, j in zip(range(4),range(4,8)): + img = cv2.line(img, tuple(imgpts[i]), tuple(imgpts[j]), color_pillar, size) + + # finally, draw top layer in color + for i, j in zip([0, 1, 2, 3],[1, 3, 0, 2]): + img = cv2.line(img, tuple(imgpts[i]), tuple(imgpts[j]), color, size) + return img + +def draw_3d_pts(img, imgpts, color, size=1): + imgpts = np.int32(imgpts).reshape(-1, 2) + for point in imgpts: + img = cv2.circle(img, (point[0], point[1]), size, color, -1) + return img + +def draw_detections(image, pred_rots, pred_trans, model_points, intrinsics, color=(255, 0, 0)): + num_pred_instances = len(pred_rots) + draw_image_bbox = image.copy() + # 3d bbox + scale = (np.max(model_points, axis=0) - np.min(model_points, axis=0)) + shift = np.mean(model_points, axis=0) + print(scale, shift) + + bbox_3d = get_3d_bbox(scale, shift) + + # 3d point + choose = np.random.choice(np.arange(len(model_points)), 512) + pts_3d = model_points[choose].T + + for ind in range(num_pred_instances): + # draw 3d bounding box + transformed_bbox_3d = pred_rots[ind]@bbox_3d + pred_trans[ind][:,np.newaxis] + projected_bbox = calculate_2d_projections(transformed_bbox_3d, intrinsics[ind]) + draw_image_bbox = draw_3d_bbox(draw_image_bbox, projected_bbox, color) + # draw point cloud + transformed_pts_3d = pred_rots[ind]@pts_3d + pred_trans[ind][:,np.newaxis] + projected_pts = calculate_2d_projections(transformed_pts_3d, intrinsics[ind]) + draw_image_bbox = draw_3d_pts(draw_image_bbox, projected_pts, color) + + return draw_image_bbox diff --git a/project/grasp_box/submodules/SAM6D/utils/loss_utils.py b/project/grasp_box/submodules/SAM6D/utils/loss_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bb93021d83f87130e3ce9f72412e3374733f8ac9 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/loss_utils.py @@ -0,0 +1,69 @@ + +import torch +import torch.nn as nn + +from model_utils import pairwise_distance + +def compute_correspondence_loss( + end_points, + atten_list, + pts1, + pts2, + gt_r, + gt_t, + dis_thres=0.15, + loss_str='coarse' +): + CE = nn.CrossEntropyLoss(reduction ='none') + + gt_pts = (pts1-gt_t.unsqueeze(1))@gt_r + dis_mat = torch.sqrt(pairwise_distance(gt_pts, pts2)) + + dis1, label1 = dis_mat.min(2) + fg_label1 = (dis1<=dis_thres).float() + label1 = (fg_label1 * (label1.float()+1.0)).long() + + dis2, label2 = dis_mat.min(1) + fg_label2 = (dis2<=dis_thres).float() + label2 = (fg_label2 * (label2.float()+1.0)).long() + + # loss + for idx, atten in enumerate(atten_list): + l1 = CE(atten.transpose(1,2)[:,:,1:].contiguous(), label1).mean(1) + l2 = CE(atten[:,:,1:].contiguous(), label2).mean(1) + end_points[loss_str + '_loss' + str(idx)] = 0.5 * (l1 + l2) + + # acc + pred_label = torch.max(atten_list[-1][:,1:,:], dim=2)[1] + end_points[loss_str + '_acc'] = (pred_label==label1).float().mean(1) + + # pred foreground num + fg_mask = (pred_label > 0).float() + end_points[loss_str + '_fg_num'] = fg_mask.sum(1) + + # foreground point dis + fg_label = fg_mask * (pred_label - 1) + fg_label = fg_label.long() + pred_pts = torch.gather(pts2, 1, fg_label.unsqueeze(2).repeat(1,1,3)) + pred_dis = torch.norm(pred_pts-gt_pts, dim=2) + pred_dis = (pred_dis * fg_mask).sum(1) / (fg_mask.sum(1)+1e-8) + end_points[loss_str + '_dis'] = pred_dis + + return end_points + + + +class Loss(nn.Module): + def __init__(self): + super(Loss, self).__init__() + + def forward(self, end_points): + out_dicts = {'loss': 0} + for key in end_points.keys(): + if 'coarse_' in key or 'fine_' in key: + out_dicts[key] = end_points[key].mean() + if 'loss' in key: + out_dicts['loss'] = out_dicts['loss'] + end_points[key] + out_dicts['loss'] = torch.clamp(out_dicts['loss'], max=100.0).mean() + return out_dicts + diff --git a/project/grasp_box/submodules/SAM6D/utils/model_utils.py b/project/grasp_box/submodules/SAM6D/utils/model_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d8294c402910c55c075ec3c6696096b6ec7a3433 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/model_utils.py @@ -0,0 +1,1155 @@ +import torch +import torch.nn as nn +from torch.nn import functional as F +import open3d as o3d +import trimesh +import copy +import time +import cv2 +import logging +import numpy as np +import pytorch3d +from pytorch3d.io import load_objs_as_meshes, load_obj +from pytorch3d.structures import Meshes +from pytorch3d.vis.plotly_vis import AxisArgs, plot_batch_individually, plot_scene +from pytorch3d.vis.texture_vis import texturesuv_image_matplotlib +from pytorch3d.renderer import ( + PerspectiveCameras, + PointLights, + RasterizationSettings, + MeshRenderer, + MeshRasterizer, + SoftPhongShader, +) + +from transformers import AutoTokenizer, AutoImageProcessor, AutoModel +from transformers import AutoProcessor, CLIPVisionModelWithProjection +from transformers import CLIPProcessor, CLIPModel +from sklearn.metrics.pairwise import cosine_similarity +import ssl +import os +os.environ['CURL_CA_BUNDLE'] = '' +ssl._create_default_https_context = ssl._create_unverified_context + +from pointnet2_utils import ( + gather_operation, + furthest_point_sample, +) + + +class LayerNorm2d(nn.Module): + def __init__(self, num_channels: int, eps: float = 1e-6) -> None: + super().__init__() + self.weight = nn.Parameter(torch.ones(num_channels)) + self.bias = nn.Parameter(torch.zeros(num_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor) -> torch.Tensor: + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x + + +def interpolate_pos_embed(model, checkpoint_model): + if 'pos_embed' in checkpoint_model: + pos_embed_checkpoint = checkpoint_model['pos_embed'] + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = model.patch_embed.num_patches + num_extra_tokens = model.pos_embed.shape[-2] - num_patches + # height (== width) for the checkpoint position embedding + orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) + # height (== width) for the new position embedding + new_size = int(num_patches ** 0.5) + # class_token and dist_token are kept unchanged + if orig_size != new_size: + print("Position interpolate from %dx%d to %dx%d" % (orig_size, orig_size, new_size, new_size)) + extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] + pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) + pos_tokens = torch.nn.functional.interpolate( + pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) + pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) + checkpoint_model['pos_embed'] = new_pos_embed + + + +def sample_pts_feats(pts, feats, npoint=2048, return_index=False): + ''' + pts: B*N*3 + feats: B*N*C + ''' + sample_idx = furthest_point_sample(pts, npoint) + pts = gather_operation(pts.transpose(1,2).contiguous(), sample_idx) + pts = pts.transpose(1,2).contiguous() + feats = gather_operation(feats.transpose(1,2).contiguous(), sample_idx) + feats = feats.transpose(1,2).contiguous() + if return_index: + return pts, feats, sample_idx + else: + return pts, feats + + +def get_chosen_pixel_feats(img, choose): + shape = img.size() + if len(shape) == 3: + pass + elif len(shape) == 4: + B, C, H, W = shape + img = img.reshape(B, C, H*W) + else: + assert False + + choose = choose.unsqueeze(1).repeat(1, C, 1) + x = torch.gather(img, 2, choose).contiguous() + return x.transpose(1,2).contiguous() + + +def pairwise_distance( + x: torch.Tensor, y: torch.Tensor, normalized: bool = False, channel_first: bool = False +) -> torch.Tensor: + r"""Pairwise distance of two (batched) point clouds. + + Args: + x (Tensor): (*, N, C) or (*, C, N) + y (Tensor): (*, M, C) or (*, C, M) + normalized (bool=False): if the points are normalized, we have "x2 + y2 = 1", so "d2 = 2 - 2xy". + channel_first (bool=False): if True, the points shape is (*, C, N). + + Returns: + dist: torch.Tensor (*, N, M) + """ + if channel_first: + channel_dim = -2 + xy = torch.matmul(x.transpose(-1, -2), y) # [(*, C, N) -> (*, N, C)] x (*, C, M) + else: + channel_dim = -1 + xy = torch.matmul(x, y.transpose(-1, -2)) # (*, N, C) x [(*, M, C) -> (*, C, M)] + if normalized: + sq_distances = 2.0 - 2.0 * xy + else: + x2 = torch.sum(x ** 2, dim=channel_dim).unsqueeze(-1) # (*, N, C) or (*, C, N) -> (*, N) -> (*, N, 1) + y2 = torch.sum(y ** 2, dim=channel_dim).unsqueeze(-2) # (*, M, C) or (*, C, M) -> (*, M) -> (*, 1, M) + sq_distances = x2 - 2 * xy + y2 + sq_distances = sq_distances.clamp(min=0.0) + return sq_distances + + +def compute_feature_similarity(feat1, feat2, type='cosine', temp=1.0, normalize_feat=True): + r''' + Args: + feat1 (Tensor): (B, N, C) + feat2 (Tensor): (B, M, C) + + Returns: + atten_mat (Tensor): (B, N, M) + ''' + if normalize_feat: + feat1 = F.normalize(feat1, p=2, dim=2) + feat2 = F.normalize(feat2, p=2, dim=2) + + if type == 'cosine': + atten_mat = feat1 @ feat2.transpose(1,2) + elif type == 'L2': + atten_mat = torch.sqrt(pairwise_distance(feat1, feat2)) + else: + assert False + + atten_mat = atten_mat / temp + + return atten_mat + +def compute_triangle_normals(pts): + pts = pts.squeeze(0) + A = pts[:, 1] - pts[:, 0] # (6000, 3) + B = pts[:, 2] - pts[:, 0] # (6000, 3) + N = torch.cross(A, B, dim=1) + normal_magnitude = torch.norm(N, dim=1, keepdim=True) + return normal_magnitude.unsqueeze(0) + +def aug_pose_noise(gt_r, gt_t, + std_rots=[15, 10, 5, 1.25, 1], + max_rot=45, + sel_std_trans=[0.2, 0.2, 0.2], + max_trans=0.8): + + B = gt_r.size(0) + device = gt_r.device + + std_rot = np.random.choice(std_rots) + angles = torch.normal(mean=0, std=std_rot, size=(B, 3)).to(device=device) + angles = angles.clamp(min=-max_rot, max=max_rot) + ones = gt_r.new(B, 1, 1).zero_() + 1 + zeros = gt_r.new(B, 1, 1).zero_() + a1 = angles[:,0].reshape(B, 1, 1) * np.pi / 180.0 + a1 = torch.cat( + [torch.cat([torch.cos(a1), -torch.sin(a1), zeros], dim=2), + torch.cat([torch.sin(a1), torch.cos(a1), zeros], dim=2), + torch.cat([zeros, zeros, ones], dim=2)], dim=1 + ) + a2 = angles[:,1].reshape(B, 1, 1) * np.pi / 180.0 + a2 = torch.cat( + [torch.cat([ones, zeros, zeros], dim=2), + torch.cat([zeros, torch.cos(a2), -torch.sin(a2)], dim=2), + torch.cat([zeros, torch.sin(a2), torch.cos(a2)], dim=2)], dim=1 + ) + a3 = angles[:,2].reshape(B, 1, 1) * np.pi / 180.0 + a3 = torch.cat( + [torch.cat([torch.cos(a3), zeros, torch.sin(a3)], dim=2), + torch.cat([zeros, ones, zeros], dim=2), + torch.cat([-torch.sin(a3), zeros, torch.cos(a3)], dim=2)], dim=1 + ) + rand_rot = a1 @ a2 @ a3 + + rand_trans = torch.normal( + mean=torch.zeros([B, 3]).to(device), + std=torch.tensor(sel_std_trans, device=device).view(1, 3), + ) + rand_trans = torch.clamp(rand_trans, min=-max_trans, max=max_trans) + + rand_rot = gt_r @ rand_rot + rand_trans = gt_t + rand_trans + rand_trans[:,2] = torch.clamp(rand_trans[:,2], min=1e-6) + + return rand_rot.detach(), rand_trans.detach() + + +def compute_coarse_Rt( + end_points, + atten, + pts1, + pts2, + depth, + radius, + mask, + bbox, + model_pts=None, + n_proposal1=6000, + n_proposal2=300, + +): + + WSVD = WeightedProcrustes() + + B, N1, _ = pts1.size() + N2 = pts2.size(1) + device = pts1.device + + # compute soft assignment matrix + pred_score = torch.softmax(atten, dim=2) * torch.softmax(atten, dim=1) + pred_label1 = torch.max(pred_score[:,1:,:], dim=2)[1] + pred_label2 = torch.max(pred_score[:,:,1:], dim=1)[1] + weights1 = (pred_label1>0).float() + weights2 = (pred_label2>0).float() + + pred_score = pred_score[:, 1:, 1:].contiguous() + pred_score = pred_score * weights1.unsqueeze(2) * weights2.unsqueeze(1) + pred_score = pred_score.reshape(B, N1*N2) ** 1.5 + + # sample pose hypothese + cumsum_weights = torch.cumsum(pred_score, dim=1) + cumsum_weights /= (cumsum_weights[:, -1].unsqueeze(1).contiguous()+1e-8) + idx = torch.searchsorted(cumsum_weights, torch.rand(B, n_proposal1*3, device=device)) + idx1, idx2 = idx.div(N2, rounding_mode='floor'), idx % N2 + idx1 = torch.clamp(idx1, max=N1-1).unsqueeze(2).repeat(1,1,3) + idx2 = torch.clamp(idx2, max=N2-1).unsqueeze(2).repeat(1,1,3) + + p1 = torch.gather(pts1, 1, idx1).reshape(B,n_proposal1,3,3).reshape(B*n_proposal1,3,3) + p2 = torch.gather(pts2, 1, idx2).reshape(B,n_proposal1,3,3).reshape(B*n_proposal1,3,3) + + + + + # # visualize the correspondences + # rgb_img = end_points['rgb_ori'].cpu().numpy() + # depth_img = end_points['depth'].cpu().numpy() * 1000 + # intrinsic = end_points['K'].reshape(3,3) + # rgb = o3d.geometry.Image((rgb_img).astype(np.uint8)) + # depth = o3d.geometry.Image((depth_img).astype(np.uint16)) + # rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(rgb, depth, depth_scale=1000.0) + # width = rgb_img.shape[1] + # height = rgb_img.shape[0] + # cx = int(intrinsic[0,2]) + # cy = int(intrinsic[1,2]) + # fx = int(intrinsic[0,0]) + # fy = int(intrinsic[1,1]) + # intri = o3d.camera.PinholeCameraIntrinsic(width=width, height=height, fx=fx, fy=fy, cx=cx, cy=cy) + # o3d_points = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, intrinsic=intri) + + # pcd1 = o3d.geometry.PointCloud() + + + + # o3d.visualization.draw_geometries([o3d_points]) + + # template_list = [ + # [[1., 0., 0.,], [0., 1., 0.], [0., 0., 1.]], # identity + # # [[-1., 0., 0.], [0., 1., 0.], [0., 0., 1.]], # mirror along x + # # [[1., 0., 0.], [0., -1., 0.], [0., 0., 1.]], # mirror along y + # # [[1., 0., 0.], [0., 1., 0.], [0., 0., -1.]], # mirror along z + # [[1., 0., 0.], [0., 0., -1.], [0., 1., 0.]], # 90 around x + # # [[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]], # -90 around x + # [[0., 0., -1.], [0., 1., 0.], [1., 0., 0.]], # 90 around y + # # [[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]], # -90 around y + # [[0., -1., 0.], [1., 0., 0.], [0., 0., 1.]], # 90 around z + # # [[0., 1., 0.], [-1., 0., 0.], [0., 0., 1.]] # -90 around z + # ] + # template_tensor = torch.tensor(template_list, device='cuda') + # template_tensor = template_tensor.unsqueeze(0).repeat(n_proposal1,1,1,1).reshape(-1,3,3) + + + pred_rs, pred_ts = WSVD(p2, p1, None) + pred_rs = pred_rs.reshape(B, n_proposal1, 3, 3) + pred_ts = pred_ts.reshape(B, n_proposal1, 1, 3) + + p1 = p1.reshape(B, n_proposal1, 3, 3) + p2 = p2.reshape(B, n_proposal1, 3, 3) + + + # original dis from sam6d + dis = torch.norm((p1 - pred_ts) @ pred_rs - p2, dim=3).mean(2) + + # add dis from normal vector + # mesh = end_points['mesh'] + # sample_pts = torch.tensor(trimesh.sample.sample_surface(mesh, 5000)[0], device = 'cuda',dtype=torch.float32) + # breakpoint() + # transformed_sample_pts = (pred_rs @ sample_pts.reshape(-1,3).T).T.reshape(-1,3) + pred_ts* (radius + 1e-6) + + # mesh_pts = (p1 - pred_ts) @ pred_rs + # mesh_normal = compute_triangle_normals(mesh_pts) + # obs_normal = compute_triangle_normals(p2) + # eps=1e-6 + # cos_sim = torch.sum(mesh_normal * obs_normal, dim=2).clamp(-1.0 + eps, 1.0 - eps) + # angle =(cos_sim - cos_sim.min()) / (cos_sim.max() - cos_sim.min() + 1e-8) # shape (1, 6000) + # dis_combien = (dis + angle)/2 + + ## customize dis + mesh = end_points['mesh'] + K = end_points['K'].reshape(3,3).to(dtype=torch.float64) + + # only project center of CAD model and make sure its depths is not overlimit + center = torch.tensor(mesh.centroid, device = 'cuda') + transformed_center = center.reshape(1,1,1,3) + pred_ts* (radius.reshape(-1, 1, 1) + 1e-6) + Z = transformed_center[..., 2].clamp(min=1e-6) + + point_2d = (K @ transformed_center.squeeze().T ) + point_2d[0] = (point_2d[0]/point_2d[2]).int() + point_2d[1] = (point_2d[1]/point_2d[2] ).int() + + x = point_2d[0].int() + y = point_2d[1].int() + + depth_center = torch.tensor(0.).repeat(n_proposal1).unsqueeze(0).cuda() + for i in range(n_proposal1): + try: + depth_center[0,i] = depth[y[i].item(), x[i].item()] + except IndexError: + depth_center[0,i] = -1 + depth_gap = Z.flatten() - depth_center.flatten() + + ## given that prediction center deoth should always be larger than gt depth + # idx_depth = torch.where(depth_gap > 0)[0] + + # if len(idx_depth) == 0: + + # print('all hypothesis are not correct') + # pred_R = torch.eye(3).unsqueeze(0).cuda() + # pred_t = torch.zeros(1,3).cuda() + + + # return pred_R,pred_t,None + + idx_dis = torch.topk(dis, n_proposal2, dim=1, largest=False)[1].sort()[0] + # idx = torch.tensor(np.intersect1d(idx_dis.cpu().numpy(), idx_depth.cpu().numpy())).cuda() + + + # if len(idx) == 0: + # print('no good selection') + # pred_R = torch.eye(3).unsqueeze(0).cuda() + # pred_t = torch.zeros(1,3).cuda() + + + # return pred_R,pred_t,None + idx = idx_dis + idx = idx.squeeze(0) + + pred_rs = torch.gather(pred_rs, 1, idx.reshape(B,idx.shape[0],1,1).repeat(1,1,3,3)) + pred_ts = torch.gather(pred_ts, 1, idx.reshape(B,idx.shape[0],1,1).repeat(1,1,1,3)) + + p1 = torch.gather(p1, 1, idx.reshape(B,idx.shape[0],1,1).repeat(1,1,3,3)) + p2 = torch.gather(p2, 1, idx.reshape(B,idx.shape[0],1,1).repeat(1,1,3,3)) + + # # # pose selection + transformed_pts = (pts1.unsqueeze(1) - pred_ts) @ pred_rs + transformed_pts = transformed_pts.reshape(B*idx.shape[0], -1, 3) + if model_pts is None: + model_pts = pts2 + expand_model_pts = model_pts.unsqueeze(1).repeat(1,idx.shape[0],1,1).reshape(B*idx.shape[0], -1, 3) + + dis = torch.sqrt(pairwise_distance(transformed_pts, expand_model_pts)) + dis = dis.min(2)[0].reshape(B, idx.shape[0], -1) + + scores = weights1.unsqueeze(1).sum(2) / ((dis * weights1.unsqueeze(1)).sum(2) +1e-8) + + # add pred-depth vs gt depth comparison score + + # cad_points = pts1.reshape(-1,3) + # # pcd = o3d.geometry.PointCloud() + # # pcd.points = o3d.utility.Vector3dVector(cad_points) + # # o3d.visualization.draw_geometries([pcd]) + # scene_pts = pred_ts * (cad_points.reshape(1,1,-1,3)) + pred_ts* (radius + 1e-6) + + # rgb_img = end_points['rgb_ori'].to(torch.uint8) + # depth_img = end_points['depth']* 1000 + # rgb = o3d.geometry.Image(rgb_img.cpu().numpy()) + # depth = o3d.geometry.Image(depth_img.cpu().numpy()) + + # rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(rgb, depth, depth_scale=1000) + # width = rgb_img.shape[1] + # height = rgb_img.shape[0] + # cx = int(K[0,2]) + # cy = int(K[1,2]) + # fx = int(K[0,0]) + # fy = int(K[1,1]) + + # intri = o3d.camera.PinholeCameraIntrinsic(width=width, height=height, fx=fx, fy=fy, cx=cx, cy=cy) + # o3d_points = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, intrinsic=intri) + # scene_points = np.array(o3d_points.points) + + # cad_points = torch.tensor(trimesh.sample.volume_mesh(mesh, count=500), device='cuda') + # cad_points_exp = cad_points.unsqueeze(0).unsqueeze(0) + # pred_rs_exp = pred_rs.unsqueeze(2) + # cad_points_trans = cad_points_exp.transpose(-1, -2) + # cad_points = torch.matmul(pred_rs_exp.float(), cad_points_trans.float()) + # cad_points = cad_points.squeeze(2).transpose(-1, -2) + # cad_points += pred_ts * radius + + # cad_points[...,2] = cad_points[...,2] + 1e-6 # avoid divide by 0 + # x = cad_points[..., 0] / cad_points[..., 2] + # y = cad_points[..., 1] / cad_points[..., 2] + + # U = K[0, 0] * x + K[0, 2] + # V = K[1, 1] * y + K[1, 2] + + # in_bound = (U >= 0) & (U < width) & (V >= 0) & (V < height) + + # cad_points_filtered = cad_points.clone() + # cad_points_filtered[~in_bound] = np.nan + + + # grid_size = 0.01 + # x, y, z = scene_points[:, 0], scene_points[:, 1], scene_points[:, 2] + + # x_min, y_min = x.min(), y.min() + # x_idx = ((x - x_min) / grid_size).astype(int) + # y_idx = ((y - y_min) / grid_size).astype(int) + # from collections import defaultdict + # grid = defaultdict(list) + # for xi, yi, zi in zip(x_idx, y_idx, z): + # grid[(xi, yi)].append(zi) + + # averaged_points = [] + + + # for (xi, yi), z_vals in grid.items(): + # x_center = x_min + (xi + 0.5) * grid_size + # y_center = y_min + (yi + 0.5) * grid_size + # z_min = np.min(z_vals) + # averaged_points.append([x_center, y_center, z_min]) + + # averaged_points = np.array(averaged_points) + + # pcd = o3d.geometry.PointCloud() + # pcd.points = o3d.utility.Vector3dVector(averaged_points) + # pcd.paint_uniform_color([1, 0., 0.]) + + + # pcd2 = o3d.geometry.PointCloud() + # pcd2.points = o3d.utility.Vector3dVector(cad_points_filtered[0,0,:,:].cpu().numpy()) + # pcd2.paint_uniform_color([0, 0.1, 1.]) + # o3d.visualization.draw_geometries([pcd,pcd2,o3d_points]) + + # Step 1: Build the pillar lookup dictionary + # x_min = averaged_points[:, 0].min() + # y_min = averaged_points[:, 1].min() + + # pillar_dict = {} + # for x, y, z in averaged_points: + # xi = int((x - x_min) / grid_size) + # yi = int((y - y_min) / grid_size) + # pillar_dict[(xi, yi)] = z + + # qp_np = cad_points_filtered.squeeze(0).cpu().numpy() + # # qp_np = cad_points.squeeze(0).cpu().numpy() + # depth_diffs = np.full((qp_np.shape[0], qp_np.shape[1]), np.nan) + + + # for i in range(qp_np.shape[0]): + # for j in range(qp_np.shape[1]): + # x, y, z = qp_np[i, j] + # if np.isnan(x) or np.isnan(y) or np.isnan(z): + # continue + # xi = int((x - x_min) / grid_size) + # yi = int((y - y_min) / grid_size) + # key = (xi, yi) + # if key in pillar_dict: + # z_pillar = pillar_dict[key] + # depth_diffs[i, j] = z - z_pillar + + + + + # depth_diffs = torch.tensor(depth_diffs) + # negative_mask = depth_diffs < 0 + + # neg_diffs_only = torch.where(negative_mask, depth_diffs, torch.zeros_like(depth_diffs)) + # depth_scores = torch.abs(neg_diffs_only.sum(dim=1)) + # depth_scores = depth_scores / (depth_scores.sum() + 1e-8) + # depth_scores *= 1000 + + + # points = torch.tensor(mesh.sample(1000, return_index=False), device='cuda') + # obs_pts = pred_ts * (points.reshape(1,1,-1,3)) + pred_ts* (radius + 1e-6) + # obs_depth = obs_pts[..., 2].clamp(min=1e-6) + + # obs_img_pts = (K.unsqueeze(0).repeat(len(idx), 1, 1) @ obs_pts.squeeze().permute(0, 2, 1) ).permute(0,2,1) + # obs_img_pts[:,:,0] = (obs_img_pts[:,:,0]/obs_img_pts[:,:,0]).int() + # obs_img_pts[:,:,1] = (obs_img_pts[:,:,1]/obs_img_pts[:,:,2] ).int() + + # x = point_2d[0].int() + # y = point_2d[1].int() + + sorted_x, indices = torch.sort(scores,descending=True) + idx = indices[0,0] + # print('-----------------',idx,scores[0,idx]) + pred_R = torch.gather(pred_rs, 1, idx.reshape(B,1,1,1).repeat(1,1,3,3)).squeeze(1) + pred_t = torch.gather(pred_ts, 1, idx.reshape(B,1,1,1).repeat(1,1,1,3)).squeeze(2).squeeze(1) + pose = np.eye(4) + pose[:3,:3] = pred_R.cpu().numpy() + pose[:3,3] = pred_t.cpu().numpy() * radius[0].cpu().numpy() + mesh_o3d = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(mesh.vertices), o3d.utility.Vector3iVector(mesh.faces)) + pcd_obj = mesh_o3d.sample_points_uniformly(number_of_points=1000) + pcd_obj_trans = copy.deepcopy(pcd_obj).transform(pose) + pcd_obj_trans.paint_uniform_color([0.1, 0.1, 1]) + + + if 0: + scores = scores - depth_scores.cuda() + sorted_x, indices = torch.sort(scores,descending=True) + idx = indices[0,1] + idx = torch.tensor(14) + # print('-----------------',idx,scores[0,idx]) + pred_R = torch.gather(pred_rs, 1, idx.reshape(B,1,1,1).repeat(1,1,3,3)).squeeze(1) + pred_t = torch.gather(pred_ts, 1, idx.reshape(B,1,1,1).repeat(1,1,1,3)).squeeze(2).squeeze(1) + pose = np.eye(4) + pose[:3,:3] = pred_R.cpu().numpy() + pose[:3,3] = pred_t.cpu().numpy() * radius[0].cpu().numpy() + mesh_o3d = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(mesh.vertices), o3d.utility.Vector3iVector(mesh.faces)) + pcd_obj2 = mesh_o3d.sample_points_uniformly(number_of_points=1000) + pcd_obj_trans2 = copy.deepcopy(pcd_obj2).transform(pose) + pcd_obj_trans2.paint_uniform_color([1, 0.1, 0.1]) + o3d.visualization.draw_geometries([pcd_obj_trans,pcd_obj_trans2,o3d_points]) + breakpoint() + + #vis the center + # sphere = o3d.geometry.TriangleMesh.create_sphere(radius=0.02) + # sphere.translate() + # sphere.paint_uniform_color([1, 0, 0]) + + + # pcd2 = o3d.geometry.PointCloud() + # pcd2.points = o3d.utility.Vector3dVector(qp_np[10]) + # pcd2.paint_uniform_color([0.1, 0.1, 1]) + # o3d.visualization.draw_geometries([pcd,pcd2,o3d_points]) + + + # approach 2: try project cad points onto 2D img and compare with pillarized gt_depth + if 0: + + grid_size = 5 + depth_img = end_points['depth']* 1000 + height, width = depth_img.shape + H_trim = height - height % grid_size + W_trim = width - width % grid_size + depth_trimmed = depth_img[:H_trim, :W_trim] + + reshaped = depth_trimmed.reshape( + H_trim // grid_size, grid_size, + W_trim // grid_size, grid_size + ) + + reshaped = reshaped.permute(0, 2, 1, 3) + # min_pillars = reshaped.min(dim=3).values.min(dim=2).values + n_y, n_x = reshaped.shape[:2] + pillar_dict = {} + + for yi in range(n_y): + for xi in range(n_x): + block = reshaped[yi, xi] + min_val = torch.min(block) + pillar_dict[(xi, yi)] = min_val.item() + + # cad_points = torch.tensor(trimesh.sample.volume_mesh(mesh, count=1000), device='cuda') + cad_points = torch.tensor(mesh.sample(1000), device='cuda') + cad_points = torch.matmul( pred_rs.squeeze(0).float(), cad_points.T.float()) + cad_points = cad_points.transpose(-1, -2) + cad_points_trans = cad_points + pred_ts.squeeze(0) * radius + + cad_points_trans[...,2] = cad_points_trans[...,2] + 1e-6 # avoid divide by 0 + x = cad_points_trans[..., 0] / cad_points_trans[..., 2] + y = cad_points_trans[..., 1] / cad_points_trans[..., 2] + + U = K[0, 0] * x + K[0, 2] + V = K[1, 1] * y + K[1, 2] + + + + depth_diffs = np.full((cad_points_trans.shape[0], cad_points_trans.shape[1]), np.nan) + + + + + for i in range(U.shape[0]): + for j in range(U.shape[1]): + x,y = U[i,j], V[i,j] + z = cad_points_trans[i,j, 2] + xi = int(x/grid_size) + yi = int(y/grid_size) + key = (xi, yi) + if key in pillar_dict: + z_pillar = pillar_dict[key] + depth_diffs[i, j] = z - z_pillar + + depth_diffs = torch.tensor(depth_diffs) + negative_mask = depth_diffs < 0 + neg_diffs_only = torch.where(negative_mask, depth_diffs, torch.zeros_like(depth_diffs)) + depth_scores = torch.abs(neg_diffs_only.sum(dim=1)) + depth_scores = depth_scores / (depth_scores.sum() + 1e-8) + depth_scores *= 1000 + + scores = weights1.unsqueeze(1).sum(2) / ((dis * weights1.unsqueeze(1)).sum(2) +1e-8) + scores = scores - (depth_scores).cuda() + sorted_x, indices = torch.sort(scores,descending=True) + idx = indices[0,0] + # print('-----------------',idx,scores[0,idx]) + pred_R = torch.gather(pred_rs, 1, idx.reshape(B,1,1,1).repeat(1,1,3,3)).squeeze(1) + pred_t = torch.gather(pred_ts, 1, idx.reshape(B,1,1,1).repeat(1,1,1,3)).squeeze(2).squeeze(1) + pose = np.eye(4) + pose[:3,:3] = pred_R.cpu().numpy() + pose[:3,3] = pred_t.cpu().numpy() * radius[0].cpu().numpy() + mesh_o3d = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(mesh.vertices), o3d.utility.Vector3iVector(mesh.faces)) + pcd_obj2 = mesh_o3d.sample_points_uniformly(number_of_points=1000) + pcd_obj_trans2 = copy.deepcopy(pcd_obj2).transform(pose) + pcd_obj_trans2.paint_uniform_color([1, 0.1, 0.1]) + # o3d.visualization.draw_geometries([pcd_obj_trans,pcd_obj_trans2,o3d_points]) + + + # breakpoint() + + + # pcd3 = o3d.geometry.PointCloud() + # pcd3.points = o3d.utility.Vector3dVector(cad_points_trans[0].cpu().numpy()) + # pcd3.paint_uniform_color([0.1, 0.1, 1]) + # o3d.visualization.draw_geometries([pcd,pcd3,o3d_points]) + + + # breakpoint() + # p1 = torch.gather(p1, 1, idx.reshape(B,1,1,1).repeat(1,1,3,3)).squeeze(1) + # p2 = torch.gather(p2, 1, idx.reshape(B,1,1,1).repeat(1,1,3,3)).squeeze(1) + # p1 = (p1 - pred_t *(radius.reshape(-1, 1, 1) + 1e-6)) @ pred_R + + # points = p1.cpu().numpy().reshape(-1,3) + # points = p2.cpu().numpy().reshape(-1,3) + # p1 = p1.repeat(4,1,1).unsqueeze(0) + # p2 = p2.repeat(4,1,1).unsqueeze(0) + + # pred_R_template = (pred_R @ template_tensor) + # pcd = torch.tensor(mesh.sample(1000), device='cuda') + # points_h = torch.hstack([pcd, torch.ones((pcd.shape[0], 1)).cuda()]) + + # pose = torch.eye(4).type(torch.float64).cuda() + # pose[:3,:3] = pred_R[0] + # pose[:3,3] = pred_t[0] + # points_transformed_h = (pose @ points_h.cuda().T).T + # points_transformed = points_transformed_h[:, :3] + + # mask = end_points['mask_ori']/255 + # rgb_img = end_points['rgb_ori'] + # depth_img = end_points['depth'] * 1000 + + # rgb_img = rgb_img * mask[:,:,None] + # depth_img = depth_img * mask + # rgb = o3d.geometry.Image(rgb_img.cpu().numpy()) + # depth = o3d.geometry.Image(depth_img.cpu().numpy()) + + # rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(rgb, depth, depth_scale=1000) + # width = rgb_img.shape[1] + # height = rgb_img.shape[0] + # cx = int(K[0,2]) + # cy = int(K[1,2]) + # fx = int(K[0,0]) + # fy = int(K[1,1]) + + # intri = o3d.camera.PinholeCameraIntrinsic(width=width, height=height, fx=fx, fy=fy, cx=cx, cy=cy) + # o3d_points = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, intrinsic=intri) + # o3d_points.translate(-o3d_points.get_center()) + # breakpoint() + # o3d.visualization.draw_geometries([o3d_points]) + # pred_t = pred_t.repeat(4, 1).reshape(1,-1,1,3) + + + # p1 = (p1 - pred_t) @ pred_R + # mesh_normal = compute_triangle_normals(p1) + # obs_normal = compute_triangle_normals(p2) + # cos_sim = torch.sum(mesh_normal * obs_normal, dim=2).clamp(-1.0 + eps, 1.0 - eps) + # angle =(cos_sim - cos_sim.min()) / (cos_sim.max() - cos_sim.min() + 1e-8) # shape (1, 6000) + + # add rot error + + + + # device = 'cpu' + # mesh = load_objs_as_meshes(['/workspace/cad_model/box_02/box_02.obj'], device=device) + # meshes = mesh.extend(10) + # fx, fy = K[0,0].item(), K[1,1].item() + # cx, cy = K[0,2].item(), K[1,2].item() + # h = 480 + # w = 640 + # image_size = torch.tensor([[480, 640]]) + + # RT = torch.eye(4) + # RT[3,3] = 1 + # RT[:3,:3] = pred_R.reshape(3,3) + # RT[:3,3] = pred_t * (radius.reshape(-1, 1, 1) + 1e-6) + + # # transfom axis to pytorch3d format + # Rz = torch.tensor([[-1,0, 0, 0], + # [0, -1, 0, 0], + # [0, 0, 1, 0], + # [0, 0, 0, 1]]).float() + + # RT = torch.matmul(Rz, RT) + + # template_pose_rot = [ + # torch.tensor([[[1., 0., 0.,], [0., 1., 0.], [0., 0., 1.]]], device = 'cuda'), # identity + # torch.tensor([[[-1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]], device = 'cuda'), # mirror along x + # torch.tensor([[[1., 0., 0.], [0., -1., 0.], [0., 0., 1.]]], device = 'cuda'), # mirror along y + # torch.tensor([[[1., 0., 0.], [0., 1., 0.], [0., 0., -1.]]], device = 'cuda'), # mirror along z + # torch.tensor([[[1., 0., 0.], [0., 0., -1.], [0., 1., 0.]]], device = 'cuda'), # 90 around x + # torch.tensor([[[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]]], device = 'cuda'), # -90 around x + # torch.tensor([[[0., 0., -1.], [0., 1., 0.], [1., 0., 0.]]], device = 'cuda'), # 90 around y + # torch.tensor([[[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]]], device = 'cuda'), # -90 around y + # torch.tensor([[[0., -1., 0.], [1., 0., 0.], [0., 0., 1.]]], device = 'cuda'), # 90 around z + # torch.tensor([[[0., 1., 0.], [-1., 0., 0.], [0., 0., 1.]]], device = 'cuda') # -90 around z + # ] + + # rot_tensor = torch.cat(template_pose_rot, dim=0) # shape (10,3,3) + + # base_rot = RT[:3, :3] + # new_rot = base_rot.unsqueeze(0) @ rot_tensor.cpu() + + # RT_batch = RT.unsqueeze(0).repeat(10, 1, 1) + # RT_batch[:, :3, :3] = new_rot + + # R = torch.transpose(RT_batch[:,:3, :3], 1, 2).reshape(-1, 3, 3).detach() + # T = RT_batch[:,:3,3].reshape(-1,3).detach() + # f = torch.tensor((fx, fy), dtype=torch.float32).unsqueeze(0) + # p = torch.tensor((cx, cy), dtype=torch.float32).unsqueeze(0) + + + # cameras = PerspectiveCameras( + # R = R, + # T = T, + # focal_length=f, + # principal_point=p, + # image_size=image_size, + # in_ndc=False, + # device="cpu") + # raster_settings = RasterizationSettings( + # image_size=(h,w), + # blur_radius=0.0, + # faces_per_pixel=10, + # ) + + # rasterizer = MeshRasterizer( + # cameras=cameras, + # raster_settings=raster_settings + # ) + # lights = PointLights(device=device, location=[[0.0, 0.0, -3.0]]) + # renderer = MeshRenderer( + # rasterizer=rasterizer, + # shader=SoftPhongShader( + # device=device, + # cameras=cameras, + # lights=lights + # ) + # ) + + + # fragments = rasterizer(meshes) + # # depths_render = fragments.zbuf.detach().cpu().numpy() + # depths_render = fragments.zbuf[..., 0].detach().cpu().numpy() + + + # images = np.array(renderer(meshes)) + # images = images[:, ..., :3] + # emb_list = [] + # depth_emb_list = [] + # y1, y2, x1, x2 = end_points['bbox'] + + # for i in range(images.shape[0]): + # img = images[i] + # img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) + # img = (img*255).astype(np.int32) + # img = img[y1.int().item():y2.int().item(), x1.int().item():x2.int().item()] + # depth_render = depths_render[i][y1.int().item():y2.int().item(), x1.int().item():x2.int().item()] + # if len(depth_render.shape) != 3: + # depth_render = np.expand_dims(depth_render, axis=-1) + # depth_render = np.repeat(depth_render*1000, 3, axis=-1).astype(np.uint8) + # depth_render_emb = compute_embedding(depth_render) + # render_emb = compute_embedding(img) + # emb_list.append(render_emb) + # depth_emb_list.append(depth_render_emb) + + # rgb = end_points['rgb_ori'].cpu().numpy()[y1.int().item():y2.int().item(), x1.int().item():x2.int().item()] + # depth_crop = depth.unsqueeze(-1).cpu().numpy()[y1.int().item():y2.int().item(), x1.int().item():x2.int().item()] + # depth_crop = np.repeat(depth_crop*1000, 3, axis=-1).astype(np.uint8) + # depth_emb = compute_embedding(depth_crop) + # # get vit embedding of two images (rendering and observation) and compute cosine similarity + # # rgb = np.concatenate([rgb, depth_crop], axis=-1) + # rgb_emb = compute_embedding(rgb) + # score_sim_rgb = [] + # score_sim_depth = [] + # for idx, emb in enumerate(emb_list): + + # similarity = cosine_similarity(emb.reshape(1,-1).cpu().numpy(), rgb_emb.reshape(1,-1).cpu().numpy())[0][0] + # sim_depth = cosine_similarity(depth_emb_list[idx].reshape(1,-1).cpu().numpy(), depth_emb.reshape(1,-1).cpu().numpy())[0][0] + # score_sim_rgb.append(similarity) + # score_sim_depth.append(sim_depth) + # # cv2.imwrite('a.png', images[0]) + + # final_score = [(a + b) / 2 for a, b in zip(score_sim_rgb, score_sim_depth)] + # max_index = final_score.index(max(final_score)) + # print('-------------->',max_index) + # pred_R = pred_R @ template_pose_rot[max_index] + + # pred_R = pred_R[idx].unsqueeze(0) + + + return pred_R, pred_t, _ + # return pred_R, pred_t, transformed_center.reshape(6000,3).cpu().numpy() + # return pred_R, pred_t, (pred_ts_filter* (radius.reshape(-1, 1, 1) + 1e-6)).cpu().numpy().reshape(-1,3) + # return pred_R, pred_t, (pred_ts_out* (radius.reshape(-1, 1, 1) + 1e-6)).cpu().numpy().reshape(-1,3) + # return pred_R, pred_t, (pred_ts_filter* (radius.reshape(-1, 1, 1) + 1e-6)).cpu().numpy().reshape(-1,3) + # return pred_R.unsqueeze(0), pred_t.unsqueeze(0).unsqueeze(0), (pred_t* (radius.reshape(-1, 1, 1) + 1e-6)).cpu().numpy().reshape(-1,3) + +def compute_fine_Rt( + atten, + pts1, + pts2, + radius, + end_points, + model_pts=None, + dis_thres=0.15 +): + if model_pts is None: + model_pts = pts2 + + # compute pose + WSVD = WeightedProcrustes(weight_thresh=0.0) + assginment_mat = torch.softmax(atten, dim=2) * torch.softmax(atten, dim=1) + label1 = torch.max(assginment_mat[:,1:,:], dim=2)[1] + label2 = torch.max(assginment_mat[:,:,1:], dim=1)[1] + + assginment_mat = assginment_mat[:, 1:, 1:] * (label1>0).float().unsqueeze(2) * (label2>0).float().unsqueeze(1) + # max_idx = torch.max(assginment_mat, dim=2, keepdim=True)[1] + # pred_pts = torch.gather(pts2, 1, max_idx.expand_as(pts1)) + normalized_assginment_mat = assginment_mat / (assginment_mat.sum(2, keepdim=True) + 1e-6) + pred_pts = normalized_assginment_mat @ pts2 + + assginment_score = assginment_mat.sum(2) + pred_R, pred_t = WSVD(pred_pts, pts1, assginment_score) + # breakpoint() + # compute score + pred_pts = (pts1 - pred_t.unsqueeze(1)) @ pred_R + dis = torch.sqrt(pairwise_distance(pred_pts, model_pts)).min(2)[0] + + mask = (label1>0).float() + pose_score = (dis < dis_thres).float() + pose_score = (pose_score * mask).sum(1) / (mask.sum(1) + 1e-8) + pose_score = pose_score * mask.mean(1) + + # add depth filter + if 0: + mesh = end_points['mesh'] + K = end_points['K'].reshape(3,3).to(dtype=torch.float32) + + sample_pts = torch.tensor(trimesh.sample.sample_surface(mesh, 5000)[0], device = 'cuda',dtype=torch.float32) + transformed_sample_pts = (pred_R @ sample_pts.reshape(-1,3).T).T.reshape(-1,3) + pred_t* (radius + 1e-6) + Z = transformed_sample_pts[..., 2].clamp(min=1e-6) + + point_2d = (K @ transformed_sample_pts.reshape(-1,3).T ) + point_2d[0] = (point_2d[0]/point_2d[2]).int() + point_2d[1] = (point_2d[1]/point_2d[2] ).int() + + x = point_2d[0].int() + y = point_2d[1].int() + + + depth = end_points['depth'] + depth_observation = torch.tensor(0.).repeat(5000).unsqueeze(0).cuda() + for i in range(5000): + try: + depth_observation[0,i] = depth[y[i].item(), x[i].item()] + except IndexError: + depth_observation[0,i] = -1 + + depth_gap = Z- depth_observation + count = (depth_gap > 0).sum().item() + + # # given that prediction center deoth should always be larger than gt depth + if count < 2500: + print('wrong pose from fine network') + pred_R = torch.eye(3).unsqueeze(0).cuda() + pred_t = torch.zeros(1,3).cuda() + return pred_R, pred_t, pose_score, None + + + # add pose refinement + # mesh = end_points['mesh'] + # vertices = torch.tensor(trimesh.bounds.corners(mesh.bounds), device = 'cuda', dtype=torch.float32) + # K = end_points['K'].reshape(3,3) + # transformed_vertices = ((pred_R @ vertices.T).squeeze().T) + (pred_t* (radius.reshape(-1, 1, 1) + 1e-6)) + + # edges_connection = [(0, 1), (0,3),(0,4), (1,5), (1,2), (2,6), (6,5),(5,1), (4,5), (6,7),(4,7),(3,7)] + + # Z_depth = transformed_vertices[..., 2].clamp(min=1e-6) + + # point_2d = (K @ transformed_vertices.squeeze().T ) + # point_2d[0] = (point_2d[0]/point_2d[2]).int() + # point_2d[1] = (point_2d[1]/point_2d[2] ).int() + + # x = point_2d[0].int().cpu().numpy() + # y = point_2d[1].int().cpu().numpy() + + # depth = end_points['depth'].cpu().numpy() + # rgb = end_points['rgb_ori'].cpu().numpy().squeeze() + + # # get convex hull of object + # points = np.stack([x, y], axis=-1) + + # n_sample = 50 + # edge_points = [] + # pose_correct = True + + # vis = rgb.copy() + # for start_idx, end_idx in edges_connection: + # pt1 = tuple(points[start_idx]) + # pt2 = tuple(points[end_idx]) + # cv2.line(vis, pt1, pt2, color=(0, 255, 0), thickness=2) + # cv2.imwrite('vis.png', vis) + # transformed_vertices = transformed_vertices.detach().cpu().numpy() + # for start_idx, end_idx in edges_connection: + + # edge = np.linspace(points[start_idx], points[end_idx], num=n_sample, dtype=int) + # Z_gt = depth[edge[:,1], edge[:,0]] + # Z_object = np.linspace(transformed_vertices[0,start_idx,2], transformed_vertices[0,end_idx,2], num=n_sample, dtype=np.float32) + # X_object = np.linspace(transformed_vertices[0,start_idx,0], transformed_vertices[0,end_idx,0], num=n_sample, dtype=np.float32) + # Y_object = np.linspace(transformed_vertices[0,start_idx,1], transformed_vertices[0,end_idx,1], num=n_sample, dtype=np.float32) + + # edge_points.append(np.stack([X_object, Y_object, Z_object], axis=-1)) + + # depth_gap = Z_gt - Z_object + # depth_gap = np.round(depth_gap, decimals=3) + + # # skip if outside the image + # if points[start_idx][0] not in range(0, rgb.shape[1]) or points[start_idx][1] not in range(0, rgb.shape[0]): + # continue + + # if points[end_idx][0] not in range(0, rgb.shape[1]) or points[end_idx][1] not in range(0, rgb.shape[0]): + # continue + + # # visualize the edge + # vis = rgb.copy() + # cv2.line(vis, tuple(points[start_idx]), tuple(points[end_idx]), (0, 0, 255) , 3) + # cv2.imwrite('vis.png', vis) + + # # print(depth_gap) + # if len(depth_gap[depth_gap < -0.02])>=int(0.5*n_sample): + # # print('edge is occluded') + # continue + + # not_occluded_depth = depth_gap[depth_gap >= -0.05] + # # set a threshold for depth gap. if larger than 0.05 (5cm), pose is wrong + # # print('num of hover points: ', len(not_occluded_depth[not_occluded_depth> 0.05])) + # # if len(not_occluded_depth[not_occluded_depth> 0.05]) >= int(0.2*n_sample): + # # print('-------------------> too much hover points!!!<------------------') + # # pose_correct = False + # # break + # edge = edge[np.where(not_occluded_depth < 0.05)[0]] + # #now check whether the projected edge is edge or not on the rgbd image. if not, pose is wrong + # # gray = cv2.cvtColor(depth, cv2.COLOR_BGR2GRAY).astype(np.uint8) + # edges = cv2.Canny((depth*1000).astype(np.uint8), threshold1=50, threshold2=150) + # x1,y1 = edge[0] + # x2,y2 = edge[-1] + + # edges_mask = np.zeros_like(depth) + # # edges = np.stack((edges,)*3, axis=-1) + # length_projected_edge =int( (abs(x2-x1)**2 + abs(y2-y1)**2)**0.5) + # # cv2.line(edges_mask, tuple(points[i]), tuple(points[i+1]), (1, 1, 1) , 8) + + # masked_edges = edges * edges_mask + # valid = np.sum( masked_edges/255)/length_projected_edge + # print('valid:', valid) + # if valid < 1: + # print('edge is not edge on the rgbd image!!!') + # pose_correct = False + # print('pose is correct:',pose_correct) + # edge_points = np.array(edge_points).reshape(-1,3) + + return pred_R, pred_t, pose_score, None + +def compute_embedding(image): + + if 1: # using CLIP + clip_model = "openai/clip-vit-base-patch32" + vision_model = CLIPVisionModelWithProjection.from_pretrained(clip_model) + model = CLIPModel.from_pretrained ("openai/clip-vit-base-patch32") + processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + inputs = processor(images=[image], return_tensors="pt", padding=True) + inputs = inputs.to('cpu') + image_outputs = vision_model(**inputs) + img_feats = image_outputs.image_embeds.view(1, -1) + img_feats = img_feats / img_feats.norm(p=2, dim=-1, keepdim=True) + + return img_feats.detach() + if 0: # using DINOv2 + processor = AutoImageProcessor.from_pretrained('facebook/dinov2-small') + model = AutoModel.from_pretrained('facebook/dinov2-small').to('cuda') + with torch.no_grad(): + inputs = processor(images= image, return_tensors = "pt").to('cuda') + outputs = model(**inputs) + embedding = outputs.last_hidden_state.mean(dim=1) + return embedding + + + +def backproject_points(xy_points, depth_image,K): + + K_inv = np.linalg.inv(K) + points_3d = [] + + for (u, v) in xy_points: + + u = int(round(u)) + v = int(round(v)) + + + z = depth_image[v, u] + pixel_homog = np.array([u, v, 1.0]) + pt_3d = z * (K_inv @ pixel_homog) + points_3d.append(pt_3d) + + return np.array(points_3d) + +def weighted_procrustes( + src_points, + ref_points, + weights=None, + weight_thresh=0.0, + eps=1e-5, + return_transform=False, + src_centroid = None, + ref_centroid = None, +): + r"""Compute rigid transformation from `src_points` to `ref_points` using weighted SVD. + + Modified from [PointDSC](https://github.com/XuyangBai/PointDSC/blob/master/models/common.py). + + Args: + src_points: torch.Tensor (B, N, 3) or (N, 3) + ref_points: torch.Tensor (B, N, 3) or (N, 3) + weights: torch.Tensor (B, N) or (N,) (default: None) + weight_thresh: float (default: 0.) + eps: float (default: 1e-5) + return_transform: bool (default: False) + + Returns: + R: torch.Tensor (B, 3, 3) or (3, 3) + t: torch.Tensor (B, 3) or (3,) + transform: torch.Tensor (B, 4, 4) or (4, 4) + """ + if src_points.ndim == 2: + src_points = src_points.unsqueeze(0) + ref_points = ref_points.unsqueeze(0) + if weights is not None: + weights = weights.unsqueeze(0) + squeeze_first = True + else: + squeeze_first = False + + batch_size = src_points.shape[0] + if weights is None: + weights = torch.ones_like(src_points[:, :, 0]) + weights = torch.where(torch.lt(weights, weight_thresh), torch.zeros_like(weights), weights) + weights = weights / (torch.sum(weights, dim=1, keepdim=True) + eps) + weights = weights.unsqueeze(2) # (B, N, 1) + + if src_centroid is None: + src_centroid = torch.sum(src_points * weights, dim=1, keepdim=True) # (B, 1, 3) + elif len(src_centroid.size()) == 2: + src_centroid = src_centroid.unsqueeze(1) + src_points_centered = src_points - src_centroid # (B, N, 3) + + if ref_centroid is None: + ref_centroid = torch.sum(ref_points * weights, dim=1, keepdim=True) # (B, 1, 3) + elif len(ref_centroid.size()) == 2: + ref_centroid = ref_centroid.unsqueeze(1) + ref_points_centered = ref_points - ref_centroid # (B, N, 3) + + H = src_points_centered.permute(0, 2, 1) @ (weights * ref_points_centered) + U, _, V = torch.svd(H) + Ut, V = U.transpose(1, 2), V + eye = torch.eye(3).unsqueeze(0).repeat(batch_size, 1, 1).to(src_points.device) + eye[:, -1, -1] = torch.sign(torch.det(V @ Ut)) + R = V @ eye @ Ut + + t = ref_centroid.permute(0, 2, 1) - R @ src_centroid.permute(0, 2, 1) + t = t.squeeze(2) + + if return_transform: + transform = torch.eye(4).unsqueeze(0).repeat(batch_size, 1, 1).cuda() + transform[:, :3, :3] = R + transform[:, :3, 3] = t + if squeeze_first: + transform = transform.squeeze(0) + return transform + else: + if squeeze_first: + R = R.squeeze(0) + t = t.squeeze(0) + return R, t + + +class WeightedProcrustes(nn.Module): + def __init__(self, weight_thresh=0.5, eps=1e-5, return_transform=False): + super(WeightedProcrustes, self).__init__() + self.weight_thresh = weight_thresh + self.eps = eps + self.return_transform = return_transform + + def forward(self, src_points, tgt_points, weights=None,src_centroid = None,ref_centroid = None): + return weighted_procrustes( + src_points, + tgt_points, + weights=weights, + weight_thresh=self.weight_thresh, + eps=self.eps, + return_transform=self.return_transform, + src_centroid=src_centroid, + ref_centroid=ref_centroid + ) + diff --git a/project/grasp_box/submodules/SAM6D/utils/solver.py b/project/grasp_box/submodules/SAM6D/utils/solver.py new file mode 100644 index 0000000000000000000000000000000000000000..d46e3ecc1cffd28210dec3bde4d2d52851c64705 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/solver.py @@ -0,0 +1,235 @@ +import os +import time +import logging +from tqdm import tqdm +import pickle as cPickle + +import torch +import torch.optim as optim + +import gorilla +from gorilla.solver.build import build_optimizer, build_lr_scheduler +from tensorboardX import SummaryWriter + + +class Solver(gorilla.solver.BaseSolver): + def __init__(self, model, loss, dataloaders, logger, cfg, coarse_model=None): + super(Solver, self).__init__( + model=model, + dataloaders=dataloaders, + cfg=cfg, + ) + self.loss = loss + self.logger = logger + self.logger.propagate = 0 + + self.coarse_model = coarse_model.eval() if coarse_model is not None else coarse_model + + tb_writer_ = tools_writer( + dir_project=cfg.log_dir, num_counter=2, get_sum=False) + tb_writer_.writer = self.tb_writer + self.tb_writer = tb_writer_ + self.iters_to_print = cfg.iters_to_print + + if cfg.checkpoint_iter != -1: + logger.info("=> loading checkpoint from iter {} ...".format(cfg.checkpoint_iter)) + checkpoint = os.path.join(cfg.log_dir, 'checkpoint_iter' + str(cfg.checkpoint_iter).zfill(6) + '.pth') + checkpoint_file = gorilla.solver.resume(model=model, filename=checkpoint, optimizer=self.optimizer, scheduler=self.lr_scheduler) + start_epoch = checkpoint_file['epoch']+1 + start_iter = checkpoint_file['iter'] + del checkpoint_file + else: + start_epoch = 1 + start_iter = 0 + self.epoch = start_epoch + self.iter = start_iter + + if hasattr(cfg, 'warmup_iter') and cfg.warmup_iter > 0: + self.warmup_optimizer = build_optimizer(model, cfg.warmup_optimizer) + self.warmup_scheduler = build_lr_scheduler(self.warmup_optimizer, cfg.warmup_lr_scheduler) + + + def solve(self): + while self.epoch <= self.cfg.training_epoch: + self.logger.info('\nEpoch {} :'.format(self.epoch)) + + end = time.time() + dict_info_train = self.train() + train_time = time.time()-end + + dict_info = {'train_time(min)': train_time/60.0} + for key, value in dict_info_train.items(): + if 'loss' in key: + dict_info['train_'+key] = value + + ckpt_path = os.path.join( + self.cfg.log_dir, 'checkpoint_iter'+ str(self.iter).zfill(6) +'.pth') + gorilla.solver.save_checkpoint( + model=self.model, filename=ckpt_path, optimizer=self.optimizer, scheduler=self.lr_scheduler, meta={'iter': self.iter, "epoch": self.epoch}) + + prefix = 'Epoch {} - '.format(self.epoch) + write_info = self.get_logger_info(prefix, dict_info=dict_info) + self.logger.warning(write_info) + self.epoch += 1 + + def train(self): + mode = 'train' + self.model.train() + end = time.time() + self.dataloaders["train"].dataset.reset() + + for i, data in enumerate(self.dataloaders["train"]): + torch.cuda.synchronize() + data_time = time.time()-end + + if hasattr(self.cfg, 'warmup_iter') and self.cfg.warmup_iter > 0: + if self.iter >= self.cfg.warmup_iter: + optimizer = self.optimizer + lr_scheduler = self.lr_scheduler + else: + optimizer = self.warmup_optimizer + lr_scheduler = self.warmup_scheduler + else: + optimizer = self.optimizer + lr_scheduler = self.lr_scheduler + + + optimizer.zero_grad() + loss, dict_info_step = self.step(data, mode) + dict_info_step['lr'] = lr_scheduler.get_last_lr()[0] + forward_time = time.time()-end-data_time + + loss.backward() + optimizer.step() + backward_time = time.time() - end - forward_time-data_time + + dict_info_step.update({ + 'T_data': data_time, + 'T_forward': forward_time, + 'T_back': backward_time, + }) + self.log_buffer.update(dict_info_step) + self.iter += 1 + + if i % self.iters_to_print == 0: + # ipdb.set_trace() + self.log_buffer.average(self.iters_to_print) + prefix = 'Iter {} Train - '.format(str(self.iter).zfill(6)) + write_info = self.get_logger_info( + prefix, dict_info=self.log_buffer._output) + self.logger.info(write_info) + self.write_summary(self.log_buffer._output, mode) + end = time.time() + + lr_scheduler.step() + + dict_info_epoch = self.log_buffer.avg + self.log_buffer.clear() + + return dict_info_epoch + + def evaluate(self): + mode = 'eval' + self.model.eval() + + for i, data in enumerate(self.dataloaders["eval"]): + with torch.no_grad(): + _, dict_info_step = self.step(data, mode) + self.log_buffer.update(dict_info_step) + if i % self.iters_to_print == 0: + self.log_buffer.average(self.iters_to_print) + prefix = '[{}/{}][{}/{}] Test - '.format( + self.epoch, self.cfg.max_epoch, i, len(self.dataloaders["eval"])) + write_info = self.get_logger_info( + prefix, dict_info=self.log_buffer._output) + self.logger.info(write_info) + self.write_summary(self.log_buffer._output, mode) + dict_info_epoch = self.log_buffer.avg + self.log_buffer.clear() + + return dict_info_epoch + + def step(self, data, mode): + torch.cuda.synchronize() + for key in data: + data[key] = data[key].cuda() + if self.coarse_model is not None: + with torch.no_grad(): + data = self.coarse_model(data) + end_points = self.model(data) + dict_info = self.loss(end_points) + loss_all = dict_info['loss'] + + for key in dict_info: + dict_info[key] = float(dict_info[key].item()) + + return loss_all, dict_info + + def get_logger_info(self, prefix, dict_info): + info = prefix + for key, value in dict_info.items(): + if 'T_' in key: + info = info + '{}: {:.3f}\t'.format(key, value) + elif 'lr' in key: + info = info + '{}: {:.6f}\t'.format(key, value) + else: + info = info + '{}: {:.5f}\t'.format(key, value) + + return info + + def write_summary(self, dict_info, mode): + keys = list(dict_info.keys()) + values = list(dict_info.values()) + if mode == "train": + self.tb_writer.update_scalar( + list_name=keys, list_value=values, index_counter=0, prefix="train_") + elif mode == "eval": + self.tb_writer.update_scalar( + list_name=keys, list_value=values, index_counter=1, prefix="eval_") + else: + assert False + + + +class tools_writer(): + def __init__(self, dir_project, num_counter, get_sum): + if not os.path.isdir(dir_project): + os.makedirs(dir_project) + if get_sum: + writer = SummaryWriter(dir_project) + else: + writer = None + # writer = SummaryWriter(dir_project) + self.writer = writer + self.num_counter = num_counter + self.list_couter = [] + for i in range(num_counter): + self.list_couter.append(0) + + def update_scalar(self, list_name, list_value, index_counter, prefix): + for name, value in zip(list_name, list_value): + self.writer.add_scalar(prefix+name, float(value), self.list_couter[index_counter]) + + self.list_couter[index_counter] += 1 + + def refresh(self): + for i in range(self.num_counter): + self.list_couter[i] = 0 + + +def get_logger(level_print, level_save, path_file, name_logger = "logger"): + # level: logging.INFO / logging.WARN + logger = logging.getLogger(name_logger) + logger.setLevel(level = logging.DEBUG) + formatter = logging.Formatter('%(asctime)s - %(message)s') + # set file handler + handler_file = logging.FileHandler(path_file) + handler_file.setLevel(level_save) + handler_file.setFormatter(formatter) + logger.addHandler(handler_file) + # set console holder + handler_view = logging.StreamHandler() + handler_view.setFormatter(formatter) + handler_view.setLevel(level_print) + logger.addHandler(handler_view) + return logger \ No newline at end of file diff --git a/project/grasp_box/submodules/SAM6D/utils/vis_utils.py b/project/grasp_box/submodules/SAM6D/utils/vis_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7a8d41577bccea3d4eb508f1bc188118c409a699 --- /dev/null +++ b/project/grasp_box/submodules/SAM6D/utils/vis_utils.py @@ -0,0 +1,122 @@ +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import numpy as np +import os +from sklearn.decomposition import PCA +from matplotlib.animation import FuncAnimation + +def features_to_colors(features): + """ + Convert high-dimensional feature matrix to RGB colors, considering cosine similarity. + + Args: + features (numpy.ndarray): Feature matrix of shape (N, D). + + Returns: + colors (numpy.ndarray): RGB colors of shape (N, 3). + """ + # Normalize features to unit length for cosine similarity + norms = np.linalg.norm(features, axis=1, keepdims=True) + 1e-6 # Avoid division by zero + features = features / norms # Normalize to unit length + + # Reduce to 3 dimensions if necessary + if features.shape[1] > 3: + pca = PCA(n_components=3) + features = pca.fit_transform(features) + + # Normalize reduced features to [0, 1] + features = (features - features.min(axis=0)) / (features.ptp(axis=0) + 1e-6) + + # Map directly to RGB + colors = features + return colors + +def visualize_points_3d(tem_pts, points_name, num_frames=360, **kwargs): + output_video_path=f'{points_name}_visualization.mp4' + fig = plt.figure(figsize=(10, 8)) + ax = fig.add_subplot(111, projection='3d') + + # Scatter plot of points + ax.scatter(tem_pts[:, 0], tem_pts[:, 1], tem_pts[:, 2], **kwargs) + + # Hide grid and axes + ax.grid(False) + ax.axis('off') + + # Configure axes limits for better visibility + max_extent = np.max(np.abs(tem_pts)) + ax.set_xlim([-max_extent, max_extent]) + ax.set_ylim([-max_extent, max_extent]) + ax.set_zlim([-max_extent, max_extent]) + + def update(frame): + ax.view_init(elev=30, azim=frame) + return fig, + + anim = FuncAnimation(fig, update, frames=num_frames, interval=100) + + # Save as a video + anim.save(output_video_path, fps=30, writer='ffmpeg') + print(f"Visualization saved to {output_video_path}") + + +def visualize_two_sets_3d( + points1, + points2, + vis_name, + points1_name='Set 1', + points2_name='Set 2', + color1='red', + color2='green', + num_frames=360, + output_video_path='two_sets_visualization.mp4', + **kwargs, +): + """ + Visualize two sets of 3D points in a single animated plot. + + Parameters: + - points1 (np.ndarray): First set of points, shape (N, 3). + - points2 (np.ndarray): Second set of points, shape (M, 3). + - points1_name (str): Label for the first point set. + - points2_name (str): Label for the second point set. + - color1 (str): Color for the first point set. + - color2 (str): Color for the second point set. + - num_frames (int): Number of frames in the animation. + - output_video_path (str): Path to save the output video. + """ + + fig = plt.figure(figsize=(10, 8)) + ax = fig.add_subplot(111, projection='3d') + + # Scatter plots for both point sets + scatter1 = ax.scatter(points1[:, 0], points1[:, 1], points1[:, 2], + c=color1, **kwargs) + scatter2 = ax.scatter(points2[:, 0], points2[:, 1], points2[:, 2], + c=color2, **kwargs) + + # Hide grid and axes for a cleaner look + ax.grid(False) + ax.axis('off') + + # Determine the combined maximum extent for all points + all_points = np.vstack((points1, points2)) + max_extent = np.max(np.abs(all_points)) * 1.1 # Slightly larger for padding + ax.set_xlim([-max_extent, max_extent]) + ax.set_ylim([-max_extent, max_extent]) + ax.set_zlim([-max_extent, max_extent]) + + # Function to update the view for each frame + def update(frame): + ax.view_init(elev=30, azim=frame) + return fig, + + # Create the animation + anim = FuncAnimation(fig, update, frames=num_frames, interval=50, blit=True) + + # Save the animation as a video file + # from matplotlib.animation import FuncAnimation, FFMpegWriter + print('------------------>', output_video_path) + anim.save(f"{vis_name}_{output_video_path}", fps=30, writer='ffmpeg') + plt.close(fig) # Close the figure to free memory + print(f"Visualization saved to {vis_name}_{output_video_path}") \ No newline at end of file diff --git a/project/grasp_box/utilis.py b/project/grasp_box/utilis.py new file mode 100644 index 0000000000000000000000000000000000000000..b21fd2bd375f8bcbb6084416bc66f41d2ab0db83 --- /dev/null +++ b/project/grasp_box/utilis.py @@ -0,0 +1,256 @@ +import numpy as np +import cv2 +from scipy.spatial import ConvexHull + + +def render_cad_mask(pose, mesh_model, K, w=640, h=480): + + # Load the vertices from the mesh model + vertices = np.array(mesh_model.vertices) + size = min(len(vertices), 500) + sample_indices = np.random.choice(len(vertices), size=size, replace=False) + vertices = vertices[sample_indices] + + # Transform vertices with the object pose + transformed_vertices = (pose @ np.hstack((vertices, np.ones((vertices.shape[0], 1)))).T).T[:, :3] + + # Project vertices to the 2D plane using the intrinsic matrix K + projected_points = (K @ transformed_vertices.T).T + projected_points = projected_points[:, :2] / projected_points[:, 2:3] # Normalize by z + + hull = ConvexHull(projected_points) + outline_points = projected_points[hull.vertices] + # Create a polygon from the projected 2D points + polygon = np.int32(outline_points).reshape((-1, 1, 2)) + + # Initialize a blank mask and draw the polygon + mask = np.zeros((h, w), dtype=np.uint8) + cv2.fillPoly(mask, [polygon], color=1) + + return mask + + + +def iou_3d_boxes(extentsA, T1, extentsB, T2, grid_n=60): + """ + Compute IoU between two 3D boxes (different sizes, different poses). + extentsA: (3,) full sizes of box A + T1: (4,4) transform (world-from-boxA) + extentsB: (3,) full sizes of box B + T2: (4,4) transform (world-from-boxB) + grid_n: number of grid subdivisions per axis for sampling inside box A + """ + extentsA = np.asarray(extentsA, float) + extentsB = np.asarray(extentsB, float) + sx, sy, sz = extentsA + volA = sx * sy * sz + volB = np.prod(extentsB) + + # grid inside A's local frame + nx = ny = nz = grid_n + hx, hy, hz = sx/2, sy/2, sz/2 + xs = np.linspace(-hx, hx, nx, endpoint=False) + (sx/nx)*0.5 + ys = np.linspace(-hy, hy, ny, endpoint=False) + (sy/ny)*0.5 + zs = np.linspace(-hz, hz, nz, endpoint=False) + (sz/nz)*0.5 + X, Y, Z = np.meshgrid(xs, ys, zs, indexing="xy") + ptsA_local = np.stack([X.ravel(), Y.ravel(), Z.ravel()], axis=-1) + + # transform these to world + R1, t1 = T1[:3,:3], T1[:3,3] + pts_world = ptsA_local @ R1.T + t1 + + # test inside B: transform to B-local + R2, t2 = T2[:3,:3], T2[:3,3] + ptsB_local = (pts_world - t2) @ R2 + + halfB = extentsB/2 + 1e-9 + insideB = np.all(np.abs(ptsB_local) <= halfB, axis=1) + + interVol = insideB.mean() * volA + if interVol == 0: + return 0.0 + return interVol / (volA + volB - interVol) +def get_connected_vertices(start_index: int): + + connected = [] + vertices_relation = np.array([ + [-1, -1, -1], # v0 + [ 1, -1, -1], # v1 + [-1, 1, -1], # v2 + [ 1, 1, -1], # v3 + [-1, -1, 1], # v4 + [ 1, -1, 1], # v5 + [-1, 1, 1], # v6 + [ 1, 1, 1], # v7 + ]) + + v0 = vertices_relation[start_index] + + for i, vi in enumerate(vertices_relation): + if i == start_index: + continue + if np.count_nonzero(np.abs(v0 - vi)) == 1: + connected.append(i) + + return connected + +def count_lines_passing_points(start_point, directions, points, threshold,rgb, vis = False): + start = np.array(start_point, dtype=float) + directions = np.array(directions, dtype=float) + points = np.array(points, dtype=float) + + #delete start pt from points if it is in + + # vis the start point and vertex points list + # vis = rgb.copy() + # cv2.circle(vis, tuple(start.astype(np.uint32)), 5, (255, 0, 0), -1) + # cv2.imwrite('a.png', vis) + + skip_index = None + if np.min(np.linalg.norm(points-start,axis = 1)) <=15: + #same point + skip_index = np.argmin(np.linalg.norm(points-start,axis = 1)) + + norms = np.linalg.norm(directions, axis=1, keepdims=True) + directions = directions / norms + + vis_img = rgb.copy() + count = 0 + for dir_vec in directions: + diff = points - start + proj_len = np.dot(diff, dir_vec) + closest_points = start + np.outer(proj_len, dir_vec) + dists = np.linalg.norm(points - closest_points, axis=1) + if skip_index is not None: + dists[skip_index] = np.inf + if np.min(dists) < threshold: + count += 1 + + #vis + if vis: + index = np.argmin(dists) + cv2.circle(vis_img, tuple(start_point), 5, (0, 0, 255), -1) + end_point = points[index] + end_u = int(round(start_point[0] + dir_vec[0] * 50)) + end_v = int(round(start_point[1] + dir_vec[1] * 50)) + cv2.arrowedLine(vis_img, (int(start_point[0]), int(start_point[1])), (end_u, end_v), (0, 255, 0), 2) + cv2.circle(vis_img, tuple(end_point.astype(np.uint32)), 5, (255, 0, 0), -1) + + + + + + if vis: + for u, v in points: + cv2.circle(vis_img, (int(u), int(v)), 2, (0, 255, 0), -1) + cv2.putText(vis_img, 'matched pt', (5,25), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,0,225), thickness= 2) + cv2.putText(vis_img, 'passed pt', (5,50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255,0,0), thickness= 2) + cv2.putText(vis_img, 'projected gt box pt', (5,75), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,255,0), thickness= 2) + cv2.imwrite(f'vertex_matching.png',vis_img) + + return count +def find_matched_points(points_a, points_b, threshold=7.0): + """ + Parameters: + points_a: (8, 2) numpy array + points_b: (6, 2) numpy array + threshold: float — max distance for a match + + Returns: + matched_pairs: list of tuples (index_b, index_a, distance) + """ + matched_pairs = [] + + for i_b, pb in enumerate(points_b): + + dists = np.linalg.norm(points_a - pb, axis=1) + min_idx = np.argmin(dists) + min_dist = dists[min_idx] + + if min_dist < threshold: + matched_pairs.append((min_idx, i_b,min_dist)) + + matched_pairs.sort(key=lambda x: x[2]) + + return matched_pairs +def compute_and_visualize_line_mask_intersection(mask, start, direction, threshold, step_size=0.5, vis = False, save = False): + H, W = mask.shape + u0, v0 = start + du, dv = direction / np.linalg.norm(direction) + + intersection_pixels = [] + t = 0.0 + inside = False + entry_t = None + exit_t = None + + for _ in range(10000): + u = int(round(u0 + du * t)) + v = int(round(v0 + dv * t)) + if 0 <= u < W and 0 <= v < H: + if np.any(mask[v-threshold:v+threshold, u-threshold:u+threshold]): + intersection_pixels.append((u, v)) + if not inside: + entry_t = t + inside = True + else: + if inside: + exit_t = t + inside = False + break + else: + if inside: + exit_t = t + inside = False + break + t += step_size + + if entry_t is not None and exit_t is not None: + length = exit_t - entry_t + elif entry_t is not None: + length = t - entry_t + else: + length = 0.0 + + if vis or save: + visualization = cv2.cvtColor((mask * 255).astype(np.uint8), cv2.COLOR_GRAY2BGR) + + # Draw intersection segment + for (u, v) in intersection_pixels: + visualization[v-5:v+5, u-5:u+5] = [0, 0, 255] # red pixels = intersection + # Draw entire ray + end_u = int(round(u0 + du * t)) + end_v = int(round(v0 + dv * t)) + cv2.arrowedLine(visualization, (int(u0), int(v0)), (end_u, end_v), (0, 255, 0), 2) + + + plt.figure(figsize=(6, 6)) + plt.imshow(visualization[..., ::-1]) + plt.title(f"Intersection length: {length:.2f} pixels") + plt.axis('off') + if save: + t = time.time() + plt.savefig(f"vis_{t}.png", dpi=300) + plt.close() + elif vis: + plt.show() + + return length + + +def intersection_in_xyz_axis(norm_vectors, start_pt, mask_r,mask,threshold,vis = False, save = False): + + intersection_gt_list = [] + intersection_obs_list = [] + for axis_index,norm_vector in enumerate(norm_vectors): + extended_start = start_pt + length_obs = compute_and_visualize_line_mask_intersection(mask_r, extended_start, norm_vector, threshold,step_size = 20, vis = vis, save = save) + length_gt = compute_and_visualize_line_mask_intersection(mask, extended_start, norm_vector, threshold,step_size = 20, vis=vis, save = save) + + intersection_obs_list.append(length_obs) + intersection_gt_list.append(length_gt) + + arr_gt = np.array(intersection_gt_list, dtype=np.float32) + arr_obs = np.array(intersection_obs_list, dtype=np.float32) + + return arr_gt, arr_obs \ No newline at end of file