diff --git a/.gitignore b/.gitignore index 6e1c83f276f877216362b71c94a841ed858929f6..7fc87a8a6a4c8b4084d83c0cd981de066118f689 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ get_the_metadata.py file_organize.py -hospital_HHA_normal_rename.py +hospital_rename.py hugging_face_git.txt \ No newline at end of file diff --git a/Hospital/instances/000_Hospital_equi_instance.npy b/Hospital/instances/000_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..7f718338f84897af9d5bcb97176d49a66626fe98 --- /dev/null +++ b/Hospital/instances/000_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4816b97f538334665e42b5d3e69ab55e90620fe96552d4f4a6eefbd92f0ef328 +size 134189184 diff --git a/Hospital/instances/001_Hospital_equi_instance.npy b/Hospital/instances/001_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..81b7c7813aa6e0b1a1547dbdb42da6e5dde2705d --- /dev/null +++ b/Hospital/instances/001_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9160140a358de1b6594aeb375425a93b7187a5e552ac060dcb6ae9955b423d47 +size 134189184 diff --git a/Hospital/instances/002_Hospital_equi_instance.npy b/Hospital/instances/002_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..463cf84925e586cda0fe6bfd35fe64c34d9a680c --- /dev/null +++ b/Hospital/instances/002_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8ccdf2c436c0c3eb7a6a5b8a26091aa74cfdab8b59a2bf2c4767db586cb9950 +size 134189184 diff --git a/Hospital/instances/003_Hospital_equi_instance.npy b/Hospital/instances/003_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..d079f197d5721082ed51e6818bb266a3f5d20bbd --- /dev/null +++ b/Hospital/instances/003_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33d5a425237ef50e22d5330292c0f865b833a3ca0e2aa025c3c36a52c891db22 +size 134189184 diff --git a/Hospital/instances/004_Hospital_equi_instance.npy b/Hospital/instances/004_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..3668b2d0eca1ccf4525e3b25a28246c21e6bfeeb --- /dev/null +++ b/Hospital/instances/004_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:615900e4e28fda3ea6a69f585d5eabc859066eb10f058529f3a799ff105b424d +size 134189184 diff --git a/Hospital/instances/005_Hospital_equi_instance.npy b/Hospital/instances/005_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..237c1ed13c8d54b4186b83398637ff46fccf6288 --- /dev/null +++ b/Hospital/instances/005_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f58bea9ddf7abcf78ada84df359946935d2896e5d26d68308c87e57d9874e356 +size 134189184 diff --git a/Hospital/instances/006_Hospital_equi_instance.npy b/Hospital/instances/006_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..0a37179d272a5554f44bf48aa1e705db4b64a0d4 --- /dev/null +++ b/Hospital/instances/006_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ffb171a6fa270ee849fcc68d8f0a98a8e9081500c5619101ea4e8cd875b8bd19 +size 134189184 diff --git a/Hospital/instances/007_Hospital_equi_instance.npy b/Hospital/instances/007_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..80bd49f4c6858425631006c0623740b0b8ced092 --- /dev/null +++ b/Hospital/instances/007_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:712f017aea15fbe2a02c03c09313f07504948c4a58758c40536e85b800a53090 +size 134189184 diff --git a/Hospital/instances/008_Hospital_equi_instance.npy b/Hospital/instances/008_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..61a9e63c402ebb848f76da8e694e3ccaa6aa3fcd --- /dev/null +++ b/Hospital/instances/008_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0441c114bf9409b3caba9cc514c2a556fc95380163872679e9d0d5d01d525656 +size 134189184 diff --git a/Hospital/instances/009_Hospital_equi_instance.npy b/Hospital/instances/009_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..e67806e0ceaef3e0a5ea7d30ed2315ce2d8f6867 --- /dev/null +++ b/Hospital/instances/009_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5f38d0b812540583326f80c7c07d4280faa4d0a45bd2accc3f1c43f94716210 +size 134189184 diff --git a/Hospital/instances/010_Hospital_equi_instance.npy b/Hospital/instances/010_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..6170a5b6e86d0f5f1305b06641d2decda9139cf9 --- /dev/null +++ b/Hospital/instances/010_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41ba903b961349d758b2de781709258c9cc20b495ee994ad20add042d6f74f65 +size 134189184 diff --git a/Hospital/instances/011_Hospital_equi_instance.npy b/Hospital/instances/011_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..73c75dd7d303ffe81c4e056d83c16828213c0546 --- /dev/null +++ b/Hospital/instances/011_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b54ad777ce18190643f39dea8cb0ea16fba907771ce84b704c8a0bafe8d6ba8c +size 134189184 diff --git a/Hospital/instances/012_Hospital_equi_instance.npy b/Hospital/instances/012_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..5f0ee6aa8bef41db65cb196c3e97ce0eda1b552a --- /dev/null +++ b/Hospital/instances/012_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb2be4507481076c8aa7bc952a476185b843e71cdfd8647de980e356d5ba92ef +size 134189184 diff --git a/Hospital/instances/013_Hospital_equi_instance.npy b/Hospital/instances/013_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..607f0977c76a8bad314ca3a7fa8722244f8cd975 --- /dev/null +++ b/Hospital/instances/013_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2692f4ec157011b68d87910eda3b9e99252e631f3974d686c20314c90dcd025e +size 134189184 diff --git a/Hospital/instances/014_Hospital_equi_instance.npy b/Hospital/instances/014_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..c3dd93cc6fd9f4f2e4d71084c3ad2028dcb14222 --- /dev/null +++ b/Hospital/instances/014_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eff050d6b8b86c58d3b2eb939396410732f805bf1a2e678675bcc531d5e7eaad +size 134189184 diff --git a/Hospital/instances/015_Hospital_equi_instance.npy b/Hospital/instances/015_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..52dbfe88501e965a13b12178da955084a8da6bb2 --- /dev/null +++ b/Hospital/instances/015_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae7b76ad73908c623db6237226066f27b6ddbdcc9eda5be0140f08cd0ea6b6b3 +size 134189184 diff --git a/Hospital/instances/016_Hospital_equi_instance.npy b/Hospital/instances/016_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..7d4ce83f55caae2fcca097c3575c1e7877d76632 --- /dev/null +++ b/Hospital/instances/016_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d64f3d1ec6ee067841fa1e1853911fc15e52727c6fce5d10455e7e9b7a79929 +size 134189184 diff --git a/Hospital/instances/017_Hospital_equi_instance.npy b/Hospital/instances/017_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..c8fe8d860e92abc75a646e8be7e05b23a32f650f --- /dev/null +++ b/Hospital/instances/017_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd8047b5a67fe272825277f02c82f9fed909afe80fdf8e2fd4cc727f3394c6ad +size 134189184 diff --git a/Hospital/instances/018_Hospital_equi_instance.npy b/Hospital/instances/018_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..d1e0d3d5d889a64f40c4cff4494dc0627d9df457 --- /dev/null +++ b/Hospital/instances/018_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6f215756a47e18a2a8a1b0771046aa5c98b3e07f0e435696d3c2618ce3e6aab +size 134189184 diff --git a/Hospital/instances/019_Hospital_equi_instance.npy b/Hospital/instances/019_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..d40d06e8f20d653a4d491274396d048be7262cd1 --- /dev/null +++ b/Hospital/instances/019_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f800322e162d0c8a80482865a24892d56cdcc25a3e07ded05d0e946cb5d97ac +size 134189184 diff --git a/Hospital/instances/020_Hospital_equi_instance.npy b/Hospital/instances/020_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..22ce548ec86532c7b53c084c264f14922e3da65b --- /dev/null +++ b/Hospital/instances/020_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45f522c8ac2b2bb707554b032469609a02170c81438d06dc9da93db889a70422 +size 134189184 diff --git a/Hospital/instances/021_Hospital_equi_instance.npy b/Hospital/instances/021_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..cf9bb1ed38d476d4aa4722dc17dcb97a49a2204b --- /dev/null +++ b/Hospital/instances/021_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d84222b9a2cab4aa2a79798d7d0dd0771841baef779a86c2eb5fc5dbf66226c8 +size 134189184 diff --git a/Hospital/instances/022_Hospital_equi_instance.npy b/Hospital/instances/022_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..3c9bb59c87f1f8a30354f8d12a195a4fe6e31e80 --- /dev/null +++ b/Hospital/instances/022_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ade530b08e8eb1514dd923f9f5a9cf4fea2f7494a901f8e6ea144788998140a +size 134189184 diff --git a/Hospital/instances/023_Hospital_equi_instance.npy b/Hospital/instances/023_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..e5ab84fc6fccac49fbe8f8c0b6943981851cee2f --- /dev/null +++ b/Hospital/instances/023_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f32f58014699b0ed6818290966f5502b15e385cc0942c509819e9d41f8540c75 +size 134189184 diff --git a/Hospital/instances/024_Hospital_equi_instance.npy b/Hospital/instances/024_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..4b5b9c328bb016032fd27140b9b82719e38eafa0 --- /dev/null +++ b/Hospital/instances/024_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a63e0ff54036b382aa058538716f19a7a4ab94bdd230e5f26fe92da4a2a2aae5 +size 134189184 diff --git a/Hospital/instances/025_Hospital_equi_instance.npy b/Hospital/instances/025_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..0ca8c8b419c42587a09c60c3ebffa26625fda31e --- /dev/null +++ b/Hospital/instances/025_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a5fb994c5e54d96c1cc6567ac89fdf84ba21253e77261ccb33a2f8d4b645c0b +size 134189184 diff --git a/Hospital/instances/026_Hospital_equi_instance.npy b/Hospital/instances/026_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..98e197e379ca7d57c9b028850bb3b3b838d0ecba --- /dev/null +++ b/Hospital/instances/026_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6cd93db2c1aaaa7d2315dc52f578109eb6f555d61b2cd0f400002900a3a60f63 +size 134189184 diff --git a/Hospital/instances/027_Hospital_equi_instance.npy b/Hospital/instances/027_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..51b575506c6c3be4635d956a784362d1576c5253 --- /dev/null +++ b/Hospital/instances/027_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86a624ee66e4e07326f4c29675170941ae742b7a07f9c0e2de90a21957c6730a +size 134189184 diff --git a/Hospital/instances/028_Hospital_equi_instance.npy b/Hospital/instances/028_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..1964390efbe085f7a1228b143c3dd45980cf9e3d --- /dev/null +++ b/Hospital/instances/028_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29db1e3280de967a086b24fc175791f294ce8de615b4bde6de2ec5e887e5b7f0 +size 134189184 diff --git a/Hospital/instances/029_Hospital_equi_instance.npy b/Hospital/instances/029_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..7359304715bb8fb32e4fbe72e33b362e91f4037a --- /dev/null +++ b/Hospital/instances/029_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd2ba12414142a8b569c8b1f650088b9352d9a77b4936437966cbd5675290657 +size 134189184 diff --git a/Hospital/instances/030_Hospital_equi_instance.npy b/Hospital/instances/030_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..0b832f55b0518a31e27b5ce3cc22b1fdf83fce15 --- /dev/null +++ b/Hospital/instances/030_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be75fedd17b846b529e8a2a3c441897b5a39851aeb0b018493fe4c147cd3cf83 +size 134189184 diff --git a/Hospital/instances/031_Hospital_equi_instance.npy b/Hospital/instances/031_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..0310ab88e81fe4cc71b7f6d38e53a8aaea21985a --- /dev/null +++ b/Hospital/instances/031_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e4de6d29a0e959e0f7bf8ea70fb0207f67695a0398a1449ed3925277367dd63 +size 134189184 diff --git a/Hospital/instances/032_Hospital_equi_instance.npy b/Hospital/instances/032_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..fa0a6c0b2db0353c676f705dd86979dfe5090293 --- /dev/null +++ b/Hospital/instances/032_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:593a5facc4834bb4b0e0044714d4834efb0ebe26f3fe737378b8e765da6374af +size 134189184 diff --git a/Hospital/instances/033_Hospital_equi_instance.npy b/Hospital/instances/033_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..18aa81274f8c7ebbf73d3f145ce02aaeb3136eac --- /dev/null +++ b/Hospital/instances/033_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e62a8f9a1c103a4fbd74f4f49d3c9750c9a9c489e515ee67ce590639fdc49f4a +size 134189184 diff --git a/Hospital/instances/034_Hospital_equi_instance.npy b/Hospital/instances/034_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..51c8849f45e5a3579dae1ec966a199dc53423ce8 --- /dev/null +++ b/Hospital/instances/034_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9917bc333e026fe9a552daa4c5a67f57c8ef13c7521935175b152e3678dfc1d2 +size 134189184 diff --git a/Hospital/instances/035_Hospital_equi_instance.npy b/Hospital/instances/035_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..68888de2a6496c109905778ae4d77c66d9ecfa14 --- /dev/null +++ b/Hospital/instances/035_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70a1c43722fa3c9cef26720d0c856a30da7f08f38b803f1fb7199455dda59449 +size 134189184 diff --git a/Hospital/instances/036_Hospital_equi_instance.npy b/Hospital/instances/036_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..0206560e2fde2f75ed7e10c2d0248367e07385ef --- /dev/null +++ b/Hospital/instances/036_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a970641f93a87875b853d5535d3adc93fc899ef06608d5041bef8396aa1967af +size 134189184 diff --git a/Hospital/instances/037_Hospital_equi_instance.npy b/Hospital/instances/037_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..016beb58dc2059930bef25833827800e78b10850 --- /dev/null +++ b/Hospital/instances/037_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16e72d721934eec61f3067961ebd884a8bf63c9d13574004df4dcff2d4e9edba +size 134189184 diff --git a/Hospital/instances/038_Hospital_equi_instance.npy b/Hospital/instances/038_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..ffac9f6c4cb1c10c1be9c047a70298f4717cd69c --- /dev/null +++ b/Hospital/instances/038_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dee5891d57be05d13183be2860931d92b15916e9f2b55ffc23297b3b26dcfb9 +size 134189184 diff --git a/Hospital/instances/039_Hospital_equi_instance.npy b/Hospital/instances/039_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..4c7769019348d42f8731c6b6dbd882a789d5c44e --- /dev/null +++ b/Hospital/instances/039_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5d9874ffe5ee21ca0f485f1fb57fb2858c17a4011906379bf0130f528033aa5 +size 134189184 diff --git a/Hospital/instances/040_Hospital_equi_instance.npy b/Hospital/instances/040_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..6be52145f5a1f5f41e9b47d5808f5f5b9539d209 --- /dev/null +++ b/Hospital/instances/040_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e3a7b0e7da8a17eb67557e86a38578591b9df0655780c0e8216713aad1b38255 +size 134189184 diff --git a/Hospital/instances/041_Hospital_equi_instance.npy b/Hospital/instances/041_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..9e988e905db0512fe40c69f26f866c4e14aa8a72 --- /dev/null +++ b/Hospital/instances/041_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:635689614b68efbc0e0e417c536d76a7f2d89e20016a58f00228b320b8a83beb +size 134189184 diff --git a/Hospital/instances/042_Hospital_equi_instance.npy b/Hospital/instances/042_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..e03c3380685cce765cfed59cae912dcbeb4f4216 --- /dev/null +++ b/Hospital/instances/042_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59f7dc8c566f58fbf7f88e76f2be5b34718e84740fcab0447dfb444d09621ef3 +size 134189184 diff --git a/Hospital/instances/043_Hospital_equi_instance.npy b/Hospital/instances/043_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..e0fee3e5a4542bd682a3968092c143dc072384b8 --- /dev/null +++ b/Hospital/instances/043_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85ffcac437bacea61b9a152d14e60afde100be1922994c0e3e7d5b420844918c +size 134189184 diff --git a/Hospital/instances/044_Hospital_equi_instance.npy b/Hospital/instances/044_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..b4a37a760eebd40da0805c93c4f4d36b00aa0521 --- /dev/null +++ b/Hospital/instances/044_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bd828821cb4dad6cd032b25e646a703c81c5713b1aae5a5375cf14d2a7cd6bf +size 134189184 diff --git a/Hospital/instances/045_Hospital_equi_instance.npy b/Hospital/instances/045_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..93176e54493be48705f64b5043e4503c7d2f6805 --- /dev/null +++ b/Hospital/instances/045_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8304e8ed8cd9880a7977ead58912348cb347cad0e875d823d97a3400f1e453e6 +size 134189184 diff --git a/Hospital/instances/046_Hospital_equi_instance.npy b/Hospital/instances/046_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..5769e14b4538453ea327087c7eb92241a9dd385a --- /dev/null +++ b/Hospital/instances/046_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5abc2a73514b2821107050d70d65f23733ce150cbf9604f0cb5c9cb41614b77 +size 134189184 diff --git a/Hospital/instances/047_Hospital_equi_instance.npy b/Hospital/instances/047_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..ea1cf24e9121cdfdfe96c01731f9b099e0d9ca5d --- /dev/null +++ b/Hospital/instances/047_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77ecfcbdf3cbe9bbc12f47ab60e194a2366b14eaaef9705cfb98979637a9ecf7 +size 134189184 diff --git a/Hospital/instances/048_Hospital_equi_instance.npy b/Hospital/instances/048_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..ac88bdb7d0ebd7f0c5d0af2ba2a1fbc80f657d1a --- /dev/null +++ b/Hospital/instances/048_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12532ac765100665e6736172797aa0e937b0c2d58ab03dda0f77b5e4cae3153e +size 134189184 diff --git a/Hospital/instances/049_Hospital_equi_instance.npy b/Hospital/instances/049_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..d6a0b5824de0d5b8109c82a59cee60f95241a206 --- /dev/null +++ b/Hospital/instances/049_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9b8496155037aa19de558addec99d69b7604f9aa61e1268e249f25a673d4d6f +size 134189184 diff --git a/Hospital/instances/050_Hospital_equi_instance.npy b/Hospital/instances/050_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..3697f0951b52234395d0bacd042530437a48ad98 --- /dev/null +++ b/Hospital/instances/050_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6bcbb64da47ff685b8e453ddd5846aa70f557377d6c94b6e84a492820aae4178 +size 134189184 diff --git a/Hospital/instances/051_Hospital_equi_instance.npy b/Hospital/instances/051_Hospital_equi_instance.npy new file mode 100644 index 0000000000000000000000000000000000000000..94007d464d4f4c627bea009d61daca81daeed9df --- /dev/null +++ b/Hospital/instances/051_Hospital_equi_instance.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58962d7d3d1b94bc3917807d2a631ea033addcd23bae34e49445b4bbd52d822d +size 134189184 diff --git a/Hospital/semantics/000_Hospital_equi_semantic.npy b/Hospital/semantics/000_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..093fe7d78cd3dc7b2bc76b3831a210d4ea2ab660 --- /dev/null +++ b/Hospital/semantics/000_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f63267584cf36b7ed26a0cbaa6ec07c545654c0a38a2aa70ceacffe9d0b7974b +size 16773760 diff --git a/Hospital/semantics/001_Hospital_equi_semantic.npy b/Hospital/semantics/001_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4d8788444eb50ab74f6ab3d3c1cef07910ada4d5 --- /dev/null +++ b/Hospital/semantics/001_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d208ca3c94aaaeb200ccc0777e66c4e26b3ea60acaad439a9e3336518abe5246 +size 16773760 diff --git a/Hospital/semantics/002_Hospital_equi_semantic.npy b/Hospital/semantics/002_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..e8675a9a196d4ba7a000fb271455d18f83d9474b --- /dev/null +++ b/Hospital/semantics/002_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd4e3fb6db34acd12a7fe42ac83d2bdba5ad58ceb2bb56b542dcb7047db7395d +size 16773760 diff --git a/Hospital/semantics/003_Hospital_equi_semantic.npy b/Hospital/semantics/003_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..06abd8ee139ce5a92dd78eb77ea6678a2483dd65 --- /dev/null +++ b/Hospital/semantics/003_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9874e4a8a2b80448c01d33592d5b7d5832b40e8f6f10917fcab648477df741c5 +size 16773760 diff --git a/Hospital/semantics/004_Hospital_equi_semantic.npy b/Hospital/semantics/004_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4995ea2c903d301761638271277225d6dd855545 --- /dev/null +++ b/Hospital/semantics/004_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:66777bdaf2c47d82c16d17a2407128100d026040852772b54d975cd80d58760c +size 16773760 diff --git a/Hospital/semantics/005_Hospital_equi_semantic.npy b/Hospital/semantics/005_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..23b061d3d38604c74833decd8458690b26aeba21 --- /dev/null +++ b/Hospital/semantics/005_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:428012e63bf9835437607a60a31861314a159de7e26d338a6804e172114998a6 +size 16773760 diff --git a/Hospital/semantics/006_Hospital_equi_semantic.npy b/Hospital/semantics/006_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..370a9726acb8f329c8930667a7628be533510d7b --- /dev/null +++ b/Hospital/semantics/006_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19fa420b430c94d7abe829aeb19de1125fd07260a12ddc5faaacfa1b41983107 +size 16773760 diff --git a/Hospital/semantics/007_Hospital_equi_semantic.npy b/Hospital/semantics/007_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0099a37748357123b2ff3aae7480ce1ed93cdc2e --- /dev/null +++ b/Hospital/semantics/007_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81941460611cd2c468ce6f79c59ff21dd6183e98df20394e50930a41c0ffba21 +size 16773760 diff --git a/Hospital/semantics/008_Hospital_equi_semantic.npy b/Hospital/semantics/008_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b98f179fad0bf81c30d0dbf46cf88d1bfada9324 --- /dev/null +++ b/Hospital/semantics/008_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:445b06ec00aa7a8402bc63c9960916e5f49493a0694fcef327dc49845048afe8 +size 16773760 diff --git a/Hospital/semantics/009_Hospital_equi_semantic.npy b/Hospital/semantics/009_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..eb152ecf7eef8cbffb3dc24146ed400788e4f475 --- /dev/null +++ b/Hospital/semantics/009_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03d423416053d5b6d89bcdf5e04bab5dbe48603cdb294784d964cc82ff799e41 +size 16773760 diff --git a/Hospital/semantics/010_Hospital_equi_semantic.npy b/Hospital/semantics/010_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f32b6ac52f7f8c6dc7ae5bf358f33412aaa831a8 --- /dev/null +++ b/Hospital/semantics/010_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b943ce186230bf34c09e855b7cd66264e118fdd1a98687b77cd60ba4db6b7a71 +size 16773760 diff --git a/Hospital/semantics/011_Hospital_equi_semantic.npy b/Hospital/semantics/011_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7a9c6b1be7fed4898f2a6ec1042304b5e8742911 --- /dev/null +++ b/Hospital/semantics/011_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf08c5968c2f01224b49ec4ed7f832f7cd6d8a349f97730f8edac77ccebec984 +size 16773760 diff --git a/Hospital/semantics/012_Hospital_equi_semantic.npy b/Hospital/semantics/012_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..01c678161487313a89f1a27c09c55e2bfa452b03 --- /dev/null +++ b/Hospital/semantics/012_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76dbc9326ee518a152b790ffbfa5b57168d4433db2e481ce2aa2d86c156fe118 +size 16773760 diff --git a/Hospital/semantics/013_Hospital_equi_semantic.npy b/Hospital/semantics/013_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..2b1f3e173fafeab24000bbe25260f5faae6d4dda --- /dev/null +++ b/Hospital/semantics/013_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83559bfb4918daec7daa468952e50e7653e62de9b66f9511204974e46e5348a4 +size 16773760 diff --git a/Hospital/semantics/014_Hospital_equi_semantic.npy b/Hospital/semantics/014_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..a042d18da52fe753ad9eb5ea2c594d1fb66277f5 --- /dev/null +++ b/Hospital/semantics/014_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58b35fce2eb8275933f341b907d1d6dac67959d9c99d08fdf36f78281cbab643 +size 16773760 diff --git a/Hospital/semantics/015_Hospital_equi_semantic.npy b/Hospital/semantics/015_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..cb70d64a42feb22891ccab2d567f74a3cd0a0f9c --- /dev/null +++ b/Hospital/semantics/015_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c69603b5a66b30cb831fbb9bfe56dc80a08bfdde176501f9f0e3d1fbf7546023 +size 16773760 diff --git a/Hospital/semantics/016_Hospital_equi_semantic.npy b/Hospital/semantics/016_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..5a54e64cec251a622836534f23d589794fc3cd9a --- /dev/null +++ b/Hospital/semantics/016_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:519777fa732a6fe7dff918c4332f57470059774a19d2c3a8107fc443f9f984f1 +size 16773760 diff --git a/Hospital/semantics/017_Hospital_equi_semantic.npy b/Hospital/semantics/017_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0c0c75bbec2ad6e6e1c069d1167576f122bf9f80 --- /dev/null +++ b/Hospital/semantics/017_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01770f1f38d83abb313939363371dbe1112d6dfb139b3c50def2c197fcbe1f69 +size 16773760 diff --git a/Hospital/semantics/018_Hospital_equi_semantic.npy b/Hospital/semantics/018_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..1889e1ddb3a280207fd0011d26748269b237f982 --- /dev/null +++ b/Hospital/semantics/018_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:faf081ebfc5041a83c1650a2346c1ee32dc940fd38926a9427a519d4ea0b67ec +size 16773760 diff --git a/Hospital/semantics/019_Hospital_equi_semantic.npy b/Hospital/semantics/019_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9c0ddcf58ea6c427a0a9141c05dcabdfd98f9248 --- /dev/null +++ b/Hospital/semantics/019_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b178eb4a736dcfb91e1a62657ef437caa20e76acb79fa866b455dcb514f21ca0 +size 16773760 diff --git a/Hospital/semantics/020_Hospital_equi_semantic.npy b/Hospital/semantics/020_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..6d586bbec9b3c219c168c2a6940bd0c661133c36 --- /dev/null +++ b/Hospital/semantics/020_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f8684dca0b71ae947c66e434724c17102a46b83a88155098a9f8e1fe98c4c08 +size 16773760 diff --git a/Hospital/semantics/021_Hospital_equi_semantic.npy b/Hospital/semantics/021_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..155f02e30e755d2456d1b8bce6aa14c4a2f526a1 --- /dev/null +++ b/Hospital/semantics/021_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a86d7f53b0d3abe0b601d67f36680ba2698555d744a8437e5f5b3ab38abaf5d4 +size 16773760 diff --git a/Hospital/semantics/022_Hospital_equi_semantic.npy b/Hospital/semantics/022_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3a0cb58d6f5e0b3569843cb39469267907b24771 --- /dev/null +++ b/Hospital/semantics/022_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7c1f32e397fbaa820bedda6dd715f2420fc20ad9e089aaf1013d05cdd16ed78 +size 16773760 diff --git a/Hospital/semantics/023_Hospital_equi_semantic.npy b/Hospital/semantics/023_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d92ae4aa0d3538e0c99af8c0a10ec75ee2db44a5 --- /dev/null +++ b/Hospital/semantics/023_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:02ebaf16afb450426776ff2ce4adce1bed353c9057551514a5fa90726ad12fc0 +size 16773760 diff --git a/Hospital/semantics/024_Hospital_equi_semantic.npy b/Hospital/semantics/024_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f433cdedf48133cbd9885d35fd533eb574fb8407 --- /dev/null +++ b/Hospital/semantics/024_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6390ddcc19f1c521d1edfc78c1f18bda1a33c866657045f18ef488f73625c14 +size 16773760 diff --git a/Hospital/semantics/025_Hospital_equi_semantic.npy b/Hospital/semantics/025_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..c1c77e919f3be46e414ce1a50204db522ec33a7d --- /dev/null +++ b/Hospital/semantics/025_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2206314ea9941139a14f118ab60c1cd6a1dda6be604778575d775d3b2a6212fa +size 16773760 diff --git a/Hospital/semantics/026_Hospital_equi_semantic.npy b/Hospital/semantics/026_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d72f1794748c07d42dbc4a0b71431178e726513d --- /dev/null +++ b/Hospital/semantics/026_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b6ab69e70e5e61849adf972cee3ea65ed734b643aa5962a38175e390a45c231 +size 16773760 diff --git a/Hospital/semantics/027_Hospital_equi_semantic.npy b/Hospital/semantics/027_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..991dfdef71eea96ac3cdbd8813df102bd46a7e77 --- /dev/null +++ b/Hospital/semantics/027_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2c326126d6a7e37d8ec2e81e4328e64d2ef65973e2bdde0792bb789fbacbf04 +size 16773760 diff --git a/Hospital/semantics/028_Hospital_equi_semantic.npy b/Hospital/semantics/028_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d6cc6ce54e62d10ceef623b95fe8554020e36303 --- /dev/null +++ b/Hospital/semantics/028_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93a304b234caffd36721ac7797c307618ba5b6f75d7f9adbd5e531bd3f98f3a7 +size 16773760 diff --git a/Hospital/semantics/029_Hospital_equi_semantic.npy b/Hospital/semantics/029_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..61ef71a82a455b40debc4209f5e8ec485b9c65ef --- /dev/null +++ b/Hospital/semantics/029_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eccfdec596707e6466751fcc781a6cfd222e2cbb1409e72775f0d7831825a375 +size 16773760 diff --git a/Hospital/semantics/030_Hospital_equi_semantic.npy b/Hospital/semantics/030_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b955332e13df8d230a206079686ce1e15ecc3fdb --- /dev/null +++ b/Hospital/semantics/030_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f1f487afb6d6f1386cf79a1333d6d71f2d08a373e6fcb3a22f9421d9620380b +size 16773760 diff --git a/Hospital/semantics/031_Hospital_equi_semantic.npy b/Hospital/semantics/031_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..aacc92e872c65e476f721421105448b7e69ec486 --- /dev/null +++ b/Hospital/semantics/031_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8632b5f714a8ac2c4d8c7bb08539a740345786e65599a20f30d17bb7295cefd +size 16773760 diff --git a/Hospital/semantics/032_Hospital_equi_semantic.npy b/Hospital/semantics/032_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..68ee8435520eaff84a3471e13e1bf85ce3e843bd --- /dev/null +++ b/Hospital/semantics/032_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21801950cadecdd2331ac41699bfd3febfe606cdd136f7cb739487686e3a5c5c +size 16773760 diff --git a/Hospital/semantics/033_Hospital_equi_semantic.npy b/Hospital/semantics/033_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..bf658bc0b538061b542c438acf3328df2baf0588 --- /dev/null +++ b/Hospital/semantics/033_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec7de8f5634409b703e3163ab65e5e2ec762a009dd9d649ad3f22c4af1b6239d +size 16773760 diff --git a/Hospital/semantics/034_Hospital_equi_semantic.npy b/Hospital/semantics/034_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7b8b4b06bb25e58912cdaa225f0439a0e3b5e609 --- /dev/null +++ b/Hospital/semantics/034_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f8a29c2e1a8fa58de65eed739e7a30977170a49d7bd9731134c6be8b902e1d1 +size 16773760 diff --git a/Hospital/semantics/035_Hospital_equi_semantic.npy b/Hospital/semantics/035_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3f880241c3e8c286267d4c01cfadde2d9afae717 --- /dev/null +++ b/Hospital/semantics/035_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba902b91accfcca96c729003ad30120bc3eee158ab248a15be86fec68592670e +size 16773760 diff --git a/Hospital/semantics/036_Hospital_equi_semantic.npy b/Hospital/semantics/036_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..95866afdd7dc81a2d30683bc78592c7a6f573b74 --- /dev/null +++ b/Hospital/semantics/036_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8eacbd8194358fee7f116f62dd135fd52057c54e2b1ed43f350eeeac457c661b +size 16773760 diff --git a/Hospital/semantics/037_Hospital_equi_semantic.npy b/Hospital/semantics/037_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..02fe03786b50db70c617d362b1c68a84677d4751 --- /dev/null +++ b/Hospital/semantics/037_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52bad804e77b4d5e9e22ab90e5fff5ea67b5b54f162863250c9df7ff3a977440 +size 16773760 diff --git a/Hospital/semantics/038_Hospital_equi_semantic.npy b/Hospital/semantics/038_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..5c3bef4957672e6bfa90cfc444ec5f33c375512d --- /dev/null +++ b/Hospital/semantics/038_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:999af4f003c63d067a1c21b4d4ee33987a4bf5c8ff3c5fbb446935b52fb07906 +size 16773760 diff --git a/Hospital/semantics/039_Hospital_equi_semantic.npy b/Hospital/semantics/039_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..eacce145b2d7ee72c3ea595e2ed711b6e227731a --- /dev/null +++ b/Hospital/semantics/039_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bd26dd926433dd277d42fec856c3ad0072e360f5448a6baf39735f5bbacec14 +size 16773760 diff --git a/Hospital/semantics/040_Hospital_equi_semantic.npy b/Hospital/semantics/040_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f943f399b7c9398e1cc589cba17e85381c18042e --- /dev/null +++ b/Hospital/semantics/040_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:952a4bf35f435d752b0005f9364886cf48b4311deeebe0bbbb65f68ff6db7732 +size 16773760 diff --git a/Hospital/semantics/041_Hospital_equi_semantic.npy b/Hospital/semantics/041_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..846041cfb9f67d03877e92b37850b67ae0bebad3 --- /dev/null +++ b/Hospital/semantics/041_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caaf96bb96e29be030a83833d7f7b908a9c3e08c9417c46c0c21be49501214d8 +size 16773760 diff --git a/Hospital/semantics/042_Hospital_equi_semantic.npy b/Hospital/semantics/042_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d4fd5335467c1f1cd5cfa5f6a2c2cbc0f7ad0e40 --- /dev/null +++ b/Hospital/semantics/042_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:198c27029bd83e87c4a903675478a65b102d7e986c996d29a2130272c591cb36 +size 16773760 diff --git a/Hospital/semantics/043_Hospital_equi_semantic.npy b/Hospital/semantics/043_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9e913fdd3cd2a0563e8227b43701e84faf1ec35f --- /dev/null +++ b/Hospital/semantics/043_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b465fdf7589674e4bcd422933a8797ea1df7f53b46349e11e3ed98890777cc46 +size 16773760 diff --git a/Hospital/semantics/044_Hospital_equi_semantic.npy b/Hospital/semantics/044_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..594937724fea2cdc3daf9b70126c11acf4c9778a --- /dev/null +++ b/Hospital/semantics/044_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d57d8bb07fd6801b6d21ac734d3ca941c3f83055d8be0833c2ae5a7f2fd82979 +size 16773760 diff --git a/Hospital/semantics/045_Hospital_equi_semantic.npy b/Hospital/semantics/045_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..e5d4cf34cd91a1975a2ec24ae33b88898c4a8126 --- /dev/null +++ b/Hospital/semantics/045_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:929e7019d398f67e8e884425028ead1f764ff4490e7e6951e5f09ea6d59781cf +size 16773760 diff --git a/Hospital/semantics/046_Hospital_equi_semantic.npy b/Hospital/semantics/046_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..47609da9fbad7011dffdff534b4c0fd2600d0fec --- /dev/null +++ b/Hospital/semantics/046_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4443cabcd5d3b7e739c655256e63a5916e10b394e71a69b635231c5cc2a79312 +size 16773760 diff --git a/Hospital/semantics/047_Hospital_equi_semantic.npy b/Hospital/semantics/047_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..c84be5c20e859648b8ffe620a740203a5d2c6aca --- /dev/null +++ b/Hospital/semantics/047_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88c540df9d14d6658240a571a0dbb9ad2e6153b82d7cc4ea81806313ec6b796c +size 16773760 diff --git a/Hospital/semantics/048_Hospital_equi_semantic.npy b/Hospital/semantics/048_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..23f068f630ffb20972e3c80e544911747356b0f8 --- /dev/null +++ b/Hospital/semantics/048_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b4f7c3dd92ef34bbbca26c1e541ae1459bf77a5575bae71175ed05941b44f7c +size 16773760 diff --git a/Hospital/semantics/049_Hospital_equi_semantic.npy b/Hospital/semantics/049_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..65dc41d4c55a117520d139d04f66e06c98eb9f5f --- /dev/null +++ b/Hospital/semantics/049_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fbc4f88830165210bf2c094033468b859a6ea18ce053bfbd80f62fae090ecf6a +size 16773760 diff --git a/Hospital/semantics/050_Hospital_equi_semantic.npy b/Hospital/semantics/050_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..e8e83922875c50f77dcd449b175d56750cd4deb7 --- /dev/null +++ b/Hospital/semantics/050_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f1999bbd8b3fff0d852e43dd7cf7c73dddf20ef0e898fbabb13c8f30beedd84 +size 16773760 diff --git a/Hospital/semantics/051_Hospital_equi_semantic.npy b/Hospital/semantics/051_Hospital_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4e4e7a06827772722517fe3457c65ce71c18c5f0 --- /dev/null +++ b/Hospital/semantics/051_Hospital_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5e3b576edb49842c8f940c683c9a342d694c947097060184adfaf23b64f9da39 +size 16773760 diff --git a/Office_Room_1/semantics/000_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/000_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..c45ae3f5fd6e0f56c5c374cf3145540b76099c9a --- /dev/null +++ b/Office_Room_1/semantics/000_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b23e9f2e6abdbd3532a1f71ab5767eb7159ea8be085ca7cf3d3c38b619e2f60 +size 16773760 diff --git a/Office_Room_1/semantics/001_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/001_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9ba5478981d68bc7ec33f4ea0921fbc853438e8e --- /dev/null +++ b/Office_Room_1/semantics/001_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a55ff4dd1efcd81216ca4ad7cc78a772a1abf38f1904564161b738b220e1a4 +size 16773760 diff --git a/Office_Room_1/semantics/002_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/002_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..02a00701bc3a32d22272ee1d421c998436028328 --- /dev/null +++ b/Office_Room_1/semantics/002_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b461999ba9e587b428dac9077a65e5da517db6393f51603a86bd725453e97983 +size 16773760 diff --git a/Office_Room_1/semantics/003_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/003_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..39beeb8e7fec004ae9c1de3d7c495e53835c9210 --- /dev/null +++ b/Office_Room_1/semantics/003_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c17f6beb578521c9ac81a2af4549ec96bc82944cfdcaec5cde9b762ba7283f9 +size 16773760 diff --git a/Office_Room_1/semantics/004_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/004_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..945daebf95ce07d55b06ea4dd80c2a9dae83719d --- /dev/null +++ b/Office_Room_1/semantics/004_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c2b884df58b8b91b11119dd9d8e4cf6a34aae3e4236ca2d3758f4ce02239daf +size 16773760 diff --git a/Office_Room_1/semantics/005_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/005_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..05979eba475ad3dd9c3f571b19097aaee78d9e49 --- /dev/null +++ b/Office_Room_1/semantics/005_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bcdea82e37c6d67fa2635ec2c220a7d1caf83f144aa01053843799ae4c3e5aea +size 16773760 diff --git a/Office_Room_1/semantics/006_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/006_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..2a2a68b7f017da3b06ba451ebfe8f92b1a4b06c8 --- /dev/null +++ b/Office_Room_1/semantics/006_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e5d2a32daa1a79c7f749ef4d4ce600b2a82089e97ad95b00ee9185f8f4b48e8 +size 16773760 diff --git a/Office_Room_1/semantics/007_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/007_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..bfd20aadc35d6479aa940c65073e2cdacd962b7d --- /dev/null +++ b/Office_Room_1/semantics/007_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f6d5558c5de6723f2626fd124d3112d617807739b1ec26f6f6c247156f4a250c +size 16773760 diff --git a/Office_Room_1/semantics/008_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/008_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f8e7cdbf9caa44b6b756c6fb7bfc35abb2f7f160 --- /dev/null +++ b/Office_Room_1/semantics/008_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:181463e140b930630c33974fa9f34d2c61479c0b2f81c5356fb9c679ce96270e +size 16773760 diff --git a/Office_Room_1/semantics/009_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/009_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..8ea8c248ace184f67b0fa3ad3d45cf6bce8b79c9 --- /dev/null +++ b/Office_Room_1/semantics/009_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:868e5e5bfa6e2203cd23e7ebc5cd4054187e9d2118a623cfe12b5dc8fb3fe5f6 +size 16773760 diff --git a/Office_Room_1/semantics/010_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/010_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..ef9f895886746f9d6fc36779de2be82a08bd345b --- /dev/null +++ b/Office_Room_1/semantics/010_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5deda51179a5bd9fcad6d8acb81755c4bf6c5b4031d41eabde8a17a9f2a70dec +size 16773760 diff --git a/Office_Room_1/semantics/011_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/011_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7b0459770cc0fa2c37962e6f836dfd0f304349df --- /dev/null +++ b/Office_Room_1/semantics/011_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92c3ece48b7a37094ecc5f15a122f14c8636080a1e62556f1a31239e9e6ef975 +size 16773760 diff --git a/Office_Room_1/semantics/012_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/012_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..145b084305d8a3f8bb1934ced6e9a1126b1b9080 --- /dev/null +++ b/Office_Room_1/semantics/012_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:237aabfc44b879b98f9726e18dfaf2a9fb4ab1096f62ef7d17e11f3784a2349a +size 16773760 diff --git a/Office_Room_1/semantics/013_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/013_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9c2df2139a0fe787530f481a1ad2db13f1a63f23 --- /dev/null +++ b/Office_Room_1/semantics/013_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b729a7f5843dbf026ba740b0de936295a1452fae84b2ebc5d551ca098f8a9f2c +size 16773760 diff --git a/Office_Room_1/semantics/014_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/014_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..39e19f6595d0bbbc6ef04b1a84ea60d938a1c189 --- /dev/null +++ b/Office_Room_1/semantics/014_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:741927568d69c2c124a59594a246c320772a24a57c6cbed965faa3eb892e0fd5 +size 16773760 diff --git a/Office_Room_1/semantics/015_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/015_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..93375b1680fdbf0f37b6002d0769f267595a228f --- /dev/null +++ b/Office_Room_1/semantics/015_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:064c23715394ea42b6680a8277952a49e67e39076c74b4b8107c582987e57e82 +size 16773760 diff --git a/Office_Room_1/semantics/016_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/016_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..581bf23ab0bae031721628a7955bbef27520c899 --- /dev/null +++ b/Office_Room_1/semantics/016_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97fe7f5f2bef3f9733a9df6d53f4763094b5c022ad9fb366680761b88fbc3b57 +size 16773760 diff --git a/Office_Room_1/semantics/017_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/017_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..13d294e6867eb18cf1034ed862c1fe509d9cc9e3 --- /dev/null +++ b/Office_Room_1/semantics/017_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2275a3ccc41e34c51621e729dfec221e667dc7dfa3f3d4824771611890324d91 +size 16773760 diff --git a/Office_Room_1/semantics/018_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/018_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..30f1e4e88bc5ad71388d0f17d63b048c524fffe4 --- /dev/null +++ b/Office_Room_1/semantics/018_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e10b66205efa44648ebcc5ce30743a4748295abcb65c11f11322bbbd206a3feb +size 16773760 diff --git a/Office_Room_1/semantics/019_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/019_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..243097888a413af7c8d20f44877f96bb138ad84e --- /dev/null +++ b/Office_Room_1/semantics/019_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed97bb0f2f0566e191d6253262e5b7b9530c1a3671dacdc698f0c39d7e3a5bde +size 16773760 diff --git a/Office_Room_1/semantics/020_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/020_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..c3c6b734d0a440cd7303e6ec2daab8202dfb1373 --- /dev/null +++ b/Office_Room_1/semantics/020_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ed690076c97cd87e4209499279dc2b06d9b85ce64ff960e0158128d32e3459f +size 16773760 diff --git a/Office_Room_1/semantics/021_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/021_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4822812cee3a7f8958e074e8f7a259f687183e73 --- /dev/null +++ b/Office_Room_1/semantics/021_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a025815834ed7e8dd94f2344881128798f7f8d948359cb8b3d74e528edefcd2a +size 16773760 diff --git a/Office_Room_1/semantics/022_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/022_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..cf61061511d27825274c89c12928ba00e06adb05 --- /dev/null +++ b/Office_Room_1/semantics/022_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f81a0f47534fbc40ba375cb0e58dffbc8cef3f972b125b0e568b0d1313e5002c +size 16773760 diff --git a/Office_Room_1/semantics/023_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/023_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0e56a8df8cff13d66b443aee7597e51a3053aa0d --- /dev/null +++ b/Office_Room_1/semantics/023_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbaa99d0a55f6f7898d6cbd97043bded6dfa23e4626d028c2c9fd8e1869391d0 +size 16773760 diff --git a/Office_Room_1/semantics/024_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/024_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..48182f1d083356420e108c319a2ac099b9aa5be2 --- /dev/null +++ b/Office_Room_1/semantics/024_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5720b89ac5366016530bc898bf0453b284493950bf618f8d5981fdf1ee9c085b +size 16773760 diff --git a/Office_Room_1/semantics/025_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/025_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..ea198bf427c15cd7d61be7272af189e840b553a9 --- /dev/null +++ b/Office_Room_1/semantics/025_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf07c5ac7405352f3f3a79f14ff945f6863285ccc7ab2159ff384faf4cf72223 +size 16773760 diff --git a/Office_Room_1/semantics/026_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/026_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..1fcef92befec87a8dc945f1e80d0edd978c09457 --- /dev/null +++ b/Office_Room_1/semantics/026_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89209314219b6a0154d516170523341846162515366a6115afbb4d55a39a7808 +size 16773760 diff --git a/Office_Room_1/semantics/027_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/027_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3aba26161abddb500ff0839f99a7231bd71ed400 --- /dev/null +++ b/Office_Room_1/semantics/027_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38b0e3c971aebb157465b8146d58cd69595b3f0f4b21d2b041f388b45d19b3d9 +size 16773760 diff --git a/Office_Room_1/semantics/028_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/028_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0235624d3f4a4791171e6622795028a86dbe89e8 --- /dev/null +++ b/Office_Room_1/semantics/028_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ccea0a3bcb1d812e1c68ea97aa07ec5acb4f60d412ee9431402c9523673f88f +size 16773760 diff --git a/Office_Room_1/semantics/029_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/029_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..580c4b33f805cbd370bff734b088e150eaaf2537 --- /dev/null +++ b/Office_Room_1/semantics/029_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed6652810ca206b90915606622cf2aa5991a586c55bd35875db90f06c9d9e971 +size 16773760 diff --git a/Office_Room_1/semantics/030_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/030_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4f02843ca39ef74dc086d2abaf042cc857915bef --- /dev/null +++ b/Office_Room_1/semantics/030_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84ec0ecf00c805c596627d66b42970183792c0815a5b9bd52972c949231e611a +size 16773760 diff --git a/Office_Room_1/semantics/031_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/031_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f7c71925ce38d8ba3c9204f0073f9435aeb74c7e --- /dev/null +++ b/Office_Room_1/semantics/031_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e369db00dc275188bc2511bd194f1720626960d293740989b683547b4f972d7 +size 16773760 diff --git a/Office_Room_1/semantics/032_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/032_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..fcaf59995f8ee3cb7dc46fc359066be4d03ce7b2 --- /dev/null +++ b/Office_Room_1/semantics/032_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0801347b14a01e70c41bc2095d36be2ff2258efc6b35d68f078fe538c53d0d5f +size 16773760 diff --git a/Office_Room_1/semantics/033_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/033_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..577f13d8e425637e4cf8735b7ec1ada6960347bf --- /dev/null +++ b/Office_Room_1/semantics/033_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f78727db925ed873ebc61beb72f06c5f589466f3479f5cf03a6132280f67d333 +size 16773760 diff --git a/Office_Room_1/semantics/034_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/034_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b676e3416a687da0d7fd8d558441a5097f4cb494 --- /dev/null +++ b/Office_Room_1/semantics/034_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5facafcaefa2fdbb41647776ca18244aa46a12bdba92593cc7cbca849b4adb26 +size 16773760 diff --git a/Office_Room_1/semantics/035_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/035_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b526909fac2ceb658c9dba928d9e20b93b7b02f7 --- /dev/null +++ b/Office_Room_1/semantics/035_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8203e70a810e0eab44661a88fd6c4f508518d45aa5f9a617a928aa4929481d90 +size 16773760 diff --git a/Office_Room_1/semantics/036_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/036_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..eac8083a1755786c3e62fea0a72d1ff88a83d4fe --- /dev/null +++ b/Office_Room_1/semantics/036_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88698383350eac913a5f1a181d66336326fc069d1a019f352e7700c70f63c135 +size 16773760 diff --git a/Office_Room_1/semantics/037_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/037_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7698f911e7bd6afffa0269bf4282cf85eb1f84e1 --- /dev/null +++ b/Office_Room_1/semantics/037_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abdf0ac05a9feebaa649deb0feda7717eeb5a7ad7fe18bb47ad5ef0144445cff +size 16773760 diff --git a/Office_Room_1/semantics/038_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/038_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d915e8002c53a1104bd140d0c29f73c4f4f816c0 --- /dev/null +++ b/Office_Room_1/semantics/038_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0bc75af2771d6e9badaca01dde941f61c5c974a121786b48886d32827a998f53 +size 16773760 diff --git a/Office_Room_1/semantics/039_Office_Room_1_equi_semantic.npy b/Office_Room_1/semantics/039_Office_Room_1_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0b9a2eae5d227e2371e692ad8423649c355f8638 --- /dev/null +++ b/Office_Room_1/semantics/039_Office_Room_1_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:35d52476102270aa5a8696eda350c2cde43db4271f0761945679ecd500adf865 +size 16773760 diff --git a/Office_Room_2/semantics/000_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/000_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b873321758fdcd5c306181b1447387cbe55bbe2a --- /dev/null +++ b/Office_Room_2/semantics/000_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04413541b05c077cd210a9ec26e9796cdc9547f8c8e80486f03933db7dfc171f +size 16773760 diff --git a/Office_Room_2/semantics/001_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/001_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..14e021f1a460a79482d70b5a5de797f74ee8b02b --- /dev/null +++ b/Office_Room_2/semantics/001_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0b83d837e49e302446bf2104c2728cdc882efeaa28515d9f4769f8078b488e1 +size 16773760 diff --git a/Office_Room_2/semantics/002_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/002_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7a19d2d6ca8674f8aaaa4143b4f5da82cfd1f77b --- /dev/null +++ b/Office_Room_2/semantics/002_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4873e172b596000019bfbe18b8a45fa85e4f7243b45f5159da45c57e38bf6585 +size 16773760 diff --git a/Office_Room_2/semantics/003_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/003_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..17d02c6319b900ce6c55d974e4e5c79dbcc4b6bf --- /dev/null +++ b/Office_Room_2/semantics/003_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e2cdf8b8c765910b65e7374bff781c25d763c6102caef7b1a7db06bc5306b10 +size 16773760 diff --git a/Office_Room_2/semantics/004_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/004_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..5e178233f0c5ac7a07b4260fa37dfe812c3cf3c7 --- /dev/null +++ b/Office_Room_2/semantics/004_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5d25796e4007d91d3dcdd7b77abec32c77b4bb7c976da8a813b3c731219a25d +size 16773760 diff --git a/Office_Room_2/semantics/005_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/005_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..78bcd57c3cc69c1411d26c08728461863b634466 --- /dev/null +++ b/Office_Room_2/semantics/005_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1987ab0775bc6c3e973d4f03ce4ed2b5433667e42d3b360cf6f5bed4f214ea46 +size 16773760 diff --git a/Office_Room_2/semantics/006_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/006_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..e774277ebd6420d116526a1dba5afa11a5629ece --- /dev/null +++ b/Office_Room_2/semantics/006_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbbe90e9171156abab8a0547e31113746901dfc1bce34273f663123e677b729c +size 16773760 diff --git a/Office_Room_2/semantics/007_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/007_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..2ea778aad13e29e75bd66e27ee7e1002d230c675 --- /dev/null +++ b/Office_Room_2/semantics/007_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab3a8c4d28bdf72f7ed494bcb43fb9a18cc6bc75d5d59b69b08e72718091ad90 +size 16773760 diff --git a/Office_Room_2/semantics/008_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/008_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..8144220bfbe74bffda83d85646b6ba3ee2a5ae11 --- /dev/null +++ b/Office_Room_2/semantics/008_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a724705f6c03b2ed67035aed958f3fd7e6310e10731cd809506db5920607ae91 +size 16773760 diff --git a/Office_Room_2/semantics/009_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/009_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3b5b3fda2658b8783ead6bfeb890d7106cdf133d --- /dev/null +++ b/Office_Room_2/semantics/009_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:194f383d20bd78d973b695b52d8019bf985f5e14433d9e4d074568dbc2094de5 +size 16773760 diff --git a/Office_Room_2/semantics/010_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/010_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..243bfa291ef83c7e53e01d3855d77984e5274fb9 --- /dev/null +++ b/Office_Room_2/semantics/010_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56dfc60a12968c9de9daeea552f9d3c328fc447c8dd281dbabe975b45fa009c9 +size 16773760 diff --git a/Office_Room_2/semantics/011_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/011_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4de7e7571f7acde75ae61b7c8dc0bed0d19dc42c --- /dev/null +++ b/Office_Room_2/semantics/011_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ca39cb3a26965e5c37a669cc8e4488448c18f46de3ab9f5ab54e7975b746564 +size 16773760 diff --git a/Office_Room_2/semantics/012_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/012_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..8a93a32388028305f6e5208b0d0ce188fdd57951 --- /dev/null +++ b/Office_Room_2/semantics/012_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11c8bafdaa5161db1cfb51947fecb02785f91dfc2120d89e95c179886b7f7680 +size 16773760 diff --git a/Office_Room_2/semantics/013_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/013_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..a679e0cc787872cd31f6435bce15dc0251de1a4e --- /dev/null +++ b/Office_Room_2/semantics/013_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de12161793249ce8e7e239ae6a4b7c4405bbf856a9d0d5849cbbe363d401fc9d +size 16773760 diff --git a/Office_Room_2/semantics/014_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/014_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..6b8a8b3396d665732371f946b22f74cc5f7adebf --- /dev/null +++ b/Office_Room_2/semantics/014_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:575854edb3955e586f83f1cf271ce4dbc523dc6ec2c2f19a676caf612f6c0514 +size 16773760 diff --git a/Office_Room_2/semantics/015_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/015_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..ad884c7be9725f087772b9334b5d7b999c930221 --- /dev/null +++ b/Office_Room_2/semantics/015_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:441dfad49a3b91ad974f8de0b6d1dfa26bf705aaa9a74fcedf50670b84ca7c26 +size 16773760 diff --git a/Office_Room_2/semantics/016_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/016_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..2139c8c3134b853f50c6f652a9514a64618c0ce5 --- /dev/null +++ b/Office_Room_2/semantics/016_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:084edfdefad952e92a0482df9db508c1f0f73052a6a61fe90e84242070669042 +size 16773760 diff --git a/Office_Room_2/semantics/017_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/017_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0d41c8a350a87e67de259769d921b724ada79ab4 --- /dev/null +++ b/Office_Room_2/semantics/017_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5f83049870d5ef593db9064388b2c85815ded4d2adbf1daf203ccf5866cc830 +size 16773760 diff --git a/Office_Room_2/semantics/018_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/018_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..46c8a4710f1984e6ed2bb42ed6efadcdf1a0916c --- /dev/null +++ b/Office_Room_2/semantics/018_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:322272bfd62a37ba881ac9eb92903ed0da6be2f74bde0c408e23d1f866c3f404 +size 16773760 diff --git a/Office_Room_2/semantics/019_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/019_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0abd01188df21178bfaca6ee5ff79c996cacc860 --- /dev/null +++ b/Office_Room_2/semantics/019_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24c2aca1fb1b2d13b6b17170bd45f9c2469d8b93e6409cc5414448c2b5437ba6 +size 16773760 diff --git a/Office_Room_2/semantics/020_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/020_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..2946450394f53a6a114f200a5c48052f179d5496 --- /dev/null +++ b/Office_Room_2/semantics/020_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76012d03454dbe54f5df7d6b36efbb9863490b4f349127ee750a5cea94d44779 +size 16773760 diff --git a/Office_Room_2/semantics/021_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/021_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..771d550b9d5835c4ea3148f3e6f078810d661e4c --- /dev/null +++ b/Office_Room_2/semantics/021_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1fa0f26ee3b1252a22a44fa8727974907118268ea55627be52f8edc85e2cfc3c +size 16773760 diff --git a/Office_Room_2/semantics/022_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/022_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4162fcb611e117425edfeefdb78184872abd7c9f --- /dev/null +++ b/Office_Room_2/semantics/022_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c5d23ea50e11a5460922bd8d232c17a9c171c76d3a74e801e50d93bd21f9727 +size 16773760 diff --git a/Office_Room_2/semantics/023_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/023_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..85f25e75242d34a45f9eb567e336e1e0329d9965 --- /dev/null +++ b/Office_Room_2/semantics/023_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f76101a80d72621161c42c8ca08a65d4268bc2a71ddb740dc0473b642d589c4 +size 16773760 diff --git a/Office_Room_2/semantics/024_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/024_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..1679e3fd4ce5f1e205b1673492233bed5053e980 --- /dev/null +++ b/Office_Room_2/semantics/024_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b79f4e24b2b697bc6d325be7f7fc60949a31e29ccf8c07099f3dac707483deea +size 16773760 diff --git a/Office_Room_2/semantics/025_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/025_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9ee00b4fae1a06d5971ba2efffb18303fd83e0ce --- /dev/null +++ b/Office_Room_2/semantics/025_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5e33660373d257e4a7bc94ccecafec71f7fd5622c0986e59f3ce0998f2bc465 +size 16773760 diff --git a/Office_Room_2/semantics/026_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/026_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..dc3914e943d83bacfce1010e407b2dc766883a11 --- /dev/null +++ b/Office_Room_2/semantics/026_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ffa9fcb58246151e1a64bfaadff0791d900630dfa8ea2d45a8db0f24821c83a +size 16773760 diff --git a/Office_Room_2/semantics/027_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/027_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..62f00474ead18e93822f2e40a2b434b912940d55 --- /dev/null +++ b/Office_Room_2/semantics/027_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d09c224a2147a084b1c3e58542f3cb82750c7c45b432887b0cfcc0620a4ad129 +size 16773760 diff --git a/Office_Room_2/semantics/028_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/028_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f9029d5232ea2097ded6a4f216579b6e214826f7 --- /dev/null +++ b/Office_Room_2/semantics/028_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f9001b4d7434813788808227458c40468e5433ec8f70403d075fe3be40e6c5c +size 16773760 diff --git a/Office_Room_2/semantics/029_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/029_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b1b6118ed461359d15be4ffed5b64555b72783bd --- /dev/null +++ b/Office_Room_2/semantics/029_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:339d31771a28e937ebd3f7fdd4c0ff7e83600d0f1db35b5a2d5bb0fe70abb2f4 +size 16773760 diff --git a/Office_Room_2/semantics/030_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/030_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..c8af975bea28082637147ade550d250be3078f46 --- /dev/null +++ b/Office_Room_2/semantics/030_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f6abc5848d4946401f98cdb968eb4240c876250df773befcd6e74f3ebcc3f55 +size 16773760 diff --git a/Office_Room_2/semantics/031_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/031_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..90fa1f3e68b8fdace3b1d878dbe0eb340375de51 --- /dev/null +++ b/Office_Room_2/semantics/031_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d4d6b419f664e074661f85a5fb0f1ac03a11d48a1141dbf78138ce4c3049539 +size 16773760 diff --git a/Office_Room_2/semantics/032_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/032_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3f3356dc328d0a12cc26e52d857079785c1e54bf --- /dev/null +++ b/Office_Room_2/semantics/032_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:612bcd9e22601b5d0cbe2b52d8027e7c2e9666d4da1b54d91a778dc4d0c6320c +size 16773760 diff --git a/Office_Room_2/semantics/033_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/033_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3d3dcd246ea3290671563a2ecb9496ad1352b852 --- /dev/null +++ b/Office_Room_2/semantics/033_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7286c42cbb5d58cbe3edaa161eff6d7dcd1acd62030dcdb0e15bcca88c9427ae +size 16773760 diff --git a/Office_Room_2/semantics/034_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/034_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b88975045d9ff3827e77e148694e73fdb2e34408 --- /dev/null +++ b/Office_Room_2/semantics/034_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5558bae2bf6c3a218db9f23517a41c3f3fae1f494b5d5ea72fc324c2e4b09eda +size 16773760 diff --git a/Office_Room_2/semantics/035_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/035_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9071df7fd2950282d359a34e61d4490e769b1542 --- /dev/null +++ b/Office_Room_2/semantics/035_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6fe7750cd7e880a0958790fbba829aeed46d72a1f0c6c86ca454863bba37e50 +size 16773760 diff --git a/Office_Room_2/semantics/036_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/036_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..4ca317de30de684f5ca9bd1e1288d911532a3a43 --- /dev/null +++ b/Office_Room_2/semantics/036_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6b4ff36e2082846737dcdb09b2790b65014a050d2185dbe6be94a930b541709 +size 16773760 diff --git a/Office_Room_2/semantics/037_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/037_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d781afe67f4431d32c171da23d75b516f79eb93d --- /dev/null +++ b/Office_Room_2/semantics/037_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:276ddcca51b524bf2952258aa16b316a90a57087ad003013129a98d17385fcf3 +size 16773760 diff --git a/Office_Room_2/semantics/038_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/038_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..de67a9a634f780e09952ece19571a4d32e9f06c7 --- /dev/null +++ b/Office_Room_2/semantics/038_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:760a0d423c8640c2aae2d82214e02cc4500f8f3c07c93a7ab7e3815e3a173c83 +size 16773760 diff --git a/Office_Room_2/semantics/039_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/039_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..0054671658b5b9cd9dd2c1a36a03566285e1fe26 --- /dev/null +++ b/Office_Room_2/semantics/039_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef5e5e636199ef1aba37db4d4bf0f68d1cac33486f6737e615cadf65bd7e76ba +size 16773760 diff --git a/Office_Room_2/semantics/040_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/040_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f60c07990f7a8a0bf8a23271330ee72f8f802474 --- /dev/null +++ b/Office_Room_2/semantics/040_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f9f50abb026ec6c6ec6903dcf0cb64696d3beaf5c48ec41a3981377cdea4978 +size 16773760 diff --git a/Office_Room_2/semantics/041_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/041_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f830dab553ea8061bf0cfc74b26956014c2c4241 --- /dev/null +++ b/Office_Room_2/semantics/041_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:806a23f184d8496534f90274347e8dfab35309ba75fd5b433aadd32ccf04a3cb +size 16773760 diff --git a/Office_Room_2/semantics/042_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/042_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..706ec8a6689c277dc51ae2151d44a95074d59c9b --- /dev/null +++ b/Office_Room_2/semantics/042_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f617abcf0654cd519f1987f470cd8db41b5560212a480d9cd5ce9e9f60844393 +size 16773760 diff --git a/Office_Room_2/semantics/043_Office_Room_2_equi_semantic.npy b/Office_Room_2/semantics/043_Office_Room_2_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..032273c7724253b2b7a2d15c063988d45e568b51 --- /dev/null +++ b/Office_Room_2/semantics/043_Office_Room_2_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:53bf5380031c72cb1e20193877681eee065da31fb58dbe349791dfdc6e6bc938 +size 16773760 diff --git a/Parking_Lot/semantics/000_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/000_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9f6cb8df0614c0f3c22d311e616cf6c1ca42c57e --- /dev/null +++ b/Parking_Lot/semantics/000_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b85f1246c0d42fe5787425fcfb1bd22d94a1d3c7da6e98f6c062b65306b1cf43 +size 16773760 diff --git a/Parking_Lot/semantics/001_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/001_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..a77ea7bb0a6de583bc422b04220cf717b9cac24b --- /dev/null +++ b/Parking_Lot/semantics/001_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f0c2026cb8414c95093a27cdc090607ed59befdf300ffa7bc73ceb7ddaceb8d +size 16773760 diff --git a/Parking_Lot/semantics/002_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/002_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..8c338098fc2894b98ef1a3c2643b8399bbe91990 --- /dev/null +++ b/Parking_Lot/semantics/002_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:75440a06ada5dc5f83d1b65d1e22185822be94ee6e1fe22afb993371597654be +size 16773760 diff --git a/Parking_Lot/semantics/003_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/003_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..a8a58e6f132a59e8b71eb7fd8a440bbfd5a7265d --- /dev/null +++ b/Parking_Lot/semantics/003_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc8087d90987ad100448f87071031fea43e2670c8bee5b4a7f8c55a3e19d7d44 +size 16773760 diff --git a/Parking_Lot/semantics/004_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/004_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..bf5cddd02e135e2ada927dcd40a022ae3d3826f7 --- /dev/null +++ b/Parking_Lot/semantics/004_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51221d3e9b1ebd73363b2541169e5fd30c113494d9103f05f3529e1ee2104d14 +size 16773760 diff --git a/Parking_Lot/semantics/005_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/005_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..12e63a098708b5b61fc9d580affb8a401077540d --- /dev/null +++ b/Parking_Lot/semantics/005_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:959e3d72a366a48cf008a5c3e8034bcae78d4757f62302fc009084b46d99b810 +size 16773760 diff --git a/Parking_Lot/semantics/006_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/006_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d0f1b85a41d4158252ca8eedab2bba2d4f7b21fa --- /dev/null +++ b/Parking_Lot/semantics/006_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ba8044e4c60b81971076d85779516db7b617620bef503bbc29b23f195e51940 +size 16773760 diff --git a/Parking_Lot/semantics/007_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/007_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..c03db69d3b648f14229718d80108d9fabde60a2c --- /dev/null +++ b/Parking_Lot/semantics/007_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ae03c6b47cd10aca6fcbe70c1badc40fad6bb03f49ef207f330df30d36467c3 +size 16773760 diff --git a/Parking_Lot/semantics/008_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/008_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..441d6baba064ce2ba59d5713a339280394b38937 --- /dev/null +++ b/Parking_Lot/semantics/008_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8b16e1e2ddb0383965406c91c22831431de5f3b494ed5460a7671f9dc825b49 +size 16773760 diff --git a/Parking_Lot/semantics/009_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/009_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..3814fb7258a3a185c9addbaebe446a11ca5698a6 --- /dev/null +++ b/Parking_Lot/semantics/009_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50048a59515d08dd472e7c6a967a4e2b2590bcc891b887c46b777009d0942868 +size 16773760 diff --git a/Parking_Lot/semantics/010_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/010_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..5e157de9896c8dc9bb4ba221933c31e14fd26251 --- /dev/null +++ b/Parking_Lot/semantics/010_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18df7302b4617b521ab50c20d889b696ddb10db7572ef82cead9f03fe4533f6f +size 16773760 diff --git a/Parking_Lot/semantics/011_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/011_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..edbf4700d3a645825e002c9ee96abd55a26aaa06 --- /dev/null +++ b/Parking_Lot/semantics/011_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5adf988932a62aca73b4a6c297f902a1e77269682df117b706c8a5fcb94ed877 +size 16773760 diff --git a/Parking_Lot/semantics/012_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/012_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7dbef042b42cb4f86ea31ab95e27b0ab87d6efff --- /dev/null +++ b/Parking_Lot/semantics/012_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea397fed3da6d158d8503e1289b1788874f77d1e6b0f65a8f2a1777b71eb4c0f +size 16773760 diff --git a/Parking_Lot/semantics/013_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/013_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..42f642c03b46f403ba385effae9f21ddfb4d5a38 --- /dev/null +++ b/Parking_Lot/semantics/013_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1230b71b1658d2edfcda75a564a2fb3ddb7a50a5994ba8f62ab3540336912917 +size 16773760 diff --git a/Parking_Lot/semantics/014_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/014_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..199e6c1324abd298ebe791d7909a72db695b2c17 --- /dev/null +++ b/Parking_Lot/semantics/014_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7131cfaba142d71dd6dac552cd4516cd475f1d4bacc2e27fa39d9c64619505ce +size 16773760 diff --git a/Parking_Lot/semantics/015_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/015_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..a2bc51749e8f69fa1fdfa8228c052920bede6647 --- /dev/null +++ b/Parking_Lot/semantics/015_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d38640fca131372b17266a12386b347948847d12c5739c1b7543f6ed8577564 +size 16773760 diff --git a/Parking_Lot/semantics/016_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/016_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..24fdf022dc66f405ed79617ec2095c508e16bb84 --- /dev/null +++ b/Parking_Lot/semantics/016_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2fa50ec5ca4856e2c5a22a6474fca55cb144e5d09de673fe1d4e6e40b9330438 +size 16773760 diff --git a/Parking_Lot/semantics/017_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/017_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..ea39dbbb4e885f3f60641d6024bc60e82e6ba0da --- /dev/null +++ b/Parking_Lot/semantics/017_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f386e6852b6ff2d2cd63e1a2b1e203b632c131d297436d8d1c0cd5381865298 +size 16773760 diff --git a/Parking_Lot/semantics/018_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/018_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..8be1e599745babf1779bb4276b174f6b57284b38 --- /dev/null +++ b/Parking_Lot/semantics/018_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f89f404a168ff64a69ed5046d3c2b9201b566ce7b97afc4aab0cb3746d49c8f4 +size 16773760 diff --git a/Parking_Lot/semantics/019_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/019_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..35f2a3108bd287bc4ed1448876b1be23418bae8b --- /dev/null +++ b/Parking_Lot/semantics/019_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fab4f59a9edbb23d98e1b65114b8ba44dfa4ff8a505323cd5001f299781f608 +size 16773760 diff --git a/Parking_Lot/semantics/020_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/020_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7e24b452c48d57a733e4bc71b8a13ad0c1b879f2 --- /dev/null +++ b/Parking_Lot/semantics/020_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d42475720052560e28bba8d695f6333070842dc50c7ebe43978dd9f026f5b82b +size 16773760 diff --git a/Parking_Lot/semantics/021_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/021_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..53742910232bf45b9a3e0fa89b559c8b5ca19622 --- /dev/null +++ b/Parking_Lot/semantics/021_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a069109f58211da3e26101493b838fdfd5206ca1ea5310741d5cedfbbd71d4b4 +size 16773760 diff --git a/Parking_Lot/semantics/022_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/022_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..347bdae606f136d4a03c737817fb357069383c60 --- /dev/null +++ b/Parking_Lot/semantics/022_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04d57e953f6e0b6e43ca5decca05e3e044b0dffe897088ec4929f143d5c20985 +size 16773760 diff --git a/Parking_Lot/semantics/023_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/023_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..cf5b985f05f82111ae7c40021827bd058c50d0d4 --- /dev/null +++ b/Parking_Lot/semantics/023_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d637d71c2d07bce91a41f219fde7f788b375240d76da89cc3b612dcc283133e +size 16773760 diff --git a/Parking_Lot/semantics/024_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/024_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..6acde41e4f6951c1d8ca726c965550402557173b --- /dev/null +++ b/Parking_Lot/semantics/024_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2749a69d342f54e9d86f6cadf2578436e3c9f2af0b821ab8621124e4f91f6944 +size 16773760 diff --git a/Parking_Lot/semantics/025_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/025_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..6586a118955cdd236ad64e313877d13ec2bc75f0 --- /dev/null +++ b/Parking_Lot/semantics/025_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d78d9b660c2beb0cf1ffd304ec95f2cce0ad96943d512497532787ff4b8a046 +size 16773760 diff --git a/Parking_Lot/semantics/026_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/026_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..f629b76915836f78e669e1f601b5f926f03fe349 --- /dev/null +++ b/Parking_Lot/semantics/026_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82f17ebcdefe5e2c2f19a9568b551f47631a43f5baea9454b6213f75ff783feb +size 16773760 diff --git a/Parking_Lot/semantics/027_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/027_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..7a41f00ebc1e3bdcc274ed73be6fa1366b2ff819 --- /dev/null +++ b/Parking_Lot/semantics/027_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11dd6921aee95a924f05f64431efff64dcd83026917ba6a717b2888c7cdde9fd +size 16773760 diff --git a/Parking_Lot/semantics/028_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/028_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..08bb594a19e29ba213d3756b9e227a1b2628baad --- /dev/null +++ b/Parking_Lot/semantics/028_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4b871ca91faee2ee675db2533e8b534386a0c23b79c45595be3ee0882b2147e +size 16773760 diff --git a/Parking_Lot/semantics/029_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/029_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..bea0c303a7e865a686ae0c26bc7968371e7a66d4 --- /dev/null +++ b/Parking_Lot/semantics/029_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3deadca582221315ca30fb897a8341d0f86affbe10b85a5920ff8d2b1ae94119 +size 16773760 diff --git a/Parking_Lot/semantics/030_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/030_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..2f3f8664b1811c44ce87e36827e7ed77afa07ee9 --- /dev/null +++ b/Parking_Lot/semantics/030_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d65c1feb1a52278eb0027555beafc9b7e5534a638e63ac40b260b983599abc4b +size 16773760 diff --git a/Parking_Lot/semantics/031_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/031_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..d78bb40fb2d3f4fcb059c41de92da46d8a799f95 --- /dev/null +++ b/Parking_Lot/semantics/031_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a09a3ffa56c3d3a51a6d63f15f27a646de86c8f4fce270f81848d8223686511 +size 16773760 diff --git a/Parking_Lot/semantics/032_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/032_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..b6e03582494daf2cd37ef077c622b6d46b59e068 --- /dev/null +++ b/Parking_Lot/semantics/032_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30e407d12ea91ee3b163d0f5d3b53ef08f36e649ca2d1f6e4f532a3e19688d96 +size 16773760 diff --git a/Parking_Lot/semantics/033_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/033_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9dd956f4a681c4ac2b234bb9340817a14dfd6d5c --- /dev/null +++ b/Parking_Lot/semantics/033_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dfbd5c74d4e6a713ad08d9b034c89bb47f8f8d7c553fbdb39e673ee799ddf70 +size 16773760 diff --git a/Parking_Lot/semantics/034_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/034_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..5a7f3c6903155c99f8a7918bda2a8bd42d7dd70d --- /dev/null +++ b/Parking_Lot/semantics/034_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a2c38e55fe4ab4c7084fc1042331e5c4edfbbc5ffe002b37f4ae34a36727a50 +size 16773760 diff --git a/Parking_Lot/semantics/035_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/035_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..77c799d7f876da51c8aa859ac6dfeb7731a33c3e --- /dev/null +++ b/Parking_Lot/semantics/035_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d486a4e27b47b46c3f2a694326d7acb37af6205575e7224b69ca552bf029e8b +size 16773760 diff --git a/Parking_Lot/semantics/036_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/036_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..de0d76452afd29cb38f75fdfb6701d8cdd022f81 --- /dev/null +++ b/Parking_Lot/semantics/036_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f53335769ac79b3db279cee4c10b420eb4cb12f2d9ecfc826f1d9b8e6a626bf +size 16773760 diff --git a/Parking_Lot/semantics/037_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/037_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..fc48c75ff70db8316088e213bf4b7ff8e129747c --- /dev/null +++ b/Parking_Lot/semantics/037_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ea5e4b440e71173b4c0215668da6b089115d439a1a91d31070a5a40f66401b5 +size 16773760 diff --git a/Parking_Lot/semantics/038_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/038_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..cd26f0cd6aa32d0b806c406d21d59f6910ed3d55 --- /dev/null +++ b/Parking_Lot/semantics/038_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a6cd49b9a2027495a62f4daaa5fdee2ddd0271684532983013702c947f38cc1 +size 16773760 diff --git a/Parking_Lot/semantics/039_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/039_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..caf14d2962e41d8863cddb3f1e470b8b6298153f --- /dev/null +++ b/Parking_Lot/semantics/039_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5dec6a5a7ecde7b4f8d509f3559bad011545b7f0e8a2bba85ac5100b0828ac5 +size 16773760 diff --git a/Parking_Lot/semantics/040_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/040_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..10bb0505272821fbaa2da387e961cbef083db68c --- /dev/null +++ b/Parking_Lot/semantics/040_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f3f42b432fda17d47c345e63c906357716557e1ac8f69919ef985b2ffcd2ad4 +size 16773760 diff --git a/Parking_Lot/semantics/041_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/041_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..9a949ef8736423627347917167fa771538c24c49 --- /dev/null +++ b/Parking_Lot/semantics/041_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:900a167cefcbe9ae7575407acc847be831f9e1af68143c00b5e12d35984f6eb4 +size 16773760 diff --git a/Parking_Lot/semantics/042_Parking_Lot_equi_semantic.npy b/Parking_Lot/semantics/042_Parking_Lot_equi_semantic.npy new file mode 100644 index 0000000000000000000000000000000000000000..29d07bac2d3d161d1560b3e4cd505b35f40c8e95 --- /dev/null +++ b/Parking_Lot/semantics/042_Parking_Lot_equi_semantic.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:667920df00c37901fb6db3ffb42d348177a6e28f816d79908b5d4260f8afedef +size 16773760 diff --git a/README.md b/README.md index faa37523aa384eb124c2152ac1a9563cf48b70f0..b106ba04a2a1b7dae5cf8492dfb7e7830a96c420 100644 --- a/README.md +++ b/README.md @@ -7,47 +7,42 @@ license: cc-by-nc-sa-4.0 ![Figure showing multiple modalities](assets/figure/figure_1.png?raw=true) ## Overview -The ToF-360 dataset consists of spherical RGB-D images with instance-level semantic and room ayout annotations, which include 4 unique scenes. It contains 179 equirectangular RGB images along with the corresponding depths, surface normals, XYZ images, and HHA images, labeled with building-defining object categories and image based layout boundaries (ceiling-wall, wall-floor). The dataset enables development of scene understanding tasks based on single-shot reconstruction without the need for global alignment in indoor spaces. - +The ToF-360 dataset consists of spherical RGB-D images with instance-level semantic and room layout annotations, which include 4 unique scenes. It contains 179 equirectangular RGB images along with the corresponding depths, surface normals, XYZ images, and HHA images, labeled with building-defining object categories and image based layout boundaries (ceiling-wall, wall-floor). The dataset enables development of scene understanding tasks based on single-shot reconstruction without the need for global alignment in indoor spaces. ## Dataset Modalities -Each scenes in the dataset has its own folder in the dataset. All the modalities and metadata for each area are contained in that folder as `/`. - -**HHA images:** -We followed [[Depth2HHA-python]](https://github.com/charlesCXK/Depth2HHA-python) to create it. +Each scenes in the dataset has its own folder in the dataset. All the modalities for each area are contained in that folder as `/`. **RGB images:** -RGB images contain equirectangular 24-bit color images converted from raw dual fisheye image. +RGB images contain equirectangular 24-bit color and it is converted from raw dual fisheye image taken by a sensor. **Manhattan aligned RGB images:** -We followed [[LGT-Net]](https://github.com/zhigangjiang/LGT-Net) to create Manhattan aligned RGB images. - -**XYZ images:** -XYZ images are saved as `.npy` binary file format in NumPy. It contains pixel-aligned set of data points in space with a sensitivity of mm. It must be the size of (Height, Width, 3[xyz]). - -**Annotation:** +We followed the preprocessing code proposed by [[LGT-Net]](https://github.com/zhigangjiang/LGT-Net) to create Manhattan aligned RGB images. Sample code for our dataset is in `assets/preprocessing/align_manhattan.py`. **depth:** -Depth images are stored as 16-bit PNGs having a maximum depth of 128m and a sensitivity of 1/512m. Missing values are encoded with the value 0. Note that while depth is defined as the distance from the point-center of the camera in the panoramics. +Depth images are stored as 16-bit grayscale PNGs having a maximum depth of 128m and a sensitivity of 1/512m. Missing values are encoded with the value 0. Note that while depth is defined as the distance from the point-center of the camera in the panoramics. -**Room layout annotation:** -Room layout annotations are stored as same json format as [PanoAnnotator](https://github.com/SunDaDenny/PanoAnnotator). Please refer to this repo for more details. +**XYZ images:** +XYZ images are saved as `.npy` binary file format in [NumPy](https://numpy.org/). It contains pixel-aligned set of data points in space with a sensitivity of mm. It must be the size of (Height, Width, 3[xyz]). **Normal images:** -Normals are 127.5-centered per-channel surface normal images. The normal vector is saved as 24-bit RGB PNGs where Red is the horizontal value (more red to the right), Green is vertical (more green downwards), and Blue is towards the camera. It is computed by [normal estimation function](https://www.open3d.org/docs/0.7.0/python_api/open3d.geometry.estimate_normals.html) in [Open3D](https://github.com/isl-org/Open3D). The tool for creating normal images from 3D is located in the `assets/compute_normal.py` - -## Tools -This repository provides some basic tools for interacting with the dataset and how to get preprocessed data. The tools are located in the `assets/preprocessing` folder. - -## Evaluation -**Semantic segmentation (image-based):** +Normals are 127.5-centered per-channel surface normal images. The normal vector is saved as 24-bit RGB PNGs where Red is the horizontal value (more red to the right), Green is vertical (more green downwards), and Blue is towards the camera. It is computed by [normal estimation function](https://www.open3d.org/docs/0.7.0/python_api/open3d.geometry.estimate_normals.html) in [Open3D](https://github.com/isl-org/Open3D). The tool for creating normal images from 3D is located in the `assets/preprocessing/depth2normal.py`. +**HHA images:** +HHA images contains horizontal disparity, height above ground and angle with gravity, respectively. +We followed [Depth2HHA-python](https://github.com/charlesCXK/Depth2HHA-python) to create it. Code is located in `assets/preprocessing/getHHA.py`. -**Semantic segmentation (pointcloud-based):** +**Annotation:** +We used the [COCO Annotator](https://github.com/jsbroks/coco-annotator) for labelling the RGB data. We follow [ontology-based annotation guidelines](https://www.dfki.de/fileadmin/user_upload/import/13246_EC3_2023_Ontology_based_annotation_of_RGB_D_images_and_point_clouds_for_a_domain_adapted_dataset.pdf) developed for both RGB-D and point cloud data. +`/annotation` contains json format files, `/semantics` and `/instances>` have image-like labeled data stored as `.npy` binary file. +**Room layout annotation:** +Room layout annotations are stored as same json format as [PanoAnnotator](https://github.com/SunDaDenny/PanoAnnotator). Please refer to this repo for more details. -**Layout estimation:** +## Tools +This repository provides some basic tools for getting preprocessed data and evaluating dataset. The tools are located in the `assets/` folder. +## Croissant metadata +You can use [this instruction](https://huggingface.co/docs/datasets-server/croissant) provided by HuggingFace. `croissant_metadata.json` is also available. ## Citations Coming soon... \ No newline at end of file diff --git a/assets/layout_eval/README.md b/assets/layout_eval/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9f06f09ebfea9438f8b7875a80627a0ff4fabe00 --- /dev/null +++ b/assets/layout_eval/README.md @@ -0,0 +1,58 @@ +# Evaluation with LGT-Net +This is instruction for evaluating our dataset with "[LGT-Net: Indoor Panoramic Room Layout Estimation with Geometry-Aware Transformer Network](https://arxiv.org/abs/2203.01824)". + +# Downloading Pre-trained Weights +Pre-trained weights are provided by authors on individual datasets at [here](https://drive.google.com/drive/folders/1bOZyXeuNnwFEC9nw7EgJUwMiI685obdT?usp=sharing). +These models are used in our dataset paper below. +- [mp3d/best.pkl](https://drive.google.com/file/d/1o97oAmd-yEP5bQrM0eAWFPLq27FjUDbh/view?usp=sharing): Training on MatterportLayout dataset +- [pano/best.pkl](https://drive.google.com/file/d/1JoeqcPbm_XBPOi6O9GjjWi3_rtyPZS8m/view?usp=sharing): Training on PanoContext(train)+Stanford2D-3D(whole) dataset +- [s2d3d/best.pkl](https://drive.google.com/file/d/1PfJzcxzUsbwwMal7yTkBClIFgn8IdEzI/view?usp=sharing): Training on Stanford2D-3D(train)+PanoContext(whole) dataset + +Make sure the pre-trained weight files are stored as follows: +``` +checkpoints +|-- SWG_Transformer_LGT_Net +| |-- mp3d +| | |-- best.pkl +| |-- pano +| | |-- best.pkl +| |-- s2d3d +| | |-- best.pkl +``` + +# Preparing Dataset +You can use `assets/layout_eval/convert4LGTNet.py` to get proper data structure for evaluation. + +### Evaluation with MatterportLayout +Make sure the dataset files are stored as follows: +``` +src/dataset/mp3d +|-- image +| |-- 000__equi_rgb.png +|-- label +| |-- 000__equi_layout.json +|-- split + |-- test.txt # it needs to contain all of files. +``` + +### Evaluation with PanoContext +Make sure the dataset files are stored as follows: +``` +src/dataset/pano_s2d3d +|-- test +| |-- img +| | |-- pano_000__equi_rgb.png +| |-- label_cor +| |-- pano_000__equi_layout.txt +``` + +### Evaluation with Stanford 2D-3D +Make sure the dataset files are stored as follows: +``` +src/dataset/pano_s2d3d +|-- test +| |-- img +| | |-- camera_000__equi_rgb.png +| |-- label_cor +| |-- camera_000__equi_layout.txt +``` \ No newline at end of file diff --git a/assets/layout_eval/config/eval_mp3d.yaml b/assets/layout_eval/config/eval_mp3d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cc03283aa34245011449fe70d296649e197fc47b --- /dev/null +++ b/assets/layout_eval/config/eval_mp3d.yaml @@ -0,0 +1,43 @@ +COMMENT: 'Training on MatterportLayout' +TAG: 'mp3d' +SEED: 123 +MODEL: + NAME: 'LGT_Net' + ARGS: [ { + 'decoder_name': 'SWG_Transformer', + 'win_size': 16, + 'rpe': 'lr_parameter_mirror', + 'dropout': 0.0, + 'depth': 8, + 'output_name': 'LGT' + } ] +TRAIN: + DEVICE: 'cuda:0' + SCRATCH: False + DETERMINISTIC: True + CRITERION: + DEPTH: + WEIGHT: 0.9 + RATIO: + WEIGHT: 0.1 + GRAD: + WEIGHT: 0.1 + WEIGHTS: [ 1.0, 1.0 ] + BASE_LR: + 1e-4 + RESUME_LAST: False + OPTIMIZER: + NAME: 'adam' + EPOCHS: 1000 +DATA: + DATASET: 'mp3d' + DIR: 'src/dataset/mp3d' + BATCH_SIZE: 6 + NUM_WORKERS: 6 + FOR_TEST_INDEX: None + AUG: + FLIP: True + STRETCH: True + ROTATE: True + GAMMA: True +AMP_OPT_LEVEL: 'O0' diff --git a/assets/layout_eval/config/eval_pano.yaml b/assets/layout_eval/config/eval_pano.yaml new file mode 100644 index 0000000000000000000000000000000000000000..230fbf61316b6388d6ef234193c0d00eb3b90823 --- /dev/null +++ b/assets/layout_eval/config/eval_pano.yaml @@ -0,0 +1,44 @@ +COMMENT: 'Training on PanoContext(train)+Stanford2D-3D(whole)' +TAG: 'pano' +SEED: 123 +MODEL: + NAME: 'LGT_Net' + ARGS: [ { + 'decoder_name': 'SWG_Transformer', + 'win_size': 16, + 'rpe': 'lr_parameter_mirror', + 'dropout': 0.0, + 'depth': 6, + 'output_name': 'LGT' + } ] +TRAIN: + DEVICE: 'cuda:0' + SCRATCH: False + DETERMINISTIC: True + CRITERION: + DEPTH: + WEIGHT: 0.9 + RATIO: + WEIGHT: 0.1 + GRAD: + WEIGHT: 0.1 + WEIGHTS: [ 1.0, 1.0 ] + BASE_LR: + 1e-4 + RESUME_LAST: False + OPTIMIZER: + NAME: 'adam' + EPOCHS: 1000 +DATA: + DATASET: 'pano_s2d3d_mix' + DIR: 'src/dataset/pano' + SUBSET: 'pano' + BATCH_SIZE: 6 + NUM_WORKERS: 6 + FOR_TEST_INDEX: None + AUG: + FLIP: True + STRETCH: True + ROTATE: True + GAMMA: True +AMP_OPT_LEVEL: 'O0' diff --git a/assets/layout_eval/config/eval_s2d3d.yaml b/assets/layout_eval/config/eval_s2d3d.yaml new file mode 100644 index 0000000000000000000000000000000000000000..60c8eb64292e5c431eb707025fa6ba1aa585c971 --- /dev/null +++ b/assets/layout_eval/config/eval_s2d3d.yaml @@ -0,0 +1,44 @@ +COMMENT: 'Training on Stanford2D-3D(train)+PanoContext(whole)' +TAG: 's2d3d' +SEED: 123 +MODEL: + NAME: 'LGT_Net' + ARGS: [ { + 'decoder_name': 'SWG_Transformer', + 'win_size': 16, + 'rpe': 'lr_parameter_mirror', + 'dropout': 0.0, + 'depth': 6, + 'output_name': 'LGT' + } ] +TRAIN: + DEVICE: 'cuda:0' + SCRATCH: False + DETERMINISTIC: True + CRITERION: + DEPTH: + WEIGHT: 0.9 + RATIO: + WEIGHT: 0.1 + GRAD: + WEIGHT: 0.1 + WEIGHTS: [ 1.0, 1.0 ] + BASE_LR: + 1e-4 + RESUME_LAST: False + OPTIMIZER: + NAME: 'adam' + EPOCHS: 1000 +DATA: + DATASET: 'pano_s2d3d_mix' + DIR: 'src/dataset/s2d3d' + SUBSET: 's2d3d' + BATCH_SIZE: 6 + NUM_WORKERS: 6 + FOR_TEST_INDEX: None + AUG: + FLIP: True + STRETCH: True + ROTATE: True + GAMMA: True +AMP_OPT_LEVEL: 'O0' diff --git a/assets/layout_eval/convert4LGTNet.py b/assets/layout_eval/convert4LGTNet.py new file mode 100644 index 0000000000000000000000000000000000000000..9bec4a752eefc46f126afd8d157451fa5ac817ef --- /dev/null +++ b/assets/layout_eval/convert4LGTNet.py @@ -0,0 +1,106 @@ +import os +import shutil +import glob +from tqdm import tqdm +import cv2 +from natsort import natsorted +import json +import math + +def config_setup(): + config = {} + config["img_width_for_resize"] = 1024 + config["input_scenes"] = ["Hospital", "Office_Room_1", "Office_Room_2", "Parking_Lot"] + config["output_folders"] = ["src/dataset/mp3d/", "src/dataset/pano/", "src/dataset/s2d3d/"] + config["output_RGB_rules"] = ["image/", "test/img/pano_", "test/img/camera_"] + config["output_json_rules"] = ["label/", "test/label_cor/pano_", "test/label_cor/camera_"] + + # remove un-Manhattan aligned images + config["except_list"] = ["017_Hospital", + "044_Hospital", + "049_Hospital", + "012_Office_Room_2", + "034_Office_Room_2", + "008_Office_Room_1", + "010_Office_Room_1", + "014_Office_Room_1", + "017_Office_Room_1", + "025_Office_Room_1", + "037_Office_Room_1" + ] + return config + +def xyz2uv(xyz): + + normXZ = math.sqrt( math.pow(xyz[0], 2) + math.pow(xyz[2], 2) ) + if normXZ < 0.000001: + normXZ = 0.000001 + + normXYZ = math.sqrt(math.pow(xyz[0], 2) + + math.pow(xyz[1], 2) + + math.pow(xyz[2], 2) ) + + v = math.asin(xyz[1] / normXYZ) + u = math.asin(xyz[0] / normXZ) + + if xyz[2] > 0 and u > 0: + u = math.pi - u + elif xyz[2] > 0 and u < 0: + u = -math.pi - u + + uv = (u, v) + + return uv + +def uv2coords(uv): + + coordsX = uv[0] / (2 * math.pi) + 0.5 + coordsY = -uv[1] / math.pi + 0.5 + + coords = (coordsX, coordsY) + + return coords + +def write_json2txt(input_json, output_txt, img_width): + output_list = [] + with open(input_json) as f: + dict_json = json.load(f) + + for point in dict_json["layoutPoints"]["points"]: + layout_up = uv2coords(xyz2uv([point["xyz"][0], point["xyz"][1] + dict_json["layoutHeight"] - dict_json["cameraHeight"], point["xyz"][2]])) + layout_down = uv2coords(xyz2uv([point["xyz"][0], point["xyz"][1] - dict_json["cameraHeight"], point["xyz"][2]])) + output_list.append(" ".join([str(int(layout_up[0]*img_width)), str(int(layout_up[1]*img_width/2))]) + "\n") + output_list.append(" ".join([str(int(layout_down[0]*img_width)), str(int(layout_down[1]*img_width/2))]) + "\n") + + with open(output_txt, "a") as t: + t.writelines(output_list) + return 0 + +def main(): + config = config_setup() + + for input_scene in config["input_scenes"]: + img_files = natsorted(glob.glob(input_scene+"/RGB_mh_aligned/*_equi_rgb_aligned.png")) + json_files = natsorted(glob.glob(input_scene+"/layout/*_equi_layout.json")) + + for img_file, json_file in zip(img_files, json_files): + idx = img_file.split(".")[0].split("/")[-1].split(input_scene)[0] + + if idx + "_" + input_scene in config["except_list"]: + continue + + else: + for output_folder, output_RGB_rule, output_json_rule in zip(config["output_folders"], config["output_RGB_rules"], config["output_json_rules"]): + os.makedirs(output_folder+output_RGB_rule.split("/")[:-1], exist_ok=True) + os.makedirs(output_folder+output_json_rule.split("/")[:-1], exist_ok=True) + output_file = output_folder+output_RGB_rule+idx+"_"+input_scene+"_equi_rgb_aligned.png" + output_txt = output_folder+output_json_rule+idx+"_"+input_scene+"_equi_layout.txt" + + img = cv2.resize(cv2.imread(img_file), (config["img_width_for_resize"], int(config["img_width_for_resize"]/2))) + cv2.imwrite(output_file, img) + + img_width = img.shape[1] + write_json2txt(json_file, output_txt, img_width) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/assets/pointcloud_eval/README.md b/assets/pointcloud_eval/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8ee332c8d1059825c23b8708f03e4e9051563bb1 --- /dev/null +++ b/assets/pointcloud_eval/README.md @@ -0,0 +1,5 @@ +### Point Cloud Evaluation + +Point cloud evaluation uses PointTransformerV3 from the original repository [Pointcept](https://github.com/Pointcept/Pointcept). The preprocessing script prepares the data in the S3DIS format used by Pointcept and the config file is a modified version of the config file for S3DIS. + +The training and evaluation follow the same setup described by the state of the art method. \ No newline at end of file diff --git a/assets/pointcloud_eval/preprocess_tof_360.py b/assets/pointcloud_eval/preprocess_tof_360.py new file mode 100644 index 0000000000000000000000000000000000000000..1712b535a92ea4ba1df17173b35b0f7a6d3a338c --- /dev/null +++ b/assets/pointcloud_eval/preprocess_tof_360.py @@ -0,0 +1,139 @@ +""" +Preprocessing Script for ToF-360 + +Author: Mahdi Chamseddine (mahdi.chamseddine@dfki.de) +Please cite our work if the code is helpful to you. +""" +from pathlib import Path + +import cv2 +import numpy as np +import open3d as o3d + + +def map_label(label: int) -> int: + + match label: + case 0 | 33 | 34: # + return -1 + case 2 | 20 | 42: # ceiling + return 0 + case 3 | 18: # floor + return 1 + case 1 | 40: # wall + return 2 + # case 17: # beam + # return 3 + case 14: # column + return 4 + case 8: # window + return 5 + case 7: # door + return 6 + case 12: # table + return 7 + case 5: # chair + return 8 + case 4: # sofa + return 9 + case 31: # bookcase + return 10 + # case 26: # board + # return 11 + case _: # clutter + return 12 + + +def downsample(xyz: np.ndarray, voxel_size: float = 0.01) -> np.ndarray: + min_vals = np.min(xyz, axis=0) + max_vals = np.max(xyz, axis=0) + point_cloud = o3d.geometry.PointCloud() + point_cloud.points = o3d.utility.Vector3dVector(xyz) + _, _, indices = point_cloud.voxel_down_sample_and_trace( + voxel_size, min_vals, max_vals + ) + indices = [np.random.choice(idx) for idx in indices] + return np.asarray(indices) + + +def preprocess_scans(area_path: Path) -> None: + xyz_dir = area_path / "XYZ" + for scan_path in xyz_dir.glob("*.npy*"): + scan_name = scan_path.stem[: -len("_xxx")] + parse_scan(scan_name, area_path) + + +def parse_scan(scan_name: str, area_path: Path, debug: bool = False): + output_name = area_path.stem + "_" + scan_name + print(f"Parsing scan: {output_name}", flush=True) + processed_path = ( + area_path.parent.parent / "preprocessed" / area_path.parent.stem / output_name + ) + # if processed_path.exists(): + # return + processed_path.mkdir(parents=True, exist_ok=True) + + print(f"--- [{output_name}] reading point cloud", flush=True) + xyz_path = Path(area_path / "XYZ", scan_name + "_XYZ.npy") + temp = np.load(xyz_path) + + temp = temp.reshape(-1, 3) / 1000 # mm to m + coord = temp.copy() + coord[:, 1] = temp[:, 2] + coord[:, 2] = -temp[:, 1] + png_path = Path(area_path / "RGB", scan_name + "_rgb.png") + color = cv2.imread(png_path.resolve()) + color = cv2.cvtColor(color, cv2.COLOR_BGR2RGB).reshape(-1, 3) / 255 + + print(f"--- [{output_name}] loading labels", flush=True) + semantic_path = Path(area_path / "semantics", scan_name + "_semantic.npy") + segment = np.load(semantic_path).reshape(-1) + segment = np.vectorize(map_label)(segment) + + normal_path = Path(area_path / "normal", scan_name + "_normal.png") + temp = cv2.imread(normal_path.resolve()) + temp = cv2.cvtColor(temp, cv2.COLOR_BGR2RGB).reshape(-1, 3) * 2 / 255 + temp = temp - 1 + normal = temp.copy() + normal[:, 1] = temp[:, 2] + normal[:, 2] = -temp[:, 1] + + print(f"--- [{output_name}] down sampling", flush=True) + idx = downsample(coord) + + print(f"--- [{output_name}] saving", flush=True) + coord = np.ascontiguousarray(coord[idx, :], dtype=np.float32) + np.save(Path(processed_path, "coord.npy"), coord) + color = np.ascontiguousarray(color[idx, :], dtype=np.float32) + np.save(Path(processed_path, "color.npy"), color) + normal = np.ascontiguousarray(normal[idx, :], dtype=np.float32) + np.save(Path(processed_path, "normal.npy"), normal) + segment = np.ascontiguousarray(segment[idx], dtype=np.int32) + np.save(Path(processed_path, "segment.npy"), segment) + + +def main(): + # splits = ["test", "train", "val"] + # splits = ["train"] + splits = [""] + dataset_directory = "path/to/ToF-360/" + + areas = ["Hospital", "Office_Room_1", "Office_Room_2", "Parking_Lot"] + + for split in splits: + split_directory = dataset_directory + split + split_path = Path(split_directory) + # Check if the parent directory exists + if not split_path.is_dir(): + print( + f"Error: '{split_path.resolve()}' is not a valid directory.", + flush=True, + ) + return + for area_path in split_path.iterdir(): + if area_path.is_dir() and area_path.stem in areas: + preprocess_scans(area_path) + + +if __name__ == "__main__": + main() diff --git a/assets/pointcloud_eval/semseg-pt-v3m1-2-ppt-extreme_tof_360_eval.py b/assets/pointcloud_eval/semseg-pt-v3m1-2-ppt-extreme_tof_360_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..ebe8274e8a0d8bf9464bf42c2e4093964041a712 --- /dev/null +++ b/assets/pointcloud_eval/semseg-pt-v3m1-2-ppt-extreme_tof_360_eval.py @@ -0,0 +1,489 @@ +""" +PTv3 + PPT +Pre-trained on ScanNet + Structured3D +(S3DIS is commented by default as a long data time issue of S3DIS: https://github.com/Pointcept/Pointcept/issues/103) +In the original PPT paper, 3 datasets are jointly trained and validated on the three datasets jointly with +one shared weight model. In PTv3, we trained on multi-dataset but only validated on one single dataset to +achieve extreme performance on one single dataset. + +To enable joint training on three datasets, uncomment config for the S3DIS dataset and change the "loop" of + Structured3D and ScanNet to 4 and 2 respectively. + +Modified to test on ToF-360 dataset +""" + +_base_ = ["../_base_/default_runtime.py"] + +# misc custom setting +batch_size = 24 # bs: total bs in all gpus +num_worker = 48 +mix_prob = 0.8 +empty_cache = False +enable_amp = True +find_unused_parameters = True + +# trainer +train = dict( + type="MultiDatasetTrainer", +) + +# model settings +model = dict( + type="PPT-v1m1", + backbone=dict( + type="PT-v3m1", + in_channels=6, + order=("z", "z-trans", "hilbert", "hilbert-trans"), + stride=(2, 2, 2, 2), + enc_depths=(2, 2, 2, 6, 2), + enc_channels=(32, 64, 128, 256, 512), + enc_num_head=(2, 4, 8, 16, 32), + enc_patch_size=(1024, 1024, 1024, 1024, 1024), + dec_depths=(2, 2, 2, 2), + dec_channels=(64, 64, 128, 256), + dec_num_head=(4, 4, 8, 16), + dec_patch_size=(1024, 1024, 1024, 1024), + mlp_ratio=4, + qkv_bias=True, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + drop_path=0.3, + shuffle_orders=True, + pre_norm=True, + enable_rpe=False, + enable_flash=True, + upcast_attention=False, + upcast_softmax=False, + cls_mode=False, + pdnorm_bn=True, + pdnorm_ln=True, + pdnorm_decouple=True, + pdnorm_adaptive=False, + pdnorm_affine=True, + pdnorm_conditions=("ScanNet", "S3DIS", "Structured3D"), + ), + criteria=[ + dict(type="CrossEntropyLoss", loss_weight=1.0, ignore_index=-1), + dict(type="LovaszLoss", mode="multiclass", loss_weight=1.0, ignore_index=-1), + ], + backbone_out_channels=64, + context_channels=256, + conditions=("Structured3D", "ScanNet", "S3DIS"), + template="[x]", + clip_model="ViT-B/16", + # fmt: off + class_name=( + "wall", "floor", "cabinet", "bed", "chair", "sofa", "table", "door", + "window", "bookshelf", "bookcase", "picture", "counter", "desk", "shelves", "curtain", + "dresser", "pillow", "mirror", "ceiling", "refrigerator", "television", "shower curtain", "nightstand", + "toilet", "sink", "lamp", "bathtub", "garbagebin", "board", "beam", "column", + "clutter", "otherstructure", "otherfurniture", "otherprop", + ), + valid_index=( + (0, 1, 2, 3, 4, 5, 6, 7, 8, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 23, 25, 26, 33, 34, 35), + (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 15, 20, 22, 24, 25, 27, 34), + (0, 1, 4, 5, 6, 7, 8, 10, 19, 29, 30, 31, 32), + ), + # fmt: on + backbone_mode=False, +) + +# scheduler settings +epoch = 100 +optimizer = dict(type="AdamW", lr=0.005, weight_decay=0.05) +scheduler = dict( + type="OneCycleLR", + max_lr=[0.005, 0.0005], + pct_start=0.05, + anneal_strategy="cos", + div_factor=10.0, + final_div_factor=1000.0, +) +param_dicts = [dict(keyword="block", lr=0.0005)] + +# dataset settings +data = dict( + num_classes=13, + ignore_index=-1, + names=[ + "ceiling", + "floor", + "wall", + "beam", + "column", + "window", + "door", + "table", + "chair", + "sofa", + "bookcase", + "board", + "clutter", + ], + train=dict( + type="ConcatDataset", + datasets=[ + # Structured3D + dict( + type="Structured3DDataset", + split=["train", "val", "test"], + data_root="data/structured3d", + transform=[ + dict(type="CenterShift", apply_z=True), + dict( + type="RandomDropout", + dropout_ratio=0.2, + dropout_application_ratio=0.2, + ), + # dict(type="RandomRotateTargetAngle", angle=(1/2, 1, 3/2), center=[0, 0, 0], axis="z", p=0.75), + dict( + type="RandomRotate", + angle=[-1, 1], + axis="z", + center=[0, 0, 0], + p=0.5, + ), + dict(type="RandomRotate", angle=[-1 / 64, 1 / 64], axis="x", p=0.5), + dict(type="RandomRotate", angle=[-1 / 64, 1 / 64], axis="y", p=0.5), + dict(type="RandomScale", scale=[0.9, 1.1]), + # dict(type="RandomShift", shift=[0.2, 0.2, 0.2]), + dict(type="RandomFlip", p=0.5), + dict(type="RandomJitter", sigma=0.005, clip=0.02), + # dict( + # type="ElasticDistortion", + # distortion_params=[[0.2, 0.4], [0.8, 1.6]], + # ), + dict(type="ChromaticAutoContrast", p=0.2, blend_factor=None), + dict(type="ChromaticTranslation", p=0.95, ratio=0.05), + dict(type="ChromaticJitter", p=0.95, std=0.05), + # dict(type="HueSaturationTranslation", hue_max=0.2, saturation_max=0.2), + # dict(type="RandomColorDrop", p=0.2, color_augment=0.0), + dict( + type="GridSample", + grid_size=0.02, + hash_type="fnv", + mode="train", + return_grid_coord=True, + ), + dict(type="SphereCrop", sample_rate=0.8, mode="random"), + dict(type="SphereCrop", point_max=204800, mode="random"), + dict(type="CenterShift", apply_z=False), + dict(type="NormalizeColor"), + # dict(type="ShufflePoint"), + dict(type="Add", keys_dict={"condition": "Structured3D"}), + dict(type="ToTensor"), + dict( + type="Collect", + keys=("coord", "grid_coord", "segment", "condition"), + feat_keys=("color", "normal"), + ), + ], + test_mode=False, + loop=4, # sampling weight + ), + # ScanNet + dict( + type="ScanNetDataset", + split="train", + data_root="data/scannet", + transform=[ + dict(type="CenterShift", apply_z=True), + dict( + type="RandomDropout", + dropout_ratio=0.2, + dropout_application_ratio=0.2, + ), + # dict(type="RandomRotateTargetAngle", angle=(1/2, 1, 3/2), center=[0, 0, 0], axis="z", p=0.75), + dict( + type="RandomRotate", + angle=[-1, 1], + axis="z", + center=[0, 0, 0], + p=0.5, + ), + dict(type="RandomRotate", angle=[-1 / 64, 1 / 64], axis="x", p=0.5), + dict(type="RandomRotate", angle=[-1 / 64, 1 / 64], axis="y", p=0.5), + dict(type="RandomScale", scale=[0.9, 1.1]), + # dict(type="RandomShift", shift=[0.2, 0.2, 0.2]), + dict(type="RandomFlip", p=0.5), + dict(type="RandomJitter", sigma=0.005, clip=0.02), + # dict( + # type="ElasticDistortion", + # distortion_params=[[0.2, 0.4], [0.8, 1.6]], + # ), + dict(type="ChromaticAutoContrast", p=0.2, blend_factor=None), + dict(type="ChromaticTranslation", p=0.95, ratio=0.05), + dict(type="ChromaticJitter", p=0.95, std=0.05), + # dict(type="HueSaturationTranslation", hue_max=0.2, saturation_max=0.2), + # dict(type="RandomColorDrop", p=0.2, color_augment=0.0), + dict( + type="GridSample", + grid_size=0.02, + hash_type="fnv", + mode="train", + return_grid_coord=True, + ), + dict(type="SphereCrop", point_max=102400, mode="random"), + dict(type="CenterShift", apply_z=False), + dict(type="NormalizeColor"), + # dict(type="ShufflePoint"), + dict(type="Add", keys_dict={"condition": "ScanNet"}), + dict(type="ToTensor"), + dict( + type="Collect", + keys=("coord", "grid_coord", "segment", "condition"), + feat_keys=("color", "normal"), + ), + ], + test_mode=False, + loop=2, # sampling weight + ), + # S3DIS + dict( + type="S3DISDataset", + split=("Area_1", "Area_2", "Area_3", "Area_4", "Area_6"), + data_root="data/s3dis", + transform=[ + dict(type="CenterShift", apply_z=True), + dict( + type="RandomDropout", + dropout_ratio=0.2, + dropout_application_ratio=0.2, + ), + # dict(type="RandomRotateTargetAngle", angle=(1/2, 1, 3/2), center=[0, 0, 0], axis="z", p=0.75), + dict( + type="RandomRotate", + angle=[-1, 1], + axis="z", + center=[0, 0, 0], + p=0.5, + ), + dict(type="RandomRotate", angle=[-1 / 64, 1 / 64], axis="x", p=0.5), + dict(type="RandomRotate", angle=[-1 / 64, 1 / 64], axis="y", p=0.5), + dict(type="RandomScale", scale=[0.9, 1.1]), + # dict(type="RandomShift", shift=[0.2, 0.2, 0.2]), + dict(type="RandomFlip", p=0.5), + dict(type="RandomJitter", sigma=0.005, clip=0.02), + # dict( + # type="ElasticDistortion", + # distortion_params=[[0.2, 0.4], [0.8, 1.6]], + # ), + dict(type="ChromaticAutoContrast", p=0.2, blend_factor=None), + dict(type="ChromaticTranslation", p=0.95, ratio=0.05), + dict(type="ChromaticJitter", p=0.95, std=0.05), + # dict(type="HueSaturationTranslation", hue_max=0.2, saturation_max=0.2), + # dict(type="RandomColorDrop", p=0.2, color_augment=0.0), + dict( + type="GridSample", + grid_size=0.02, + hash_type="fnv", + mode="train", + return_grid_coord=True, + ), + dict(type="SphereCrop", sample_rate=0.6, mode="random"), + dict(type="SphereCrop", point_max=204800, mode="random"), + dict(type="CenterShift", apply_z=False), + dict(type="NormalizeColor"), + # dict(type="ShufflePoint"), + dict(type="Add", keys_dict={"condition": "S3DIS"}), + dict(type="ToTensor"), + dict( + type="Collect", + keys=("coord", "grid_coord", "segment", "condition"), + feat_keys=("color", "normal"), + ), + ], + test_mode=False, + loop=1, # sampling weight + ), + ], + ), + val=dict( + type="S3DISDataset", + split="Area_5", + data_root="data/s3dis", + transform=[ + dict(type="CenterShift", apply_z=True), + dict( + type="Copy", + keys_dict={"coord": "origin_coord", "segment": "origin_segment"}, + ), + dict( + type="GridSample", + grid_size=0.02, + hash_type="fnv", + mode="train", + return_grid_coord=True, + ), + dict(type="CenterShift", apply_z=False), + dict(type="NormalizeColor"), + dict(type="ToTensor"), + dict(type="Add", keys_dict={"condition": "S3DIS"}), + dict( + type="Collect", + keys=( + "coord", + "grid_coord", + "origin_coord", + "segment", + "origin_segment", + "condition", + ), + offset_keys_dict=dict(offset="coord", origin_offset="origin_coord"), + feat_keys=("color", "normal"), + ), + ], + test_mode=False, + ), + test=dict( + type="S3DISDataset", + split=["Hospital", "Office_Room_1", "Office_Room_2", "Parking_Lot"], + data_root="data/tof-360/preprocessed", + transform=[ + dict(type="CenterShift", apply_z=True), + dict(type="NormalizeColor"), + ], + test_mode=True, + test_cfg=dict( + voxelize=dict( + type="GridSample", + grid_size=0.02, + hash_type="fnv", + mode="test", + keys=("coord", "color", "normal"), + return_grid_coord=True, + ), + crop=None, + post_transform=[ + dict(type="CenterShift", apply_z=False), + dict(type="Add", keys_dict={"condition": "S3DIS"}), + dict(type="ToTensor"), + dict( + type="Collect", + keys=("coord", "grid_coord", "index", "condition"), + feat_keys=("color", "normal"), + ), + ], + aug_transform=[ + [ + dict( + type="RandomRotateTargetAngle", + angle=[0], + axis="z", + center=[0, 0, 0], + p=1, + ) + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[1 / 2], + axis="z", + center=[0, 0, 0], + p=1, + ) + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[1], + axis="z", + center=[0, 0, 0], + p=1, + ) + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[3 / 2], + axis="z", + center=[0, 0, 0], + p=1, + ) + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[0], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[0.95, 0.95]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[1 / 2], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[0.95, 0.95]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[1], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[0.95, 0.95]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[3 / 2], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[0.95, 0.95]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[0], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[1.05, 1.05]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[1 / 2], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[1.05, 1.05]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[1], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[1.05, 1.05]), + ], + [ + dict( + type="RandomRotateTargetAngle", + angle=[3 / 2], + axis="z", + center=[0, 0, 0], + p=1, + ), + dict(type="RandomScale", scale=[1.05, 1.05]), + ], + [dict(type="RandomFlip", p=1)], + ], + ), + ), +) diff --git a/assets/preprocessing/align_manhattan.py b/assets/preprocessing/align_manhattan.py new file mode 100644 index 0000000000000000000000000000000000000000..71f540aeb59a74fdb9b9227fae79188decf1320a --- /dev/null +++ b/assets/preprocessing/align_manhattan.py @@ -0,0 +1,53 @@ +from .utils_from_LGT_Net import * +import sys +import numpy as np +import cv2 +import os +from PIL import Image +import glob +import json +from natsort import natsorted +from tqdm import tqdm + +def config_setup(): + config = {} + config["home_param"] = "/" + return config + +def main(): + config = config_setup() + print(f"Now Processing: {config["home_param"]}...") + input_folder = f"{config["home_param"]}/RGB" + output_folder = f"{config["home_param"]}/RGB_mh_aligned" + os.makedirs(output_folder, exist_ok=True) + + input_files = natsorted(glob.glob(f"{input_folder}/*_rgb.png")) + mat_dict = {} + mat_dict["data"] = [] + + for input_file in tqdm(input_files): + + # disable OpenCV3's non thread safe OpenCL option + cv2.ocl.setUseOpenCL(False) + + # Read image + img_ori = np.array(Image.open(input_file)) + + olines, vp, views, edges, panoEdge, score, angle = panoEdgeDetection(img_ori, + qError=0.7, + refineIter=3) + + img, R = rotatePanorama(img_ori / 255.0, vp[2::-1]) + + file_name = input_file.split("/")[-1].split(".")[0] + file_path = f"{output_folder}/{file_name}_aligned.png" + Image.fromarray((img * 255).astype(np.uint8)).save(file_path) + + each_dict = {"input_file": input_file, "output_file": file_path, "rotation_matrix": R.tolist()} + mat_dict["data"].append(each_dict) + + with open(f'{output_folder}/rotation_matrix.json', 'w') as f: + json.dump(mat_dict, f, indent=2) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/assets/preprocessing/depth2normal.py b/assets/preprocessing/depth2normal.py new file mode 100644 index 0000000000000000000000000000000000000000..ff0baff404cf35ee4441df8e6e3a160d02f66a30 --- /dev/null +++ b/assets/preprocessing/depth2normal.py @@ -0,0 +1,131 @@ +# common +import numpy as np +import open3d as o3d +import os +import glob +import argparse +import yaml +import cv2 +from tqdm import tqdm + + +def config_setup(): + config = {} + config["home_param"] = "/" + config["depth_max"] = 10 + config["depth_min"] = 0.5 + return config + +def load_depth(depth_path, config): + # depth follows stanford-2D-3D-S (URL: http://buildingparser.stanford.edu/images/2D-3D-S_2017.pdf) + # one unit of change in pixel intensity (e.g. a value from 45 to 46) corresponds to a 1/512 m change in depth (16 bit) + depth_img = cv2.imread(depth_path, cv2.IMREAD_ANYDEPTH) / 512 + + # depth is defined from depth_min to depth_max + depth_img[depth_img > config["depth_max"]] = config["depth_max"] + depth_img[depth_img < config["depth_min"]] = 0 + return depth_img + +def equi2pcd(depth): + # ----------------------------------------- + # image: [h,w,c], range:[0-255] + # depth: [h,w], unit:meter + # ----------------------------------------- + + H,W = depth.shape + + # intrinsic parameter + int_mtx = np.array([[max(H, W), 0, W/2], [0, max(H, W), H/2], [0, 0, 1]]) + if int_mtx.max() > 1: + int_mtx[0, :] = int_mtx[0, :] / float(W) + int_mtx[1, :] = int_mtx[1, :] / float(H) + int_mtx_pix = int_mtx * np.array([[W], [H], [1.]]) + int_mtx_pix = int_mtx * np.array([[W], [H], [1.]]) + cam_param_pix_inv = np.linalg.inv(int_mtx_pix) + k_00, k_02, k_11, k_12 = cam_param_pix_inv[0, 0], cam_param_pix_inv[0, 2], \ + cam_param_pix_inv[1, 1], cam_param_pix_inv[1, 2] + + # Convert from meshgrid of depth images to xyz 3D coordinates + xyz = np.zeros((H*W,3)) + sx = np.arange(H).repeat(W) + sy = np.arange(W)[None,:].repeat(H,axis=0).reshape(-1) + sd = depth.reshape(-1) + yaw = 2 * np.pi * ((sy+0.5) * k_00 + k_02) # yaw:-π~πの範囲に変換 + pitch = 2 * np.pi * ((sx+0.5) * k_11 + k_12) # pitch:-π/2~π/2の範囲に変換 + xyz[:,0] = np.cos(pitch) * np.sin(yaw) * abs(sd) + xyz[:,1] = np.sin(pitch) * abs(sd) + xyz[:,2] = np.cos(pitch) * np.cos(yaw) * abs(sd) + + # Stored as Open3d pointcloud + pcd = o3d.geometry.PointCloud() + pcd.points = o3d.utility.Vector3dVector(xyz) + + return pcd + +def pcd2normalimg(pcd, depth): + # -------------------------------- + # Normals are calculated on pointcloud + # -------------------------------- + H, W = depth.shape + pcd.estimate_normals() + normal = np.asarray(pcd.normals) + normal = normal_align(normal, pcd) + + # stanford-2D-3D-S adapts left-handed system but open3d follows right-handed system + normal[:,2] *= -1 + + return normal + +def normal_align(normal, pcd): + # -------------------------------- + # All of the vector are aligned with the direction to the camera + # -------------------------------- + points = np.asarray(pcd.points) + vec2cam = np.array([0,0,0])[None,:].repeat(points.shape[0], axis=0) - points + direction = np.sum(np.multiply(vec2cam, normal),axis=1) < 0 + normal[direction, :] *= -1 + return normal + +def main(): + config = config_setup() + print("home_path:", config["home_param"]) + + save_folder_path = config["home_param"] + "normal/" + if not os.path.exists(save_folder_path): + os.mkdir(save_folder_path) + + # Search depth images defined as png + depth_paths = sorted(glob.glob(config["home_param"] + "depth/*.png")) + + for idx, depth_path in tqdm(enumerate(depth_paths)): + print("\n") + print("depth file:", depth_path.split("/")[-1]) + depth = load_depth(depth_path, config) + + # To reduce the calculation costs + H, W = (int(depth.shape[0]/4), int(depth.shape[1]/4)) + depth_img = cv2.resize(depth, (W, H), interpolation=cv2.INTER_NEAREST) + + pcd = equi2pcd(depth_img, config) + normal = pcd2normalimg(pcd, depth_img) + + # To visualize the normal as colored pointcloud (It contains normal information itself) + pcd.normals = o3d.utility.Vector3dVector(normal) + pcd.colors = o3d.utility.Vector3dVector((normal+1)/2) + # o3d.io.write_point_cloud(save_folder_path + f"{idx:03d}_" + "equi_normal.ply", pcd) + + + save_path = save_folder_path + f"{idx:03d}_" + "equi_normal.png" + print("output image:", save_path.split("/")[-1]) + + # 8 bit value centered at 127.5 (-1~1 >> 0~255) + normal_img = 127.5*(normal.reshape(H,W,3)+1.) + + # outlier, blank + normal_img[depth_img1, but in MATLAB, uint8: 256->255 + I[I>255] = 255 + HHA = I.astype(np.uint8) + return HHA + +def main(): + # D, RD = getImage() + # camera_matrix = getCameraParam('color') + # print('max gray value: ', np.max(D)) # make sure that the image is in 'meter' + # hha = getHHA(camera_matrix, D, RD) + # hha_complete = getHHA(camera_matrix, D, D) + # cv2.imwrite('demo/hha.png', hha) + # cv2.imwrite('demo/hha_complete.png', hha_complete) + config = config_setup() + + depth_paths = sorted(glob.glob(config["scene_path"] + "depth/*.png")) + + for i, depth_path in enumerate(depth_paths): + depth = cv2.imread(depth_path, -1) + ## make HHA + depth = depth / 1000 + H_ori, W_ori = (depth.shape[0], depth.shape[1]) + camera_matrix = np.array([[max(H_ori, W_ori), 0, W_ori/2], [0, max(H_ori, W_ori), H_ori/2], [0, 0, 1]]) + H, W = (int(depth.shape[0]/4), int(depth.shape[1]/4)) + depth_resize = cv2.resize(depth, (W, H), interpolation=cv2.INTER_NEAREST) + hha = getHHA(camera_matrix, depth_resize, depth_resize) + cv2.imwrite(config["scene_path"]+f'HHA/{i:03d}_equi_hha.png', cv2.resize(hha, (W_ori, H_ori), interpolation=cv2.INTER_NEAREST)) + + +if __name__ == "__main__": + main() + + ''' multi-peocessing example ''' + ''' + from multiprocessing import Pool + + def generate_hha(i): + # generate hha for the i-th image + return + + processNum = 16 + pool = Pool(processNum) + + for i in range(img_num): + print(i) + pool.apply_async(generate_hha, args=(i,)) + pool.close() + pool.join() + ''' diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/getCameraParam.cpython-38.pyc b/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/getCameraParam.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16acebb3ee8df0beb6c569554ea4e44920861813 Binary files /dev/null and b/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/getCameraParam.cpython-38.pyc differ diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/rgbd_util.cpython-38.pyc b/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/rgbd_util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cc9b4be3ff4f042e7a60e74b7599b3c349081b9 Binary files /dev/null and b/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/rgbd_util.cpython-38.pyc differ diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/util.cpython-38.pyc b/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d839c322e532f77cf258f9d827007e9673123a8 Binary files /dev/null and b/assets/preprocessing/utils_from_Depth2HHA_python/__pycache__/util.cpython-38.pyc differ diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/getCameraParam.py b/assets/preprocessing/utils_from_Depth2HHA_python/getCameraParam.py new file mode 100644 index 0000000000000000000000000000000000000000..edf66b587448dff00fd60435a6818b9e59e212d0 --- /dev/null +++ b/assets/preprocessing/utils_from_Depth2HHA_python/getCameraParam.py @@ -0,0 +1,21 @@ +# --*-- coding:utf-8 --*-- +import numpy as np + +''' +getCameraParam: get the camera matrix +colOrZ: color or depth +''' +def getCameraParam(colorOrZ='color'): + if colorOrZ == 'color': + fx_rgb = 5.1885790117450188e+02 + fy_rgb = 5.1946961112127485e+02 + cx_rgb = 3.2558244941119034e+02 + cy_rgb = 2.5373616633400465e+02 + C = np.array([[fx_rgb, 0, cx_rgb], [0, fy_rgb, cy_rgb], [0, 0, 1]]) + else: + fx_d = 5.8262448167737955e+02 + fy_d = 5.8269103270988637e+02 + cx_d = 3.1304475870804731e+02 + cy_d = 2.3844389626620386e+02 + C = np.array([[fx_d, 0, cx_d], [0, fy_d, cy_d], [0, 0, 1]]) + return C \ No newline at end of file diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/principle.png b/assets/preprocessing/utils_from_Depth2HHA_python/principle.png new file mode 100644 index 0000000000000000000000000000000000000000..d4ac38c22aa24ed51a551716f977eb1ee22ae4ed --- /dev/null +++ b/assets/preprocessing/utils_from_Depth2HHA_python/principle.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92d9897422aff392b7614d85ea297a2a3bce1ab303729a77ebfca5e9b0bb9e33 +size 276546 diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/rgbd_util.py b/assets/preprocessing/utils_from_Depth2HHA_python/rgbd_util.py new file mode 100644 index 0000000000000000000000000000000000000000..38b455906d74feea26927d198675a2cb4aea01f9 --- /dev/null +++ b/assets/preprocessing/utils_from_Depth2HHA_python/rgbd_util.py @@ -0,0 +1,150 @@ +# --*-- coding:utf-8 --*-- +import numpy as np +from utils.util import * +np.seterr(divide='ignore', invalid='ignore') + +''' +z: depth image in 'centimetres' +missingMask: a mask +C: camera matrix +''' +def processDepthImage(z, missingMask, C): + yDirParam_angleThresh = np.array([45, 15]) # threshold to estimate the direction of the gravity + yDirParam_iter = np.array([5, 5]) + yDirParam_y0 = np.array([0, 1, 0]) + + normalParam_patchSize = np.array([3, 10]) + + X, Y, Z = getPointCloudFromZ(z, C, 1) + + # with open('pd.txt', 'w', encoding='utf-8') as f: + # for i in range(X.shape[0]): + # for j in range(X.shape[1]): + # f.write('{} {} {}\n'.format(str(X[i,j]), str(Y[i,j]), str(Z[i,j]))) + + # restore x-y-z position + pc = np.zeros([z.shape[0], z.shape[1], 3]) + pc[:,:,0] = X + pc[:,:,1] = Y + pc[:,:,2] = Z + + N1, b1 = computeNormalsSquareSupport(z/100, missingMask, normalParam_patchSize[0], + 1, C, np.ones(z.shape)) + N2, b2 = computeNormalsSquareSupport(z/100, missingMask, normalParam_patchSize[1], + 1, C, np.ones(z.shape)) + + N = N1 + + # Compute the direction of gravity + yDir = getYDir(N2, yDirParam_angleThresh, yDirParam_iter, yDirParam_y0) + y0 = np.array([[0, 1, 0]]).T + R = getRMatrix(y0, yDir) + + # rotate the pc and N + NRot = rotatePC(N, R.T) + + pcRot = rotatePC(pc, R.T) + h = -pcRot[:,:,1] + yMin = np.percentile(h, 0) + if (yMin > -90): + yMin = -130 + h = h - yMin + + return pc, N, yDir, h, pcRot, NRot + +''' +getPointCloudFromZ: use depth image and camera matrix to get pointcloud +Z is in 'centimetres' +C: camera matrix +s: is the factor by which Z has been upsampled +''' +def getPointCloudFromZ(Z, C, s=1): + h, w= Z.shape + xx, yy = np.meshgrid(np.array(range(w))+1, np.array(range(h))+1) + # color camera parameters + cc_rgb = C[0:2,2] * s # the first two lines of colomn-3, x0 and the y0 + fc_rgb = np.diag(C[0:2,0:2]) * s # number on the diagonal line + x3 = np.multiply((xx - cc_rgb[0]), Z) / fc_rgb[0] + y3 = np.multiply((yy - cc_rgb[1]), Z) / fc_rgb[1] + z3 = Z + return x3, y3, z3 + +''' + Clip out a 2R+1 x 2R+1 window at each point and estimate + the normal from points within this window. In case the window + straddles more than a single superpixel, only take points in the + same superpixel as the centre pixel. + +Input: + depthImage: in meters + missingMask: boolean mask of what data was missing + R: radius of clipping + sc: to upsample or not + superpixels: superpixel map to define bounadaries that should + not be straddled +''' +def computeNormalsSquareSupport(depthImage, missingMask, R, sc, cameraMatrix, superpixels): + depthImage = depthImage*100 # convert to centi metres + X, Y, Z = getPointCloudFromZ(depthImage, cameraMatrix, sc) + Xf = X + Yf = Y + Zf = Z + pc = np.zeros([depthImage.shape[0], depthImage.shape[1], 3]) + pc[:,:,0] = Xf + pc[:,:,1] = Yf + pc[:,:,2] = Zf + XYZf = np.copy(pc) + + # find missing value + ind = np.where(missingMask == 1) + X[ind] = np.nan + Y[ind] = np.nan + Z[ind] = np.nan + + one_Z = np.expand_dims(1 / Z, axis=2) + X_Z = np.divide(X, Z) + Y_Z = np.divide(Y, Z) + one = np.copy(Z) + one[np.invert(np.isnan(one[:, :]))] = 1 + ZZ = np.multiply(Z, Z) + X_ZZ = np.expand_dims(np.divide(X, ZZ), axis=2) + Y_ZZ = np.expand_dims(np.divide(Y, ZZ), axis=2) + + X_Z_2 = np.expand_dims(np.multiply(X_Z, X_Z), axis=2) + XY_Z = np.expand_dims(np.multiply(X_Z, Y_Z), axis=2) + Y_Z_2 = np.expand_dims(np.multiply(Y_Z, Y_Z), axis=2) + + AtARaw = np.concatenate((X_Z_2, XY_Z, np.expand_dims(X_Z, axis=2), Y_Z_2, + np.expand_dims(Y_Z, axis=2), np.expand_dims(one, axis=2)), axis=2) + + AtbRaw = np.concatenate((X_ZZ, Y_ZZ, one_Z), axis=2) + + # with clipping + AtA = filterItChopOff(np.concatenate((AtARaw, AtbRaw), axis=2), R, superpixels) + Atb = AtA[:, :, AtARaw.shape[2]:] + AtA = AtA[:, :, :AtARaw.shape[2]] + + AtA_1, detAtA = invertIt(AtA) + N = mutiplyIt(AtA_1, Atb) + + divide_fac = np.sqrt(np.sum(np.multiply(N, N), axis=2)) + # with np.errstate(divide='ignore'): + b = np.divide(-detAtA, divide_fac) + for i in range(3): + N[:, :, i] = np.divide(N[:, :, i], divide_fac) + + # Reorient the normals to point out from the scene. + # with np.errstate(invalid='ignore'): + SN = np.sign(N[:, :, 2]) + SN[SN == 0] = 1 + extend_SN = np.expand_dims(SN, axis=2) + extend_SN = np.concatenate((extend_SN, extend_SN, extend_SN), axis=2) + N = np.multiply(N, extend_SN) + b = np.multiply(b, SN) + sn = np.sign(np.sum(np.multiply(N, XYZf), axis=2)) + sn[np.isnan(sn)] = 1 + sn[sn == 0] = 1 + extend_sn = np.expand_dims(sn, axis=2) + N = np.multiply(extend_sn, N) + b = np.multiply(b, sn) + return N, b \ No newline at end of file diff --git a/assets/preprocessing/utils_from_Depth2HHA_python/util.py b/assets/preprocessing/utils_from_Depth2HHA_python/util.py new file mode 100644 index 0000000000000000000000000000000000000000..4f247a862fea40703dccb5944a90c67da202287d --- /dev/null +++ b/assets/preprocessing/utils_from_Depth2HHA_python/util.py @@ -0,0 +1,200 @@ +# --*-- coding:utf-8 --*-- +import numpy as np +import cv2 +from scipy import signal + +''' +helper function +''' +def filterItChopOff(f, r, sp): + f[np.isnan(f)] = 0 + H, W, d = f.shape + B = np.ones([2 * r + 1, 2 * r + 1]) # 2r+1 * 2r+1 neighbourhood + + minSP = cv2.erode(sp, B, iterations=1) + maxSP = cv2.dilate(sp, B, iterations=1) + + ind = np.where(np.logical_or(minSP != sp, maxSP != sp)) + + spInd = np.reshape(range(np.size(sp)), sp.shape,'F') + + delta = np.zeros(f.shape) + delta = np.reshape(delta, (H * W, d), 'F') + f = np.reshape(f, (H * W, d),'F') + + # calculate delta + + I, J = np.unravel_index(ind, [H, W], 'C') + for i in range(np.size(ind)): + x = I[i] + y = J[i] + clipInd = spInd[max(0, x - r):min(H-1, x + r), max(0, y - r):min(W-1, y + r)] + diffInd = clipInd[sp[clipInd] != sp[x, y]] + delta[ind[i], :] = np.sum(f[diffInd, :], 1) + delta = np.reshape(delta, (H, W, d), 'F') + f = np.reshape(f, (H, W, d), 'F') + fFilt = np.zeros([H, W, d]) + + for i in range(f.shape[2]): + # fFilt(:,:,i) = filter2(B, f(:,:,i)); + tmp = cv2.filter2D(np.rot90(f[:, :, i], 2), -1, np.rot90(np.rot90(B, 2), 2)) + tmp = signal.convolve2d(np.rot90(f[:, :, i], 2), np.rot90(np.rot90(B, 2), 2), mode="same") + fFilt[:, :, i] = np.rot90(tmp, 2) + fFilt = fFilt - delta + return fFilt + +''' +helper function +''' +def mutiplyIt(AtA_1, Atb): + result = np.zeros([Atb.shape[0], Atb.shape[1], 3]) + result[:, :, 0] = np.multiply(AtA_1[:, :, 0], Atb[:, :, 0]) + np.multiply(AtA_1[:, :, 1], + Atb[:, :, 1]) + np.multiply( + AtA_1[:, :, 2], Atb[:, :, 2]) + result[:, :, 1] = np.multiply(AtA_1[:, :, 1], Atb[:, :, 0]) + np.multiply(AtA_1[:, :, 3], + Atb[:, :, 1]) + np.multiply( + AtA_1[:, :, 4], Atb[:, :, 2]) + result[:, :, 2] = np.multiply(AtA_1[:, :, 2], Atb[:, :, 0]) + np.multiply(AtA_1[:, :, 4], + Atb[:, :, 1]) + np.multiply( + AtA_1[:, :, 5], Atb[:, :, 2]) + return result + +''' +helper function +''' +def invertIt(AtA): + AtA_1 = np.zeros([AtA.shape[0], AtA.shape[1], 6]) + AtA_1[:, :, 0] = np.multiply(AtA[:, :, 3], AtA[:, :, 5]) - np.multiply(AtA[:, :, 4], AtA[:, :, 4]) + AtA_1[:, :, 1] = -np.multiply(AtA[:, :, 1], AtA[:, :, 5]) + np.multiply(AtA[:, :, 2], AtA[:, :, 4]) + AtA_1[:, :, 2] = np.multiply(AtA[:, :, 1], AtA[:, :, 4]) - np.multiply(AtA[:, :, 2], AtA[:, :, 3]) + AtA_1[:, :, 3] = np.multiply(AtA[:, :, 0], AtA[:, :, 5]) - np.multiply(AtA[:, :, 2], AtA[:, :, 2]) + AtA_1[:, :, 4] = -np.multiply(AtA[:, :, 0], AtA[:, :, 4]) + np.multiply(AtA[:, :, 1], AtA[:, :, 2]) + AtA_1[:, :, 5] = np.multiply(AtA[:, :, 0], AtA[:, :, 3]) - np.multiply(AtA[:, :, 1], AtA[:, :, 1]) + + x1 = np.multiply(AtA[:, :, 0], AtA_1[:, :, 0]) + x2 = np.multiply(AtA[:, :, 1], AtA_1[:, :, 1]) + x3 = np.multiply(AtA[:, :, 2], AtA_1[:, :, 2]) + + detAta = x1 + x2 + x3 + return AtA_1, detAta + +''' +Compute the direction of gravity +N: normal field +iter: number of 'big' iterations +''' +def getYDir(N, angleThresh, iter, y0): + y = y0 + for i in range(len(angleThresh)): + thresh = np.pi * angleThresh[i] / 180 # convert it to radian measure + y = getYDirHelper(N, y, thresh, iter[i]) + return y + +''' +N: HxWx3 matrix with normal at each pixel. +y0: the initial gravity direction +thresh: in degrees the threshold for mapping to parallel to gravity and perpendicular to gravity +iter: number of iterations to perform +''' +def getYDirHelper(N, y0, thresh, num_iter): + dim = N.shape[0] * N.shape[1] + + # change the third dimension to the first-order. (480, 680, 3) => (3, 480, 680) + nn = np.swapaxes(np.swapaxes(N,0,2),1,2) + nn = np.reshape(nn, (3, dim), 'F') + + # remove these whose number is NAN + idx = np.where(np.invert(np.isnan(nn[0,:])))[0] + nn = nn[:,idx] + + # Set it up as a optimization problem + yDir = y0; + for i in range(num_iter): + sim0 = np.dot(yDir.T, nn) + indF = abs(sim0) > np.cos(thresh) # calculate 'floor' set. |sin(theta)| < sin(thresh) ==> |cos(theta)| > cos(thresh) + indW = abs(sim0) < np.sin(thresh) # calculate 'wall' set. + if(len(indF.shape) == 2): + NF = nn[:, indF[0,:]] + NW = nn[:, indW[0,:]] + else: + NF = nn[:, indF] + NW = nn[:, indW] + A = np.dot(NW, NW.T) - np.dot(NF, NF.T) + b = np.zeros([3,1]) + c = NF.shape[1] + w,v = np.linalg.eig(A) # w:eigenvalues; v:eigenvectors + min_ind = np.argmin(w) # min index + newYDir = v[:,min_ind] + yDir = newYDir * np.sign(np.dot(yDir.T, newYDir)) + return yDir + +''' +getRMatrix: Generate a rotation matrix that + if yf is a scalar, rotates about axis yi by yf degrees + if yf is an axis, rotates yi to yf in the direction given by yi x yf +Input: yi is an axis 3x1 vector + yf could be a scalar of axis + +''' +# def getRMatrix(yi, yf): +# if (np.isscalar(yf)): +# ax = yi / np.linalg.norm(yi) # norm(A) = max(svd(A)) +# phi = yf +# else: +# yi = yi / np.linalg.norm(yi) +# yf = yf / np.linalg.norm(yf) +# ax = np.cross(yi.T, yf.T).T +# ax = ax / np.linalg.norm(ax) +# # find angle of rotation +# phi = np.degrees(np.arccos(np.dot(yi.T, yf))) + +# if (abs(phi) > 0.1): +# phi = phi * (np.pi / 180) + +# s_hat = np.array([[0, -ax[2], ax[1]], +# [ax[2], 0, -ax[0]], +# [-ax[1], ax[0], 0]]) +# R = np.eye(3) + np.sin(phi) * s_hat + (1 - np.cos(phi)) * np.dot(s_hat, s_hat) # dot??? +# else: +# R = np.eye(3) +# return R + +def getRMatrix(yi, yf): + if (np.isscalar(yf)): + ax = yi / np.linalg.norm(yi) # norm(A) = max(svd(A)) + phi = yf + else: + yi = yi / np.linalg.norm(yi) + yf = yf / np.linalg.norm(yf) + ax = np.cross(yi.T, yf.T).T + ax = ax / np.linalg.norm(ax) + # find angle of rotation + phi = np.degrees(np.arccos(np.dot(yi.T, yf))) + ax = np.squeeze(ax, axis=-1) + + if (abs(phi) > 0.1): + phi = phi * (np.pi / 180) + + s_hat = np.array([[0, -ax[2], ax[1]], + [ax[2], 0, -ax[0]], + [-ax[1], ax[0], 0]], dtype=np.float64) + R = np.eye(3) + np.sin(phi) * s_hat + (1 - np.cos(phi)) * np.dot(s_hat, s_hat) # dot??? + else: + R = np.eye(3) + return R + +''' +Calibration of gravity direction +''' +def rotatePC(pc, R): + if(np.array_equal(R, np.eye(3))): + return pc + else: + R = R.astype(np.float64) + dim = pc.shape[0] * pc.shape[1] + pc = np.swapaxes(np.swapaxes(pc, 0, 2), 1, 2) + res = np.reshape(pc, (3, dim), 'F') + res = np.dot(R, res) + res = np.reshape(res, pc.shape, 'F') + res = np.swapaxes(np.swapaxes(res, 0, 1), 1, 2) + return res \ No newline at end of file diff --git a/assets/preprocessing/utils_from_LGT_Net/__init__.py b/assets/preprocessing/utils_from_LGT_Net/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..11899be3b477996772cd1ae754815056f22ca205 --- /dev/null +++ b/assets/preprocessing/utils_from_LGT_Net/__init__.py @@ -0,0 +1,4 @@ +""" +@date: 2021/7/5 +@description: +""" diff --git a/assets/preprocessing/utils_from_LGT_Net/filter.py b/assets/preprocessing/utils_from_LGT_Net/filter.py new file mode 100644 index 0000000000000000000000000000000000000000..6e95ec7c48be2ab545cbd7bab5b33c93ada38022 --- /dev/null +++ b/assets/preprocessing/utils_from_LGT_Net/filter.py @@ -0,0 +1,90 @@ +""" +@date: 2021/7/5 +@description: +""" +import json +import math +import shutil + +import numpy as np +from utils.boundary import * +import dataset +import os +from tqdm import tqdm +from PIL import Image +from visualization.boundary import * +from visualization.floorplan import * +from shapely.geometry import Polygon, Point + + +def filter_center(ceil_corners): + xyz = uv2xyz(ceil_corners, plan_y=1.6) + xz = xyz[:, ::2] + poly = Polygon(xz).buffer(-0.01) + return poly.contains(Point(0, 0)) + + +def filter_boundary(corners): + if is_ceil_boundary(corners): + return True + elif is_floor_boundary(corners): + return True + else: + # An intersection occurs and an exception is considered + return False + + +def filter_self_intersection(corners): + xz = uv2xyz(corners)[:, ::2] + poly = Polygon(xz) + return poly.is_valid + + +def filter_dataset(dataset, show=False, output_dir=None): + if output_dir is None: + output_dir = os.path.join(dataset.root_dir, dataset.mode) + output_img_dir = os.path.join(output_dir, 'img_align') + output_label_dir = os.path.join(output_dir, 'label_cor_align') + else: + output_dir = os.path.join(output_dir, dataset.mode) + output_img_dir = os.path.join(output_dir, 'img') + output_label_dir = os.path.join(output_dir, 'label_cor') + + if not os.path.exists(output_img_dir): + os.makedirs(output_img_dir) + + if not os.path.exists(output_label_dir): + os.makedirs(output_label_dir) + + bar = tqdm(dataset, total=len(dataset)) + for data in bar: + name = data['name'] + bar.set_description(f"Processing {name}") + img = data['img'] + corners = data['corners'] + + if not filter_center(corners[1::2]): + if show: + draw_boundaries(img, corners_list=[corners[0::2], corners[1::2]], show=True) + if not os.path.exists(data['img_path']): + print("already remove") + else: + print(f"move {name}") + shutil.move(data['img_path'], os.path.join(output_img_dir, os.path.basename(data['img_path']))) + shutil.move(data['label_path'], os.path.join(output_label_dir, os.path.basename(data['label_path']))) + + +def execute_filter_dataset(root_dir, dataset_name="PanoS2D3DDataset", modes=None, output_dir=None): + if modes is None: + modes = ["train", "test", "valid"] + + for mode in modes: + print("mode: {}".format(mode)) + + filter_dataset(getattr(dataset, dataset_name)(root_dir, mode), show=False, output_dir=output_dir) + + +if __name__ == '__main__': + execute_filter_dataset(root_dir='/root/data/hd/hnet_dataset', + dataset_name="PanoS2D3DDataset", modes=['train', "test", "valid"], + output_dir='/root/data/hd/hnet_dataset_close') diff --git a/assets/preprocessing/utils_from_LGT_Net/pano_lsd_align.py b/assets/preprocessing/utils_from_LGT_Net/pano_lsd_align.py new file mode 100644 index 0000000000000000000000000000000000000000..2df4285d7ce61726f0b5582d9e0ac8da0702999c --- /dev/null +++ b/assets/preprocessing/utils_from_LGT_Net/pano_lsd_align.py @@ -0,0 +1,911 @@ +''' +This script is helper function for preprocessing. +Most of the code are converted from LayoutNet official's matlab code. +All functions, naming rule and data flow follow official for easier +converting and comparing. +Code is not optimized for python or numpy yet. +''' + +import sys +import numpy as np +from scipy.ndimage import map_coordinates +import cv2 +from pylsd.lsd import lsd + + +def computeUVN(n, in_, planeID): + ''' + compute v given u and normal. + ''' + if planeID == 2: + n = np.array([n[1], n[2], n[0]]) + elif planeID == 3: + n = np.array([n[2], n[0], n[1]]) + bc = n[0] * np.sin(in_) + n[1] * np.cos(in_) + bs = n[2] + out = np.arctan(-bc / (bs + 1e-9)) + return out + + +def computeUVN_vec(n, in_, planeID): + ''' + vectorization version of computeUVN + @n N x 3 + @in_ MN x 1 + @planeID N + ''' + n = n.copy() + if (planeID == 2).sum(): + n[planeID == 2] = np.roll(n[planeID == 2], 2, axis=1) + if (planeID == 3).sum(): + n[planeID == 3] = np.roll(n[planeID == 3], 1, axis=1) + n = np.repeat(n, in_.shape[0] // n.shape[0], axis=0) + assert n.shape[0] == in_.shape[0] + bc = n[:, [0]] * np.sin(in_) + n[:, [1]] * np.cos(in_) + bs = n[:, [2]] + out = np.arctan(-bc / (bs + 1e-9)) + return out + + +def xyz2uvN(xyz, planeID=1): + ID1 = (int(planeID) - 1 + 0) % 3 + ID2 = (int(planeID) - 1 + 1) % 3 + ID3 = (int(planeID) - 1 + 2) % 3 + normXY = np.sqrt(xyz[:, [ID1]] ** 2 + xyz[:, [ID2]] ** 2) + normXY[normXY < 0.000001] = 0.000001 + normXYZ = np.sqrt(xyz[:, [ID1]] ** 2 + xyz[:, [ID2]] ** 2 + xyz[:, [ID3]] ** 2) + v = np.arcsin(xyz[:, [ID3]] / normXYZ) + u = np.arcsin(xyz[:, [ID1]] / normXY) + valid = (xyz[:, [ID2]] < 0) & (u >= 0) + u[valid] = np.pi - u[valid] + valid = (xyz[:, [ID2]] < 0) & (u <= 0) + u[valid] = -np.pi - u[valid] + uv = np.hstack([u, v]) + uv[np.isnan(uv[:, 0]), 0] = 0 + return uv + + +def uv2xyzN(uv, planeID=1): + ID1 = (int(planeID) - 1 + 0) % 3 + ID2 = (int(planeID) - 1 + 1) % 3 + ID3 = (int(planeID) - 1 + 2) % 3 + xyz = np.zeros((uv.shape[0], 3)) + xyz[:, ID1] = np.cos(uv[:, 1]) * np.sin(uv[:, 0]) + xyz[:, ID2] = np.cos(uv[:, 1]) * np.cos(uv[:, 0]) + xyz[:, ID3] = np.sin(uv[:, 1]) + return xyz + + +def uv2xyzN_vec(uv, planeID): + ''' + vectorization version of uv2xyzN + @uv N x 2 + @planeID N + ''' + assert (planeID.astype(int) != planeID).sum() == 0 + planeID = planeID.astype(int) + ID1 = (planeID - 1 + 0) % 3 + ID2 = (planeID - 1 + 1) % 3 + ID3 = (planeID - 1 + 2) % 3 + ID = np.arange(len(uv)) + xyz = np.zeros((len(uv), 3)) + xyz[ID, ID1] = np.cos(uv[:, 1]) * np.sin(uv[:, 0]) + xyz[ID, ID2] = np.cos(uv[:, 1]) * np.cos(uv[:, 0]) + xyz[ID, ID3] = np.sin(uv[:, 1]) + return xyz + + +def warpImageFast(im, XXdense, YYdense): + minX = max(1., np.floor(XXdense.min()) - 1) + minY = max(1., np.floor(YYdense.min()) - 1) + + maxX = min(im.shape[1], np.ceil(XXdense.max()) + 1) + maxY = min(im.shape[0], np.ceil(YYdense.max()) + 1) + + im = im[int(round(minY-1)):int(round(maxY)), + int(round(minX-1)):int(round(maxX))] + + assert XXdense.shape == YYdense.shape + out_shape = XXdense.shape + coordinates = [ + (YYdense - minY).reshape(-1), + (XXdense - minX).reshape(-1), + ] + im_warp = np.stack([ + map_coordinates(im[..., c], coordinates, order=1).reshape(out_shape) + for c in range(im.shape[-1])], + axis=-1) + + return im_warp + + +def rotatePanorama(img, vp=None, R=None): + ''' + Rotate panorama + if R is given, vp (vanishing point) will be overlooked + otherwise R is computed from vp + ''' + sphereH, sphereW, C = img.shape + + # new uv coordinates + TX, TY = np.meshgrid(range(1, sphereW + 1), range(1, sphereH + 1)) + TX = TX.reshape(-1, 1, order='F') + TY = TY.reshape(-1, 1, order='F') + ANGx = (TX - sphereW/2 - 0.5) / sphereW * np.pi * 2 + ANGy = -(TY - sphereH/2 - 0.5) / sphereH * np.pi + uvNew = np.hstack([ANGx, ANGy]) + xyzNew = uv2xyzN(uvNew, 1) + + # rotation matrix + if R is None: + R = np.linalg.inv(vp.T) + + xyzOld = np.linalg.solve(R, xyzNew.T).T + uvOld = xyz2uvN(xyzOld, 1) + + Px = (uvOld[:, 0] + np.pi) / (2*np.pi) * sphereW + 0.5 + Py = (-uvOld[:, 1] + np.pi/2) / np.pi * sphereH + 0.5 + + Px = Px.reshape(sphereH, sphereW, order='F') + Py = Py.reshape(sphereH, sphereW, order='F') + + # boundary + imgNew = np.zeros((sphereH+2, sphereW+2, C), np.float64) + imgNew[1:-1, 1:-1, :] = img + imgNew[1:-1, 0, :] = img[:, -1, :] + imgNew[1:-1, -1, :] = img[:, 0, :] + imgNew[0, 1:sphereW//2+1, :] = img[0, sphereW-1:sphereW//2-1:-1, :] + imgNew[0, sphereW//2+1:-1, :] = img[0, sphereW//2-1::-1, :] + imgNew[-1, 1:sphereW//2+1, :] = img[-1, sphereW-1:sphereW//2-1:-1, :] + imgNew[-1, sphereW//2+1:-1, :] = img[0, sphereW//2-1::-1, :] + imgNew[0, 0, :] = img[0, 0, :] + imgNew[-1, -1, :] = img[-1, -1, :] + imgNew[0, -1, :] = img[0, -1, :] + imgNew[-1, 0, :] = img[-1, 0, :] + + rotImg = warpImageFast(imgNew, Px+1, Py+1) + + return rotImg, R + + +def imgLookAt(im, CENTERx, CENTERy, new_imgH, fov): + sphereH = im.shape[0] + sphereW = im.shape[1] + warped_im = np.zeros((new_imgH, new_imgH, 3)) + TX, TY = np.meshgrid(range(1, new_imgH + 1), range(1, new_imgH + 1)) + TX = TX.reshape(-1, 1, order='F') + TY = TY.reshape(-1, 1, order='F') + TX = TX - 0.5 - new_imgH/2 + TY = TY - 0.5 - new_imgH/2 + r = new_imgH / 2 / np.tan(fov/2) + + # convert to 3D + R = np.sqrt(TY ** 2 + r ** 2) + ANGy = np.arctan(- TY / r) + ANGy = ANGy + CENTERy + + X = np.sin(ANGy) * R + Y = -np.cos(ANGy) * R + Z = TX + + INDn = np.nonzero(np.abs(ANGy) > np.pi/2) + + # project back to sphere + ANGx = np.arctan(Z / -Y) + RZY = np.sqrt(Z ** 2 + Y ** 2) + ANGy = np.arctan(X / RZY) + + ANGx[INDn] = ANGx[INDn] + np.pi + ANGx = ANGx + CENTERx + + INDy = np.nonzero(ANGy < -np.pi/2) + ANGy[INDy] = -np.pi - ANGy[INDy] + ANGx[INDy] = ANGx[INDy] + np.pi + + INDx = np.nonzero(ANGx <= -np.pi); ANGx[INDx] = ANGx[INDx] + 2 * np.pi + INDx = np.nonzero(ANGx > np.pi); ANGx[INDx] = ANGx[INDx] - 2 * np.pi + INDx = np.nonzero(ANGx > np.pi); ANGx[INDx] = ANGx[INDx] - 2 * np.pi + INDx = np.nonzero(ANGx > np.pi); ANGx[INDx] = ANGx[INDx] - 2 * np.pi + + Px = (ANGx + np.pi) / (2*np.pi) * sphereW + 0.5 + Py = ((-ANGy) + np.pi/2) / np.pi * sphereH + 0.5 + + INDxx = np.nonzero(Px < 1) + Px[INDxx] = Px[INDxx] + sphereW + im = np.concatenate([im, im[:, :2]], 1) + + Px = Px.reshape(new_imgH, new_imgH, order='F') + Py = Py.reshape(new_imgH, new_imgH, order='F') + + warped_im = warpImageFast(im, Px, Py) + + return warped_im + + +def separatePano(panoImg, fov, x, y, imgSize=320): + '''cut a panorama image into several separate views''' + assert x.shape == y.shape + if not isinstance(fov, np.ndarray): + fov = fov * np.ones_like(x) + + sepScene = [ + { + 'img': imgLookAt(panoImg.copy(), xi, yi, imgSize, fovi), + 'vx': xi, + 'vy': yi, + 'fov': fovi, + 'sz': imgSize, + } + for xi, yi, fovi in zip(x, y, fov) + ] + + return sepScene + + +def lsdWrap(img): + ''' + Opencv implementation of + Rafael Grompone von Gioi, Jérémie Jakubowicz, Jean-Michel Morel, and Gregory Randall, + LSD: a Line Segment Detector, Image Processing On Line, vol. 2012. + [Rafael12] http://www.ipol.im/pub/art/2012/gjmr-lsd/?utm_source=doi + @img + input image + ''' + if len(img.shape) == 3: + img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) + + lines = lsd(img, quant=0.7) + if lines is None: + return np.zeros_like(img), np.array([]) + edgeMap = np.zeros_like(img) + for i in range(lines.shape[0]): + pt1 = (int(lines[i, 0]), int(lines[i, 1])) + pt2 = (int(lines[i, 2]), int(lines[i, 3])) + width = lines[i, 4] + cv2.line(edgeMap, pt1, pt2, 255, int(np.ceil(width / 2))) + edgeList = np.concatenate([lines, np.ones_like(lines[:, :2])], 1) + return edgeMap, edgeList + + +def edgeFromImg2Pano(edge): + edgeList = edge['edgeLst'] + if len(edgeList) == 0: + return np.array([]) + + vx = edge['vx'] + vy = edge['vy'] + fov = edge['fov'] + imH, imW = edge['img'].shape + + R = (imW/2) / np.tan(fov/2) + + # im is the tangent plane, contacting with ball at [x0 y0 z0] + x0 = R * np.cos(vy) * np.sin(vx) + y0 = R * np.cos(vy) * np.cos(vx) + z0 = R * np.sin(vy) + vecposX = np.array([np.cos(vx), -np.sin(vx), 0]) + vecposY = np.cross(np.array([x0, y0, z0]), vecposX) + vecposY = vecposY / np.sqrt(vecposY @ vecposY.T) + vecposX = vecposX.reshape(1, -1) + vecposY = vecposY.reshape(1, -1) + Xc = (0 + imW-1) / 2 + Yc = (0 + imH-1) / 2 + + vecx1 = edgeList[:, [0]] - Xc + vecy1 = edgeList[:, [1]] - Yc + vecx2 = edgeList[:, [2]] - Xc + vecy2 = edgeList[:, [3]] - Yc + + vec1 = np.tile(vecx1, [1, 3]) * vecposX + np.tile(vecy1, [1, 3]) * vecposY + vec2 = np.tile(vecx2, [1, 3]) * vecposX + np.tile(vecy2, [1, 3]) * vecposY + coord1 = [[x0, y0, z0]] + vec1 + coord2 = [[x0, y0, z0]] + vec2 + + normal = np.cross(coord1, coord2, axis=1) + normal = normal / np.linalg.norm(normal, axis=1, keepdims=True) + + panoList = np.hstack([normal, coord1, coord2, edgeList[:, [-1]]]) + + return panoList + + +def _intersection(range1, range2): + if range1[1] < range1[0]: + range11 = [range1[0], 1] + range12 = [0, range1[1]] + else: + range11 = range1 + range12 = [0, 0] + + if range2[1] < range2[0]: + range21 = [range2[0], 1] + range22 = [0, range2[1]] + else: + range21 = range2 + range22 = [0, 0] + + b = max(range11[0], range21[0]) < min(range11[1], range21[1]) + if b: + return b + b2 = max(range12[0], range22[0]) < min(range12[1], range22[1]) + b = b or b2 + return b + + +def _insideRange(pt, range): + if range[1] > range[0]: + b = pt >= range[0] and pt <= range[1] + else: + b1 = pt >= range[0] and pt <= 1 + b2 = pt >= 0 and pt <= range[1] + b = b1 or b2 + return b + + +def combineEdgesN(edges): + ''' + Combine some small line segments, should be very conservative + OUTPUT + lines: combined line segments + ori_lines: original line segments + line format [nx ny nz projectPlaneID umin umax LSfov score] + ''' + arcList = [] + for edge in edges: + panoLst = edge['panoLst'] + if len(panoLst) == 0: + continue + arcList.append(panoLst) + arcList = np.vstack(arcList) + + # ori lines + numLine = len(arcList) + ori_lines = np.zeros((numLine, 8)) + areaXY = np.abs(arcList[:, 2]) + areaYZ = np.abs(arcList[:, 0]) + areaZX = np.abs(arcList[:, 1]) + planeIDs = np.argmax(np.stack([areaXY, areaYZ, areaZX], -1), 1) + 1 # XY YZ ZX + + for i in range(numLine): + ori_lines[i, :3] = arcList[i, :3] + ori_lines[i, 3] = planeIDs[i] + coord1 = arcList[i, 3:6] + coord2 = arcList[i, 6:9] + uv = xyz2uvN(np.stack([coord1, coord2]), planeIDs[i]) + umax = uv[:, 0].max() + np.pi + umin = uv[:, 0].min() + np.pi + if umax - umin > np.pi: + ori_lines[i, 4:6] = np.array([umax, umin]) / 2 / np.pi + else: + ori_lines[i, 4:6] = np.array([umin, umax]) / 2 / np.pi + ori_lines[i, 6] = np.arccos(( + np.dot(coord1, coord2) / (np.linalg.norm(coord1) * np.linalg.norm(coord2)) + ).clip(-1, 1)) + ori_lines[i, 7] = arcList[i, 9] + + # additive combination + lines = ori_lines.copy() + for _ in range(3): + numLine = len(lines) + valid_line = np.ones(numLine, bool) + for i in range(numLine): + if not valid_line[i]: + continue + dotProd = (lines[:, :3] * lines[[i], :3]).sum(1) + valid_curr = np.logical_and((np.abs(dotProd) > np.cos(np.pi / 180)), valid_line) + valid_curr[i] = False + for j in np.nonzero(valid_curr)[0]: + range1 = lines[i, 4:6] + range2 = lines[j, 4:6] + valid_rag = _intersection(range1, range2) + if not valid_rag: + continue + + # combine + I = np.argmax(np.abs(lines[i, :3])) + if lines[i, I] * lines[j, I] > 0: + nc = lines[i, :3] * lines[i, 6] + lines[j, :3] * lines[j, 6] + else: + nc = lines[i, :3] * lines[i, 6] - lines[j, :3] * lines[j, 6] + nc = nc / np.linalg.norm(nc) + + if _insideRange(range1[0], range2): + nrmin = range2[0] + else: + nrmin = range1[0] + + if _insideRange(range1[1], range2): + nrmax = range2[1] + else: + nrmax = range1[1] + + u = np.array([[nrmin], [nrmax]]) * 2 * np.pi - np.pi + v = computeUVN(nc, u, lines[i, 3]) + xyz = uv2xyzN(np.hstack([u, v]), lines[i, 3]) + l = np.arccos(np.dot(xyz[0, :], xyz[1, :]).clip(-1, 1)) + scr = (lines[i,6]*lines[i,7] + lines[j,6]*lines[j,7]) / (lines[i,6]+lines[j,6]) + + lines[i] = [*nc, lines[i, 3], nrmin, nrmax, l, scr] + valid_line[j] = False + + lines = lines[valid_line] + + return lines, ori_lines + + +def icosahedron2sphere(level): + # this function use a icosahedron to sample uniformly on a sphere + a = 2 / (1 + np.sqrt(5)) + M = np.array([ + 0, a, -1, a, 1, 0, -a, 1, 0, + 0, a, 1, -a, 1, 0, a, 1, 0, + 0, a, 1, 0, -a, 1, -1, 0, a, + 0, a, 1, 1, 0, a, 0, -a, 1, + 0, a, -1, 0, -a, -1, 1, 0, -a, + 0, a, -1, -1, 0, -a, 0, -a, -1, + 0, -a, 1, a, -1, 0, -a, -1, 0, + 0, -a, -1, -a, -1, 0, a, -1, 0, + -a, 1, 0, -1, 0, a, -1, 0, -a, + -a, -1, 0, -1, 0, -a, -1, 0, a, + a, 1, 0, 1, 0, -a, 1, 0, a, + a, -1, 0, 1, 0, a, 1, 0, -a, + 0, a, 1, -1, 0, a, -a, 1, 0, + 0, a, 1, a, 1, 0, 1, 0, a, + 0, a, -1, -a, 1, 0, -1, 0, -a, + 0, a, -1, 1, 0, -a, a, 1, 0, + 0, -a, -1, -1, 0, -a, -a, -1, 0, + 0, -a, -1, a, -1, 0, 1, 0, -a, + 0, -a, 1, -a, -1, 0, -1, 0, a, + 0, -a, 1, 1, 0, a, a, -1, 0]) + + coor = M.T.reshape(3, 60, order='F').T + coor, idx = np.unique(coor, return_inverse=True, axis=0) + tri = idx.reshape(3, 20, order='F').T + + # extrude + coor = list(coor / np.tile(np.linalg.norm(coor, axis=1, keepdims=True), (1, 3))) + + for _ in range(level): + triN = [] + for t in range(len(tri)): + n = len(coor) + coor.append((coor[tri[t, 0]] + coor[tri[t, 1]]) / 2) + coor.append((coor[tri[t, 1]] + coor[tri[t, 2]]) / 2) + coor.append((coor[tri[t, 2]] + coor[tri[t, 0]]) / 2) + + triN.append([n, tri[t, 0], n+2]) + triN.append([n, tri[t, 1], n+1]) + triN.append([n+1, tri[t, 2], n+2]) + triN.append([n, n+1, n+2]) + tri = np.array(triN) + + # uniquefy + coor, idx = np.unique(coor, return_inverse=True, axis=0) + tri = idx[tri] + + # extrude + coor = list(coor / np.tile(np.sqrt(np.sum(coor * coor, 1, keepdims=True)), (1, 3))) + + return np.array(coor), np.array(tri) + + +def curveFitting(inputXYZ, weight): + ''' + @inputXYZ: N x 3 + @weight : N x 1 + ''' + l = np.linalg.norm(inputXYZ, axis=1, keepdims=True) + inputXYZ = inputXYZ / l + weightXYZ = inputXYZ * weight + XX = np.sum(weightXYZ[:, 0] ** 2) + YY = np.sum(weightXYZ[:, 1] ** 2) + ZZ = np.sum(weightXYZ[:, 2] ** 2) + XY = np.sum(weightXYZ[:, 0] * weightXYZ[:, 1]) + YZ = np.sum(weightXYZ[:, 1] * weightXYZ[:, 2]) + ZX = np.sum(weightXYZ[:, 2] * weightXYZ[:, 0]) + + A = np.array([ + [XX, XY, ZX], + [XY, YY, YZ], + [ZX, YZ, ZZ]]) + U, S, Vh = np.linalg.svd(A) + outputNM = Vh[-1, :] + outputNM = outputNM / np.linalg.norm(outputNM) + + return outputNM + + +def sphereHoughVote(segNormal, segLength, segScores, binRadius, orthTolerance, candiSet, force_unempty=True): + # initial guess + numLinesg = len(segNormal) + + voteBinPoints = candiSet.copy() + voteBinPoints = voteBinPoints[~(voteBinPoints[:,2] < 0)] + reversValid = (segNormal[:, 2] < 0).reshape(-1) + segNormal[reversValid] = -segNormal[reversValid] + + voteBinUV = xyz2uvN(voteBinPoints) + numVoteBin = len(voteBinPoints) + voteBinValues = np.zeros(numVoteBin) + for i in range(numLinesg): + tempNorm = segNormal[[i]] + tempDots = (voteBinPoints * tempNorm).sum(1) + + valid = np.abs(tempDots) < np.cos((90 - binRadius) * np.pi / 180) + + voteBinValues[valid] = voteBinValues[valid] + segScores[i] * segLength[i] + + checkIDs1 = np.nonzero(voteBinUV[:, [1]] > np.pi / 3)[0] + voteMax = 0 + checkID1Max = 0 + checkID2Max = 0 + checkID3Max = 0 + + for j in range(len(checkIDs1)): + checkID1 = checkIDs1[j] + vote1 = voteBinValues[checkID1] + if voteBinValues[checkID1] == 0 and force_unempty: + continue + checkNormal = voteBinPoints[[checkID1]] + dotProduct = (voteBinPoints * checkNormal).sum(1) + checkIDs2 = np.nonzero(np.abs(dotProduct) < np.cos((90 - orthTolerance) * np.pi / 180))[0] + + for i in range(len(checkIDs2)): + checkID2 = checkIDs2[i] + if voteBinValues[checkID2] == 0 and force_unempty: + continue + vote2 = vote1 + voteBinValues[checkID2] + cpv = np.cross(voteBinPoints[checkID1], voteBinPoints[checkID2]).reshape(1, 3) + cpn = np.linalg.norm(cpv) + dotProduct = (voteBinPoints * cpv).sum(1) / cpn + checkIDs3 = np.nonzero(np.abs(dotProduct) > np.cos(orthTolerance * np.pi / 180))[0] + + for k in range(len(checkIDs3)): + checkID3 = checkIDs3[k] + if voteBinValues[checkID3] == 0 and force_unempty: + continue + vote3 = vote2 + voteBinValues[checkID3] + if vote3 > voteMax: + lastStepCost = vote3 - voteMax + if voteMax != 0: + tmp = (voteBinPoints[[checkID1Max, checkID2Max, checkID3Max]] * \ + voteBinPoints[[checkID1, checkID2, checkID3]]).sum(1) + lastStepAngle = np.arccos(tmp.clip(-1, 1)) + else: + lastStepAngle = np.zeros(3) + + checkID1Max = checkID1 + checkID2Max = checkID2 + checkID3Max = checkID3 + + voteMax = vote3 + + if checkID1Max == 0: + print('[WARN] sphereHoughVote: no orthogonal voting exist', file=sys.stderr) + return None, 0, 0 + initXYZ = voteBinPoints[[checkID1Max, checkID2Max, checkID3Max]] + + # refine + refiXYZ = np.zeros((3, 3)) + dotprod = (segNormal * initXYZ[[0]]).sum(1) + valid = np.abs(dotprod) < np.cos((90 - binRadius) * np.pi / 180) + validNm = segNormal[valid] + validWt = segLength[valid] * segScores[valid] + validWt = validWt / validWt.max() + refiNM = curveFitting(validNm, validWt) + refiXYZ[0] = refiNM.copy() + + dotprod = (segNormal * initXYZ[[1]]).sum(1) + valid = np.abs(dotprod) < np.cos((90 - binRadius) * np.pi / 180) + validNm = segNormal[valid] + validWt = segLength[valid] * segScores[valid] + validWt = validWt / validWt.max() + validNm = np.vstack([validNm, refiXYZ[[0]]]) + validWt = np.vstack([validWt, validWt.sum(0, keepdims=1) * 0.1]) + refiNM = curveFitting(validNm, validWt) + refiXYZ[1] = refiNM.copy() + + refiNM = np.cross(refiXYZ[0], refiXYZ[1]) + refiXYZ[2] = refiNM / np.linalg.norm(refiNM) + + return refiXYZ, lastStepCost, lastStepAngle + + +def findMainDirectionEMA(lines): + '''compute vp from set of lines''' + + # initial guess + segNormal = lines[:, :3] + segLength = lines[:, [6]] + segScores = np.ones((len(lines), 1)) + + shortSegValid = (segLength < 5 * np.pi / 180).reshape(-1) + segNormal = segNormal[~shortSegValid, :] + segLength = segLength[~shortSegValid] + segScores = segScores[~shortSegValid] + + numLinesg = len(segNormal) + candiSet, tri = icosahedron2sphere(3) + ang = np.arccos((candiSet[tri[0,0]] * candiSet[tri[0,1]]).sum().clip(-1, 1)) / np.pi * 180 + binRadius = ang / 2 + initXYZ, score, angle = sphereHoughVote(segNormal, segLength, segScores, 2*binRadius, 2, candiSet) + + if initXYZ is None: + print('[WARN] findMainDirectionEMA: initial failed', file=sys.stderr) + return None, score, angle + + # iterative refine + iter_max = 3 + candiSet, tri = icosahedron2sphere(5) + numCandi = len(candiSet) + angD = np.arccos((candiSet[tri[0, 0]] * candiSet[tri[0, 1]]).sum().clip(-1, 1)) / np.pi * 180 + binRadiusD = angD / 2 + curXYZ = initXYZ.copy() + tol = np.linspace(4*binRadius, 4*binRadiusD, iter_max) # shrink down ls and candi + for it in range(iter_max): + dot1 = np.abs((segNormal * curXYZ[[0]]).sum(1)) + dot2 = np.abs((segNormal * curXYZ[[1]]).sum(1)) + dot3 = np.abs((segNormal * curXYZ[[2]]).sum(1)) + valid1 = dot1 < np.cos((90 - tol[it]) * np.pi / 180) + valid2 = dot2 < np.cos((90 - tol[it]) * np.pi / 180) + valid3 = dot3 < np.cos((90 - tol[it]) * np.pi / 180) + valid = valid1 | valid2 | valid3 + + if np.sum(valid) == 0: + print('[WARN] findMainDirectionEMA: zero line segments for voting', file=sys.stderr) + break + + subSegNormal = segNormal[valid] + subSegLength = segLength[valid] + subSegScores = segScores[valid] + + dot1 = np.abs((candiSet * curXYZ[[0]]).sum(1)) + dot2 = np.abs((candiSet * curXYZ[[1]]).sum(1)) + dot3 = np.abs((candiSet * curXYZ[[2]]).sum(1)) + valid1 = dot1 > np.cos(tol[it] * np.pi / 180) + valid2 = dot2 > np.cos(tol[it] * np.pi / 180) + valid3 = dot3 > np.cos(tol[it] * np.pi / 180) + valid = valid1 | valid2 | valid3 + + if np.sum(valid) == 0: + print('[WARN] findMainDirectionEMA: zero line segments for voting', file=sys.stderr) + break + + subCandiSet = candiSet[valid] + + tcurXYZ, _, _ = sphereHoughVote(subSegNormal, subSegLength, subSegScores, 2*binRadiusD, 2, subCandiSet) + + if tcurXYZ is None: + print('[WARN] findMainDirectionEMA: no answer found', file=sys.stderr) + break + curXYZ = tcurXYZ.copy() + + mainDirect = curXYZ.copy() + mainDirect[0] = mainDirect[0] * np.sign(mainDirect[0,2]) + mainDirect[1] = mainDirect[1] * np.sign(mainDirect[1,2]) + mainDirect[2] = mainDirect[2] * np.sign(mainDirect[2,2]) + + uv = xyz2uvN(mainDirect) + I1 = np.argmax(uv[:,1]) + J = np.setdiff1d(np.arange(3), I1) + I2 = np.argmin(np.abs(np.sin(uv[J,0]))) + I2 = J[I2] + I3 = np.setdiff1d(np.arange(3), np.hstack([I1, I2])) + mainDirect = np.vstack([mainDirect[I1], mainDirect[I2], mainDirect[I3]]) + + mainDirect[0] = mainDirect[0] * np.sign(mainDirect[0,2]) + mainDirect[1] = mainDirect[1] * np.sign(mainDirect[1,1]) + mainDirect[2] = mainDirect[2] * np.sign(mainDirect[2,0]) + + mainDirect = np.vstack([mainDirect, -mainDirect]) + + return mainDirect, score, angle + + +def multi_linspace(start, stop, num): + div = (num - 1) + y = np.arange(0, num, dtype=np.float64) + steps = (stop - start) / div + return steps.reshape(-1, 1) * y + start.reshape(-1, 1) + + +def assignVanishingType(lines, vp, tol, area=10): + numLine = len(lines) + numVP = len(vp) + typeCost = np.zeros((numLine, numVP)) + # perpendicular + for vid in range(numVP): + cosint = (lines[:, :3] * vp[[vid]]).sum(1) + typeCost[:, vid] = np.arcsin(np.abs(cosint).clip(-1, 1)) + + # infinity + u = np.stack([lines[:, 4], lines[:, 5]], -1) + u = u.reshape(-1, 1) * 2 * np.pi - np.pi + v = computeUVN_vec(lines[:, :3], u, lines[:, 3]) + xyz = uv2xyzN_vec(np.hstack([u, v]), np.repeat(lines[:, 3], 2)) + xyz = multi_linspace(xyz[0::2].reshape(-1), xyz[1::2].reshape(-1), 100) + xyz = np.vstack([blk.T for blk in np.split(xyz, numLine)]) + xyz = xyz / np.linalg.norm(xyz, axis=1, keepdims=True) + for vid in range(numVP): + ang = np.arccos(np.abs((xyz * vp[[vid]]).sum(1)).clip(-1, 1)) + notok = (ang < area * np.pi / 180).reshape(numLine, 100).sum(1) != 0 + typeCost[notok, vid] = 100 + + I = typeCost.min(1) + tp = typeCost.argmin(1) + tp[I > tol] = numVP + 1 + + return tp, typeCost + + +def refitLineSegmentB(lines, vp, vpweight=0.1): + ''' + Refit direction of line segments + INPUT: + lines: original line segments + vp: vannishing point + vpweight: if set to 0, lines will not change; if set to inf, lines will + be forced to pass vp + ''' + numSample = 100 + numLine = len(lines) + xyz = np.zeros((numSample+1, 3)) + wei = np.ones((numSample+1, 1)) + wei[numSample] = vpweight * numSample + lines_ali = lines.copy() + for i in range(numLine): + n = lines[i, :3] + sid = lines[i, 4] * 2 * np.pi + eid = lines[i, 5] * 2 * np.pi + if eid < sid: + x = np.linspace(sid, eid + 2 * np.pi, numSample) % (2 * np.pi) + else: + x = np.linspace(sid, eid, numSample) + u = -np.pi + x.reshape(-1, 1) + v = computeUVN(n, u, lines[i, 3]) + xyz[:numSample] = uv2xyzN(np.hstack([u, v]), lines[i, 3]) + xyz[numSample] = vp + outputNM = curveFitting(xyz, wei) + lines_ali[i, :3] = outputNM + + return lines_ali + + +def paintParameterLine(parameterLine, width, height): + lines = parameterLine.copy() + panoEdgeC = np.zeros((height, width)) + + num_sample = max(height, width) + for i in range(len(lines)): + n = lines[i, :3] + sid = lines[i, 4] * 2 * np.pi + eid = lines[i, 5] * 2 * np.pi + if eid < sid: + x = np.linspace(sid, eid + 2 * np.pi, num_sample) + x = x % (2 * np.pi) + else: + x = np.linspace(sid, eid, num_sample) + u = -np.pi + x.reshape(-1, 1) + v = computeUVN(n, u, lines[i, 3]) + xyz = uv2xyzN(np.hstack([u, v]), lines[i, 3]) + uv = xyz2uvN(xyz, 1) + m = np.minimum(np.floor((uv[:,0] + np.pi) / (2 * np.pi) * width) + 1, + width).astype(np.int32) + n = np.minimum(np.floor(((np.pi / 2) - uv[:, 1]) / np.pi * height) + 1, + height).astype(np.int32) + panoEdgeC[n-1, m-1] = i + + return panoEdgeC + + +def panoEdgeDetection(img, viewSize=320, qError=0.7, refineIter=3): + ''' + line detection on panorama + INPUT: + img: image waiting for detection, double type, range 0~1 + viewSize: image size of croped views + qError: set smaller if more line segment wanted + OUTPUT: + oLines: detected line segments + vp: vanishing point + views: separate views of panorama + edges: original detection of line segments in separate views + panoEdge: image for visualize line segments + ''' + cutSize = viewSize + fov = np.pi / 3 + xh = np.arange(-np.pi, np.pi*5/6, np.pi/6) + yh = np.zeros(xh.shape[0]) + xp = np.array([-3/3, -2/3, -1/3, 0/3, 1/3, 2/3, -3/3, -2/3, -1/3, 0/3, 1/3, 2/3]) * np.pi + yp = np.array([ 1/4, 1/4, 1/4, 1/4, 1/4, 1/4, -1/4, -1/4, -1/4, -1/4, -1/4, -1/4]) * np.pi + x = np.concatenate([xh, xp, [0, 0]]) + y = np.concatenate([yh, yp, [np.pi/2., -np.pi/2]]) + + sepScene = separatePano(img.copy(), fov, x, y, cutSize) + edge = [] + for i, scene in enumerate(sepScene): + edgeMap, edgeList = lsdWrap(scene['img']) + edge.append({ + 'img': edgeMap, + 'edgeLst': edgeList, + 'vx': scene['vx'], + 'vy': scene['vy'], + 'fov': scene['fov'], + }) + edge[-1]['panoLst'] = edgeFromImg2Pano(edge[-1]) + lines, olines = combineEdgesN(edge) + + clines = lines.copy() + for _ in range(refineIter): + mainDirect, score, angle = findMainDirectionEMA(clines) + + tp, typeCost = assignVanishingType(lines, mainDirect[:3], 0.1, 10) + lines1 = lines[tp==0] + lines2 = lines[tp==1] + lines3 = lines[tp==2] + + lines1rB = refitLineSegmentB(lines1, mainDirect[0], 0) + lines2rB = refitLineSegmentB(lines2, mainDirect[1], 0) + lines3rB = refitLineSegmentB(lines3, mainDirect[2], 0) + + clines = np.vstack([lines1rB, lines2rB, lines3rB]) + + panoEdge1r = paintParameterLine(lines1rB, img.shape[1], img.shape[0]) + panoEdge2r = paintParameterLine(lines2rB, img.shape[1], img.shape[0]) + panoEdge3r = paintParameterLine(lines3rB, img.shape[1], img.shape[0]) + panoEdger = np.stack([panoEdge1r, panoEdge2r, panoEdge3r], -1) + + # output + olines = clines + vp = mainDirect + views = sepScene + edges = edge + panoEdge = panoEdger + + return olines, vp, views, edges, panoEdge, score, angle + + +if __name__ == '__main__': + + # disable OpenCV3's non thread safe OpenCL option + cv2.ocl.setUseOpenCL(False) + + import os + import argparse + import PIL + from PIL import Image + import time + + parser = argparse.ArgumentParser() + parser.add_argument('--i', required=True) + parser.add_argument('--o_prefix', required=True) + parser.add_argument('--qError', default=0.7, type=float) + parser.add_argument('--refineIter', default=3, type=int) + args = parser.parse_args() + + # Read image + img_ori = np.array(Image.open(args.i).resize((1024, 512))) + + # Vanishing point estimation & Line segments detection + s_time = time.time() + olines, vp, views, edges, panoEdge, score, angle = panoEdgeDetection(img_ori, + qError=args.qError, + refineIter=args.refineIter) + print('Elapsed time: %.2f' % (time.time() - s_time)) + panoEdge = (panoEdge > 0) + + print('Vanishing point:') + for v in vp[2::-1]: + print('%.6f %.6f %.6f' % tuple(v)) + + # Visualization + edg = rotatePanorama(panoEdge.astype(np.float64), vp[2::-1]) + img = rotatePanorama(img_ori / 255.0, vp[2::-1]) + one = img.copy() * 0.5 + one[(edg > 0.5).sum(-1) > 0] = 0 + one[edg[..., 0] > 0.5, 0] = 1 + one[edg[..., 1] > 0.5, 1] = 1 + one[edg[..., 2] > 0.5, 2] = 1 + Image.fromarray((edg * 255).astype(np.uint8)).save('%s_edg.png' % args.o_prefix) + Image.fromarray((img * 255).astype(np.uint8)).save('%s_img.png' % args.o_prefix) + Image.fromarray((one * 255).astype(np.uint8)).save('%s_one.png' % args.o_prefix)