aderylo commited on
Commit
60102fa
·
verified ·
1 Parent(s): 809072d

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/__init__.py +0 -0
  3. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/__pycache__/__init__.cpython-310.pyc +0 -0
  4. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__init__.py +27 -0
  5. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  6. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/aeo.cpython-310.pyc +0 -0
  7. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/builder.cpython-310.pyc +0 -0
  8. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/dataloader.cpython-310.pyc +0 -0
  9. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/defaults.cpython-310.pyc +0 -0
  10. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/hm3d.cpython-310.pyc +0 -0
  11. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/modelnet.cpython-310.pyc +0 -0
  12. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/nuscenes.cpython-310.pyc +0 -0
  13. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/precomputed_features.cpython-310.pyc +0 -0
  14. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/s3dis.cpython-310.pyc +0 -0
  15. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/scannet.cpython-310.pyc +0 -0
  16. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/scannet_pair.cpython-310.pyc +0 -0
  17. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/scannetpp.cpython-310.pyc +0 -0
  18. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/semantic_kitti.cpython-310.pyc +0 -0
  19. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/shapenet_part.cpython-310.pyc +0 -0
  20. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/structure3d.cpython-310.pyc +0 -0
  21. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/transform.cpython-310.pyc +0 -0
  22. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/utils.cpython-310.pyc +0 -0
  23. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/waymo.cpython-310.pyc +0 -0
  24. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/aeo.py +29 -0
  25. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/builder.py +15 -0
  26. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/dataloader.py +112 -0
  27. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/defaults.py +236 -0
  28. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/hm3d.py +45 -0
  29. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/modelnet.py +156 -0
  30. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/nuscenes.py +125 -0
  31. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/precomputed_features.py +80 -0
  32. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/arkitscenes/preprocess_arkitscenes_mesh.py +93 -0
  33. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/filter_hm3d.py +92 -0
  34. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/hm3d_constants.py +45 -0
  35. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/hm3dsem_category_mappings.tsv +2368 -0
  36. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/preprocess_hm3d.py +209 -0
  37. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/category_mapping.tsv +0 -0
  38. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/scenes_test.txt +18 -0
  39. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/scenes_train.txt +61 -0
  40. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/scenes_val.txt +11 -0
  41. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/preprocess_matterport3d_mesh.py +240 -0
  42. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/unzip_matterport3d_region_segmentation.py +66 -0
  43. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/nuscenes/preprocess_nuscenes_info.py +607 -0
  44. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/s3dis/preprocess_s3dis.py +233 -0
  45. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/sampling_chunking_data.py +149 -0
  46. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/dino/prepare_scene_list.py +27 -0
  47. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/dino/preprocess_dino_feature.py +362 -0
  48. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/extract_partition.py +71 -0
  49. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/meta_data/__pycache__/scannet200_constants.cpython-310.pyc +0 -0
  50. gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/meta_data/classes_ObjClassification-ShapeNetCore55.txt +17 -0
.gitattributes CHANGED
@@ -38,3 +38,4 @@ gf_s3dis_ss_0.05/baseline/wandb/run-20251102_201734-ccwbjmvu/run-ccwbjmvu.wandb
38
  gf_s3dis_ss_0.05/baseline/wandb/run-20251102_213550-2e8vphue/run-2e8vphue.wandb filter=lfs diff=lfs merge=lfs -text
39
  gf_s3dis_ss_0.05/early-fusion-r-0.5/wandb/run-20251102_213832-fwfae11r/run-fwfae11r.wandb filter=lfs diff=lfs merge=lfs -text
40
  gf_s3dis_ss_0.05/early-fusion-r-0.5/wandb/run-20251114_011300-hlpbmdin/run-hlpbmdin.wandb filter=lfs diff=lfs merge=lfs -text
 
 
38
  gf_s3dis_ss_0.05/baseline/wandb/run-20251102_213550-2e8vphue/run-2e8vphue.wandb filter=lfs diff=lfs merge=lfs -text
39
  gf_s3dis_ss_0.05/early-fusion-r-0.5/wandb/run-20251102_213832-fwfae11r/run-fwfae11r.wandb filter=lfs diff=lfs merge=lfs -text
40
  gf_s3dis_ss_0.05/early-fusion-r-0.5/wandb/run-20251114_011300-hlpbmdin/run-hlpbmdin.wandb filter=lfs diff=lfs merge=lfs -text
41
+ gf_s3dis_ss_0.05/latent-fusion-r-0.5/wandb/run-20251110_114502-texly8ki/run-texly8ki.wandb filter=lfs diff=lfs merge=lfs -text
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/__init__.py ADDED
File without changes
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (211 Bytes). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__init__.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .defaults import DefaultDataset, ConcatDataset
2
+ from .builder import build_dataset
3
+ from .utils import point_collate_fn, collate_fn
4
+
5
+ # generic
6
+ from .precomputed_features import PrecomputedFeaturesDataset
7
+
8
+ # indoor scene
9
+ from .s3dis import S3DISDataset
10
+ from .scannet import ScanNetDataset, ScanNet200Dataset
11
+ from .scannetpp import ScanNetPPDataset
12
+ from .scannet_pair import ScanNetPairDataset
13
+ from .hm3d import HM3DDataset
14
+ from .structure3d import Structured3DDataset
15
+ from .aeo import AEODataset
16
+
17
+ # outdoor scene
18
+ from .semantic_kitti import SemanticKITTIDataset
19
+ from .nuscenes import NuScenesDataset
20
+ from .waymo import WaymoDataset
21
+
22
+ # object
23
+ from .modelnet import ModelNetDataset
24
+ from .shapenet_part import ShapeNetPartDataset
25
+
26
+ # dataloader
27
+ from .dataloader import MultiDatasetDataloader
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.14 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/aeo.cpython-310.pyc ADDED
Binary file (1.25 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/builder.cpython-310.pyc ADDED
Binary file (581 Bytes). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/dataloader.cpython-310.pyc ADDED
Binary file (3.7 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/defaults.cpython-310.pyc ADDED
Binary file (6.82 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/hm3d.cpython-310.pyc ADDED
Binary file (1.72 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/modelnet.cpython-310.pyc ADDED
Binary file (4.92 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/nuscenes.cpython-310.pyc ADDED
Binary file (3.61 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/precomputed_features.cpython-310.pyc ADDED
Binary file (2.64 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/s3dis.cpython-310.pyc ADDED
Binary file (908 Bytes). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/scannet.cpython-310.pyc ADDED
Binary file (3.35 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/scannet_pair.cpython-310.pyc ADDED
Binary file (3.48 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/scannetpp.cpython-310.pyc ADDED
Binary file (2 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/semantic_kitti.cpython-310.pyc ADDED
Binary file (4.37 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/shapenet_part.cpython-310.pyc ADDED
Binary file (5.35 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/structure3d.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/transform.cpython-310.pyc ADDED
Binary file (37 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/utils.cpython-310.pyc ADDED
Binary file (2.64 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/__pycache__/waymo.cpython-310.pyc ADDED
Binary file (3.27 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/aeo.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ AEO Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+
10
+ import numpy as np
11
+
12
+ from .defaults import DefaultDataset
13
+ from .builder import DATASETS
14
+
15
+
16
+ @DATASETS.register_module()
17
+ class AEODataset(DefaultDataset):
18
+ def __init__(self, **kwargs):
19
+ super().__init__(**kwargs)
20
+ label_mapping = np.ones(41, dtype=int) * -1
21
+ label_mapping[[0, 1, 3, 4, 13, 16, 19, 21, 22, 28, 29, 34, 36, 37, 38, 39]] = (
22
+ np.arange(16)
23
+ )
24
+ self.label_mapping = label_mapping
25
+
26
+ def get_data(self, idx):
27
+ data_dict = super().get_data(idx)
28
+ data_dict["segment"] = self.label_mapping[data_dict["segment"]]
29
+ return data_dict
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/builder.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Dataset Builder
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ from pointcept.utils.registry import Registry
9
+
10
+ DATASETS = Registry("datasets")
11
+
12
+
13
+ def build_dataset(cfg):
14
+ """Build datasets."""
15
+ return DATASETS.build(cfg)
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/dataloader.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import partial
2
+ import weakref
3
+ import torch
4
+ import torch.utils.data
5
+
6
+ import pointcept.utils.comm as comm
7
+ from pointcept.datasets.utils import point_collate_fn
8
+ from pointcept.datasets import ConcatDataset
9
+ from pointcept.utils.env import set_seed
10
+
11
+
12
+ class MultiDatasetDummySampler:
13
+ def __init__(self):
14
+ self.dataloader = None
15
+
16
+ def set_epoch(self, epoch):
17
+ if comm.get_world_size() > 1:
18
+ for dataloader in self.dataloader.dataloaders:
19
+ dataloader.sampler.set_epoch(epoch)
20
+ return
21
+
22
+
23
+ class MultiDatasetDataloader:
24
+ """
25
+ Multiple Datasets Dataloader, batch data from a same dataset and mix up ratio determined by loop of each sub dataset.
26
+ The overall length is determined by the main dataset (first) and loop of concat dataset.
27
+ """
28
+
29
+ def __init__(
30
+ self,
31
+ concat_dataset: ConcatDataset,
32
+ batch_size_per_gpu: int,
33
+ num_worker_per_gpu: int,
34
+ mix_prob=0,
35
+ seed=None,
36
+ ):
37
+ self.datasets = concat_dataset.datasets
38
+ self.ratios = [dataset.loop for dataset in self.datasets]
39
+ # reset data loop, original loop serve as ratios
40
+ for dataset in self.datasets:
41
+ dataset.loop = 1
42
+ # determine union training epoch by main dataset
43
+ self.datasets[0].loop = concat_dataset.loop
44
+ # build sub-dataloaders
45
+ num_workers = num_worker_per_gpu // len(self.datasets)
46
+ self.dataloaders = []
47
+ for dataset_id, dataset in enumerate(self.datasets):
48
+ if comm.get_world_size() > 1:
49
+ sampler = torch.utils.data.distributed.DistributedSampler(dataset)
50
+ else:
51
+ sampler = None
52
+
53
+ init_fn = (
54
+ partial(
55
+ self._worker_init_fn,
56
+ dataset_id=dataset_id,
57
+ num_workers=num_workers,
58
+ num_datasets=len(self.datasets),
59
+ rank=comm.get_rank(),
60
+ seed=seed,
61
+ )
62
+ if seed is not None
63
+ else None
64
+ )
65
+ self.dataloaders.append(
66
+ torch.utils.data.DataLoader(
67
+ dataset,
68
+ batch_size=batch_size_per_gpu,
69
+ shuffle=(sampler is None),
70
+ num_workers=num_worker_per_gpu,
71
+ sampler=sampler,
72
+ collate_fn=partial(point_collate_fn, mix_prob=mix_prob),
73
+ pin_memory=True,
74
+ worker_init_fn=init_fn,
75
+ drop_last=True,
76
+ persistent_workers=True,
77
+ )
78
+ )
79
+ self.sampler = MultiDatasetDummySampler()
80
+ self.sampler.dataloader = weakref.proxy(self)
81
+
82
+ def __iter__(self):
83
+ iterator = [iter(dataloader) for dataloader in self.dataloaders]
84
+ while True:
85
+ for i in range(len(self.ratios)):
86
+ for _ in range(self.ratios[i]):
87
+ try:
88
+ batch = next(iterator[i])
89
+ except StopIteration:
90
+ if i == 0:
91
+ return
92
+ else:
93
+ iterator[i] = iter(self.dataloaders[i])
94
+ batch = next(iterator[i])
95
+ yield batch
96
+
97
+ def __len__(self):
98
+ main_data_loader_length = len(self.dataloaders[0])
99
+ return (
100
+ main_data_loader_length // self.ratios[0] * sum(self.ratios)
101
+ + main_data_loader_length % self.ratios[0]
102
+ )
103
+
104
+ @staticmethod
105
+ def _worker_init_fn(worker_id, num_workers, dataset_id, num_datasets, rank, seed):
106
+ worker_seed = (
107
+ num_workers * num_datasets * rank
108
+ + num_workers * dataset_id
109
+ + worker_id
110
+ + seed
111
+ )
112
+ set_seed(worker_seed)
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/defaults.py ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Default Datasets
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import glob
10
+ import json
11
+ from re import split
12
+
13
+ import numpy as np
14
+ from copy import deepcopy
15
+ from torch.utils.data import Dataset
16
+ from collections.abc import Sequence
17
+
18
+ from pointcept.utils.logger import get_root_logger
19
+ from pointcept.utils.cache import shared_dict
20
+
21
+ from .builder import DATASETS, build_dataset
22
+ from .transform import Compose, TRANSFORMS
23
+
24
+
25
+ @DATASETS.register_module()
26
+ class DefaultDataset(Dataset):
27
+ VALID_ASSETS = [
28
+ "coord",
29
+ "color",
30
+ "normal",
31
+ "strength",
32
+ "segment",
33
+ "instance",
34
+ "pose",
35
+ ]
36
+
37
+ def __init__(
38
+ self,
39
+ split="train",
40
+ data_root="data/dataset",
41
+ transform=None,
42
+ test_mode=False,
43
+ test_cfg=None,
44
+ cache=False,
45
+ ignore_index=-1,
46
+ loop=1,
47
+ ):
48
+ super(DefaultDataset, self).__init__()
49
+ self.data_root = data_root
50
+ self.split = split
51
+ self.transform = Compose(transform)
52
+ self.cache = cache
53
+ self.ignore_index = ignore_index
54
+ self.loop = (
55
+ loop if not test_mode else 1
56
+ ) # force make loop = 1 while in test mode
57
+ self.test_mode = test_mode
58
+ self.test_cfg = test_cfg if test_mode else None
59
+
60
+ if test_mode:
61
+ self.test_voxelize = (
62
+ TRANSFORMS.build(self.test_cfg.voxelize)
63
+ if self.test_cfg.voxelize
64
+ else None
65
+ )
66
+ self.test_crop = (
67
+ TRANSFORMS.build(self.test_cfg.crop) if self.test_cfg.crop else None
68
+ )
69
+ self.post_transform = Compose(self.test_cfg.post_transform)
70
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
71
+
72
+ self.data_list = self.get_data_list()
73
+ logger = get_root_logger()
74
+ logger.info(
75
+ "Totally {} x {} samples in {} {} set.".format(
76
+ len(self.data_list), self.loop, os.path.basename(self.data_root), split
77
+ )
78
+ )
79
+
80
+ def get_data_list(self):
81
+ if isinstance(self.split, str):
82
+ split_list = [self.split]
83
+ elif isinstance(self.split, Sequence):
84
+ split_list = self.split
85
+ else:
86
+ raise NotImplementedError
87
+
88
+ data_list = []
89
+ for split in split_list:
90
+ if os.path.isfile(os.path.join(self.data_root, split)):
91
+ with open(os.path.join(self.data_root, split)) as f:
92
+ data_list += [
93
+ os.path.join(self.data_root, data) for data in json.load(f)
94
+ ]
95
+ else:
96
+ data_list += glob.glob(os.path.join(self.data_root, split, "*"))
97
+ return data_list
98
+
99
+ def get_data(self, idx):
100
+ data_path = self.data_list[idx % len(self.data_list)]
101
+ name = self.get_data_name(idx)
102
+ split = self.get_split_name(idx)
103
+ if self.cache:
104
+ cache_name = f"pointcept-{name}"
105
+ return shared_dict(cache_name)
106
+
107
+ data_dict = {}
108
+ assets = os.listdir(data_path)
109
+ for asset in assets:
110
+ if not asset.endswith(".npy"):
111
+ continue
112
+ if asset[:-4] not in self.VALID_ASSETS:
113
+ continue
114
+ data_dict[asset[:-4]] = np.load(os.path.join(data_path, asset))
115
+ data_dict["name"] = name
116
+ data_dict["split"] = split
117
+
118
+ if "coord" in data_dict.keys():
119
+ data_dict["coord"] = data_dict["coord"].astype(np.float32)
120
+
121
+ if "color" in data_dict.keys():
122
+ data_dict["color"] = data_dict["color"].astype(np.float32)
123
+
124
+ if "normal" in data_dict.keys():
125
+ data_dict["normal"] = data_dict["normal"].astype(np.float32)
126
+
127
+ if "segment" in data_dict.keys():
128
+ data_dict["segment"] = data_dict["segment"].reshape([-1]).astype(np.int32)
129
+ else:
130
+ data_dict["segment"] = (
131
+ np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1
132
+ )
133
+
134
+ if "instance" in data_dict.keys():
135
+ data_dict["instance"] = data_dict["instance"].reshape([-1]).astype(np.int32)
136
+ else:
137
+ data_dict["instance"] = (
138
+ np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1
139
+ )
140
+ return data_dict
141
+
142
+ def get_data_name(self, idx):
143
+ return os.path.basename(self.data_list[idx % len(self.data_list)])
144
+
145
+ def get_split_name(self, idx):
146
+ return os.path.basename(
147
+ os.path.dirname(self.data_list[idx % len(self.data_list)])
148
+ )
149
+
150
+ def prepare_train_data(self, idx):
151
+ # load data
152
+ data_dict = self.get_data(idx)
153
+ data_dict = self.transform(data_dict)
154
+ return data_dict
155
+
156
+ def prepare_test_data(self, idx):
157
+ # load data
158
+ data_dict = self.get_data(idx)
159
+ data_dict = self.transform(data_dict)
160
+ result_dict = dict(segment=data_dict.pop("segment"), name=data_dict.pop("name"))
161
+ if "origin_segment" in data_dict:
162
+ assert "inverse" in data_dict
163
+ result_dict["origin_segment"] = data_dict.pop("origin_segment")
164
+ result_dict["inverse"] = data_dict.pop("inverse")
165
+
166
+ data_dict_list = []
167
+ for aug in self.aug_transform:
168
+ data_dict_list.append(aug(deepcopy(data_dict)))
169
+
170
+ fragment_list = []
171
+ for data in data_dict_list:
172
+ if self.test_voxelize is not None:
173
+ data_part_list = self.test_voxelize(data)
174
+ else:
175
+ data["index"] = np.arange(data["coord"].shape[0])
176
+ data_part_list = [data]
177
+ for data_part in data_part_list:
178
+ if self.test_crop is not None:
179
+ data_part = self.test_crop(data_part)
180
+ else:
181
+ data_part = [data_part]
182
+ fragment_list += data_part
183
+
184
+ for i in range(len(fragment_list)):
185
+ fragment_list[i] = self.post_transform(fragment_list[i])
186
+ result_dict["fragment_list"] = fragment_list
187
+ return result_dict
188
+
189
+ def __getitem__(self, idx):
190
+ if self.test_mode:
191
+ return self.prepare_test_data(idx)
192
+ else:
193
+ return self.prepare_train_data(idx)
194
+
195
+ def __len__(self):
196
+ return len(self.data_list) * self.loop
197
+
198
+
199
+ @DATASETS.register_module()
200
+ class ConcatDataset(Dataset):
201
+ def __init__(self, datasets, loop=1):
202
+ super(ConcatDataset, self).__init__()
203
+ self.datasets = [build_dataset(dataset) for dataset in datasets]
204
+ self.loop = loop
205
+ self.data_list = self.get_data_list()
206
+ logger = get_root_logger()
207
+ logger.info(
208
+ "Totally {} x {} samples in the concat set.".format(
209
+ len(self.data_list), self.loop
210
+ )
211
+ )
212
+
213
+ def get_data_list(self):
214
+ data_list = []
215
+ for i in range(len(self.datasets)):
216
+ data_list.extend(
217
+ zip(
218
+ np.ones(len(self.datasets[i]), dtype=int) * i,
219
+ np.arange(len(self.datasets[i])),
220
+ )
221
+ )
222
+ return data_list
223
+
224
+ def get_data(self, idx):
225
+ dataset_idx, data_idx = self.data_list[idx % len(self.data_list)]
226
+ return self.datasets[dataset_idx][data_idx]
227
+
228
+ def get_data_name(self, idx):
229
+ dataset_idx, data_idx = self.data_list[idx % len(self.data_list)]
230
+ return self.datasets[dataset_idx].get_data_name(data_idx)
231
+
232
+ def __getitem__(self, idx):
233
+ return self.get_data(idx)
234
+
235
+ def __len__(self):
236
+ return len(self.data_list) * self.loop
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/hm3d.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Habitat-Matterport 3D Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import glob
9
+ import os
10
+ from collections.abc import Sequence
11
+ from .builder import DATASETS
12
+ from .defaults import DefaultDataset
13
+
14
+
15
+ @DATASETS.register_module()
16
+ class HM3DDataset(DefaultDataset):
17
+ def __init__(
18
+ self,
19
+ force_label=True,
20
+ **kwargs,
21
+ ):
22
+ # if force_label, only load data with label
23
+ self.force_label = force_label
24
+ super().__init__(**kwargs)
25
+
26
+ def get_single_data_list(self, split):
27
+ if self.force_label:
28
+ data_list = glob.glob(
29
+ os.path.join(self.data_root, split, "*", "segment.npy")
30
+ )
31
+ data_list = [os.path.dirname(data) for data in data_list]
32
+ else:
33
+ data_list = glob.glob(os.path.join(self.data_root, split, "*"))
34
+ return data_list
35
+
36
+ def get_data_list(self):
37
+ if isinstance(self.split, str):
38
+ data_list = self.get_single_data_list(self.split)
39
+ elif isinstance(self.split, Sequence):
40
+ data_list = []
41
+ for split in self.split:
42
+ data_list += self.get_single_data_list(split)
43
+ else:
44
+ raise NotImplementedError
45
+ return data_list
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/modelnet.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ModelNet40 Dataset
3
+
4
+ get sampled point clouds of ModelNet40 (XYZ and normal from mesh, 10k points per shape)
5
+ at "https://shapenet.cs.stanford.edu/media/modelnet40_normal_resampled.zip"
6
+
7
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
8
+ Please cite our work if the code is helpful to you.
9
+ """
10
+
11
+ import os
12
+ import numpy as np
13
+ import copy
14
+ import torch
15
+ from torch.utils.data import Dataset
16
+ from copy import deepcopy
17
+
18
+
19
+ from pointcept.utils.logger import get_root_logger
20
+ from .builder import DATASETS
21
+ from .transform import Compose
22
+
23
+ try:
24
+ import pointops
25
+ except ImportError:
26
+ pointops = None
27
+ print("Warning: pointops could not be imported. Some features may be unavailable.")
28
+
29
+
30
+ @DATASETS.register_module()
31
+ class ModelNetDataset(Dataset):
32
+ def __init__(
33
+ self,
34
+ split="train",
35
+ data_root="data/modelnet40",
36
+ class_names=None,
37
+ transform=None,
38
+ num_points=8192,
39
+ uniform_sampling=True,
40
+ save_record=True,
41
+ test_mode=False,
42
+ test_cfg=None,
43
+ loop=1,
44
+ ):
45
+ super().__init__()
46
+ self.data_root = data_root
47
+ self.class_names = dict(zip(class_names, range(len(class_names))))
48
+ self.split = split
49
+ self.num_point = num_points
50
+ self.uniform_sampling = uniform_sampling
51
+ self.transform = Compose(transform)
52
+ self.loop = (
53
+ loop if not test_mode else 1
54
+ ) # force make loop = 1 while in test mode
55
+ self.test_mode = test_mode
56
+ self.test_cfg = test_cfg if test_mode else None
57
+ if test_mode:
58
+ self.post_transform = Compose(self.test_cfg.post_transform)
59
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
60
+
61
+ self.data_list = self.get_data_list()
62
+ logger = get_root_logger()
63
+ logger.info(
64
+ "Totally {} x {} samples in {} set.".format(
65
+ len(self.data_list), self.loop, split
66
+ )
67
+ )
68
+
69
+ # check, prepare record
70
+ record_name = f"modelnet40_{self.split}"
71
+ if num_points is not None:
72
+ record_name += f"_{num_points}points"
73
+ if uniform_sampling:
74
+ record_name += "_uniform"
75
+ record_path = os.path.join(self.data_root, f"{record_name}.pth")
76
+ if os.path.isfile(record_path):
77
+ logger.info(f"Loading record: {record_name} ...")
78
+ self.data = torch.load(record_path, weights_only=False)
79
+ else:
80
+ logger.info(f"Preparing record: {record_name} ...")
81
+ self.data = {}
82
+ for idx in range(len(self.data_list)):
83
+ data_name = self.data_list[idx]
84
+ logger.info(f"Parsing data [{idx}/{len(self.data_list)}]: {data_name}")
85
+ self.data[data_name] = self.get_data(idx)
86
+ if save_record:
87
+ torch.save(self.data, record_path)
88
+
89
+ def get_data(self, idx):
90
+ data_idx = idx % len(self.data_list)
91
+ data_name = self.data_list[data_idx]
92
+ if data_name in self.data.keys():
93
+ return copy.deepcopy(self.data[data_name])
94
+ else:
95
+ data_shape = "_".join(data_name.split("_")[0:-1])
96
+ data_path = os.path.join(
97
+ self.data_root, data_shape, self.data_list[data_idx] + ".txt"
98
+ )
99
+ data = np.loadtxt(data_path, delimiter=",").astype(np.float32)
100
+ if self.num_point is not None:
101
+ if self.uniform_sampling:
102
+ with torch.no_grad():
103
+ mask = pointops.farthest_point_sampling(
104
+ torch.tensor(data).float().cuda(),
105
+ torch.tensor([len(data)]).long().cuda(),
106
+ torch.tensor([self.num_point]).long().cuda(),
107
+ )
108
+ data = data[mask.cpu()]
109
+ else:
110
+ data = data[: self.num_point]
111
+ coord, normal = data[:, 0:3], data[:, 3:6]
112
+ category = np.array([self.class_names[data_shape]])
113
+ return dict(coord=coord, normal=normal, category=category)
114
+
115
+ def get_data_list(self):
116
+ assert isinstance(self.split, str)
117
+ split_path = os.path.join(
118
+ self.data_root, "modelnet40_{}.txt".format(self.split)
119
+ )
120
+ data_list = np.loadtxt(split_path, dtype="str")
121
+ return data_list
122
+
123
+ def get_data_name(self, idx):
124
+ data_idx = idx % len(self.data_list)
125
+ return self.data_list[data_idx]
126
+
127
+ def __getitem__(self, idx):
128
+ if self.test_mode:
129
+ return self.prepare_test_data(idx)
130
+ else:
131
+ return self.prepare_train_data(idx)
132
+
133
+ def __len__(self):
134
+ return len(self.data_list) * self.loop
135
+
136
+ def prepare_train_data(self, idx):
137
+ data_dict = self.get_data(idx)
138
+ data_dict = self.transform(data_dict)
139
+ return data_dict
140
+
141
+ def prepare_test_data(self, idx):
142
+ assert idx < len(self.data_list)
143
+ data_dict = self.get_data(idx)
144
+ category = data_dict.pop("category")
145
+ data_dict = self.transform(data_dict)
146
+ data_dict_list = []
147
+ for aug in self.aug_transform:
148
+ data_dict_list.append(aug(deepcopy(data_dict)))
149
+ for i in range(len(data_dict_list)):
150
+ data_dict_list[i] = self.post_transform(data_dict_list[i])
151
+ data_dict = dict(
152
+ voting_list=data_dict_list,
153
+ category=category,
154
+ name=self.get_data_name(idx),
155
+ )
156
+ return data_dict
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/nuscenes.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ nuScenes Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com), Zheng Zhang
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import numpy as np
10
+ from collections.abc import Sequence
11
+ import pickle
12
+
13
+ from .builder import DATASETS
14
+ from .defaults import DefaultDataset
15
+
16
+
17
+ @DATASETS.register_module()
18
+ class NuScenesDataset(DefaultDataset):
19
+ def __init__(self, sweeps=10, ignore_index=-1, **kwargs):
20
+ self.sweeps = sweeps
21
+ self.ignore_index = ignore_index
22
+ self.learning_map = self.get_learning_map(ignore_index)
23
+ super().__init__(ignore_index=ignore_index, **kwargs)
24
+
25
+ def get_info_path(self, split):
26
+ assert split in ["train", "val", "test"]
27
+ if split == "train":
28
+ return os.path.join(
29
+ self.data_root, "info", f"nuscenes_infos_{self.sweeps}sweeps_train.pkl"
30
+ )
31
+ elif split == "val":
32
+ return os.path.join(
33
+ self.data_root, "info", f"nuscenes_infos_{self.sweeps}sweeps_val.pkl"
34
+ )
35
+ elif split == "test":
36
+ return os.path.join(
37
+ self.data_root, "info", f"nuscenes_infos_{self.sweeps}sweeps_test.pkl"
38
+ )
39
+ else:
40
+ raise NotImplementedError
41
+
42
+ def get_data_list(self):
43
+ if isinstance(self.split, str):
44
+ info_paths = [self.get_info_path(self.split)]
45
+ elif isinstance(self.split, Sequence):
46
+ info_paths = [self.get_info_path(s) for s in self.split]
47
+ else:
48
+ raise NotImplementedError
49
+ data_list = []
50
+ for info_path in info_paths:
51
+ with open(info_path, "rb") as f:
52
+ info = pickle.load(f)
53
+ data_list.extend(info)
54
+ return data_list
55
+
56
+ def get_data(self, idx):
57
+ data = self.data_list[idx % len(self.data_list)]
58
+ lidar_path = os.path.join(self.data_root, "raw", data["lidar_path"])
59
+ points = np.fromfile(str(lidar_path), dtype=np.float32, count=-1).reshape(
60
+ [-1, 5]
61
+ )
62
+ coord = points[:, :3]
63
+ strength = points[:, 3].reshape([-1, 1]) / 255 # scale strength to [0, 1]
64
+
65
+ if "gt_segment_path" in data.keys():
66
+ gt_segment_path = os.path.join(
67
+ self.data_root, "raw", data["gt_segment_path"]
68
+ )
69
+ segment = np.fromfile(
70
+ str(gt_segment_path), dtype=np.uint8, count=-1
71
+ ).reshape([-1])
72
+ segment = np.vectorize(self.learning_map.__getitem__)(segment).astype(
73
+ np.int64
74
+ )
75
+ else:
76
+ segment = np.ones((points.shape[0],), dtype=np.int64) * self.ignore_index
77
+ data_dict = dict(
78
+ coord=coord,
79
+ strength=strength,
80
+ segment=segment,
81
+ name=self.get_data_name(idx),
82
+ )
83
+ return data_dict
84
+
85
+ def get_data_name(self, idx):
86
+ # return data name for lidar seg, optimize the code when need to support detection
87
+ return self.data_list[idx % len(self.data_list)]["lidar_token"]
88
+
89
+ @staticmethod
90
+ def get_learning_map(ignore_index):
91
+ learning_map = {
92
+ 0: ignore_index,
93
+ 1: ignore_index,
94
+ 2: 6,
95
+ 3: 6,
96
+ 4: 6,
97
+ 5: ignore_index,
98
+ 6: 6,
99
+ 7: ignore_index,
100
+ 8: ignore_index,
101
+ 9: 0,
102
+ 10: ignore_index,
103
+ 11: ignore_index,
104
+ 12: 7,
105
+ 13: ignore_index,
106
+ 14: 1,
107
+ 15: 2,
108
+ 16: 2,
109
+ 17: 3,
110
+ 18: 4,
111
+ 19: ignore_index,
112
+ 20: ignore_index,
113
+ 21: 5,
114
+ 22: 8,
115
+ 23: 9,
116
+ 24: 10,
117
+ 25: 11,
118
+ 26: 12,
119
+ 27: 13,
120
+ 28: 14,
121
+ 29: ignore_index,
122
+ 30: 15,
123
+ 31: ignore_index,
124
+ }
125
+ return learning_map
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/precomputed_features.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import numpy as np
3
+ from torch.utils.data import Dataset
4
+ from collections.abc import Sequence
5
+ from pointcept.utils.logger import get_root_logger
6
+ from .builder import DATASETS
7
+ from .defaults import DefaultDataset
8
+ import json
9
+ import glob
10
+
11
+
12
+ @DATASETS.register_module()
13
+ class PrecomputedFeaturesDataset(DefaultDataset):
14
+ """
15
+ Flexible dataset for loading precomputed features from user-specified assets in each sample directory.
16
+ The user provides a mapping from data_dict keys to asset filenames (without .npy extension).
17
+ Example config:
18
+ mapping = {
19
+ 'coord': 'my_coord', (or a list of file names ['X', 'Y', 'Z'])
20
+ 'segment': 'my_label',
21
+ 'feature1': 'my_feature',
22
+ # ...
23
+ }
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ mapping,
29
+ **kwargs,
30
+ ):
31
+ assert "coord" in mapping, "'coord' key must be present in mapping."
32
+ assert "segment" in mapping, "'segment' key must be present in mapping."
33
+ self.mapping = mapping
34
+ super().__init__(**kwargs)
35
+
36
+ def get_data(self, idx):
37
+ data_path = self.data_list[idx % len(self.data_list)]
38
+ name = self.get_data_name(idx)
39
+ split = self.get_split_name(idx)
40
+ if self.cache:
41
+ cache_name = f"pointcept-{name}"
42
+ return shared_dict(cache_name)
43
+
44
+ data_dict = {}
45
+ for key, asset in self.mapping.items():
46
+ if isinstance(asset, str):
47
+ asset_file = os.path.join(data_path, asset + ".npy")
48
+ if os.path.exists(asset_file):
49
+ asset = np.load(asset_file)
50
+ if asset.ndim == 1:
51
+ asset = np.expand_dims(asset, axis=1)
52
+ data_dict[key] = asset
53
+ else:
54
+ raise FileNotFoundError(
55
+ f"Asset file {asset_file} not found for key {key}."
56
+ )
57
+ elif isinstance(asset, (list, tuple)):
58
+ arrays = []
59
+ for a in asset:
60
+ asset_file = os.path.join(data_path, a + ".npy")
61
+ if os.path.exists(asset_file):
62
+ asset = np.load(asset_file)
63
+ if asset.ndim == 1:
64
+ asset = np.expand_dims(asset, axis=1)
65
+ arrays.append(asset)
66
+ else:
67
+ raise FileNotFoundError(
68
+ f"Asset file {asset_file} not found for key {key}."
69
+ )
70
+ data_dict[key] = np.concatenate(arrays, axis=1)
71
+ else:
72
+ raise ValueError(
73
+ f"Mapping for key {key} must be a string or list/tuple of strings."
74
+ )
75
+
76
+ data_dict["name"] = name
77
+ data_dict["split"] = split
78
+ data_dict["segment"] = data_dict["segment"].astype(np.int32).squeeze()
79
+
80
+ return data_dict
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/arkitscenes/preprocess_arkitscenes_mesh.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing ArkitScenes
3
+ """
4
+
5
+ import os
6
+ import argparse
7
+ import glob
8
+ import plyfile
9
+ import numpy as np
10
+ import pandas as pd
11
+ import multiprocessing as mp
12
+ from concurrent.futures import ProcessPoolExecutor
13
+ from itertools import repeat
14
+ from pathlib import Path
15
+
16
+
17
+ def read_plymesh(filepath):
18
+ """Read ply file and return it as numpy array. Returns None if emtpy."""
19
+ with open(filepath, "rb") as f:
20
+ plydata = plyfile.PlyData.read(f)
21
+ if plydata.elements:
22
+ vertices = pd.DataFrame(plydata["vertex"].data).values
23
+ faces = np.stack(plydata["face"].data["vertex_indices"], axis=0)
24
+ return vertices, faces
25
+
26
+
27
+ def face_normal(vertex, face):
28
+ v01 = vertex[face[:, 1]] - vertex[face[:, 0]]
29
+ v02 = vertex[face[:, 2]] - vertex[face[:, 0]]
30
+ vec = np.cross(v01, v02)
31
+ length = np.sqrt(np.sum(vec**2, axis=1, keepdims=True)) + 1.0e-8
32
+ nf = vec / length
33
+ area = length * 0.5
34
+ return nf, area
35
+
36
+
37
+ def vertex_normal(vertex, face):
38
+ nf, area = face_normal(vertex, face)
39
+ nf = nf * area
40
+
41
+ nv = np.zeros_like(vertex)
42
+ for i in range(face.shape[0]):
43
+ nv[face[i]] += nf[i]
44
+
45
+ length = np.sqrt(np.sum(nv**2, axis=1, keepdims=True)) + 1.0e-8
46
+ nv = nv / length
47
+ return nv
48
+
49
+
50
+ def parse_scene(scene_path, output_dir):
51
+ print(f"Parsing scene {scene_path}")
52
+ split = os.path.basename(os.path.dirname(os.path.dirname(scene_path)))
53
+ scene_id = os.path.basename(os.path.dirname(scene_path))
54
+ vertices, faces = read_plymesh(scene_path)
55
+ coords = vertices[:, :3]
56
+ colors = vertices[:, 3:6]
57
+ normals = vertex_normal(coords, faces)
58
+ data_dict = dict(coord=coords, color=colors, normal=normals)
59
+ os.makedirs(output_dir / split / scene_id, exist_ok=True)
60
+ for key in data_dict.keys():
61
+ np.save(output_dir / split / scene_id / f"{key}.npy", data_dict[key])
62
+
63
+
64
+ if __name__ == "__main__":
65
+ parser = argparse.ArgumentParser()
66
+ parser.add_argument(
67
+ "--dataset_root",
68
+ required=True,
69
+ help="Path to the ArkitScenes dataset containing 3dod folder",
70
+ )
71
+ parser.add_argument(
72
+ "--output_root",
73
+ required=True,
74
+ help="Output path where train/val folders will be located",
75
+ )
76
+ parser.add_argument(
77
+ "--num_workers",
78
+ default=mp.cpu_count(),
79
+ type=int,
80
+ help="Num workers for preprocessing.",
81
+ )
82
+ opt = parser.parse_args()
83
+ # Create output directories
84
+ train_output_dir = os.path.join(opt.output_root, "Training")
85
+ os.makedirs(train_output_dir, exist_ok=True)
86
+ val_output_dir = os.path.join(opt.output_root, "Validation")
87
+ os.makedirs(val_output_dir, exist_ok=True)
88
+ # Load scene paths
89
+ scene_paths = sorted(glob.glob(opt.dataset_root + "/3dod/*/*/*_mesh.ply"))
90
+ # Preprocess data.
91
+ pool = ProcessPoolExecutor(max_workers=opt.num_workers)
92
+ print("Processing scenes...")
93
+ _ = list(pool.map(parse_scene, scene_paths, repeat(Path(opt.output_root))))
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/filter_hm3d.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Filtering Script for Habitat-Matterport 3D Dataset
3
+
4
+ filter out and only keep top 10,000 size of processed HM3D
5
+
6
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
7
+ Please cite our work if the code is helpful to you.
8
+ """
9
+
10
+ import os
11
+ import glob
12
+ import argparse
13
+ import numpy as np
14
+ import shutil
15
+ import tqdm
16
+ from pathlib import Path
17
+ import multiprocessing as mp
18
+ from concurrent.futures import ProcessPoolExecutor
19
+ from itertools import repeat
20
+
21
+
22
+ def count_scene(data_path, info_list, lock):
23
+ data_path = Path(data_path)
24
+ data_name = data_path.name
25
+ data_split = data_path.parent.name
26
+ print(f"Counting {data_name} in {data_split}..")
27
+ data_size = np.load(data_path / "color.npy").shape[0]
28
+ with lock:
29
+ info_list.append(dict(name=data_name, split=data_split, size=data_size))
30
+
31
+
32
+ def main_process():
33
+ parser = argparse.ArgumentParser()
34
+ parser.add_argument(
35
+ "--dataset_root",
36
+ required=True,
37
+ help="Path to the Habitat-Matterport 3D dataset containing scene folders",
38
+ )
39
+
40
+ parser.add_argument(
41
+ "--num_keep",
42
+ default=10000,
43
+ type=int,
44
+ help="Number of scenes that kept for the dataset.",
45
+ )
46
+ parser.add_argument(
47
+ "--num_workers",
48
+ default=mp.cpu_count(),
49
+ type=int,
50
+ help="Num workers for preprocessing.",
51
+ )
52
+ args = parser.parse_args()
53
+
54
+ scene_list = glob.glob(os.path.join(args.dataset_root, "*", "*"))
55
+ if len(scene_list) <= args.num_keep:
56
+ return
57
+ manager = mp.Manager()
58
+ lock = manager.Lock()
59
+ info_list = manager.list()
60
+
61
+ # Preprocess data.
62
+ print("Processing scenes...")
63
+ pool = ProcessPoolExecutor(max_workers=args.num_workers)
64
+ _ = list(
65
+ pool.map(
66
+ count_scene,
67
+ scene_list,
68
+ repeat(info_list),
69
+ repeat(lock),
70
+ )
71
+ )
72
+ name_list = np.array([scene["name"] for scene in info_list])
73
+ split_list = np.array([scene["split"] for scene in info_list])
74
+ size_list = np.array([scene["size"] for scene in info_list])
75
+ remove_index = np.argsort(size_list)[: len(scene_list) - args.num_keep]
76
+
77
+ for split in np.unique(split_list):
78
+ os.makedirs(os.path.join(args.dataset_root, f"{split}_rm"), exist_ok=True)
79
+
80
+ source = [
81
+ os.path.join(args.dataset_root, split_list[i], name_list[i])
82
+ for i in remove_index
83
+ ]
84
+ target = [
85
+ os.path.join(args.dataset_root, f"{split_list[i]}_rm") for i in remove_index
86
+ ]
87
+ for s, t in zip(source, target):
88
+ shutil.move(s, t)
89
+
90
+
91
+ if __name__ == "__main__":
92
+ main_process()
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/hm3d_constants.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+
3
+
4
+ CLASS_LABELS_40 = (
5
+ "wall", # 0
6
+ "floor", # 1
7
+ "chair", # 2
8
+ "door", # 3
9
+ "table", # 4
10
+ "picture", # 5
11
+ "cabinet", # 6
12
+ "cushion", # 7
13
+ "window", # 8
14
+ "sofa", # 9
15
+ "bed", # 10
16
+ "curtain", # 11
17
+ "chest_of_drawers", # 12
18
+ "plant", # 13
19
+ "sink", # 14
20
+ "stairs", # 15
21
+ "ceiling", # 16
22
+ "toilet", # 17
23
+ "stool", # 18
24
+ "towel", # 19
25
+ "mirror", # 20
26
+ "tv_monitor", # 21
27
+ "shower", # 22
28
+ "column", # 23
29
+ "bathtub", # 24
30
+ "counter", # 25
31
+ "fireplace", # 26
32
+ "lighting", # 27
33
+ "beam", # 28
34
+ "railing", # 29
35
+ "shelving", # 30
36
+ "blinds", # 31
37
+ "gym_equipment", # 32
38
+ "seating", # 33
39
+ "board_panel", # 34
40
+ "furniture", # 35
41
+ "appliances", # 36
42
+ "clothes", # 37
43
+ "objects", # 38
44
+ "misc", # 39
45
+ )
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/hm3dsem_category_mappings.tsv ADDED
@@ -0,0 +1,2368 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ raw_category category mpcat40
2
+ ,1 unknown unlabeled
3
+ ,4 unknown unlabeled
4
+ ,5 unknown unlabeled
5
+ ,6 unknown unlabeled
6
+ /unknown/ probably decoration unknown unlabeled
7
+ acoustic panel acoustic panel misc
8
+ advertisement advertisement misc
9
+ aemchair armchair chair
10
+ affice chair office chair chair
11
+ air conditioner air conditioner objects
12
+ air conditioning air conditioning objects
13
+ air duct air duct misc
14
+ air freshener air freshener objects
15
+ air heater air heater misc
16
+ air hockey air hockey objects
17
+ air purifier air purifier objects
18
+ air refresher air refresher objects
19
+ air vent air vent misc
20
+ air vent fan air vent fan objects
21
+ air vent installation air vent installation misc
22
+ aisle frame aisle frame misc
23
+ alarm alarm objects
24
+ alarm clock alarm clock objects
25
+ alarm control alarm control misc
26
+ alarm controller alarm controller misc
27
+ album album objects
28
+ albums albums objects
29
+ alcohol bottles alcohol bottles objects
30
+ american flag american flag misc
31
+ amp amplifier objects
32
+ amplifier amplifier objects
33
+ antique clock antique clock objects
34
+ antique telehone antique telephone objects
35
+ antique telephone antique telephone objects
36
+ antlers antlers misc
37
+ apple apple objects
38
+ appliance appliance misc
39
+ apron apron misc
40
+ aquarium aquarium misc
41
+ arcade arcade misc
42
+ arcade game arcade game misc
43
+ arch arch misc
44
+ archway archway door
45
+ arm chair armchair chair
46
+ armchair armchair chair
47
+ armchir armchair chair
48
+ art picture picture
49
+ art frame art frame misc
50
+ art piece art piece misc
51
+ art work artwork picture
52
+ art/clutter art/clutter misc
53
+ art/muscle shell art/muscle shell misc
54
+ art/statue art/statue misc
55
+ artframe art frame misc
56
+ artwork artwork picture
57
+ artwork frame artwork frame misc
58
+ ashtray ashtray objects
59
+ attic door attic door misc
60
+ attic entrance attic entrance misc
61
+ attic hatch attic hatch misc
62
+ audio cable audio cable misc
63
+ audio cables audio cables misc
64
+ audio player audio player objects
65
+ axe axe objects
66
+ baby chair baby chair misc
67
+ baby changing station baby changing station misc
68
+ baby changing table baby changing table misc
69
+ baby seat baby seat misc
70
+ backpack backpack objects
71
+ backrest backrest objects
72
+ backsplash backsplash misc
73
+ bad unknown unlabeled
74
+ badside cabinet bedside cabinet chest_of_drawers
75
+ bag bag objects
76
+ bag with sheets bag with sheets objects
77
+ bag with something bag with something objects
78
+ bags bags objects
79
+ balcony balcony misc
80
+ balcony railing balcony railing misc
81
+ balcony reiling balcony railing misc
82
+ balk ball misc
83
+ ball ball misc
84
+ ball chair ball chair chair
85
+ ball pool ball pool objects
86
+ ball pouffe ball pouffe seating
87
+ balustrade balustrade railing
88
+ banister banister railing
89
+ banner banner misc
90
+ bar bar objects
91
+ bar cabinet bar cabinet cabinet
92
+ bar chair bar chair chair
93
+ bar soap bar soap misc
94
+ barbecue barbecue misc
95
+ barbell barbell gym_equipment
96
+ barrel barrel misc
97
+ bascet basket objects
98
+ base base misc
99
+ baseball bat baseball bat objects
100
+ baseball cap baseball cap objects
101
+ baseboard baseboard wall
102
+ basin basin sink
103
+ basin faucet basin faucet objects
104
+ basket basket objects
105
+ basket /w books basket with books objects
106
+ basket of fruits basket of fruits objects
107
+ basket of something basket of something misc
108
+ basket of towels basket of towels misc
109
+ basket with clothes basket with clothes objects
110
+ basket wqith clothes basket with clothes objects
111
+ basketball hoop basketball hoop gym_equipment
112
+ basketballs basketballs objects
113
+ baskets baskets objects
114
+ bat tube bath tub bathtub
115
+ bath bath bathtub
116
+ bath bar bath bar misc
117
+ bath cabinet bath cabinet cabinet
118
+ bath carpet bath carpet floor
119
+ bath cosmetics bath cosmetics objects
120
+ bath curtain bath curtain curtain
121
+ bath curtain bar bath curtain bar misc
122
+ bath dial bath dial misc
123
+ bath door bath door door
124
+ bath door frame bath door frame door
125
+ bath faucet bath faucet objects
126
+ bath floor bath floor floor
127
+ bath grab bar bath grab bar misc
128
+ bath hanger bath hanger objects
129
+ bath mat bath mat floor
130
+ bath shelf bath shelf shelving
131
+ bath shower bath shower shower
132
+ bath side table bath side table table
133
+ bath sink bath sink misc
134
+ bath tap bath tap objects
135
+ bath towel bath towel towel
136
+ bath towels bath towels towel
137
+ bath tub bath tub bathtub
138
+ bath tube bath tub bathtub
139
+ bath utensil bath utensil misc
140
+ bath wall bath wall misc
141
+ bathmat bathmat floor
142
+ bathrobe bathrobe clothes
143
+ bathroom mat bathroom mat floor
144
+ bathroom accessory bathroom accessory clothes
145
+ bathroom art bathroom art objects
146
+ bathroom cabinet bathroom cabinet cabinet
147
+ bathroom cabinet door bathroom cabinet door misc
148
+ bathroom cabinet drawer bathroom cabinet drawer chest_of_drawers
149
+ bathroom cabint bathroom cabinet cabinet
150
+ bathroom counter bathroom counter counter
151
+ bathroom floor bathroom floor floor
152
+ bathroom glass bathroom glass objects
153
+ bathroom mat bathroom mat floor
154
+ bathroom mirror mirror mirror
155
+ bathroom rug bathroom rug floor
156
+ bathroom shelf bathroom shelf shelving
157
+ bathroom stuff bathroom stuff objects
158
+ bathroom towel bathroom towel towel
159
+ bathroom utencills bathroom utensils objects
160
+ bathroom utencils bathroom utensil objects
161
+ bathroom utensil bathroom utensil objects
162
+ bathroom wall bathroom wall wall
163
+ bathroom window bathroom window window
164
+ bathtub bathtub bathtub
165
+ bathtub knob bathtub knob misc
166
+ bathtub platform bathtub platform misc
167
+ bathtub tap bathtub tap objects
168
+ bathtub utensil bathtub utensil misc
169
+ bathtube bathtub bathtub
170
+ batroom art bathroom art objects
171
+ beam beam beam
172
+ bean bag chair bean bag chair chair
173
+ beanbag beanbag chair
174
+ beanbag chair beanbag chair chair
175
+ bed bed bed
176
+ bed base bed base bed
177
+ bed cabinet bed cabinet chest_of_drawers
178
+ bed cabinet lamp bed cabinet lamp lighting
179
+ bed comforter bed comforter misc
180
+ bed curtain bed curtain misc
181
+ bed for pet bed for pet misc
182
+ bed frame bedframe bed
183
+ bed ladder bed ladder stairs
184
+ bed lamp bedside lamp lighting
185
+ bed light bed light misc
186
+ bed sheet bed sheet bed
187
+ bed small bed small misc
188
+ bed stand bed stand misc
189
+ bed table bed table table
190
+ bedding bedding objects
191
+ bedframe bedframe bed
192
+ bedpost bedpost bed
193
+ bedroom ceiling bedroom ceiling misc
194
+ bedroom table bedroom table chest_of_drawers
195
+ bedside cabinet bedside cabinet chest_of_drawers
196
+ bedside cabinet door bedside cabinet door misc
197
+ bedside cabinet drawer bedside cabinet drawer chest_of_drawers
198
+ bedside lamp bedside lamp lighting
199
+ bedside table bedside table chest_of_drawers
200
+ beer crate beer crate misc
201
+ bell bell misc
202
+ belt belt misc
203
+ bench bench seating
204
+ beside table bedside table chest_of_drawers
205
+ bicycle bicycle objects
206
+ bicycle helmet bicycle helmet objects
207
+ bicycle helmets bicycle helmets objects
208
+ bidet bidet toilet
209
+ big bag big bag objects
210
+ bike bicycle objects
211
+ billiard balls billiard balls objects
212
+ billiard cue billiard cue objects
213
+ billiard cues billiard cues objects
214
+ bin bin objects
215
+ binder binder misc
216
+ binders binders misc
217
+ birdhouse birdhouse misc
218
+ brackboard blackboard objects
219
+ blackboard blackboard objects
220
+ blanket blanket objects
221
+ blanket basket blanket basket objects
222
+ blankets blankets objects
223
+ blind blinds blinds
224
+ blinder blinds blinds
225
+ blinds blinds blinds
226
+ blouse blouse clothes
227
+ board board board_panel
228
+ board game board game misc
229
+ board games board games misc
230
+ board with keys board with keys objects
231
+ boards boards board_panel
232
+ boat model boat model misc
233
+ boiler boiler misc
234
+ bojler boiler misc
235
+ bonsai tree bonsai tree misc
236
+ book book objects
237
+ book cabinet book cabinet cabinet
238
+ book display book display misc
239
+ book rack book rack misc
240
+ book shape bookshelf shelving
241
+ book shelf bookshelf shelving
242
+ bookcase bookshelf shelving
243
+ books book objects
244
+ bookshelf bookshelf shelving
245
+ bookstand bookstand shelving
246
+ bootles bottles objects
247
+ boots boots objects
248
+ bottle bottle objects
249
+ bottle dispenser bottle dispenser objects
250
+ bottle of soap bottle of soap misc
251
+ bottle of detergent bottle of detergent misc
252
+ bottle of soap bottle of soap misc
253
+ bottle of water bottle of water objects
254
+ bottle of wine bottle of wine objects
255
+ bottle opener bottle opener misc
256
+ bottlels bottles objects
257
+ bottles bottles objects
258
+ bottles of water bottles of water objects
259
+ bottles of wine bottles of wine misc
260
+ bottom of stairs bottom of stairs misc
261
+ bouquet bouquet objects
262
+ bowl bowl objects
263
+ bowl of fruit bowl of fruit objects
264
+ bowl of fruits bowl of fruits objects
265
+ bowl of sweets bowl of sweets objects
266
+ bowl with sweets bowl with sweets objects
267
+ bowls bowls objects
268
+ box box objects
269
+ box with books box with books misc
270
+ box /w books box with books misc
271
+ box /w shoes box with shoes misc
272
+ box /w toys box with toys misc
273
+ box of tissues box of tissues misc
274
+ box of food box of food misc
275
+ box of fruit box of fruit misc
276
+ box of paper box of paper misc
277
+ box of something box of something misc
278
+ box of tissue box of tissue misc
279
+ box of tissues box of tissues misc
280
+ box pen box pen misc
281
+ box tissues box of tissues misc
282
+ box with jewellery box with jewelry misc
283
+ box with photos box with photos misc
284
+ box with tea box with tea misc
285
+ boxes boxes objects
286
+ boxes /w books boxes with books misc
287
+ boxing ring boxing ring objects
288
+ bread bread misc
289
+ bread bin bread bin misc
290
+ bread box bread box misc
291
+ breadbox breadbox misc
292
+ breifcase briefcase objects
293
+ bricks bricks misc
294
+ bridge bridge misc
295
+ briefcase briefcase objects
296
+ brochure brochure objects
297
+ brochures brochures objects
298
+ broom broom objects
299
+ broom stick broomstick misc
300
+ broomstick broomstick misc
301
+ brtochure brochure objects
302
+ brush brush objects
303
+ brushes brushes objects
304
+ bucket bucket objects
305
+ buckets buckets objects
306
+ buffet buffet counter
307
+ bulletin board bulletin board tv_monitor
308
+ bunk bed bunk bed bed
309
+ bureau bureau chest_of_drawers
310
+ bust bust objects
311
+ button button misc
312
+ cabidet cabinet cabinet
313
+ cabient cabinet cabinet
314
+ cabiinet cabinet cabinet
315
+ cabinet cabinet cabinet
316
+ cabinet door cabinet door misc
317
+ cabinet /otherroom cabinet /otherroom cabinet
318
+ cabinet /w clutter cabinet /w clutter cabinet
319
+ cabinet /w cluttered art cabinet /w cluttered art misc
320
+ cabinet clutter cabinet clutter misc
321
+ cabinet counter cabinet counter misc
322
+ cabinet door cabinet door misc
323
+ cabinet drawer cabinet drawer cabinet
324
+ cabinet kitchen cabinet kitchen cabinet
325
+ cabinet mirror cabinet mirror cabinet
326
+ cabinet parts cabinet parts cabinet
327
+ cabinet table cabinet table table
328
+ cable cable misc
329
+ cables cables misc
330
+ cage cage objects
331
+ cailing lamp ceiling lamp lighting
332
+ cair chair chair
333
+ cake cake misc
334
+ calander calendar misc
335
+ calculator calculator appliances
336
+ calendar calendar misc
337
+ camera camera objects
338
+ camping chair camping chair chair
339
+ can can objects
340
+ can of paint can of paint misc
341
+ canal canal misc
342
+ candel candle objects
343
+ candelabra candelabra objects
344
+ candle candle objects
345
+ candle holder candle holder objects
346
+ candle stand candle stand objects
347
+ candles candle objects
348
+ candlestick candlestick objects
349
+ canister canister misc
350
+ canoe canoe objects
351
+ canopy canopy misc
352
+ cans cans objects
353
+ cans of paint cans of paint objects
354
+ canvas canvas misc
355
+ cap cap clothes
356
+ car car objects
357
+ car model car model objects
358
+ carboard box cardboard box misc
359
+ card card misc
360
+ cardboard cardboard misc
361
+ cardboard box cardboard box misc
362
+ cardboardbox cardboard box misc
363
+ carpet carpet floor
364
+ carpet roll carpet roll misc
365
+ cart cart objects
366
+ cartboard cardboard misc
367
+ case case misc
368
+ cases cases misc
369
+ cash register cash register objects
370
+ casket casket misc
371
+ cat cat misc
372
+ cat bed cat bed bed
373
+ cat food cat food misc
374
+ cat food bag cat food bag misc
375
+ cat litter box cat litter box toilet
376
+ cat tree cat tree objects
377
+ cats toilet cat toilet toilet
378
+ cd cd objects
379
+ cd player cd player appliances
380
+ cd's cds objects
381
+ cds cds objects
382
+ cebinet cabinet cabinet
383
+ ceiiling ceiling ceiling
384
+ ceiling balk unknown unlabeled
385
+ ceiilng ceiling ceiling
386
+ ceiliing ceiling ceiling
387
+ ceiliing lamp ceiling lamp lighting
388
+ ceiling ceiling ceiling
389
+ ceiling lamp ceiling lamp lighting
390
+ ceiling light ceiling light lighting
391
+ ceiling vent ceiling vent misc
392
+ ceiling /otheroom ceiling ceiling
393
+ ceiling /otherroom ceiling ceiling
394
+ ceiling air vent ceiling vent misc
395
+ ceiling arch ceiling arch ceiling
396
+ ceiling bedroom ceiling bedroom misc
397
+ ceiling boarder ceiling boarder misc
398
+ ceiling border ceiling border ceiling
399
+ ceiling chasis ceiling chassis ceiling
400
+ ceiling corridor ceiling corridor misc
401
+ ceiling decorative lamp ceiling decorative lamp lighting
402
+ ceiling dome ceiling dome misc
403
+ ceiling door ceiling door misc
404
+ ceiling duck ceiling duct ceiling
405
+ ceiling duct ceiling duct ceiling
406
+ ceiling fan ceiling fan objects
407
+ ceiling fan lamp ceiling fan lamp lighting
408
+ ceiling fan vent ceiling fan vent misc
409
+ ceiling fire detector ceiling fire detector objects
410
+ ceiling fixture ceiling fixture misc
411
+ ceiling floor ceiling floor floor
412
+ ceiling hanger ceiling hanger objects
413
+ ceiling ladder ceiling ladder objects
414
+ ceiling lamp ceiling lamp lighting
415
+ ceiling lamp hanger ceiling lamp hanger lighting
416
+ ceiling lamp rail ceiling lamp rail lighting
417
+ ceiling lan ceiling fan objects
418
+ ceiling light ceiling light lighting
419
+ ceiling light fixture connection ceiling light fixture connection misc
420
+ ceiling llower ceiling lower misc
421
+ ceiling lower ceiling lower misc
422
+ ceiling molding ceiling molding misc
423
+ ceiling panel ceiling panel ceiling
424
+ ceiling pipe ceiling pipe ceiling
425
+ ceiling pipes ceiling pipe ceiling
426
+ ceiling support ceiling support ceiling
427
+ ceiling under staircase ceiling under staircase misc
428
+ ceiling under stairs ceiling under stairs misc
429
+ ceiling vent ceiling vent misc
430
+ ceiling wall ceiling wall misc
431
+ ceiling window ceiling window misc
432
+ ceiling/ wall west ceiling/west wall misc
433
+ ceiling/ west wall ceiling/west wall misc
434
+ ceiling/west wall ceiling/west wall misc
435
+ ceilling ceiling ceiling
436
+ ceilling lamp ceiling lamp lighting
437
+ celing ceiling ceiling
438
+ celing lamp ceiling lamp lighting
439
+ celing lower ceiling lower misc
440
+ central heating furnace central heating furnace appliances
441
+ ceramics ceramics objects
442
+ cerpet carpet floor
443
+ certain curtain curtain
444
+ chain chain objects
445
+ chair chair chair
446
+ chair /otherroom chair chair
447
+ chair /w clutter chair /w clutter chair
448
+ chair stand chair stand misc
449
+ chaise chaise chair
450
+ chaise longue chaise longue sofa
451
+ chamber pot chamber pot toilet
452
+ chandelier chandelier lighting
453
+ changing table changing table table
454
+ charger charger objects
455
+ chasis chassis objects
456
+ chess chess misc
457
+ chest chest misc
458
+ chest bench chest bench misc
459
+ chest drawer chest drawer misc
460
+ chest drawers chest of drawers chest_of_drawers
461
+ chest of drawer chest of drawer chest_of_drawers
462
+ chest of drawers chest of drawers chest_of_drawers
463
+ chest of drawres chest of drawers chest_of_drawers
464
+ chest of dreawers chest of drawers chest_of_drawers
465
+ chiar chair chair
466
+ child car seat child car seat objects
467
+ chimney chimney misc
468
+ christmas tree christmas tree misc
469
+ cieling ceiling ceiling
470
+ circular sofa circular sofa sofa
471
+ clarinet clarinet objects
472
+ cleaner cleaner objects
473
+ cleaner bottle cleaner bottle objects
474
+ cleaner brush cleaner brush objects
475
+ cleaning clutter cleaning clutter misc
476
+ cleaning fluid cleaning fluid objects
477
+ cleaning liquid cleaning liquid objects
478
+ cleaning paper cleaning paper objects
479
+ cleaning paste cleaning paste objects
480
+ cleaning powders/ liquids cleaning powders/ liquids objects
481
+ cleaning sponge cleaning sponge objects
482
+ cleaning spray cleaning spray objects
483
+ clock clock objects
484
+ closet closet cabinet
485
+ closet area for hanging clothes closet area for hanging clothes misc
486
+ closet door closet door door
487
+ closet floor closet floor misc
488
+ closet mirror wall closet mirror wall misc
489
+ closet rod closet rod objects
490
+ closet shelf closet shelf shelving
491
+ closet shelving closet shelving misc
492
+ closet storage area closet storage area misc
493
+ clotes dryer clothes dryer appliances
494
+ cloth cloth objects
495
+ cloth dryer cloth dryer appliances
496
+ cloth hanger cloth hanger objects
497
+ cloth hangers cloth hangers objects
498
+ cloth holder cloth holder objects
499
+ clothes clothes clothes
500
+ clothes rack clothes rack misc
501
+ clothes bag clothes bag misc
502
+ clothes container clothes container misc
503
+ clothes dryer clothes dryer appliances
504
+ clothes hamper clothes hamper objects
505
+ clothes hanger clothes hanger objects
506
+ clothes hanger rod clothes hanger rod objects
507
+ clothes on shelf clothes on shelf clothes
508
+ clothes rack clothes rack misc
509
+ clothing stand clothing stand misc
510
+ clutter clutter misc
511
+ coach coach misc
512
+ coaster coaster objects
513
+ coat coat clothes
514
+ coat hanger coat hanger objects
515
+ coat rack coat rack misc
516
+ cofee table coffee machine appliances
517
+ coffe machine coffee machine appliances
518
+ coffe table coffee table table
519
+ coffee machine coffee machine appliances
520
+ coffee maker coffee maker appliances
521
+ coffee mug coffee mug objects
522
+ coffee table coffee table table
523
+ coffie machine coffee machine appliances
524
+ coffiee machine coffee machine appliances
525
+ coffiee table coffee table table
526
+ coffy table coffee table table
527
+ column column misc
528
+ compound wall compound wall misc
529
+ compressor compressor appliances
530
+ computer computer objects
531
+ computer chair computer chair chair
532
+ computer desk computer desk table
533
+ computer equipment computer equipment misc
534
+ computer mouse computer mouse objects
535
+ computer tower computer tower objects
536
+ condensation furnace condensation furnace objects
537
+ condiment condiment misc
538
+ cone cone misc
539
+ conference phone conference phone appliances
540
+ console console objects
541
+ console pad console pad objects
542
+ console pad charger console pad charger misc
543
+ construction stuff construction stuff misc
544
+ container container objects
545
+ containers containers objects
546
+ control control objects
547
+ control panel control panel objects
548
+ controller controller objects
549
+ coocker cooker objects
550
+ cooffe machine coffee machine appliances
551
+ cookbook cook book objects
552
+ cooker cooker objects
553
+ cooker hood cooker hood appliances
554
+ cookies cookies misc
555
+ copier copier objects
556
+ copier machine copier machine misc
557
+ cork board cork board objects
558
+ cornice cornice misc
559
+ cosmetic cosmetic objects
560
+ cosmetics cosmetics objects
561
+ couch couch sofa
562
+ counter counter counter
563
+ counter desk counter desk table
564
+ counter door counter door misc
565
+ countertop countertop counter
566
+ countertop /otherroom countertop /otherroom counter
567
+ countertop item countertop item misc
568
+ courtain curtain curtain
569
+ cover cover misc
570
+ cradle cradle bed
571
+ crate crate objects
572
+ crayon crayon objects
573
+ crib crib misc
574
+ cross cross objects
575
+ cross-trainer cross-trainer gym_equipment
576
+ crutches crutches objects
577
+ cuddly toy cuddly toy misc
578
+ cup cup objects
579
+ cupboard cabinet cabinet
580
+ cups cups objects
581
+ curatin curtain curtain
582
+ curb curb objects
583
+ curtain curtain curtain
584
+ curtain rod curtain rod curtain
585
+ curtain /otheroom curtain curtain
586
+ curtain bar curtain bar curtain
587
+ curtain box curtain box curtain
588
+ curtain hanger curtain hanger curtain
589
+ curtain rail curtain rail misc
590
+ curtain rod curtain rod curtain
591
+ curtain rod cover curtain rod cover curtain
592
+ curtain valence curtain valence curtain
593
+ curtains curtain curtain
594
+ cushiom cushion cushion
595
+ cushion cushion cushion
596
+ cutlery cutlery objects
597
+ cutting board cutting board objects
598
+ cutting boards cutting board objects
599
+ cyp cyp misc
600
+ dacorative plant decorative plant plant
601
+ dartboard dartboard objects
602
+ deccoration decoration objects
603
+ deck chair deck chair chair
604
+ decoartive plate decorative plate misc
605
+ decoder decoder misc
606
+ decooration decoration objects
607
+ decor decor objects
608
+ decor plate decorative plate misc
609
+ decoratinon decoration objects
610
+ decoratiom decoration objects
611
+ decoration decoration objects
612
+ decoration bowl decorative bowl objects
613
+ decoration plan decorative plant misc
614
+ decoration plant decorative plant misc
615
+ decoration window decorative window misc
616
+ decorations decoration objects
617
+ decorative bottle decorative bottle objects
618
+ decorative bowl decorative bowl objects
619
+ decorative ceramic decorative ceramic objects
620
+ decorative cloth decorative cloth objects
621
+ decorative dinnerware decorative dinnerware objects
622
+ decorative frame decorative frame objects
623
+ decorative lamp decorative lamp objects
624
+ decorative lantern decorative lantern objects
625
+ decorative mask decorative mask objects
626
+ decorative pl;ant decorative plant misc
627
+ decorative plant decorative plant misc
628
+ decorative plants decorative plant misc
629
+ decorative plate decorative plate misc
630
+ decorative quilt decorative quilt misc
631
+ decorative tray decorative tray objects
632
+ decorative vase decorative vase objects
633
+ decorative vessel decorative vessel objects
634
+ decoratrion decoration objects
635
+ decoratrive plant decorative plant misc
636
+ decorayion decoration objects
637
+ decotarion decoration objects
638
+ decration decoration objects
639
+ dehumidifier dehumidifier objects
640
+ den den misc
641
+ desk desk table
642
+ desk cabinet desk cabinet cabinet
643
+ desk chair desk chair chair
644
+ desk clutter desk clutter misc
645
+ desk door desk door cabinet
646
+ desk lamp desk lamp lighting
647
+ desk organizer desk organizer objects
648
+ detergent bottle detergent bottle objects
649
+ detergent bottles detergent bottle objects
650
+ detergents detergent misc
651
+ device device misc
652
+ device/ probably paper shredder device misc
653
+ dining chair dining chair chair
654
+ dining table dining table table
655
+ dinner chair dinner chair chair
656
+ dinner table dinner table table
657
+ dinnerware dinnerware objects
658
+ dinnerware decoration decorative dinnerware objects
659
+ diplom diploma misc
660
+ diploma diploma misc
661
+ dirt ground dirt ground misc
662
+ dish dish objects
663
+ dish cabinet dish cabinet cabinet
664
+ dish dryer dish dryer appliances
665
+ dish rack dish rack objects
666
+ dish washer dishwasher appliances
667
+ dish with food dish objects
668
+ dishawasher dishwasher appliances
669
+ dishes dishes objects
670
+ dishes dryer dish dryer appliances
671
+ dishrag dishrag objects
672
+ dishwasher dishwasher appliances
673
+ dishwasher machine dishwasher appliances
674
+ disk dish objects
675
+ dispenser dispenser objects
676
+ display display tv_monitor
677
+ display cabinet display cabinet cabinet
678
+ display case display case objects
679
+ display of pictures display of pictures picture
680
+ display table display table table
681
+ document document misc
682
+ document holder document holder objects
683
+ documents document misc
684
+ dog bed dog bed bed
685
+ dog leash dog leash misc
686
+ dog toy dog toy objects
687
+ doile doily misc
688
+ doily doily misc
689
+ doll doll objects
690
+ dool doll objects
691
+ door door door
692
+ door frame door frame door
693
+ door hinge door hinge misc
694
+ door /otherroom door /otherroom door
695
+ door cabinet door cabinet cabinet
696
+ door drame door frame door
697
+ door f door frame door
698
+ door fame door frame door
699
+ door fframe door frame door
700
+ door frame door frame door
701
+ door framr door frame door
702
+ door handle door handle misc
703
+ door hanger door handle misc
704
+ door hine door hinge misc
705
+ door hinge door hinge misc
706
+ door kinge door hinge misc
707
+ door knob door knob misc
708
+ door mat doormat floor
709
+ door rame door frame door
710
+ door slide sliding door door
711
+ door stoper door stopper misc
712
+ door stopper door stopper misc
713
+ door stoppper door stopper misc
714
+ door window door window misc
715
+ door/window door/window misc
716
+ door/window frame door/window frame misc
717
+ doorbell doorbell misc
718
+ doorf rame door frame door
719
+ doorframe door frame door
720
+ doormat doormat floor
721
+ doorpost doorpost objects
722
+ doors door door
723
+ doorstep doorstep objects
724
+ doorway doorway door
725
+ dorr door door
726
+ double armchair double armchair chair
727
+ drain drain objects
728
+ drainage drainage misc
729
+ drainpipe drainpipe objects
730
+ draw draw objects
731
+ drawer drawer chest_of_drawers
732
+ drawer cabinet drawer cabinet cabinet
733
+ drawer cart drawer cart objects
734
+ drawer desk drawer desk misc
735
+ drawer sink table drawer sink table misc
736
+ drawers drawers chest_of_drawers
737
+ drawers for clothes drawers for clothes misc
738
+ drawing drawing misc
739
+ dreser dresser chest_of_drawers
740
+ dress dress clothes
741
+ dresser dresser chest_of_drawers
742
+ dressing table dressing table table
743
+ dried flowers dried flowers objects
744
+ drill drill misc
745
+ drum drum objects
746
+ drums drum objects
747
+ dryer clothes dryer appliances
748
+ drywall board drywall board misc
749
+ drywall boards drywall board misc
750
+ duct duct misc
751
+ dumb bell dumbbell gym_equipment
752
+ dumb-bells dumbbell gym_equipment
753
+ dumbbel dumbbell gym_equipment
754
+ dumbbell dumbbell gym_equipment
755
+ dumbbell handles dumbbell gym_equipment
756
+ dumbbells dumbbell gym_equipment
757
+ dust bin dustbin objects
758
+ dustbin dustbin objects
759
+ dustpan dustpan objects
760
+ duvet duvet misc
761
+ dvd dvd objects
762
+ dvd movies dvd objects
763
+ dvd player dvd player objects
764
+ dvds dvd objects
765
+ easel easel shelving
766
+ easels easel shelving
767
+ easy chair easy chair chair
768
+ electric box electric box misc
769
+ electric cable electric cable misc
770
+ electric cord electric cord misc
771
+ electric device electric device misc
772
+ electric drums electric drum objects
773
+ electric freshener electric freshener appliances
774
+ electric guitar electric guitar objects
775
+ electric guitar pack electric guitar objects
776
+ electric heater electric heater appliances
777
+ electric hub electric hub objects
778
+ electric installation electric installation misc
779
+ electric kettle electric kettle appliances
780
+ electric outlet electric outlet objects
781
+ electric percussion electric percussion appliances
782
+ electric plug electric plug objects
783
+ electric toothbrush electric toothbrush appliances
784
+ electric wire electric wire misc
785
+ electric wire casing electric wire casing misc
786
+ electrical box electrical box objects
787
+ electrical controller electrical controller misc
788
+ electrical device electrical device misc
789
+ electrical installation electrical installation misc
790
+ electrical switchboard electrical switchboard misc
791
+ electricity box electricity box misc
792
+ electronics electronics misc
793
+ elephant sculpture elephant sculpture misc
794
+ elevator elevator misc
795
+ elevator door elevator door door
796
+ emergency sign emergency sign misc
797
+ end table end table table
798
+ entertainment set entertainment set misc
799
+ entrance arch entrance arch misc
800
+ entry phone entry phone objects
801
+ excercise mat exercise mat gym_equipment
802
+ excersie ball exercise ball gym_equipment
803
+ excersie equipment exercise equipment gym_equipment
804
+ exercise ball exercise ball gym_equipment
805
+ exercise bike exercise bike gym_equipment
806
+ exercise equipment exercise equipment gym_equipment
807
+ exercise ladder exercise ladder gym_equipment
808
+ exercise machine exercise machine misc
809
+ exercise mat exercise mat gym_equipment
810
+ exercise mat roll exercise mat roll misc
811
+ exercising blocks exercising blocks gym_equipment
812
+ exhaust pipe exhaust pipe misc
813
+ exhibition panel exhibition panel misc
814
+ exhibition picture exhibition picture picture
815
+ exhibition table exhibition table table
816
+ exhibition window exhibition window window
817
+ exhibition window frame exhibition window frame window
818
+ exit sign exit sign misc
819
+ extension cord extension cord misc
820
+ extension lead extension lead misc
821
+ extinguisher fire extinguisher objects
822
+ extractor extractor misc
823
+ extractor hood extractor hood misc
824
+ eyeglasses eyeglasses objects
825
+ fan fan objects
826
+ fan-coil fan coil objects
827
+ fan air vent fan air vent misc
828
+ fans fan objects
829
+ faucet faucet objects
830
+ fence fence misc
831
+ fframe frame misc
832
+ figure figure misc
833
+ figurine figurine misc
834
+ file file misc
835
+ file binder file binder misc
836
+ file cabinet file cabinet cabinet
837
+ files file misc
838
+ fire alarm fire alarm objects
839
+ fire detector fire detector objects
840
+ fire etenguisher fire extinguisher objects
841
+ fire extinguisher fire extinguisher objects
842
+ fire pit fire pit objects
843
+ fire screen fire screen objects
844
+ fire sprinkler fire sprinkler misc
845
+ firebox firebox objects
846
+ fireplace fireplace fireplace
847
+ fireplace brush fireplace brush misc
848
+ fireplace floor fireplace floor misc
849
+ fireplace mirror fireplace mirror misc
850
+ fireplace sconce fireplace sconce misc
851
+ fireplace shelf fireplace shelf misc
852
+ fireplace tool set fireplace tool set misc
853
+ fireplace utensil fireplace utensil misc
854
+ fireplace wall fireplace wall misc
855
+ firewood firewood misc
856
+ firewood bag firewood bag misc
857
+ firewood chest firewood chest misc
858
+ firewood holder firewood holder misc
859
+ fish tank fish tank misc
860
+ fishing pole fishing pole misc
861
+ fishing rod fishing rod misc
862
+ fitness ball fitness ball gym_equipment
863
+ flag flag misc
864
+ flashlight flashlight objects
865
+ floer floor floor
866
+ floor floor floor
867
+ floor lamp floor lamp lighting
868
+ floor mat floor mat floor
869
+ floor vent floor vent misc
870
+ floor /otherroom floor /otherroom floor
871
+ floor /outside floor /outside floor
872
+ floor lamp floor lamp lighting
873
+ floor mat floor mat floor
874
+ floor stand floor stand misc
875
+ floor vent floor vent misc
876
+ floormat floor mat floor
877
+ florr floor floor
878
+ flower flower plant
879
+ flower vase flower vase objects
880
+ flower pot flowerpot objects
881
+ flower stand flower stand misc
882
+ flower vase flower vase objects
883
+ flowerbed flowerbed objects
884
+ flowerpot flowerpot objects
885
+ flowers flowers plant
886
+ flowers in vase flower vase objects
887
+ fluorescent light fluorescent light misc
888
+ flush flush misc
889
+ flush button flush button misc
890
+ flush buttons flush button misc
891
+ flush push flush push misc
892
+ folded chair folded chair chair
893
+ folded chairs folded chair chair
894
+ folded table folded table table
895
+ folder folder objects
896
+ folders folder objects
897
+ folding chair folding chair chair
898
+ folding stand folding stand misc
899
+ food food misc
900
+ food processor food processor appliances
901
+ food stand food stand misc
902
+ food tray food tray objects
903
+ foor frame floor frame floor
904
+ foor lamp floor lamp lighting
905
+ foosball game table foosball game table table
906
+ foosball table foosball table table
907
+ foot rest footrest stool
908
+ foot spa foot spa misc
909
+ foot stand foot stand misc
910
+ football football objects
911
+ footrest footrest stool
912
+ footstool footstool stool
913
+ fork fork objects
914
+ frame frame misc
915
+ frame door frame misc
916
+ frame part frame misc
917
+ framed document document misc
918
+ framed mirror mirror mirror
919
+ framed photo photo picture
920
+ framed photos photos picture
921
+ framed picture picture picture
922
+ framed pictures pictures picture
923
+ framed text framed text misc
924
+ freezer freezer appliances
925
+ fridge refrigerator appliances
926
+ fruit fruit objects
927
+ fruit bowl fruit bowl objects
928
+ fruits fruit objects
929
+ frying pan frying pan objects
930
+ fume cupboard fume cupboard misc
931
+ fur fur misc
932
+ fur carpet fur carpet floor
933
+ furnace furnace misc
934
+ furniture furniture furniture
935
+ furniture parts furniture furniture
936
+ fuse box fuse box misc
937
+ fuse panel fuse panel misc
938
+ game board game board objects
939
+ game console game console objects
940
+ gap gap objects
941
+ garage door garage door door
942
+ garage door frame garage door frame misc
943
+ garage door motor garage door motor misc
944
+ garage door opener garage door opener misc
945
+ garage door opener bar garage door opener bar misc
946
+ garage door opener motor garage door opener motor misc
947
+ garage door opener railing garage door opener railing misc
948
+ garage door railing garage door railing misc
949
+ garage light garage light misc
950
+ garden bench garden bench seating
951
+ garden chair garden chair chair
952
+ garden deck garden deck misc
953
+ garden hose hose misc
954
+ garden swing swing objects
955
+ gas box gas box objects
956
+ gas container gas container objects
957
+ gas furnace gas furnace appliances
958
+ gas meter gas meter misc
959
+ gate gate misc
960
+ gauge gauge misc
961
+ gift gift misc
962
+ glass glass objects
963
+ glass bottle bottle objects
964
+ glass container glass container objects
965
+ glass door glass door door
966
+ glass pane glass pane misc
967
+ glasses glasses objects
968
+ globe globe misc
969
+ globe stand globe stand misc
970
+ glove gloves objects
971
+ gloves gloves objects
972
+ glue glue objects
973
+ goal goalpost objects
974
+ golf sticks golf sticks objects
975
+ grab bar grab bar misc
976
+ gramophone gramophone objects
977
+ grandfather clock grandfather clock objects
978
+ grass grass plant
979
+ grate grate misc
980
+ gravel gravel floor
981
+ grill grill misc
982
+ groceries groceries objects
983
+ guitar guitar objects
984
+ guitar amp amplifier objects
985
+ guitar amplifier amplifier objects
986
+ guitar box amplifier objects
987
+ guitar case guitar case objects
988
+ guitar case suit guitar case cover objects
989
+ guitar cases guitar cases objects
990
+ guitar frame guitar frame objects
991
+ guitar pedals guitar pedals objects
992
+ guitar stand guitar stand objects
993
+ guitar straps guitar straps objects
994
+ gun gun objects
995
+ gutter gutter misc
996
+ gym equipment gym equipment gym_equipment
997
+ gym mat gym mat floor
998
+ gym rope gym rope objects
999
+ gym stepper gym stepper objects
1000
+ gym-equipment gym equipment gym_equipment
1001
+ hair dryer hair dryer objects
1002
+ hairbrush hair brush objects
1003
+ hairdryer hair dryer objects
1004
+ hammer hammer objects
1005
+ hammock hammock bed
1006
+ hand cart handcart objects
1007
+ hand cloth hand cloth misc
1008
+ hand drier hand dryer objects
1009
+ hand soap hand soap objects
1010
+ hand towel hand towel objects
1011
+ handbag handbag objects
1012
+ handkerchiefs handkerchiefs objects
1013
+ handle handle objects
1014
+ handrail handrail railing
1015
+ handhold handrail railing
1016
+ handshower showerhead shower
1017
+ hanger hanger objects
1018
+ hangers hanger objects
1019
+ hanging clothes hanging clothes clothes
1020
+ hat hat clothes
1021
+ hatch hatch misc
1022
+ hats hats objects
1023
+ headboard headboard bed
1024
+ headphones headphones objects
1025
+ headset headset objects
1026
+ hearth hearth misc
1027
+ heat vent vent misc
1028
+ heater heater objects
1029
+ heater piping heater piping misc
1030
+ heater piping heater piping misc
1031
+ heating furnace part furnace misc
1032
+ heating vent vent misc
1033
+ herb basket basket objects
1034
+ hi hat hi-hat objects
1035
+ hi hat stand hi-hat stand objects
1036
+ high chair highchair chair
1037
+ high shelf high shelf misc
1038
+ highchair highchair chair
1039
+ highchairs highchair chair
1040
+ holder holder objects
1041
+ holy cross holy cross misc
1042
+ hood range hood objects
1043
+ hook hook misc
1044
+ hose hose misc
1045
+ hot drinks machine beverage dispenser objects
1046
+ hot water/cold water knob hot water/cold water knob misc
1047
+ hourglass hourglass objects
1048
+ hoverboard hoverboard objects
1049
+ hunting trophy hunting trophy misc
1050
+ hutch hutch misc
1051
+ ice maker ice maker objects
1052
+ icebox icebox appliances
1053
+ identifier identifier misc
1054
+ image picture picture
1055
+ indow curtain window curtain curtain
1056
+ induction hob stovetop misc
1057
+ information information misc
1058
+ insect door door screen door
1059
+ door screen door screen door
1060
+ installation installation objects
1061
+ instrument instrument objects
1062
+ iron iron objects
1063
+ iron board iron board misc
1064
+ ironing board ironing board objects
1065
+ island island counter
1066
+ jacket jacket clothes
1067
+ jacuzzi jacuzzi misc
1068
+ jar jar objects
1069
+ jars jars objects
1070
+ jewellery jewelry objects
1071
+ jewellery ehibition jewelry objects
1072
+ jewellery exhibition jewelry objects
1073
+ jewellery exposition jewelry objects
1074
+ jewelry jewelry objects
1075
+ jewelry box jewelry box misc
1076
+ jewlery box jewelry box misc
1077
+ joga mat yoga mat misc
1078
+ jug jug objects
1079
+ junk junk objects
1080
+ keg keg objects
1081
+ kegerator keg objects
1082
+ kettle kettle objects
1083
+ keyboard keyboard objects
1084
+ keyboard box keyboard box objects
1085
+ keyboard case suit keyboard cover objects
1086
+ keyboard piano keyboard piano objects
1087
+ keyboard stand keyboard stand objects
1088
+ keys keys objects
1089
+ kitchen appliance kitchen appliance appliances
1090
+ kitchen board cutting board objects
1091
+ kitchen cabinet kitchen cabinet cabinet
1092
+ kitchen cabinet door kitchen cabinet door cabinet
1093
+ kitchen cabinet drawer kitchen cabinet drawer cabinet
1094
+ kitchen cabinet lower kitchen cabinet lower cabinet
1095
+ kitchen ceiling kitchen ceiling ceiling
1096
+ kitchen chair kitchen chair chair
1097
+ kitchen coocking cabinet kitchen cabinet cabinet
1098
+ kitchen counter kitchen counter counter
1099
+ kitchen counter support kitchen counter support misc
1100
+ kitchen countertop kitchen counter counter
1101
+ kitchen countertop item kitchen countertop item misc
1102
+ kitchen countertop items kitchen countertop items misc
1103
+ kitchen coutertop item kitchen counter item objects
1104
+ kitchen decoration kitchen decoration objects
1105
+ kitchen extractor kitchen extractor misc
1106
+ kitchen glowes kitchen gloves objects
1107
+ kitchen handle kitchen handle objects
1108
+ kitchen hood range hood objects
1109
+ kitchen island kitchen island counter
1110
+ kitchen knife set kitchen knife set objects
1111
+ kitchen lower cabinet kitchen lower cabinet cabinet
1112
+ kitchen lower shelf kitchen lower shelf shelving
1113
+ kitchen shelf kitchen shelf shelving
1114
+ kitchen sink kitchen sink sink
1115
+ kitchen sink cabinet kitchen sink cabinet cabinet
1116
+ kitchen sitting kitchen seating seating
1117
+ kitchen stuff clutter misc
1118
+ kitchen table kitchen table table
1119
+ kitchen tools kitchen utensils objects
1120
+ kitchen top kitchen top counter
1121
+ kitchen towel kitchen towel objects
1122
+ kitchen untensils kitchen utensils objects
1123
+ kitchen upper cabinet kitchen cabinet cabinet
1124
+ kitchen utensil kitchen utensil objects
1125
+ kitchen utensils kitchen utensils objects
1126
+ kitchen wall kitchen wall wall
1127
+ kitchen walll kitchen wall wall
1128
+ kitchenware kitchenware objects
1129
+ kitchhen cabinet door kitchen cabinet door cabinet
1130
+ kitcyhen cabinet kitchen cabinet cabinet
1131
+ knife knife objects
1132
+ knife holder knife holder misc
1133
+ knife set knife set objects
1134
+ knife stand knife stand objects
1135
+ knob knob misc
1136
+ knofe set knife set objects
1137
+ l-shaped sofa l-shaped sofa sofa
1138
+ lace doily lace doily objects
1139
+ lacy doily lace doily objects
1140
+ ladder ladder stairs
1141
+ lamp lamp lighting
1142
+ lamp ceiling ceiling lamp lighting
1143
+ lamp desk desk lamp lighting
1144
+ lamp shade lamp shade lighting
1145
+ lamp stand lamp stand misc
1146
+ lamp table lamp table table
1147
+ lamps lamp lighting
1148
+ lampshade lampshade lighting
1149
+ landing landing floor
1150
+ lantern lantern lighting
1151
+ laptop laptop objects
1152
+ laundry laundry clothes
1153
+ laundry bag laundry bag misc
1154
+ laundry basket laundry basket objects
1155
+ laundry machine laundry machine misc
1156
+ launger lounger seating
1157
+ lawn lawn floor
1158
+ lawn mower lawn mower objects
1159
+ lawnmower lawn mower objects
1160
+ leaflet leaflets objects
1161
+ leaflets leaflets objects
1162
+ led tv led tv misc
1163
+ led tv led tv misc
1164
+ ledge ledge objects
1165
+ ledtv led tv misc
1166
+ leg rest leg rest misc
1167
+ letter document misc
1168
+ level level objects
1169
+ lid lid misc
1170
+ lids lid misc
1171
+ light light lighting
1172
+ light fixture light fixture lighting
1173
+ light switch light switch objects
1174
+ lighter lighter objects
1175
+ lighting fixture lighting fixture objects
1176
+ lighting grid lighting fixture objects
1177
+ lights light lighting
1178
+ liquid liquid misc
1179
+ liquid container liquid container objects
1180
+ liquid cleaner liquid cleaner objects
1181
+ liquid soap liquid soap misc
1182
+ lmap lamp lighting
1183
+ locker locker misc
1184
+ loft hatch hatch misc
1185
+ logs firewood misc
1186
+ lounge chair lounge chair chair
1187
+ lounger lounger seating
1188
+ lower cabinet lower cabinet cabinet
1189
+ luggage luggage objects
1190
+ lundry basket laundry basket objects
1191
+ machine machine objects
1192
+ magazine magazine objects
1193
+ magazine rack magazine rack misc
1194
+ magazines magazines objects
1195
+ magazines, albums magazines objects
1196
+ magazines, albums, bookshelf magazines objects
1197
+ magazines, books and albums magazines objects
1198
+ magic marker magic marker objects
1199
+ magic marker box magic marker box objects
1200
+ mail mail objects
1201
+ mailbox mailbox objects
1202
+ make up accesories makeup accessories objects
1203
+ mannequin mannequin objects
1204
+ mantel mantel fireplace
1205
+ mantle mantle fireplace
1206
+ map map objects
1207
+ mascot mascot objects
1208
+ mascots mascots objects
1209
+ mask decoration decoration objects
1210
+ massage bed massage bed bed
1211
+ mat mat floor
1212
+ mat floor mat floor
1213
+ material material misc
1214
+ mattress bed bed
1215
+ measuring tape measuring tape objects
1216
+ medal medal objects
1217
+ medal collection medal collection objects
1218
+ media console media console misc
1219
+ medical lamp medical lamp lighting
1220
+ medical object medical object objects
1221
+ menu menu objects
1222
+ menu board menu board objects
1223
+ meshwork meshwork misc
1224
+ meter meter misc
1225
+ microphone microphone objects
1226
+ microphone accesory microphone accessory objects
1227
+ microvawe microwave appliances
1228
+ microwave microwave appliances
1229
+ microweave microwave appliances
1230
+ midi keyboard keyboard piano objects
1231
+ mini fridge mini fridge appliances
1232
+ miocrowave microwave appliances
1233
+ mircowave microwave appliances
1234
+ miror mirror mirror
1235
+ mirror mirror mirror
1236
+ mirror /otherroom mirror /otherroom mirror
1237
+ mirror door mirror door misc
1238
+ mirror frame mirror frame mirror
1239
+ mirror stand mirror mirror
1240
+ mixer mixer misc
1241
+ mobile mobile objects
1242
+ model model objects
1243
+ modem modem objects
1244
+ molding molding wall
1245
+ monitor monitor tv_monitor
1246
+ moose head/sculpture/hunting trophy moose head/sculpture/hunting trophy misc
1247
+ mop mop objects
1248
+ mortar mortar misc
1249
+ motion detector motion detector misc
1250
+ motion sensor motion detector misc
1251
+ motorcycle motorcycle objects
1252
+ mouse mouse objects
1253
+ mousepad mousepad objects
1254
+ mug mug objects
1255
+ multi-gym gym equipment gym_equipment
1256
+ music album shelf music album shelf shelving
1257
+ music player music player objects
1258
+ music stand music equipment stand objects
1259
+ musical equipment music equipment objects
1260
+ napkin napkin objects
1261
+ napkins napkins objects
1262
+ newspaper newspaper misc
1263
+ newspaper basket newspaper basket objects
1264
+ niche niche wall
1265
+ nighstand nightstand chest_of_drawers
1266
+ night lamp night lamp lighting
1267
+ night stand nightstand chest_of_drawers
1268
+ night table nightstand chest_of_drawers
1269
+ nightsand nightstand chest_of_drawers
1270
+ nightstand nightstand chest_of_drawers
1271
+ nigtstand nightstand chest_of_drawers
1272
+ note note objects
1273
+ notebook notebook objects
1274
+ notebooks notebooks objects
1275
+ notes notes objects
1276
+ noticeboard note board objects
1277
+ oar oar objects
1278
+ object object objects
1279
+ objects objects objects
1280
+ office chair office chair chair
1281
+ office wall office wall wall
1282
+ office drawer drawers chest_of_drawers
1283
+ office phone phone objects
1284
+ office stuff stationery objects
1285
+ office table office table table
1286
+ office utensils utensil objects
1287
+ oil lamp oil lamp lighting
1288
+ organizer drawers drawers chest_of_drawers
1289
+ ornament ornament objects
1290
+ ornament flower flower plant
1291
+ ornament plant plant plant
1292
+ ornamental plant plant plant
1293
+ ornamental plate plate objects
1294
+ ornaments ornament objects
1295
+ ottoman ottoman stool
1296
+ outflow unknown unlabeled
1297
+ outlet outlet objects
1298
+ outside unknown unlabeled
1299
+ oven oven appliances
1300
+ oven and stove oven and stove misc
1301
+ oven hood range hood objects
1302
+ oven vent oven vent misc
1303
+ overhang overhang misc
1304
+ overlay unknown unlabeled
1305
+ package package objects
1306
+ pad pad misc
1307
+ painiting painting objects
1308
+ paint unknown unlabeled
1309
+ painting painting objects
1310
+ painting frame painting frame misc
1311
+ painting /otherroom painting /otherroom objects
1312
+ painting frame painting frame misc
1313
+ painting roll painting roll objects
1314
+ painting rolls painting rolls objects
1315
+ painting stuff painting stuff objects
1316
+ painting tray painting tray objects
1317
+ paintng painting objects
1318
+ paintning painting objects
1319
+ pair of shoes shoes objects
1320
+ paiting painting objects
1321
+ pan pan objects
1322
+ panel panel board_panel
1323
+ panel screen panel screen misc
1324
+ panel wall wall panel wall
1325
+ paneling paneling wall
1326
+ pantry pantry misc
1327
+ paper paper objects
1328
+ paper holder paper holder objects
1329
+ paper shelf shelf shelving
1330
+ paper storage paper storage misc
1331
+ paper towel paper towel towel
1332
+ paper towel dispenser paper towel dispenser misc
1333
+ paper towel holder paper towel holder objects
1334
+ paper towels paper towels towel
1335
+ papers papers objects
1336
+ parapet parapet misc
1337
+ partial partial misc
1338
+ partition partition misc
1339
+ patio patio misc
1340
+ patio chair patio chair chair
1341
+ patio floor patio floor misc
1342
+ pavement pavement floor
1343
+ paving pavement floor
1344
+ payment terminal payment terminal objects
1345
+ pc mouse mouse objects
1346
+ pc tower pc tower misc
1347
+ pedestal pedestal misc
1348
+ peinting painting objects
1349
+ pen pen objects
1350
+ pen cup pen cup objects
1351
+ pencil pencil objects
1352
+ pencil case pencil case objects
1353
+ pencil holder pencil holder objects
1354
+ pendant pendant objects
1355
+ pendrive pen drive objects
1356
+ perfume perfume objects
1357
+ pet bed pet bed objects
1358
+ pet bowl pet bowl objects
1359
+ phillar pillar column
1360
+ phone phone objects
1361
+ photo photo picture
1362
+ photo mount photo mount misc
1363
+ photo mounts photo mounts misc
1364
+ photo stand photo stand objects
1365
+ photography photo picture
1366
+ photos photos picture
1367
+ piano piano objects
1368
+ piano bench piano bench seating
1369
+ piano lower part piano objects
1370
+ piano stool piano stool stool
1371
+ pictrure picture picture
1372
+ picture picture picture
1373
+ picture frame picture frame picture
1374
+ picture frame picture frame picture
1375
+ pictured frame picture frame picture
1376
+ pictures pictures picture
1377
+ picure picture picture
1378
+ piillow pillow cushion
1379
+ pile of boxes boxes objects
1380
+ pile of cups cups objects
1381
+ pile of clothes clothes clothes
1382
+ pile of magazines pile of magazines misc
1383
+ pile of papers papers objects
1384
+ pile of stuff unknown unlabeled
1385
+ pillar pillar column
1386
+ pilllow pillow cushion
1387
+ pillow pillow cushion
1388
+ pillow seat pillow seat seating
1389
+ pillows pillow cushion
1390
+ ping pong table ping pong table table
1391
+ pipe pipe misc
1392
+ pipe part pipe misc
1393
+ pipes pipe misc
1394
+ piping pipe misc
1395
+ pitcher pitcher objects
1396
+ pitchfork pitchfork objects
1397
+ place mat place mat objects
1398
+ plane plane misc
1399
+ plank plank misc
1400
+ planner planner misc
1401
+ plant plant plant
1402
+ plant ornament plant ornament misc
1403
+ planter pot objects
1404
+ plants plant plant
1405
+ plasma tv tv tv_monitor
1406
+ plastic bag plastic bag objects
1407
+ plate plate objects
1408
+ plate of food plate of food misc
1409
+ plates plates objects
1410
+ platform platform misc
1411
+ platter platter misc
1412
+ player music player objects
1413
+ playpen playpen misc
1414
+ plenum box plenum box misc
1415
+ pliers pliers objects
1416
+ plstes plates objects
1417
+ plug plug misc
1418
+ plunger plunger objects
1419
+ plush toy plush toy objects
1420
+ podest podium objects
1421
+ podium podium objects
1422
+ pole pole objects
1423
+ poles poles objects
1424
+ pomp pump objects
1425
+ pool pool misc
1426
+ pool stick pool stick objects
1427
+ pool table pool table table
1428
+ porcelain porcelain objects
1429
+ portrait portrait picture
1430
+ post post column
1431
+ poster poster picture
1432
+ poster figure poster picture
1433
+ pot pot objects
1434
+ pot lid pot lid objects
1435
+ poto mount photo mount misc
1436
+ pots pot objects
1437
+ potty potty objects
1438
+ pouches pouches objects
1439
+ poufe pouffe seating
1440
+ pouffe pouffe seating
1441
+ powder soap powder soap objects
1442
+ power breaker box power breaker box misc
1443
+ power cord power cord objects
1444
+ power outlet outlet objects
1445
+ power strip power strip objects
1446
+ prduct box product box objects
1447
+ press press objects
1448
+ pressure washer pressure washer objects
1449
+ price tag price tag objects
1450
+ printer printer objects
1451
+ product product objects
1452
+ product box product box objects
1453
+ product boxes product boxes objects
1454
+ products products objects
1455
+ projector projector objects
1456
+ projector screen projector screen misc
1457
+ prop prop misc
1458
+ psinting painting objects
1459
+ public tap drinking fountain misc
1460
+ pump pump objects
1461
+ punchbag punchbag objects
1462
+ puncher hole puncher objects
1463
+ puppet puppet objects
1464
+ puppet cat puppet objects
1465
+ purse purse objects
1466
+ purses purse objects
1467
+ rack rack shelving
1468
+ rack of weights rack of weights misc
1469
+ rack with shoes rack with shoes shelving
1470
+ radfiator radiator objects
1471
+ radiator radiator objects
1472
+ radio radio misc
1473
+ rafter rafter beam
1474
+ rag rag objects
1475
+ ragdoll ragdoll objects
1476
+ ragdoll cat ragdoll cat objects
1477
+ rail rail railing
1478
+ railing railing railing
1479
+ rain shower showerhead shower
1480
+ rake rake objects
1481
+ range hood range hood objects
1482
+ receipt printer receipt printer objects
1483
+ receipt spike receipt spike objects
1484
+ recessed cubby recessed cubby misc
1485
+ recessed shelving recessed shelving shelving
1486
+ recessed wall recessed wall misc
1487
+ recliner recliner chair
1488
+ record player record player objects
1489
+ records records objects
1490
+ recuperator recuperator objects
1491
+ recycle bin recycle bin misc
1492
+ refridgerator refrigerator appliances
1493
+ refrigearator refrigerator appliances
1494
+ refrigerator refrigerator appliances
1495
+ refrigerator cabinet refrigerator cabinet cabinet
1496
+ relief relief wall
1497
+ remote remote control objects
1498
+ remote control remote control objects
1499
+ remote controller remote control objects
1500
+ rice cooker rice cooker objects
1501
+ riser riser misc
1502
+ robe robe clothes
1503
+ rock rock objects
1504
+ rocking chair rocking chair chair
1505
+ rocking horse rocking horse objects
1506
+ rocks rock objects
1507
+ rod rod objects
1508
+ rods rods objects
1509
+ roll roll objects
1510
+ rolled carpet rolled carpet objects
1511
+ roller blind blinds blinds
1512
+ rolling pin rolling pin objects
1513
+ rolls of toilet paper toilet paper objects
1514
+ roof roof ceiling
1515
+ roomba roomba misc
1516
+ rope rope objects
1517
+ round chair round chair chair
1518
+ round cushion round cushion misc
1519
+ router router objects
1520
+ row of theater chairs row of theater chairs misc
1521
+ rug rug floor
1522
+ ruler ruler objects
1523
+ safe safe objects
1524
+ salt and pepper salt and pepper objects
1525
+ salt and pepper grinder salt and pepper grinder objects
1526
+ salt pepper salt and pepper objects
1527
+ salver salver objects
1528
+ sandals sandals objects
1529
+ saturator saturator appliances
1530
+ sauna sauna appliances
1531
+ sauna bowl sauna bowl objects
1532
+ sauna ceiling ceiling ceiling
1533
+ sauna floor floor floor
1534
+ sauna heat rocks sauna heat rocks misc
1535
+ sauna heater sauna heater misc
1536
+ sauna oven sauna oven appliances
1537
+ sauna seat sauna seat seating
1538
+ sauna support sauna support misc
1539
+ sauna wall wall wall
1540
+ saw saw objects
1541
+ saxophone saxophone objects
1542
+ scale scale objects
1543
+ scales scale objects
1544
+ scanner scanner objects
1545
+ scarf scarf clothes
1546
+ schedule schedule misc
1547
+ schoe shoe objects
1548
+ sconce sconce lighting
1549
+ scoop scoop objects
1550
+ screen screen curtain
1551
+ screen frame screen frame misc
1552
+ screw box screw box objects
1553
+ screwdriver screwdriver objects
1554
+ sculpture sculpture objects
1555
+ seat seat seating
1556
+ secretary secretary misc
1557
+ security camera security camera objects
1558
+ self-closing mechanism self-closing mechanism misc
1559
+ sensor sensor objects
1560
+ separator separator objects
1561
+ set of armchairs set of armchairs misc
1562
+ set of boxes set of boxes misc
1563
+ set of cosmetics set of cosmetics objects
1564
+ set of hangers set of hangers objects
1565
+ set of knives set of knives misc
1566
+ set of pictures set of pictures picture
1567
+ set of towels set of towels towel
1568
+ set of valves set of valves appliances
1569
+ sewing box sewing box appliances
1570
+ sewing machine sewing machine appliances
1571
+ sewing set sewing set appliances
1572
+ sewing tools sewing tools appliances
1573
+ shade shade objects
1574
+ shade rail shade rail misc
1575
+ shades shades blinds
1576
+ shalf shelf shelving
1577
+ shampoo shampoo objects
1578
+ shedule schedule misc
1579
+ sheet sheet misc
1580
+ sheet music sheet music objects
1581
+ sheet music stand sheet music stand objects
1582
+ sheets sheets misc
1583
+ sheets/ clothes sheets / clothes clothes
1584
+ shelf shelf shelving
1585
+ shelf /w art shelf with art shelving
1586
+ shelf /w clutter shelf with clutter shelving
1587
+ shelf clutter shelf clutter shelving
1588
+ shelf cubby shelf cubby misc
1589
+ shelf with cosmetics shelf with cosmetics shelving
1590
+ shelf with shoes shelf with shoes shelving
1591
+ shelf/cabinet shelf / cabinet misc
1592
+ shelve shelf shelving
1593
+ shelves shelving shelving
1594
+ shelving shelving shelving
1595
+ ship model ship model objects
1596
+ ship toy ship toy objects
1597
+ shirt shirt clothes
1598
+ shisha shisha appliances
1599
+ shlef shelf shelving
1600
+ shoe shoe objects
1601
+ shoe cabinet shoe cabinet cabinet
1602
+ shoe case shoe case cabinet
1603
+ shoe rack shoe rack shelving
1604
+ shoe shelf shoe shelf shelving
1605
+ shoehorn shoehorn objects
1606
+ shoes shoes objects
1607
+ shoes on shelf shoes on shelf shelving
1608
+ shoes rack shoes rack shelving
1609
+ shop shelf shop shelf shelving
1610
+ shoulder bag shoulder bag objects
1611
+ shovel shovel misc
1612
+ shower shower shower
1613
+ shower floor shower floor shower
1614
+ shower knob shower knob misc
1615
+ shower wall shower wall shower
1616
+ shower bar shower bar misc
1617
+ shower base shower base shower
1618
+ shower battery shower battery misc
1619
+ shower bench shower bench seating
1620
+ shower cabin shower cabin misc
1621
+ shower cabinet shower cabinet cabinet
1622
+ shower caddy shower caddy misc
1623
+ shower case shower case misc
1624
+ shower ceiling shower ceiling shower
1625
+ shower ceiling lamp shower ceiling lamp lighting
1626
+ shower cockpit shower cockpit shower
1627
+ shower cosmetics shower cosmetics objects
1628
+ shower curtain shower curtain curtain
1629
+ shower curtain bar shower curtain bar misc
1630
+ shower curtain rod shower curtain rod curtain
1631
+ shower dial shower dial misc
1632
+ shower door shower door shower
1633
+ shower door frame shower door frame shower
1634
+ shower door knob shower door knob misc
1635
+ shower floor shower floor shower
1636
+ shower frame shower frame shower
1637
+ shower glass shower glass misc
1638
+ shower grab bar shower grab bar misc
1639
+ shower handle shower handle misc
1640
+ shower handrail shower handrail railing
1641
+ shower hanger shower hanger misc
1642
+ shower head showerhead shower
1643
+ shower hose shower hose misc
1644
+ shower hose/head shower hose/head misc
1645
+ shower knob shower knob misc
1646
+ shower mat shower mat floor
1647
+ shower mirror shower mirror mirror
1648
+ shower pipe shower pipe misc
1649
+ shower rail shower rail misc
1650
+ shower rod shower rod curtain
1651
+ shower seat shower seat misc
1652
+ shower shelf shower shelf shelving
1653
+ shower soap shelf shower soap shelf misc
1654
+ shower stall shower stall misc
1655
+ shower step shower step shower
1656
+ shower tap shower tap misc
1657
+ shower tray shower tray shower
1658
+ shower tub shower tub bathtub
1659
+ shower utensill shower utensil objects
1660
+ shower valve shower valve misc
1661
+ shower wall shower wall shower
1662
+ shower wall cubby shower wall cubby shower
1663
+ shower window frame shower window frame window
1664
+ shower-bath cabinet shower-bath cabinet cabinet
1665
+ showerhead showerhead shower
1666
+ shredder shredder appliances
1667
+ shutter shutter blinds
1668
+ shutters shutters blinds
1669
+ side table side table table
1670
+ sideboard sideboard misc
1671
+ sign sign misc
1672
+ silicone gun silicone gun objects
1673
+ silicone tube silicone tube objects
1674
+ sink sink sink
1675
+ sink cabinet sink cabinet cabinet
1676
+ sink pipe sink pipe misc
1677
+ sink table sink table table
1678
+ sink tap sink tap misc
1679
+ sink/basin sink/basin misc
1680
+ sitting bench sitting bench seating
1681
+ skateboard skateboard objects
1682
+ skates skates objects
1683
+ ski ski objects
1684
+ skirting board skirting board wall
1685
+ sky sky misc
1686
+ skylight skylight window
1687
+ slab slab objects
1688
+ sled sled objects
1689
+ sledge sledge objects
1690
+ sleeping bag sleeping bag objects
1691
+ sliding door sliding door door
1692
+ sliding glass door sliding glass door misc
1693
+ slippers slippers objects
1694
+ small table table table
1695
+ small table/stand small table/stand misc
1696
+ smoke alarm smoke alarm objects
1697
+ smoke detector smoke detector misc
1698
+ snack snack objects
1699
+ soap soap objects
1700
+ soap bottle soap bottle objects
1701
+ soap dish soap dish objects
1702
+ soap dish cubby soap dish cubby misc
1703
+ soap dispenser soap dispenser objects
1704
+ soap dispenser shelf in shower soap dispenser shelf in shower misc
1705
+ soap tray soap tray objects
1706
+ soapbox soapbox misc
1707
+ socket socket objects
1708
+ socks socks clothes
1709
+ sofa sofa sofa
1710
+ sofa chair sofa chair chair
1711
+ sofa seat sofa seat misc
1712
+ sofa set sofa set sofa
1713
+ soft chair soft chair chair
1714
+ solarium solarium misc
1715
+ solarium door solarium door door
1716
+ sombrero sombrero objects
1717
+ sopp bottle soap bottle objects
1718
+ sound bar soundbar appliances
1719
+ soundbar soundbar appliances
1720
+ spa armchair spa armchair chair
1721
+ spa bathtub spa bathtub bathtub
1722
+ spa bench spa bench seating
1723
+ spatula spatula objects
1724
+ speaker speaker objects
1725
+ speaker stand speaker stand cabinet
1726
+ spice boxes spice boxes objects
1727
+ spice rack spice rack shelving
1728
+ spices spices objects
1729
+ spirit level spirit level objects
1730
+ sponge sponge objects
1731
+ spoon spoon objects
1732
+ spray spray objects
1733
+ spray can spray can objects
1734
+ sprinkler sprinkler objects
1735
+ square square objects
1736
+ stack stack misc
1737
+ stack of albums stack of albums objects
1738
+ stack of bags stack of bags objects
1739
+ stack of binders stack of binders objects
1740
+ stack of blankets stack of blankets objects
1741
+ stack of book stack of books objects
1742
+ stack of books stack of books objects
1743
+ stack of books/ papers stack of books / papers objects
1744
+ stack of boxes stack of boxes objects
1745
+ stack of cd's stack of cds objects
1746
+ stack of cds stack of cds objects
1747
+ stack of chairs stack of chairs chair
1748
+ stack of clothes stack of clothes clothes
1749
+ stack of files stack of files objects
1750
+ stack of jackets stack of jackets clothes
1751
+ stack of magazines stack of magazines objects
1752
+ stack of music stands stack of music stands misc
1753
+ stack of paper stack of papers misc
1754
+ stack of papers stack of papers misc
1755
+ stack of pillows stack of pillows cushion
1756
+ stack of plates stack of plates objects
1757
+ stack of pots stack of pots objects
1758
+ stack of product boxes stack of product boxes objects
1759
+ stack of shoes stack of shoes objects
1760
+ stack of stuff stack of stuff misc
1761
+ stack of t shirts stack of t-shirts clothes
1762
+ stack of towels stack of towels towel
1763
+ stack of trays stack of trays objects
1764
+ stack of yarns stack of yarns clothes
1765
+ stacked chair stacked chair chair
1766
+ stacked chairs stacked chair chair
1767
+ stage stage misc
1768
+ stained glass stained glass window
1769
+ stair stair stairs
1770
+ stair frame stair frame misc
1771
+ stair handle stair handle railing
1772
+ stair railing banister railing
1773
+ stair step stair step misc
1774
+ stair wall stair wall misc
1775
+ staircaise handrail staircase handrail misc
1776
+ staircase staircase stairs
1777
+ staircase handrail staircase handrail misc
1778
+ staircase handrair staircase handrail misc
1779
+ staircase trim staircase trim misc
1780
+ staircase wall staircase wall wall
1781
+ stairs stairs stairs
1782
+ stairs railing stairs railing misc
1783
+ stairs skirt stairs skirt misc
1784
+ stairs trim stairs trim stairs
1785
+ stairs wall stairs wall wall
1786
+ stairwell stairwell stairs
1787
+ stampler stapler objects
1788
+ stand stand table
1789
+ stand/small table stand/small table table
1790
+ stapler stapler objects
1791
+ star star objects
1792
+ stationary stationery objects
1793
+ stationery stationery objects
1794
+ statue statue objects
1795
+ statue/art statue/art misc
1796
+ steel plate steel plate objects
1797
+ step step stairs
1798
+ step stool step stool stool
1799
+ stereo stereo appliances
1800
+ stereo set stereo set appliances
1801
+ stick stick misc
1802
+ sticker book sticker book objects
1803
+ sticky notes sticky notes objects
1804
+ stoll stole clothes
1805
+ stone stone objects
1806
+ stone bench stone bench seating
1807
+ stone support structure stone support structure misc
1808
+ stones stones misc
1809
+ stonework stonework misc
1810
+ stoo stool stool
1811
+ stool stool stool
1812
+ stools stools stool
1813
+ storage storage objects
1814
+ storage bin storage bin misc
1815
+ storage box storage box misc
1816
+ storage cabinet storage cabinet cabinet
1817
+ storage shelving storage shelving shelving
1818
+ storage space storage space misc
1819
+ storage unit storage unit furniture
1820
+ stove stove appliances
1821
+ stove and oven oven appliances
1822
+ stove door door door
1823
+ stove utensil utensil objects
1824
+ stove utensils utensil objects
1825
+ stovetop stovetop misc
1826
+ strands strings misc
1827
+ stricker book book objects
1828
+ strings strings misc
1829
+ stripes decoration objects
1830
+ stroke stroke misc
1831
+ strongbox strongbox objects
1832
+ stuffed animal stuffed animal objects
1833
+ stuffed duck stuffed animal objects
1834
+ subwoofer subwoofer objects
1835
+ suitcase luggage objects
1836
+ sunbed sunbed furniture
1837
+ support support misc
1838
+ support beam support beam beam
1839
+ support stand stand table
1840
+ supporting beam support beam beam
1841
+ supporting structure support misc
1842
+ surface surface counter
1843
+ surfboard surfboard objects
1844
+ sweets bowl bowl objects
1845
+ swing swing objects
1846
+ switch switch objects
1847
+ switches switch objects
1848
+ swivel chair swivel chair chair
1849
+ t shirt t-shirt clothes
1850
+ table table table
1851
+ table /w books table table
1852
+ table chair chair chair
1853
+ table cloth table cloth table
1854
+ table clutter clutter misc
1855
+ table lamp table lamp lighting
1856
+ table on wheels table table
1857
+ table pad table pad table
1858
+ table plant plant plant
1859
+ table shelf shelf shelving
1860
+ table stand table stand misc
1861
+ table tennis table table tennis table table
1862
+ table top table table
1863
+ table tray tray objects
1864
+ table vase vase objects
1865
+ table border unknown unlabeled
1866
+ tablecloth table cloth table
1867
+ tablet tablet table
1868
+ tabletop table table
1869
+ tabletop box box objects
1870
+ tabletop games game objects
1871
+ tabletop trinket trinket objects
1872
+ tableware flatware objects
1873
+ tailet toilet toilet
1874
+ tambourine tambourine objects
1875
+ tank tank misc
1876
+ tap tap objects
1877
+ tap dial knob misc
1878
+ tap/ water source tap objects
1879
+ tapestry tapestry objects
1880
+ tea box box objects
1881
+ tea boxes boxes objects
1882
+ teapot teapot objects
1883
+ teaset tea set objects
1884
+ teddy bear stuffed animal objects
1885
+ teepee toilet paper objects
1886
+ telephone telephone objects
1887
+ telephpne telephone objects
1888
+ telescope telescope objects
1889
+ temperature control thermostat objects
1890
+ tennis racket tennis racket objects
1891
+ tent tent objects
1892
+ terrace terrace misc
1893
+ terrace door door door
1894
+ therapeutic chair chair chair
1895
+ thermal mug mug objects
1896
+ thermometer thermometer objects
1897
+ thermostat thermostat objects
1898
+ threadmill treadmill gym_equipment
1899
+ three three misc
1900
+ throw blanket throw blanket misc
1901
+ tile tile objects
1902
+ tiled floor floor floor
1903
+ tiled wall wall wall
1904
+ tiles tiles misc
1905
+ tiling tiles misc
1906
+ tin box objects
1907
+ tire tire objects
1908
+ tisse box tissue box objects
1909
+ tissue tissue misc
1910
+ tissue box tissue box objects
1911
+ tissues tissue misc
1912
+ title title misc
1913
+ toaster toaster appliances
1914
+ toaster oven toaster oven appliances
1915
+ toester toaster appliances
1916
+ toiled toilet toilet
1917
+ toiled paper toilet paper objects
1918
+ toilet toilet toilet
1919
+ toilet bin trash can objects
1920
+ toilet bowl brush holder toilet brush holder misc
1921
+ toilet brush toilet brush misc
1922
+ toilet brush holder toilet brush holder misc
1923
+ toilet cabinet cabinet cabinet
1924
+ toilet cleaner toilet cleaner misc
1925
+ toilet counter counter counter
1926
+ toilet flush handle objects
1927
+ toilet handle handle objects
1928
+ toilet holder toilet brush holder misc
1929
+ toilet paper toilet paper objects
1930
+ toilet paper dispenser toilet paper dispenser misc
1931
+ toilet paper holder toilet paper dispenser misc
1932
+ toilet paper stand toilet paper dispenser misc
1933
+ toilet plunger plunger objects
1934
+ toilet seat toilet seat toilet
1935
+ toilet sink sink sink
1936
+ toilet sliding door door door
1937
+ toilet utensil utensil objects
1938
+ toilete toilet toilet
1939
+ toilete brush toilet brush misc
1940
+ toilete paper holder toilet paper dispenser misc
1941
+ toiletry toiletry objects
1942
+ toiletry bag toiletry bag objects
1943
+ toliet toilet toilet
1944
+ tolilet toilet toilet
1945
+ tool tool objects
1946
+ tool board tool rack wall
1947
+ tool box tool box objects
1948
+ tool rack tool rack wall
1949
+ toolbox tool box objects
1950
+ tools tool objects
1951
+ toothbrush toothbrush objects
1952
+ toothbrush cup cup objects
1953
+ toothbrush holder cup objects
1954
+ toothpaste toothpaste objects
1955
+ torch torch objects
1956
+ toster toaster appliances
1957
+ towel towel towel
1958
+ towel bar towel bar objects
1959
+ towel bar shelf shelf shelving
1960
+ towel basket basket objects
1961
+ towel box box objects
1962
+ towel hang towel bar objects
1963
+ towel hanger towel bar objects
1964
+ towel holder towel bar objects
1965
+ towel paper dispenser toilet paper dispenser misc
1966
+ towel paper holder toilet paper dispenser misc
1967
+ towel rack towel bar objects
1968
+ towel rail towel bar objects
1969
+ towel ring towel ring objects
1970
+ towel rod towel bar objects
1971
+ towel shelf shelf shelving
1972
+ towell towel towel
1973
+ towels towel towel
1974
+ towels in a basket basket objects
1975
+ toy toy objects
1976
+ toy /otherroom toy objects
1977
+ toy mickey mouse toy objects
1978
+ toy airplane toy objects
1979
+ toy car toy objects
1980
+ toy cars toy objects
1981
+ toy duck toy objects
1982
+ toy trailer toy objects
1983
+ toy train toy objects
1984
+ toys toy objects
1985
+ track toy objects
1986
+ traffic cone traffic cone objects
1987
+ training bench weight bench gym_equipment
1988
+ training mat training mat gym_equipment
1989
+ trampoline trampoline gym_equipment
1990
+ trascan trash can objects
1991
+ trash trash can objects
1992
+ trash bin trash can objects
1993
+ trash bag trash bag misc
1994
+ trash bin trash can objects
1995
+ trash can trash can objects
1996
+ trashcan trash can objects
1997
+ tray tray objects
1998
+ treadmill treadmill gym_equipment
1999
+ tree tree plant
2000
+ tree branch tree branch objects
2001
+ trimmer trimmer objects
2002
+ trinket trinket objects
2003
+ tripod tripod shelving
2004
+ trofeum trophy misc
2005
+ troley trolley objects
2006
+ trolley trolley objects
2007
+ trombone trombone objects
2008
+ trophies trophy misc
2009
+ trophy trophy misc
2010
+ trough trough misc
2011
+ trumpet trumpet objects
2012
+ trumpet stand stand table
2013
+ tub bathtub bathtub
2014
+ tube tv tv_monitor
2015
+ tv tv tv_monitor
2016
+ tv cabinet tv stand furniture
2017
+ tv decoder tv remote objects
2018
+ tv led tv tv_monitor
2019
+ tv remote tv remote objects
2020
+ tv remote control tv remote objects
2021
+ tv stand tv stand furniture
2022
+ tv stand door door door
2023
+ tv table tv stand furniture
2024
+ twigs in vase plant plant
2025
+ typewriter typewriter objects
2026
+ ubnknown/ probably clothes clothes clothes
2027
+ uknknown unknown unlabeled
2028
+ uknown unknown unlabeled
2029
+ uknown device unknown unlabeled
2030
+ ukulele ukulele objects
2031
+ umbrella umbrella objects
2032
+ umbrella stand umbrella stand furniture
2033
+ umbrellas umbrella objects
2034
+ umknown unknown unlabeled
2035
+ unkknown unknown unlabeled
2036
+ unknaown unknown unlabeled
2037
+ unknnown unknown unlabeled
2038
+ unknon unknown unlabeled
2039
+ unknow unknown unlabeled
2040
+ unknowm unknown unlabeled
2041
+ unknown unknown unlabeled
2042
+ unknown /otherroom unknown unlabeled
2043
+ unknown /outside unknown unlabeled
2044
+ unknown /probably cup cup objects
2045
+ unknown cluter clutter misc
2046
+ unknown clutter clutter misc
2047
+ unknown countertop item unknown unlabeled
2048
+ unknown device unknown unlabeled
2049
+ unknown facility unknown unlabeled
2050
+ unknown kitchen appliance kitchen appliance appliances
2051
+ unknown kitchen stuff unknown unlabeled
2052
+ unknown machines appliance misc
2053
+ unknown office stuff unknown unlabeled
2054
+ unknown picture/window picture picture
2055
+ unknown stuff unknown unlabeled
2056
+ unknown wall wall wall
2057
+ unknown/ unknown unlabeled
2058
+ unknown/ a pie? food misc
2059
+ unknown/ bin? bin objects
2060
+ unknown/ pile of something clutter misc
2061
+ unknown/ proably cup cup objects
2062
+ unknown/ probably a book book objects
2063
+ unknown/ probably advertisement magazine objects
2064
+ unknown/ probably air refresher air freshener objects
2065
+ unknown/ probably air vent vent misc
2066
+ unknown/ probably air vent fan fan objects
2067
+ unknown/ probably albums album objects
2068
+ unknown/ probably attic entrance door door
2069
+ unknown/ probably bag bag objects
2070
+ unknown/ probably basket basket objects
2071
+ unknown/ probably bin bin objects
2072
+ unknown/ probably board board board_panel
2073
+ unknown/ probably board games game objects
2074
+ unknown/ probably boiler boiler misc
2075
+ unknown/ probably book book objects
2076
+ unknown/ probably books book objects
2077
+ unknown/ probably books or albums book objects
2078
+ unknown/ probably bottle bottle objects
2079
+ unknown/ probably bowl bowl objects
2080
+ unknown/ probably box box objects
2081
+ unknown/probably cabinet cabinet cabinet
2082
+ unknown/ probably cabinet cabinet cabinet
2083
+ unknown/ probably calculator calculator appliances
2084
+ unknown/ probably candle candle objects
2085
+ unknown/ probably candle stand candle objects
2086
+ unknown/ probably candles candle objects
2087
+ unknown/ probably canopy canopy misc
2088
+ unknown/ probably casket casket misc
2089
+ unknown/ probably charger charger objects
2090
+ unknown/ probably clock clock objects
2091
+ unknown/ probably clothes clothes clothes
2092
+ unknown/ probably coffee machine coffee machine appliances
2093
+ unknown/ probably computer computer objects
2094
+ unknown/ probably coocking books book objects
2095
+ unknown/ probably cookies food misc
2096
+ unknown/ probably cosmetic cosmetics objects
2097
+ unknown/ probably cosmetics cosmetics objects
2098
+ unknown/ probably cups cups objects
2099
+ unknown/ probably decoration decoration objects
2100
+ unknown/ probably decorative plant decorative plant plant
2101
+ unknown/ probably desk or box box objects
2102
+ unknown/ probably dinnerware flatware objects
2103
+ unknown/ probably dishwasher dishwasher appliances
2104
+ unknown/ probably drum drum objects
2105
+ unknown/ probably drums drum objects
2106
+ unknown/ probably fan vent fan objects
2107
+ unknown/ probably fire alarm fire alarm objects
2108
+ unknown/ probably fire detector smoke detector misc
2109
+ unknown/ probably fire sprinkler sprinkler objects
2110
+ unknown/ probably foam foam misc
2111
+ unknown/ probably folded table table table
2112
+ unknown/ probably frame frame misc
2113
+ unknown/ probably framed picture picture picture
2114
+ unknown/ probably fridge refrigerator appliances
2115
+ unknown/ probably glasses glasses objects
2116
+ unknown/ probably hand grab unknown unlabeled
2117
+ unknown/ probably hat hat clothes
2118
+ unknown/ probably heater heater objects
2119
+ unknown/ probably home theater player tv tv_monitor
2120
+ unknown/ probably jar jar objects
2121
+ unknown/ probably lamp lamp lighting
2122
+ unknown/ probably letters papers objects
2123
+ unknown/ probably liquid soap soap objects
2124
+ unknown/ probably magazine magazine objects
2125
+ unknown/ probably monitor monitor tv_monitor
2126
+ unknown/ probably napkins napkins objects
2127
+ unknown/ probably notebook notebook objects
2128
+ unknown/ probably paper towel dispenser paper towels towel
2129
+ unknown/ probably paper towel holder paper towels towel
2130
+ unknown/ probably pipe pipe misc
2131
+ unknown/ probably plate plate objects
2132
+ unknown/ probably pot pot objects
2133
+ unknown/ probably printer printer objects
2134
+ unknown/ probably radio radio misc
2135
+ unknown/ probably rug rug floor
2136
+ unknown/ probably scarf scarf clothes
2137
+ unknown/ probably security detector security detector misc
2138
+ unknown/ probably shampoo shampoo objects
2139
+ unknown/ probably sheet sheet misc
2140
+ unknown/ probably sheets sheet misc
2141
+ unknown/ probably shelf shelf shelving
2142
+ unknown/ probably shower cabine shower shower
2143
+ unknown/ probably shower gel shampoo objects
2144
+ unknown/ probably sitting pillow pillow cushion
2145
+ unknown/ probably soap soap objects
2146
+ unknown/ probably sofa sofa sofa
2147
+ unknown/ probably solarium chair chair chair
2148
+ unknown/ probably stand stand table
2149
+ unknown/ probably table lamp table lamp lighting
2150
+ unknown/ probably tank tank misc
2151
+ unknown/ probably telephone telephone objects
2152
+ unknown/ probably tissue dispenser tissue misc
2153
+ unknown/ probably wall lamp wall lamp lighting
2154
+ unknown/ probably washing powder washing powder misc
2155
+ unknown/ probably washing soap soap objects
2156
+ unknown/ probaby albums albums objects
2157
+ unknown// probably fire sprinkler fire sprinkler misc
2158
+ unknown/probably decoration decoration objects
2159
+ unknown/remove unknown/remove misc
2160
+ unknwn unknown unlabeled
2161
+ unknwom unknown unlabeled
2162
+ unknwon unknown unlabeled
2163
+ unkown unknown unlabeled
2164
+ unkown clutter clutter misc
2165
+ unknown" unknown unlabeled
2166
+ unknown unlabeled
2167
+ ups unknown unlabeled
2168
+ urinal urinal objects
2169
+ utensil utensil objects
2170
+ vaccum cleaner vacuum cleaner appliances
2171
+ vacuum cleaner vacuum cleaner appliances
2172
+ valve plumbing objects
2173
+ vanity vanity table
2174
+ vanity table vanity table
2175
+ vase vase objects
2176
+ vase with flower flower plant
2177
+ vegetables vegetables objects
2178
+ vegetation plant plant
2179
+ vent vent misc
2180
+ vent pipe pipe misc
2181
+ ventialtion ventilation objects
2182
+ ventilation ventilation objects
2183
+ ventilation hood ventilation hood misc
2184
+ ventilation pipe pipe misc
2185
+ ventilator ventilation objects
2186
+ vese vessel objects
2187
+ vessel vessel objects
2188
+ vessel sink vessel sink misc
2189
+ vice unknown unlabeled
2190
+ video cassette cassette objects
2191
+ vinyl records vinyl records objects
2192
+ violin violin objects
2193
+ violin case violin case objects
2194
+ wadrobe wardrobe furniture
2195
+ waffle iron waffle iron appliances
2196
+ wal wall wall
2197
+ wal hanging decoration decoration objects
2198
+ wall wall wall
2199
+ wall outside wall outside wall
2200
+ wall clock wall clock objects
2201
+ wall lamp wall lamp lighting
2202
+ wall /otheroom wall wall
2203
+ wall /outside wall /outside wall
2204
+ wall balk wall wall
2205
+ wall balks wall wall
2206
+ wall beam wall beam misc
2207
+ wall board wall board misc
2208
+ wall cabinet wall cabinet cabinet
2209
+ wall clock wall clock objects
2210
+ wall control wall control misc
2211
+ wall controll wall control misc
2212
+ wall controller wall control misc
2213
+ wall coping wall wall
2214
+ wall corridor wall wall
2215
+ wall cubby wall cubby misc
2216
+ wall decoration picture picture
2217
+ wall detail wall detail misc
2218
+ wall device wall electronics misc
2219
+ wall electronic wall electronics misc
2220
+ wall electronics wall electronics misc
2221
+ wall frame picture picture
2222
+ wall hanger wall hanger objects
2223
+ wall hanging decoration wall hanging decoration misc
2224
+ wall hanging organizer wall hanger objects
2225
+ wall indent wall indent misc
2226
+ wall lamp wall lamp lighting
2227
+ wall light wall lamp lighting
2228
+ wall lmap wall lamp lighting
2229
+ wall niche wall hanging decoration misc
2230
+ wall of green wall wall
2231
+ wall painting painting objects
2232
+ wall panel wall panel wall
2233
+ wall panel frame wall panel frame wall
2234
+ wall post wall post misc
2235
+ wall shelf wall shelf shelving
2236
+ wall sign wall sign misc
2237
+ wall sing wall sign misc
2238
+ wall soap shelf wall soap shelf misc
2239
+ wall statue wall statue misc
2240
+ wall sticker decoration objects
2241
+ wall toilet paper wall toilet paper misc
2242
+ wall top wall top misc
2243
+ wall tv wall tv misc
2244
+ wall vent vent misc
2245
+ wallkitchen island wall wall
2246
+ walll wall wall
2247
+ wardeobe wardrobe furniture
2248
+ wardeobe door wardrobe furniture
2249
+ warderobe wardrobe furniture
2250
+ wardobe wardrobe furniture
2251
+ wardrobe wardrobe furniture
2252
+ wardrobe accesories wardrobe furniture
2253
+ wardrobe door wardrobe furniture
2254
+ wardrobe door frame wardrobe furniture
2255
+ wardrobe drawer drawer chest_of_drawers
2256
+ wardrobe mirror mirror mirror
2257
+ wardrobe shelf shelf shelving
2258
+ wardrobe sliding door sliding door door
2259
+ wash basin washbasin sink
2260
+ wash basin cabinet wash cabinet cabinet
2261
+ wash cabinet wash cabinet cabinet
2262
+ washbasin washbasin sink
2263
+ washbasin cabinet wash cabinet cabinet
2264
+ washbasin counter washbasin counter counter
2265
+ washbasin countertop washbasin counter counter
2266
+ washbasin table washbasin counter counter
2267
+ washcloth washcloth objects
2268
+ washdisher machine dishwasher appliances
2269
+ washer dryer washer-dryer misc
2270
+ washer-dryer washer-dryer misc
2271
+ washing container container objects
2272
+ washing liquid detergent misc
2273
+ washing machine washing machine appliances
2274
+ washing machine and dryer washing machine and dryer appliances
2275
+ washing powder washing powder misc
2276
+ washing stuff washing stuff misc
2277
+ watch watch objects
2278
+ water basin water basin misc
2279
+ water bottle bottle objects
2280
+ water dispenser water dispenser objects
2281
+ water fountain water fountain misc
2282
+ water heater water heater objects
2283
+ water meter water meter objects
2284
+ water outlet water outlet objects
2285
+ water pump water pump objects
2286
+ water tank water tank objects
2287
+ watering can watering can objects
2288
+ weight weight gym_equipment
2289
+ weight bench weight bench gym_equipment
2290
+ weights weights gym_equipment
2291
+ wheel wheel objects
2292
+ wheelbarrow wheelbarrow objects
2293
+ whine shelf whine shelf misc
2294
+ whiteboard whiteboard tv_monitor
2295
+ widnow frame window frame window
2296
+ wifi repeater wifi router objects
2297
+ wifi router wifi router objects
2298
+ wifi transmitter wifi router objects
2299
+ wind chime decoration objects
2300
+ windo window window
2301
+ windoe frame window frame window
2302
+ windoow shade window shade blinds
2303
+ windor window window
2304
+ window window window
2305
+ window curtain window curtain curtain
2306
+ window frame window frame window
2307
+ window glass window glass objects
2308
+ window shade window shade blinds
2309
+ window /otherroom window /otherroom window
2310
+ window /outside window /outside window
2311
+ window /wall window window
2312
+ window bars window window
2313
+ window behind shutters window window
2314
+ window blinds blinds blinds
2315
+ window curtain window curtain curtain
2316
+ window curtain bar window curtain curtain
2317
+ window fame window frame window
2318
+ window fram window frame window
2319
+ window frame window frame window
2320
+ window frame /otherroom window frame /otherroom window
2321
+ window glass window glass objects
2322
+ window glass /outside window window
2323
+ window pane window window
2324
+ window panel window window
2325
+ window panes window window
2326
+ window place window window
2327
+ window rame window window
2328
+ window seat window seat seating
2329
+ window shade window shade blinds
2330
+ window shades window shade blinds
2331
+ window shutter window shutter objects
2332
+ window shutters window shutters objects
2333
+ window sill window frame window
2334
+ window valence window valence curtain
2335
+ window/door window/door misc
2336
+ windowframe window frame window
2337
+ windown frame window frame window
2338
+ windows window window
2339
+ windows frame window frame window
2340
+ windowsil window frame window
2341
+ windowsill window frame window
2342
+ windw frame window frame window
2343
+ wine wine misc
2344
+ wine bottle wine bottle objects
2345
+ wine cabinet wine cabinet cabinet
2346
+ wine rack wine rack misc
2347
+ wine refrigerator refrigerator appliances
2348
+ wine storage wine storage shelving
2349
+ winndow window window
2350
+ winndow frame window frame window
2351
+ wire wire misc
2352
+ wood wood misc
2353
+ wood burner fireplace fireplace
2354
+ wood frame frame misc
2355
+ wooden balk wood misc
2356
+ wooden panel panel board_panel
2357
+ wool cloth objects
2358
+ woor frame frame misc
2359
+ workout bike workout bike misc
2360
+ workout weight weights gym_equipment
2361
+ workstation workstation objects
2362
+ worktop worktop misc
2363
+ wreath wreath objects
2364
+ wrench wrench misc
2365
+ yoga mat yoga mat misc
2366
+ ~aper unknown unlabeled
2367
+ łamp lamp lighting
2368
+ śign sign misc
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/hm3d/preprocess_hm3d.py ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for Habitat-Matterport 3D Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import glob
10
+ import argparse
11
+ import numpy as np
12
+ import trimesh
13
+ from pathlib import Path
14
+ import multiprocessing as mp
15
+ from concurrent.futures import ProcessPoolExecutor
16
+ from itertools import repeat
17
+ from pathlib import Path
18
+
19
+ from hm3d_constants import CLASS_LABELS_40
20
+
21
+ MPCAT2INDEX = dict([(CLASS_LABELS_40[i], i) for i in range(40)])
22
+ MPCAT2INDEX["unlabeled"] = -1
23
+
24
+ CAT2INDEX = dict()
25
+ with open(Path(__file__).parent / "hm3dsem_category_mappings.tsv") as f:
26
+ f.readline() # raw_category category mpcat40
27
+ lines = f.readlines()
28
+ for line in lines:
29
+ line = line.strip().split("\t")
30
+ if len(line) == 2:
31
+ # L2164: "\tunknown\tunlabeled"
32
+ line.insert(0, "")
33
+ CAT2INDEX[line[1]] = MPCAT2INDEX[line[2]]
34
+
35
+
36
+ def uv_to_texture_color(uv, texture):
37
+ width, height = texture.size
38
+ u = int(uv[0] * width)
39
+ v = int((1 - uv[1]) * height) # Flip y-axis for image coordinates
40
+ if 0 <= u < width and 0 <= v < height:
41
+ return texture.getpixel((u, v))
42
+ else:
43
+ return 0, 0, 0
44
+
45
+
46
+ def load_hex_mapping(mapping_path):
47
+ hex2label = {}
48
+ with open(mapping_path) as f:
49
+ f.readline() # remove 'HM3D Semantic Annotations\n'
50
+ lines = f.readlines() # get the left
51
+ for line in lines:
52
+ line = line.strip().split(",")
53
+ cat = line[2].strip('"')
54
+ if cat == "trashcan":
55
+ cat = "trash can"
56
+ elif cat == "fridge":
57
+ cat = "refrigerator"
58
+ hex2label[line[1]] = dict(instance=line[0], segment=CAT2INDEX[cat])
59
+ return hex2label
60
+
61
+
62
+ def handle_process(
63
+ scene_path,
64
+ output_root,
65
+ density=0.02,
66
+ ):
67
+ scene_path = Path(scene_path)
68
+ scene_label_path = scene_path.with_suffix(".semantic.glb")
69
+ scene_mapping_path = scene_path.with_suffix(".semantic.txt")
70
+ scene_name = scene_path.parent.name.replace("-", "_")
71
+ scene_id = scene_name.split("_")[0]
72
+ labeled = True if scene_label_path.is_file() else False
73
+
74
+ # test split (900-1000) is reserved by official
75
+ if 0 <= int(scene_id) < 800:
76
+ split = "train"
77
+ else:
78
+ split = "val"
79
+ print(f"Parsing scene {scene_name} in {split} split...")
80
+
81
+ scene = trimesh.load(scene_path)
82
+ if labeled:
83
+ labeled_scene_ = trimesh.load(scene_label_path)
84
+ labeled_scene = trimesh.Scene()
85
+ for name, mesh in labeled_scene_.geometry.items():
86
+ # some case, name in labeled scene and scene is not matched, so only use chunk id
87
+ labeled_scene.add_geometry(mesh, geom_name=name.split("_")[0])
88
+ del labeled_scene_
89
+ hex2label = load_hex_mapping(scene_mapping_path)
90
+
91
+ room_dict = {}
92
+ for name, mesh in scene.geometry.items():
93
+ room_id = "_".join(name.split("_")[1:3]).replace("group", "").replace("sub", "")
94
+ if room_id not in room_dict.keys():
95
+ room_dict[room_id] = trimesh.Scene()
96
+ room_dict[room_id].add_geometry(mesh, geom_name=name.split("_")[0])
97
+ del scene
98
+
99
+ for room_id, scene in room_dict.items():
100
+ # seed by scene_id and room_id e.g. 00802-000-002 -> 802000002
101
+ np.random.seed(int(scene_id + room_id.replace("_", "")))
102
+ room_coord = []
103
+ room_color = []
104
+ room_normal = []
105
+ if labeled:
106
+ room_label_color = []
107
+
108
+ for name in scene.geometry.keys():
109
+ mesh = scene.geometry[name]
110
+ num_points = int(np.sum(mesh.area_faces) / density**2)
111
+ if num_points == 0:
112
+ continue
113
+ coords, face_indices = mesh.sample(num_points, return_index=True)
114
+ faces = mesh.faces[face_indices]
115
+ triangles = mesh.vertices[faces]
116
+ bary_coords = trimesh.triangles.points_to_barycentric(triangles, coords)
117
+ uv_coords = mesh.visual.uv[faces]
118
+ sampled_uvs = np.einsum("ijk,ij->ik", uv_coords, bary_coords)
119
+
120
+ pbr_material = mesh.visual.material
121
+ texture_image = pbr_material.baseColorTexture
122
+ if texture_image is None:
123
+ continue
124
+ colors = np.array(
125
+ [uv_to_texture_color(uv, texture_image) for uv in sampled_uvs]
126
+ )
127
+ normals = mesh.vertex_normals[faces]
128
+ normals = np.einsum("ijk,ij->ik", normals, bary_coords)
129
+ room_coord.append(coords)
130
+ room_color.append(colors)
131
+ room_normal.append(normals)
132
+
133
+ if labeled:
134
+ labeled_mash = labeled_scene.geometry[name]
135
+ label_texture_image = labeled_mash.visual.material.baseColorTexture
136
+ label_color = np.array(
137
+ [uv_to_texture_color(uv, label_texture_image) for uv in sampled_uvs]
138
+ )
139
+ room_label_color.append(label_color)
140
+
141
+ if len(room_coord) == 0:
142
+ continue
143
+
144
+ room_coord = np.concatenate(room_coord, axis=0).astype("float32")
145
+ room_color = np.concatenate(room_color, axis=0).astype("uint8")
146
+ room_normal = np.concatenate(room_normal, axis=0).astype("float32")
147
+ data_dict = dict(coord=room_coord, color=room_color, normal=room_normal)
148
+
149
+ if labeled:
150
+ room_label_color = np.concatenate(room_label_color, axis=0)
151
+ instance_label_color = np.unique(room_label_color, axis=0)
152
+ room_instance = -np.ones(len(room_label_color), dtype="int16")
153
+ room_segment = -np.ones(len(room_label_color), dtype="int16")
154
+ for i in range(len(instance_label_color)):
155
+ label_color = instance_label_color[i]
156
+ label_hex = "{c[0]:02x}{c[1]:02x}{c[2]:02x}".format(c=label_color)
157
+ mask = np.all(room_label_color == label_color, axis=-1)
158
+ room_instance[mask] = i
159
+ if label_hex.upper() in hex2label.keys():
160
+ room_segment[mask] = hex2label[label_hex.upper()]["segment"]
161
+ data_dict["instance"] = room_instance
162
+ data_dict["segment"] = room_segment
163
+ save_path = Path(output_root) / split / "_".join([scene_name, room_id])
164
+ os.makedirs(save_path, exist_ok=True)
165
+ for key, value in data_dict.items():
166
+ np.save(save_path / f"{key}.npy", value)
167
+
168
+
169
+ if __name__ == "__main__":
170
+ parser = argparse.ArgumentParser()
171
+ parser.add_argument(
172
+ "--dataset_root",
173
+ required=True,
174
+ help="Path to the Habitat-Matterport 3D dataset containing scene folders",
175
+ )
176
+ parser.add_argument(
177
+ "--output_root",
178
+ required=True,
179
+ help="Output path where train/val folders will be located",
180
+ )
181
+ parser.add_argument(
182
+ "--density",
183
+ default=0.02,
184
+ type=float,
185
+ help="Sampling density on mesh surface (m)",
186
+ )
187
+ parser.add_argument(
188
+ "--num_workers",
189
+ default=mp.cpu_count(),
190
+ type=int,
191
+ help="Num workers for preprocessing.",
192
+ )
193
+ args = parser.parse_args()
194
+
195
+ scene_list = glob.glob(os.path.join(args.dataset_root, "*", "*.glb"))
196
+ scene_list = [scene for scene in scene_list if not scene.endswith("semantic.glb")]
197
+ assert len(scene_list) == 900
198
+
199
+ # Preprocess data.
200
+ print("Processing scenes...")
201
+ pool = ProcessPoolExecutor(max_workers=args.num_workers)
202
+ _ = list(
203
+ pool.map(
204
+ handle_process,
205
+ scene_list,
206
+ repeat(args.output_root),
207
+ repeat(args.density),
208
+ )
209
+ )
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/category_mapping.tsv ADDED
The diff for this file is too large to render. See raw diff
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/scenes_test.txt ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2t7WUuJeko7
2
+ 5ZKStnWn8Zo
3
+ ARNzJeq3xxb
4
+ fzynW3qQPVF
5
+ jtcxE69GiFV
6
+ pa4otMbVnkk
7
+ q9vSo1VnCiC
8
+ rqfALeAoiTq
9
+ UwV83HsGsw3
10
+ wc2JMjhGNzB
11
+ WYY7iVyf5p8
12
+ YFuZgdQ5vWj
13
+ yqstnuAEVhm
14
+ YVUC4YcDtcY
15
+ gxdoqLR6rwA
16
+ gYvKGZ5eRqb
17
+ RPmz2sHmrrY
18
+ Vt2qJdWjCF2
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/scenes_train.txt ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 17DRP5sb8fy
2
+ 1LXtFkjw3qL
3
+ 1pXnuDYAj8r
4
+ 29hnd4uzFmX
5
+ 5LpN3gDmAk7
6
+ 5q7pvUzZiYa
7
+ 759xd9YjKW5
8
+ 7y3sRwLe3Va
9
+ 82sE5b5pLXE
10
+ 8WUmhLawc2A
11
+ aayBHfsNo7d
12
+ ac26ZMwG7aT
13
+ B6ByNegPMKs
14
+ b8cTxDM8gDG
15
+ cV4RVeZvu5T
16
+ D7N2EKCX4Sj
17
+ e9zR4mvMWw7
18
+ EDJbREhghzL
19
+ GdvgFV5R1Z5
20
+ gTV8FGcVJC9
21
+ HxpKQynjfin
22
+ i5noydFURQK
23
+ JeFG25nYj2p
24
+ JF19kD82Mey
25
+ jh4fc5c5qoQ
26
+ kEZ7cmS4wCh
27
+ mJXqzFtmKg4
28
+ p5wJjkQkbXX
29
+ Pm6F8kyY3z2
30
+ pRbA3pwrgk9
31
+ PuKPg4mmafe
32
+ PX4nDJXEHrG
33
+ qoiz87JEwZ2
34
+ rPc6DW4iMge
35
+ s8pcmisQ38h
36
+ S9hNv5qa7GM
37
+ sKLMLpTHeUy
38
+ SN83YJsR3w2
39
+ sT4fr6TAbpF
40
+ ULsKaCPVFJR
41
+ uNb9QFRL6hY
42
+ Uxmj2M2itWa
43
+ V2XKFyX4ASd
44
+ VFuaQ6m2Qom
45
+ VVfe2KiqLaN
46
+ Vvot9Ly1tCj
47
+ vyrNrziPKCB
48
+ VzqfbhrpDEA
49
+ XcA2TqTSSAj
50
+ 2n8kARJN3HM
51
+ D7G3Y4RVNrH
52
+ dhjEzFoUFzH
53
+ E9uDoFAP3SH
54
+ gZ6f7yhEvPG
55
+ JmbYfDe2QKZ
56
+ r1Q1Z4BcV1o
57
+ r47D5H71a5s
58
+ ur6pFq6Qu1A
59
+ VLzqgDo317F
60
+ YmJkqBEsHnH
61
+ ZMojNkEp431
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/meta_data/scenes_val.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2azQ1b91cZZ
2
+ 8194nk5LbLH
3
+ EU6Fwq7SyZv
4
+ oLBMNvg9in8
5
+ QUCTc6BB5sX
6
+ TbHJrupSAjP
7
+ X7HyMhZNoso
8
+ pLe4wQe7qrG
9
+ x8F5xyUWy9e
10
+ Z6MFQCViBuw
11
+ zsNo4HB9uLZ
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/preprocess_matterport3d_mesh.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for Matterport3D (Unzipping)
3
+ adatpted from https://github.com/pengsongyou/openscene/blob/main/scripts/preprocess/preprocess_3d_matterport.py
4
+
5
+ Author: Chongjie Ye (chongjieye@link.cuhk.edu.cn)
6
+ Modified by: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
7
+ Please cite our work if the code is helpful to you.
8
+ """
9
+
10
+ import os
11
+ import argparse
12
+ import glob
13
+ import plyfile
14
+ import numpy as np
15
+ import pandas as pd
16
+ import multiprocessing as mp
17
+ from concurrent.futures import ProcessPoolExecutor
18
+ from itertools import repeat
19
+ from pathlib import Path
20
+ import torch
21
+
22
+ MATTERPORT_CLASS_REMAP = np.zeros(41)
23
+ MATTERPORT_CLASS_REMAP[1] = 1
24
+ MATTERPORT_CLASS_REMAP[2] = 2
25
+ MATTERPORT_CLASS_REMAP[3] = 3
26
+ MATTERPORT_CLASS_REMAP[4] = 4
27
+ MATTERPORT_CLASS_REMAP[5] = 5
28
+ MATTERPORT_CLASS_REMAP[6] = 6
29
+ MATTERPORT_CLASS_REMAP[7] = 7
30
+ MATTERPORT_CLASS_REMAP[8] = 8
31
+ MATTERPORT_CLASS_REMAP[9] = 9
32
+ MATTERPORT_CLASS_REMAP[10] = 10
33
+ MATTERPORT_CLASS_REMAP[11] = 11
34
+ MATTERPORT_CLASS_REMAP[12] = 12
35
+ MATTERPORT_CLASS_REMAP[14] = 13
36
+ MATTERPORT_CLASS_REMAP[16] = 14
37
+ MATTERPORT_CLASS_REMAP[22] = 21 # DIFFERENCE TO SCANNET!
38
+ MATTERPORT_CLASS_REMAP[24] = 15
39
+ MATTERPORT_CLASS_REMAP[28] = 16
40
+ MATTERPORT_CLASS_REMAP[33] = 17
41
+ MATTERPORT_CLASS_REMAP[34] = 18
42
+ MATTERPORT_CLASS_REMAP[36] = 19
43
+ MATTERPORT_CLASS_REMAP[39] = 20
44
+
45
+ MATTERPORT_LABELS_21 = (
46
+ "wall",
47
+ "floor",
48
+ "cabinet",
49
+ "bed",
50
+ "chair",
51
+ "sofa",
52
+ "table",
53
+ "door",
54
+ "window",
55
+ "bookshelf",
56
+ "picture",
57
+ "counter",
58
+ "desk",
59
+ "curtain",
60
+ "refrigerator",
61
+ "shower curtain",
62
+ "toilet",
63
+ "sink",
64
+ "bathtub",
65
+ "other",
66
+ "ceiling",
67
+ )
68
+ MATTERPORT_ALLOWED_NYU_CLASSES = [
69
+ 1,
70
+ 2,
71
+ 3,
72
+ 4,
73
+ 5,
74
+ 6,
75
+ 7,
76
+ 8,
77
+ 9,
78
+ 10,
79
+ 11,
80
+ 12,
81
+ 14,
82
+ 16,
83
+ 22,
84
+ 24,
85
+ 28,
86
+ 33,
87
+ 34,
88
+ 36,
89
+ 39,
90
+ ]
91
+
92
+
93
+ def handle_process(mesh_path, output_path, mapping, train_scenes, val_scenes):
94
+ # Get the scene id and region name from the mesh path
95
+ scene_id = Path(mesh_path).parent.parent.name
96
+ region_id = Path(mesh_path).stem.removeprefix("region")
97
+ data_name = f"{scene_id}_{int(region_id):02d}"
98
+
99
+ output_path = Path(output_path)
100
+ # Check which split the scene belongs to (train, val, or test)
101
+ if scene_id in train_scenes:
102
+ output_folder = output_path / "train" / data_name
103
+ split = "train"
104
+ elif scene_id in val_scenes:
105
+ output_folder = output_path / "val" / data_name
106
+ split = "val"
107
+ else:
108
+ output_folder = output_path / "test" / data_name
109
+ split = "test"
110
+
111
+ # Create the output directory if it doesn't exist
112
+ os.makedirs(output_folder, exist_ok=True)
113
+ print(f"Processing: {data_name} in {split}")
114
+
115
+ # Load the vertex data
116
+ with open(mesh_path, "rb") as f:
117
+ plydata = plyfile.PlyData.read(f)
118
+ vertex_data = plydata["vertex"].data
119
+
120
+ # Get the coordinates, colors, and normals from the vertex data
121
+ coords = np.vstack([vertex_data["x"], vertex_data["y"], vertex_data["z"]]).T
122
+ colors = np.vstack(
123
+ [vertex_data["red"], vertex_data["green"], vertex_data["blue"]]
124
+ ).T
125
+ normals = np.vstack([vertex_data["nx"], vertex_data["ny"], vertex_data["nz"]]).T
126
+
127
+ # Load the face data
128
+ face_data = plydata["face"].data
129
+ category_id = face_data["category_id"]
130
+
131
+ # Replace -1 with 0 in category_id
132
+ category_id[category_id == -1] = 0
133
+
134
+ # Map the labels according to NYU40ID
135
+ mapped_labels = mapping[category_id]
136
+
137
+ # Replace labels not in MATTERPORT_ALLOWED_NYU_CLASSES with 0
138
+ mapped_labels[
139
+ np.logical_not(np.isin(mapped_labels, MATTERPORT_ALLOWED_NYU_CLASSES))
140
+ ] = 0
141
+
142
+ # Remap the labels to ScanNet 20 categories + ceiling
143
+ remapped_labels = MATTERPORT_CLASS_REMAP[mapped_labels].astype(int)
144
+
145
+ # Calculate per-vertex labels
146
+ triangles = face_data["vertex_indices"]
147
+ vertex_labels = np.zeros((coords.shape[0], 22), dtype=np.int32)
148
+ # calculate per-vertex labels
149
+ for row_id in range(triangles.shape[0]):
150
+ for i in range(3):
151
+ vertex_labels[triangles[row_id][i], remapped_labels[row_id]] += 1
152
+
153
+ # Get the most frequent label for each vertex
154
+ vertex_labels = np.argmax(vertex_labels, axis=1)
155
+ vertex_labels -= 1
156
+
157
+ # Add the vertex labels to the data to be saved
158
+ # Prepare the data to be saved
159
+ data_dict = dict(
160
+ coord=coords.astype("float32"),
161
+ color=colors.astype("uint8"),
162
+ normal=normals.astype("float32"),
163
+ segment=vertex_labels.astype("int16"),
164
+ )
165
+
166
+ # Save processed data
167
+ for key in data_dict.keys():
168
+ np.save(output_folder / f"{key}.npy", data_dict[key])
169
+
170
+
171
+ if __name__ == "__main__":
172
+ parser = argparse.ArgumentParser()
173
+ parser.add_argument(
174
+ "--dataset_root",
175
+ required=True,
176
+ help="Path to the Matterport3D dataset containing scene folders",
177
+ )
178
+ parser.add_argument(
179
+ "--output_root",
180
+ required=True,
181
+ help="Output path where train/val folders will be located",
182
+ )
183
+ parser.add_argument(
184
+ "--num_workers",
185
+ default=mp.cpu_count(),
186
+ type=int,
187
+ help="Num workers for preprocessing.",
188
+ )
189
+ opt = parser.parse_args()
190
+ meta_root = Path(os.path.dirname(__file__)) / "meta_data"
191
+
192
+ # Load label map
193
+ category_mapping = pd.read_csv(
194
+ meta_root / "category_mapping.tsv",
195
+ sep="\t",
196
+ header=0,
197
+ )
198
+ mapping = np.insert(
199
+ category_mapping[["nyu40id"]].to_numpy().astype(int).flatten(), 0, 0, axis=0
200
+ )
201
+
202
+ # Load train/val splits
203
+ with open(meta_root / "scenes_train.txt") as train_file:
204
+ train_scenes = train_file.read().splitlines()
205
+ with open(meta_root / "scenes_val.txt") as val_file:
206
+ val_scenes = val_file.read().splitlines()
207
+ with open(meta_root / "scenes_test.txt") as test_file:
208
+ test_scenes = test_file.read().splitlines()
209
+
210
+ # Create output directories
211
+ os.makedirs(opt.output_root, exist_ok=True)
212
+ train_output_dir = os.path.join(opt.output_root, "train")
213
+ os.makedirs(train_output_dir, exist_ok=True)
214
+ val_output_dir = os.path.join(opt.output_root, "val")
215
+ os.makedirs(val_output_dir, exist_ok=True)
216
+ test_output_dir = os.path.join(opt.output_root, "test")
217
+ os.makedirs(test_output_dir, exist_ok=True)
218
+
219
+ # Load scene paths
220
+ scene_paths = sorted(
221
+ glob.glob(
222
+ os.path.join(
223
+ opt.dataset_root, "v1", "scans", "*", "region_segmentations", "*.ply"
224
+ )
225
+ )
226
+ )
227
+
228
+ # Preprocess data.
229
+ pool = ProcessPoolExecutor(max_workers=opt.num_workers)
230
+ print("Processing scenes...")
231
+ _ = list(
232
+ pool.map(
233
+ handle_process,
234
+ scene_paths,
235
+ repeat(opt.output_root),
236
+ repeat(mapping),
237
+ repeat(train_scenes),
238
+ repeat(val_scenes),
239
+ )
240
+ )
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/matterport3d/unzip_matterport3d_region_segmentation.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for Matterport3D (Unzipping)
3
+ modified from official preprocess code.
4
+
5
+ Author: Chongjie Ye (chongjieye@link.cuhk.edu.cn)
6
+ Modified by: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
7
+ Please cite our work if the code is helpful to you.
8
+ """
9
+
10
+ import argparse
11
+ import os
12
+ import zipfile
13
+ import glob
14
+ import multiprocessing as mp
15
+ from concurrent.futures import ProcessPoolExecutor
16
+ from itertools import repeat
17
+
18
+
19
+ def unzip_file(input_path, output_path):
20
+ print(f"Unzipping {input_path} ...")
21
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
22
+ with zipfile.ZipFile(input_path, "r") as zip_ref:
23
+ zip_ref.extractall(output_path)
24
+
25
+
26
+ if __name__ == "__main__":
27
+ parser = argparse.ArgumentParser(
28
+ description='Unzip all "region_segmentations.zip" files in a directory'
29
+ )
30
+ parser.add_argument(
31
+ "--dataset_root",
32
+ type=str,
33
+ help="Path to input directory containing ZIP files",
34
+ required=True,
35
+ )
36
+ parser.add_argument(
37
+ "--output_root",
38
+ type=str,
39
+ help="Path to output directory for extracted files",
40
+ default=None,
41
+ )
42
+ parser.add_argument(
43
+ "--num_workers",
44
+ default=mp.cpu_count(),
45
+ type=int,
46
+ help="Num workers for preprocessing.",
47
+ )
48
+ args = parser.parse_args()
49
+ if args.output_root is None:
50
+ args.output_root = args.dataset_root
51
+ args.output_root = os.path.join(args.output_root, "v1", "scans")
52
+
53
+ file_list = glob.glob(
54
+ os.path.join(args.dataset_root, "v1", "scans", "*", "region_segmentations.zip")
55
+ )
56
+
57
+ # Preprocess data.
58
+ print("Unzipping region_segmentations.zip in Matterport3D...")
59
+ pool = ProcessPoolExecutor(max_workers=args.num_workers)
60
+ _ = list(
61
+ pool.map(
62
+ unzip_file,
63
+ file_list,
64
+ repeat(args.output_root),
65
+ )
66
+ )
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/nuscenes/preprocess_nuscenes_info.py ADDED
@@ -0,0 +1,607 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for nuScenes Informantion
3
+ modified from OpenPCDet (https://github.com/open-mmlab/OpenPCDet)
4
+
5
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
6
+ Please cite our work if the code is helpful to you.
7
+ """
8
+
9
+ import os
10
+ from pathlib import Path
11
+ import numpy as np
12
+ import argparse
13
+ import tqdm
14
+ import pickle
15
+ from functools import reduce
16
+ from pyquaternion import Quaternion
17
+ from nuscenes.nuscenes import NuScenes
18
+ from nuscenes.utils import splits
19
+ from nuscenes.utils.geometry_utils import transform_matrix
20
+
21
+
22
+ map_name_from_general_to_detection = {
23
+ "human.pedestrian.adult": "pedestrian",
24
+ "human.pedestrian.child": "pedestrian",
25
+ "human.pedestrian.wheelchair": "ignore",
26
+ "human.pedestrian.stroller": "ignore",
27
+ "human.pedestrian.personal_mobility": "ignore",
28
+ "human.pedestrian.police_officer": "pedestrian",
29
+ "human.pedestrian.construction_worker": "pedestrian",
30
+ "animal": "ignore",
31
+ "vehicle.car": "car",
32
+ "vehicle.motorcycle": "motorcycle",
33
+ "vehicle.bicycle": "bicycle",
34
+ "vehicle.bus.bendy": "bus",
35
+ "vehicle.bus.rigid": "bus",
36
+ "vehicle.truck": "truck",
37
+ "vehicle.construction": "construction_vehicle",
38
+ "vehicle.emergency.ambulance": "ignore",
39
+ "vehicle.emergency.police": "ignore",
40
+ "vehicle.trailer": "trailer",
41
+ "movable_object.barrier": "barrier",
42
+ "movable_object.trafficcone": "traffic_cone",
43
+ "movable_object.pushable_pullable": "ignore",
44
+ "movable_object.debris": "ignore",
45
+ "static_object.bicycle_rack": "ignore",
46
+ }
47
+
48
+
49
+ cls_attr_dist = {
50
+ "barrier": {
51
+ "cycle.with_rider": 0,
52
+ "cycle.without_rider": 0,
53
+ "pedestrian.moving": 0,
54
+ "pedestrian.sitting_lying_down": 0,
55
+ "pedestrian.standing": 0,
56
+ "vehicle.moving": 0,
57
+ "vehicle.parked": 0,
58
+ "vehicle.stopped": 0,
59
+ },
60
+ "bicycle": {
61
+ "cycle.with_rider": 2791,
62
+ "cycle.without_rider": 8946,
63
+ "pedestrian.moving": 0,
64
+ "pedestrian.sitting_lying_down": 0,
65
+ "pedestrian.standing": 0,
66
+ "vehicle.moving": 0,
67
+ "vehicle.parked": 0,
68
+ "vehicle.stopped": 0,
69
+ },
70
+ "bus": {
71
+ "cycle.with_rider": 0,
72
+ "cycle.without_rider": 0,
73
+ "pedestrian.moving": 0,
74
+ "pedestrian.sitting_lying_down": 0,
75
+ "pedestrian.standing": 0,
76
+ "vehicle.moving": 9092,
77
+ "vehicle.parked": 3294,
78
+ "vehicle.stopped": 3881,
79
+ },
80
+ "car": {
81
+ "cycle.with_rider": 0,
82
+ "cycle.without_rider": 0,
83
+ "pedestrian.moving": 0,
84
+ "pedestrian.sitting_lying_down": 0,
85
+ "pedestrian.standing": 0,
86
+ "vehicle.moving": 114304,
87
+ "vehicle.parked": 330133,
88
+ "vehicle.stopped": 46898,
89
+ },
90
+ "construction_vehicle": {
91
+ "cycle.with_rider": 0,
92
+ "cycle.without_rider": 0,
93
+ "pedestrian.moving": 0,
94
+ "pedestrian.sitting_lying_down": 0,
95
+ "pedestrian.standing": 0,
96
+ "vehicle.moving": 882,
97
+ "vehicle.parked": 11549,
98
+ "vehicle.stopped": 2102,
99
+ },
100
+ "ignore": {
101
+ "cycle.with_rider": 307,
102
+ "cycle.without_rider": 73,
103
+ "pedestrian.moving": 0,
104
+ "pedestrian.sitting_lying_down": 0,
105
+ "pedestrian.standing": 0,
106
+ "vehicle.moving": 165,
107
+ "vehicle.parked": 400,
108
+ "vehicle.stopped": 102,
109
+ },
110
+ "motorcycle": {
111
+ "cycle.with_rider": 4233,
112
+ "cycle.without_rider": 8326,
113
+ "pedestrian.moving": 0,
114
+ "pedestrian.sitting_lying_down": 0,
115
+ "pedestrian.standing": 0,
116
+ "vehicle.moving": 0,
117
+ "vehicle.parked": 0,
118
+ "vehicle.stopped": 0,
119
+ },
120
+ "pedestrian": {
121
+ "cycle.with_rider": 0,
122
+ "cycle.without_rider": 0,
123
+ "pedestrian.moving": 157444,
124
+ "pedestrian.sitting_lying_down": 13939,
125
+ "pedestrian.standing": 46530,
126
+ "vehicle.moving": 0,
127
+ "vehicle.parked": 0,
128
+ "vehicle.stopped": 0,
129
+ },
130
+ "traffic_cone": {
131
+ "cycle.with_rider": 0,
132
+ "cycle.without_rider": 0,
133
+ "pedestrian.moving": 0,
134
+ "pedestrian.sitting_lying_down": 0,
135
+ "pedestrian.standing": 0,
136
+ "vehicle.moving": 0,
137
+ "vehicle.parked": 0,
138
+ "vehicle.stopped": 0,
139
+ },
140
+ "trailer": {
141
+ "cycle.with_rider": 0,
142
+ "cycle.without_rider": 0,
143
+ "pedestrian.moving": 0,
144
+ "pedestrian.sitting_lying_down": 0,
145
+ "pedestrian.standing": 0,
146
+ "vehicle.moving": 3421,
147
+ "vehicle.parked": 19224,
148
+ "vehicle.stopped": 1895,
149
+ },
150
+ "truck": {
151
+ "cycle.with_rider": 0,
152
+ "cycle.without_rider": 0,
153
+ "pedestrian.moving": 0,
154
+ "pedestrian.sitting_lying_down": 0,
155
+ "pedestrian.standing": 0,
156
+ "vehicle.moving": 21339,
157
+ "vehicle.parked": 55626,
158
+ "vehicle.stopped": 11097,
159
+ },
160
+ }
161
+
162
+
163
+ def get_available_scenes(nusc):
164
+ available_scenes = []
165
+ for scene in nusc.scene:
166
+ scene_token = scene["token"]
167
+ scene_rec = nusc.get("scene", scene_token)
168
+ sample_rec = nusc.get("sample", scene_rec["first_sample_token"])
169
+ sd_rec = nusc.get("sample_data", sample_rec["data"]["LIDAR_TOP"])
170
+ has_more_frames = True
171
+ scene_not_exist = False
172
+ while has_more_frames:
173
+ lidar_path, boxes, _ = nusc.get_sample_data(sd_rec["token"])
174
+ if not Path(lidar_path).exists():
175
+ scene_not_exist = True
176
+ break
177
+ else:
178
+ break
179
+ if scene_not_exist:
180
+ continue
181
+ available_scenes.append(scene)
182
+ return available_scenes
183
+
184
+
185
+ def get_sample_data(nusc, sample_data_token, selected_anntokens=None):
186
+ """
187
+ Returns the data path as well as all annotations related to that sample_data.
188
+ Note that the boxes are transformed into the current sensor"s coordinate frame.
189
+ Args:
190
+ nusc:
191
+ sample_data_token: Sample_data token.
192
+ selected_anntokens: If provided only return the selected annotation.
193
+
194
+ Returns:
195
+
196
+ """
197
+ # Retrieve sensor & pose records
198
+ sd_record = nusc.get("sample_data", sample_data_token)
199
+ cs_record = nusc.get("calibrated_sensor", sd_record["calibrated_sensor_token"])
200
+ sensor_record = nusc.get("sensor", cs_record["sensor_token"])
201
+ pose_record = nusc.get("ego_pose", sd_record["ego_pose_token"])
202
+
203
+ data_path = nusc.get_sample_data_path(sample_data_token)
204
+
205
+ if sensor_record["modality"] == "camera":
206
+ cam_intrinsic = np.array(cs_record["camera_intrinsic"])
207
+ else:
208
+ cam_intrinsic = None
209
+
210
+ # Retrieve all sample annotations and map to sensor coordinate system.
211
+ if selected_anntokens is not None:
212
+ boxes = list(map(nusc.get_box, selected_anntokens))
213
+ else:
214
+ boxes = nusc.get_boxes(sample_data_token)
215
+
216
+ # Make list of Box objects including coord system transforms.
217
+ box_list = []
218
+ for box in boxes:
219
+ box.velocity = nusc.box_velocity(box.token)
220
+ # Move box to ego vehicle coord system
221
+ box.translate(-np.array(pose_record["translation"]))
222
+ box.rotate(Quaternion(pose_record["rotation"]).inverse)
223
+
224
+ # Move box to sensor coord system
225
+ box.translate(-np.array(cs_record["translation"]))
226
+ box.rotate(Quaternion(cs_record["rotation"]).inverse)
227
+
228
+ box_list.append(box)
229
+
230
+ return data_path, box_list, cam_intrinsic
231
+
232
+
233
+ def quaternion_yaw(q: Quaternion) -> float:
234
+ """
235
+ Calculate the yaw angle from a quaternion.
236
+ Note that this only works for a quaternion that represents a box in lidar or global coordinate frame.
237
+ It does not work for a box in the camera frame.
238
+ :param q: Quaternion of interest.
239
+ :return: Yaw angle in radians.
240
+ """
241
+
242
+ # Project into xy plane.
243
+ v = np.dot(q.rotation_matrix, np.array([1, 0, 0]))
244
+
245
+ # Measure yaw using arctan.
246
+ yaw = np.arctan2(v[1], v[0])
247
+
248
+ return yaw
249
+
250
+
251
+ def obtain_sensor2top(
252
+ nusc, sensor_token, l2e_t, l2e_r_mat, e2g_t, e2g_r_mat, sensor_type="lidar"
253
+ ):
254
+ """Obtain the info with RT matric from general sensor to Top LiDAR.
255
+
256
+ Args:
257
+ nusc (class): Dataset class in the nuScenes dataset.
258
+ sensor_token (str): Sample data token corresponding to the
259
+ specific sensor type.
260
+ l2e_t (np.ndarray): Translation from lidar to ego in shape (1, 3).
261
+ l2e_r_mat (np.ndarray): Rotation matrix from lidar to ego
262
+ in shape (3, 3).
263
+ e2g_t (np.ndarray): Translation from ego to global in shape (1, 3).
264
+ e2g_r_mat (np.ndarray): Rotation matrix from ego to global
265
+ in shape (3, 3).
266
+ sensor_type (str): Sensor to calibrate. Default: "lidar".
267
+
268
+ Returns:
269
+ sweep (dict): Sweep information after transformation.
270
+ """
271
+ sd_rec = nusc.get("sample_data", sensor_token)
272
+ cs_record = nusc.get("calibrated_sensor", sd_rec["calibrated_sensor_token"])
273
+ pose_record = nusc.get("ego_pose", sd_rec["ego_pose_token"])
274
+ data_path = str(nusc.get_sample_data_path(sd_rec["token"]))
275
+ # if os.getcwd() in data_path: # path from lyftdataset is absolute path
276
+ # data_path = data_path.split(f"{os.getcwd()}/")[-1] # relative path
277
+ sweep = {
278
+ "data_path": data_path,
279
+ "type": sensor_type,
280
+ "sample_data_token": sd_rec["token"],
281
+ "sensor2ego_translation": cs_record["translation"],
282
+ "sensor2ego_rotation": cs_record["rotation"],
283
+ "ego2global_translation": pose_record["translation"],
284
+ "ego2global_rotation": pose_record["rotation"],
285
+ "timestamp": sd_rec["timestamp"],
286
+ }
287
+ l2e_r_s = sweep["sensor2ego_rotation"]
288
+ l2e_t_s = sweep["sensor2ego_translation"]
289
+ e2g_r_s = sweep["ego2global_rotation"]
290
+ e2g_t_s = sweep["ego2global_translation"]
291
+
292
+ # obtain the RT from sensor to Top LiDAR
293
+ # sweep->ego->global->ego'->lidar
294
+ l2e_r_s_mat = Quaternion(l2e_r_s).rotation_matrix
295
+ e2g_r_s_mat = Quaternion(e2g_r_s).rotation_matrix
296
+ R = (l2e_r_s_mat.T @ e2g_r_s_mat.T) @ (
297
+ np.linalg.inv(e2g_r_mat).T @ np.linalg.inv(l2e_r_mat).T
298
+ )
299
+ T = (l2e_t_s @ e2g_r_s_mat.T + e2g_t_s) @ (
300
+ np.linalg.inv(e2g_r_mat).T @ np.linalg.inv(l2e_r_mat).T
301
+ )
302
+ T -= (
303
+ e2g_t @ (np.linalg.inv(e2g_r_mat).T @ np.linalg.inv(l2e_r_mat).T)
304
+ + l2e_t @ np.linalg.inv(l2e_r_mat).T
305
+ ).squeeze(0)
306
+ sweep["sensor2lidar_rotation"] = R.T # points @ R.T + T
307
+ sweep["sensor2lidar_translation"] = T
308
+ return sweep
309
+
310
+
311
+ def fill_trainval_infos(
312
+ data_path, nusc, train_scenes, test=False, max_sweeps=10, with_camera=False
313
+ ):
314
+ train_nusc_infos = []
315
+ val_nusc_infos = []
316
+ progress_bar = tqdm.tqdm(
317
+ total=len(nusc.sample), desc="create_info", dynamic_ncols=True
318
+ )
319
+
320
+ ref_chan = "LIDAR_TOP" # The radar channel from which we track back n sweeps to aggregate the point cloud.
321
+ chan = "LIDAR_TOP" # The reference channel of the current sample_rec that the point clouds are mapped to.
322
+
323
+ for index, sample in enumerate(nusc.sample):
324
+ progress_bar.update()
325
+
326
+ ref_sd_token = sample["data"][ref_chan]
327
+ ref_sd_rec = nusc.get("sample_data", ref_sd_token)
328
+ ref_cs_rec = nusc.get(
329
+ "calibrated_sensor", ref_sd_rec["calibrated_sensor_token"]
330
+ )
331
+ ref_pose_rec = nusc.get("ego_pose", ref_sd_rec["ego_pose_token"])
332
+ ref_time = 1e-6 * ref_sd_rec["timestamp"]
333
+
334
+ ref_lidar_path, ref_boxes, _ = get_sample_data(nusc, ref_sd_token)
335
+
336
+ ref_cam_front_token = sample["data"]["CAM_FRONT"]
337
+ ref_cam_path, _, ref_cam_intrinsic = nusc.get_sample_data(ref_cam_front_token)
338
+
339
+ # Homogeneous transform from ego car frame to reference frame
340
+ ref_from_car = transform_matrix(
341
+ ref_cs_rec["translation"], Quaternion(ref_cs_rec["rotation"]), inverse=True
342
+ )
343
+
344
+ # Homogeneous transformation matrix from global to _current_ ego car frame
345
+ car_from_global = transform_matrix(
346
+ ref_pose_rec["translation"],
347
+ Quaternion(ref_pose_rec["rotation"]),
348
+ inverse=True,
349
+ )
350
+ info = {
351
+ "lidar_path": Path(ref_lidar_path).relative_to(data_path).__str__(),
352
+ "lidar_token": ref_sd_token,
353
+ "cam_front_path": Path(ref_cam_path).relative_to(data_path).__str__(),
354
+ "cam_intrinsic": ref_cam_intrinsic,
355
+ "token": sample["token"],
356
+ "sweeps": [],
357
+ "ref_from_car": ref_from_car,
358
+ "car_from_global": car_from_global,
359
+ "timestamp": ref_time,
360
+ }
361
+ if with_camera:
362
+ info["cams"] = dict()
363
+ l2e_r = ref_cs_rec["rotation"]
364
+ l2e_t = (ref_cs_rec["translation"],)
365
+ e2g_r = ref_pose_rec["rotation"]
366
+ e2g_t = ref_pose_rec["translation"]
367
+ l2e_r_mat = Quaternion(l2e_r).rotation_matrix
368
+ e2g_r_mat = Quaternion(e2g_r).rotation_matrix
369
+
370
+ # obtain 6 image's information per frame
371
+ camera_types = [
372
+ "CAM_FRONT",
373
+ "CAM_FRONT_RIGHT",
374
+ "CAM_FRONT_LEFT",
375
+ "CAM_BACK",
376
+ "CAM_BACK_LEFT",
377
+ "CAM_BACK_RIGHT",
378
+ ]
379
+ for cam in camera_types:
380
+ cam_token = sample["data"][cam]
381
+ cam_path, _, camera_intrinsics = nusc.get_sample_data(cam_token)
382
+ cam_info = obtain_sensor2top(
383
+ nusc, cam_token, l2e_t, l2e_r_mat, e2g_t, e2g_r_mat, cam
384
+ )
385
+ cam_info["data_path"] = (
386
+ Path(cam_info["data_path"]).relative_to(data_path).__str__()
387
+ )
388
+ cam_info.update(camera_intrinsics=camera_intrinsics)
389
+ info["cams"].update({cam: cam_info})
390
+
391
+ sample_data_token = sample["data"][chan]
392
+ curr_sd_rec = nusc.get("sample_data", sample_data_token)
393
+ sweeps = []
394
+ while len(sweeps) < max_sweeps - 1:
395
+ if curr_sd_rec["prev"] == "":
396
+ if len(sweeps) == 0:
397
+ sweep = {
398
+ "lidar_path": Path(ref_lidar_path)
399
+ .relative_to(data_path)
400
+ .__str__(),
401
+ "sample_data_token": curr_sd_rec["token"],
402
+ "transform_matrix": None,
403
+ "time_lag": curr_sd_rec["timestamp"] * 0,
404
+ }
405
+ sweeps.append(sweep)
406
+ else:
407
+ sweeps.append(sweeps[-1])
408
+ else:
409
+ curr_sd_rec = nusc.get("sample_data", curr_sd_rec["prev"])
410
+
411
+ # Get past pose
412
+ current_pose_rec = nusc.get("ego_pose", curr_sd_rec["ego_pose_token"])
413
+ global_from_car = transform_matrix(
414
+ current_pose_rec["translation"],
415
+ Quaternion(current_pose_rec["rotation"]),
416
+ inverse=False,
417
+ )
418
+
419
+ # Homogeneous transformation matrix from sensor coordinate frame to ego car frame.
420
+ current_cs_rec = nusc.get(
421
+ "calibrated_sensor", curr_sd_rec["calibrated_sensor_token"]
422
+ )
423
+ car_from_current = transform_matrix(
424
+ current_cs_rec["translation"],
425
+ Quaternion(current_cs_rec["rotation"]),
426
+ inverse=False,
427
+ )
428
+
429
+ tm = reduce(
430
+ np.dot,
431
+ [ref_from_car, car_from_global, global_from_car, car_from_current],
432
+ )
433
+
434
+ lidar_path = nusc.get_sample_data_path(curr_sd_rec["token"])
435
+
436
+ time_lag = ref_time - 1e-6 * curr_sd_rec["timestamp"]
437
+
438
+ sweep = {
439
+ "lidar_path": Path(lidar_path).relative_to(data_path).__str__(),
440
+ "sample_data_token": curr_sd_rec["token"],
441
+ "transform_matrix": tm,
442
+ "global_from_car": global_from_car,
443
+ "car_from_current": car_from_current,
444
+ "time_lag": time_lag,
445
+ }
446
+ sweeps.append(sweep)
447
+
448
+ info["sweeps"] = sweeps
449
+
450
+ assert len(info["sweeps"]) == max_sweeps - 1, (
451
+ f"sweep {curr_sd_rec['token']} only has {len(info['sweeps'])} sweeps, "
452
+ f"you should duplicate to sweep num {max_sweeps - 1}"
453
+ )
454
+
455
+ if not test:
456
+ # processing gt bbox
457
+ annotations = [
458
+ nusc.get("sample_annotation", token) for token in sample["anns"]
459
+ ]
460
+
461
+ # the filtering gives 0.5~1 map improvement
462
+ num_lidar_pts = np.array([anno["num_lidar_pts"] for anno in annotations])
463
+ num_radar_pts = np.array([anno["num_radar_pts"] for anno in annotations])
464
+ mask = num_lidar_pts + num_radar_pts > 0
465
+
466
+ locs = np.array([b.center for b in ref_boxes]).reshape(-1, 3)
467
+ dims = np.array([b.wlh for b in ref_boxes]).reshape(-1, 3)[
468
+ :, [1, 0, 2]
469
+ ] # wlh == > dxdydz (lwh)
470
+ velocity = np.array([b.velocity for b in ref_boxes]).reshape(-1, 3)
471
+ rots = np.array([quaternion_yaw(b.orientation) for b in ref_boxes]).reshape(
472
+ -1, 1
473
+ )
474
+ names = np.array([b.name for b in ref_boxes])
475
+ tokens = np.array([b.token for b in ref_boxes])
476
+ gt_boxes = np.concatenate([locs, dims, rots, velocity[:, :2]], axis=1)
477
+
478
+ assert len(annotations) == len(gt_boxes) == len(velocity)
479
+
480
+ info["gt_boxes"] = gt_boxes[mask, :]
481
+ info["gt_boxes_velocity"] = velocity[mask, :]
482
+ info["gt_names"] = np.array(
483
+ [map_name_from_general_to_detection[name] for name in names]
484
+ )[mask]
485
+ info["gt_boxes_token"] = tokens[mask]
486
+ info["num_lidar_pts"] = num_lidar_pts[mask]
487
+ info["num_radar_pts"] = num_radar_pts[mask]
488
+
489
+ # processing gt segment
490
+ segment_path = nusc.get("lidarseg", ref_sd_token)["filename"]
491
+ info["gt_segment_path"] = segment_path
492
+
493
+ if sample["scene_token"] in train_scenes:
494
+ train_nusc_infos.append(info)
495
+ else:
496
+ val_nusc_infos.append(info)
497
+
498
+ progress_bar.close()
499
+ return train_nusc_infos, val_nusc_infos
500
+
501
+
502
+ if __name__ == "__main__":
503
+ parser = argparse.ArgumentParser()
504
+ parser.add_argument(
505
+ "--dataset_root", required=True, help="Path to the nuScenes dataset."
506
+ )
507
+ parser.add_argument(
508
+ "--output_root",
509
+ required=True,
510
+ help="Output path where processed information located.",
511
+ )
512
+ parser.add_argument(
513
+ "--max_sweeps", default=10, type=int, help="Max number of sweeps. Default: 10."
514
+ )
515
+ parser.add_argument(
516
+ "--with_camera",
517
+ action="store_true",
518
+ default=False,
519
+ help="Whether use camera or not.",
520
+ )
521
+ config = parser.parse_args()
522
+
523
+ print(f"Loading nuScenes tables for version v1.0-trainval...")
524
+ nusc_trainval = NuScenes(
525
+ version="v1.0-trainval", dataroot=config.dataset_root, verbose=False
526
+ )
527
+ available_scenes_trainval = get_available_scenes(nusc_trainval)
528
+ available_scene_names_trainval = [s["name"] for s in available_scenes_trainval]
529
+ print("total scene num:", len(nusc_trainval.scene))
530
+ print("exist scene num:", len(available_scenes_trainval))
531
+ assert len(available_scenes_trainval) == len(nusc_trainval.scene) == 850
532
+
533
+ print(f"Loading nuScenes tables for version v1.0-test...")
534
+ nusc_test = NuScenes(
535
+ version="v1.0-test", dataroot=config.dataset_root, verbose=False
536
+ )
537
+ available_scenes_test = get_available_scenes(nusc_test)
538
+ available_scene_names_test = [s["name"] for s in available_scenes_test]
539
+ print("total scene num:", len(nusc_test.scene))
540
+ print("exist scene num:", len(available_scenes_test))
541
+ assert len(available_scenes_test) == len(nusc_test.scene) == 150
542
+
543
+ train_scenes = splits.train
544
+ train_scenes = set(
545
+ [
546
+ available_scenes_trainval[available_scene_names_trainval.index(s)]["token"]
547
+ for s in train_scenes
548
+ ]
549
+ )
550
+ test_scenes = splits.test
551
+ test_scenes = set(
552
+ [
553
+ available_scenes_test[available_scene_names_test.index(s)]["token"]
554
+ for s in test_scenes
555
+ ]
556
+ )
557
+ print(f"Filling trainval information...")
558
+ train_nusc_infos, val_nusc_infos = fill_trainval_infos(
559
+ config.dataset_root,
560
+ nusc_trainval,
561
+ train_scenes,
562
+ test=False,
563
+ max_sweeps=config.max_sweeps,
564
+ with_camera=config.with_camera,
565
+ )
566
+ print(f"Filling test information...")
567
+ test_nusc_infos, _ = fill_trainval_infos(
568
+ config.dataset_root,
569
+ nusc_test,
570
+ test_scenes,
571
+ test=True,
572
+ max_sweeps=config.max_sweeps,
573
+ with_camera=config.with_camera,
574
+ )
575
+
576
+ print(f"Saving nuScenes information...")
577
+ os.makedirs(os.path.join(config.output_root, "info"), exist_ok=True)
578
+ print(
579
+ f"train sample: {len(train_nusc_infos)}, val sample: {len(val_nusc_infos)}, test sample: {len(test_nusc_infos)}"
580
+ )
581
+ with open(
582
+ os.path.join(
583
+ config.output_root,
584
+ "info",
585
+ f"nuscenes_infos_{config.max_sweeps}sweeps_train.pkl",
586
+ ),
587
+ "wb",
588
+ ) as f:
589
+ pickle.dump(train_nusc_infos, f)
590
+ with open(
591
+ os.path.join(
592
+ config.output_root,
593
+ "info",
594
+ f"nuscenes_infos_{config.max_sweeps}sweeps_val.pkl",
595
+ ),
596
+ "wb",
597
+ ) as f:
598
+ pickle.dump(val_nusc_infos, f)
599
+ with open(
600
+ os.path.join(
601
+ config.output_root,
602
+ "info",
603
+ f"nuscenes_infos_{config.max_sweeps}sweeps_test.pkl",
604
+ ),
605
+ "wb",
606
+ ) as f:
607
+ pickle.dump(test_nusc_infos, f)
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/s3dis/preprocess_s3dis.py ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for S3DIS
3
+ Parsing normal vectors has a large consumption of memory. Please reduce max_workers if memory is limited.
4
+
5
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
6
+ Please cite our work if the code is helpful to you.
7
+ """
8
+
9
+ import os
10
+ import argparse
11
+ import glob
12
+ import numpy as np
13
+
14
+ try:
15
+ import open3d
16
+ except ImportError:
17
+ import warnings
18
+
19
+ warnings.warn("Please install open3d for parsing normal")
20
+
21
+ try:
22
+ import trimesh
23
+ except ImportError:
24
+ import warnings
25
+
26
+ warnings.warn("Please install trimesh for parsing normal")
27
+
28
+ from concurrent.futures import ProcessPoolExecutor
29
+ from itertools import repeat
30
+
31
+ area_mesh_dict = {}
32
+
33
+
34
+ def parse_room(
35
+ room, angle, dataset_root, output_root, align_angle=True, parse_normal=False
36
+ ):
37
+ print("Parsing: {}".format(room))
38
+ classes = [
39
+ "ceiling",
40
+ "floor",
41
+ "wall",
42
+ "beam",
43
+ "column",
44
+ "window",
45
+ "door",
46
+ "table",
47
+ "chair",
48
+ "sofa",
49
+ "bookcase",
50
+ "board",
51
+ "clutter",
52
+ ]
53
+ class2label = {cls: i for i, cls in enumerate(classes)}
54
+ source_dir = os.path.join(dataset_root, room)
55
+ save_path = os.path.join(output_root, room)
56
+ os.makedirs(save_path, exist_ok=True)
57
+ object_path_list = sorted(glob.glob(os.path.join(source_dir, "Annotations/*.txt")))
58
+
59
+ room_coords = []
60
+ room_colors = []
61
+ room_normals = []
62
+ room_semantic_gt = []
63
+ room_instance_gt = []
64
+
65
+ for object_id, object_path in enumerate(object_path_list):
66
+ object_name = os.path.basename(object_path).split("_")[0]
67
+ obj = np.loadtxt(object_path)
68
+ coords = obj[:, :3]
69
+ colors = obj[:, 3:6]
70
+ # note: in some room there is 'stairs' class
71
+ class_name = object_name if object_name in classes else "clutter"
72
+ semantic_gt = np.repeat(class2label[class_name], coords.shape[0])
73
+ semantic_gt = semantic_gt.reshape([-1, 1])
74
+ instance_gt = np.repeat(object_id, coords.shape[0])
75
+ instance_gt = instance_gt.reshape([-1, 1])
76
+
77
+ room_coords.append(coords)
78
+ room_colors.append(colors)
79
+ room_semantic_gt.append(semantic_gt)
80
+ room_instance_gt.append(instance_gt)
81
+
82
+ room_coords = np.ascontiguousarray(np.vstack(room_coords))
83
+
84
+ if parse_normal:
85
+ x_min, z_max, y_min = np.min(room_coords, axis=0)
86
+ x_max, z_min, y_max = np.max(room_coords, axis=0)
87
+ z_max = -z_max
88
+ z_min = -z_min
89
+ max_bound = np.array([x_max, y_max, z_max]) + 0.1
90
+ min_bound = np.array([x_min, y_min, z_min]) - 0.1
91
+ bbox = open3d.geometry.AxisAlignedBoundingBox(
92
+ min_bound=min_bound, max_bound=max_bound
93
+ )
94
+ # crop room
95
+ room_mesh = (
96
+ area_mesh_dict[os.path.dirname(room)]
97
+ .crop(bbox)
98
+ .transform(
99
+ np.array([[1, 0, 0, 0], [0, 0, -1, 0], [0, 1, 0, 0], [0, 0, 0, 1]])
100
+ )
101
+ )
102
+ vertices = np.array(room_mesh.vertices)
103
+ faces = np.array(room_mesh.triangles)
104
+ vertex_normals = np.array(room_mesh.vertex_normals)
105
+ room_mesh = trimesh.Trimesh(
106
+ vertices=vertices, faces=faces, vertex_normals=vertex_normals
107
+ )
108
+ (closest_points, distances, face_id) = room_mesh.nearest.on_surface(room_coords)
109
+ room_normals = room_mesh.face_normals[face_id]
110
+
111
+ if align_angle:
112
+ angle = (2 - angle / 180) * np.pi
113
+ rot_cos, rot_sin = np.cos(angle), np.sin(angle)
114
+ rot_t = np.array([[rot_cos, -rot_sin, 0], [rot_sin, rot_cos, 0], [0, 0, 1]])
115
+ room_center = (np.max(room_coords, axis=0) + np.min(room_coords, axis=0)) / 2
116
+ room_coords = (room_coords - room_center) @ np.transpose(rot_t) + room_center
117
+ if parse_normal:
118
+ room_normals = room_normals @ np.transpose(rot_t)
119
+
120
+ room_colors = np.ascontiguousarray(np.vstack(room_colors))
121
+ room_semantic_gt = np.ascontiguousarray(np.vstack(room_semantic_gt))
122
+ room_instance_gt = np.ascontiguousarray(np.vstack(room_instance_gt))
123
+ np.save(os.path.join(save_path, "coord.npy"), room_coords.astype(np.float32))
124
+ np.save(os.path.join(save_path, "color.npy"), room_colors.astype(np.uint8))
125
+ np.save(os.path.join(save_path, "segment.npy"), room_semantic_gt.astype(np.int16))
126
+ np.save(os.path.join(save_path, "instance.npy"), room_instance_gt.astype(np.int16))
127
+
128
+ if parse_normal:
129
+ np.save(os.path.join(save_path, "normal.npy"), room_normals.astype(np.float32))
130
+
131
+
132
+ def main_process():
133
+ parser = argparse.ArgumentParser()
134
+ parser.add_argument(
135
+ "--splits",
136
+ required=True,
137
+ nargs="+",
138
+ choices=["Area_1", "Area_2", "Area_3", "Area_4", "Area_5", "Area_6"],
139
+ help="Splits need to process ([Area_1, Area_2, Area_3, Area_4, Area_5, Area_6]).",
140
+ )
141
+ parser.add_argument(
142
+ "--dataset_root", required=True, help="Path to Stanford3dDataset_v1.2 dataset"
143
+ )
144
+ parser.add_argument(
145
+ "--output_root",
146
+ required=True,
147
+ help="Output path where area folders will be located",
148
+ )
149
+ parser.add_argument(
150
+ "--raw_root",
151
+ default=None,
152
+ help="Path to Stanford2d3dDataset_noXYZ dataset (optional)",
153
+ )
154
+ parser.add_argument(
155
+ "--align_angle", action="store_true", help="Whether align room angles"
156
+ )
157
+ parser.add_argument(
158
+ "--parse_normal", action="store_true", help="Whether process normal"
159
+ )
160
+ parser.add_argument(
161
+ "--num_workers", default=1, type=int, help="Num workers for preprocessing."
162
+ )
163
+ args = parser.parse_args()
164
+
165
+ if args.parse_normal:
166
+ assert args.raw_root is not None
167
+
168
+ room_list = []
169
+ angle_list = []
170
+
171
+ # Load room information
172
+ print("Loading room information ...")
173
+ for split in args.splits:
174
+ area_info = np.loadtxt(
175
+ os.path.join(
176
+ args.dataset_root,
177
+ split,
178
+ f"{split}_alignmentAngle.txt",
179
+ ),
180
+ dtype=str,
181
+ )
182
+ room_list += [os.path.join(split, room_info[0]) for room_info in area_info]
183
+ angle_list += [int(room_info[1]) for room_info in area_info]
184
+
185
+ if args.parse_normal:
186
+ # load raw mesh file to extract normal
187
+ print("Loading raw mesh file ...")
188
+ for split in args.splits:
189
+ if split != "Area_5":
190
+ mesh_dir = os.path.join(args.raw_root, split, "3d", "rgb.obj")
191
+ mesh = open3d.io.read_triangle_mesh(mesh_dir)
192
+ mesh.triangle_uvs.clear()
193
+ else:
194
+ mesh_a_dir = os.path.join(args.raw_root, f"{split}a", "3d", "rgb.obj")
195
+ mesh_b_dir = os.path.join(args.raw_root, f"{split}b", "3d", "rgb.obj")
196
+ mesh_a = open3d.io.read_triangle_mesh(mesh_a_dir)
197
+ mesh_a.triangle_uvs.clear()
198
+ mesh_b = open3d.io.read_triangle_mesh(mesh_b_dir)
199
+ mesh_b.triangle_uvs.clear()
200
+ mesh_b = mesh_b.transform(
201
+ np.array(
202
+ [
203
+ [0, 0, -1, -4.09703582],
204
+ [0, 1, 0, 0],
205
+ [1, 0, 0, -6.22617759],
206
+ [0, 0, 0, 1],
207
+ ]
208
+ )
209
+ )
210
+ mesh = mesh_a + mesh_b
211
+ area_mesh_dict[split] = mesh
212
+ print(f"{split} mesh is loaded")
213
+
214
+ # Preprocess data.
215
+ print("Processing scenes...")
216
+ pool = ProcessPoolExecutor(
217
+ max_workers=args.num_workers
218
+ ) # peak 110G memory when parsing normal.
219
+ _ = list(
220
+ pool.map(
221
+ parse_room,
222
+ room_list,
223
+ angle_list,
224
+ repeat(args.dataset_root),
225
+ repeat(args.output_root),
226
+ repeat(args.align_angle),
227
+ repeat(args.parse_normal),
228
+ )
229
+ )
230
+
231
+
232
+ if __name__ == "__main__":
233
+ main_process()
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/sampling_chunking_data.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Chunking Data
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import argparse
10
+ import numpy as np
11
+ import multiprocessing as mp
12
+ from concurrent.futures import ProcessPoolExecutor
13
+ from itertools import repeat
14
+ from pathlib import Path
15
+
16
+
17
+ def chunking_scene(
18
+ name,
19
+ dataset_root,
20
+ split,
21
+ grid_size=None,
22
+ chunk_range=(6, 6),
23
+ chunk_stride=(3, 3),
24
+ chunk_minimum_size=10000,
25
+ ):
26
+ print(f"Chunking scene {name} in {split} split")
27
+ dataset_root = Path(dataset_root)
28
+ scene_path = dataset_root / split / name
29
+ assets = os.listdir(scene_path)
30
+ data_dict = dict()
31
+ for asset in assets:
32
+ if not asset.endswith(".npy"):
33
+ continue
34
+ data_dict[asset[:-4]] = np.load(scene_path / asset)
35
+ coord = data_dict["coord"] - data_dict["coord"].min(axis=0)
36
+
37
+ if grid_size is not None:
38
+ grid_coord = np.floor(coord / grid_size).astype(int)
39
+ _, idx = np.unique(grid_coord, axis=0, return_index=True)
40
+ coord = coord[idx]
41
+ for key in data_dict.keys():
42
+ data_dict[key] = data_dict[key][idx]
43
+
44
+ bev_range = coord.max(axis=0)[:2]
45
+ x, y = np.meshgrid(
46
+ np.arange(0, bev_range[0] + chunk_stride[0] - chunk_range[0], chunk_stride[0]),
47
+ np.arange(0, bev_range[0] + chunk_stride[0] - chunk_range[0], chunk_stride[0]),
48
+ indexing="ij",
49
+ )
50
+ chunks = np.concatenate([x.reshape([-1, 1]), y.reshape([-1, 1])], axis=-1)
51
+ chunk_idx = 0
52
+ for chunk in chunks:
53
+ mask = (
54
+ (coord[:, 0] >= chunk[0])
55
+ & (coord[:, 0] < chunk[0] + chunk_range[0])
56
+ & (coord[:, 1] >= chunk[1])
57
+ & (coord[:, 1] < chunk[1] + chunk_range[1])
58
+ )
59
+ if np.sum(mask) < chunk_minimum_size:
60
+ continue
61
+
62
+ chunk_data_name = f"{name}_{chunk_idx}"
63
+ if grid_size is not None:
64
+ chunk_split_name = (
65
+ f"{split}_"
66
+ f"grid{grid_size * 100:.0f}mm_"
67
+ f"chunk{chunk_range[0]}x{chunk_range[1]}_"
68
+ f"stride{chunk_stride[0]}x{chunk_stride[1]}"
69
+ )
70
+ else:
71
+ chunk_split_name = (
72
+ f"{split}_"
73
+ f"chunk{chunk_range[0]}x{chunk_range[1]}_"
74
+ f"stride{chunk_stride[0]}x{chunk_stride[1]}"
75
+ )
76
+
77
+ chunk_save_path = dataset_root / chunk_split_name / chunk_data_name
78
+ chunk_save_path.mkdir(parents=True, exist_ok=True)
79
+ for key in data_dict.keys():
80
+ np.save(chunk_save_path / f"{key}.npy", data_dict[key][mask])
81
+ chunk_idx += 1
82
+
83
+
84
+ if __name__ == "__main__":
85
+ parser = argparse.ArgumentParser()
86
+ parser.add_argument(
87
+ "--dataset_root",
88
+ required=True,
89
+ help="Path to the Pointcept processed ScanNet++ dataset.",
90
+ )
91
+ parser.add_argument(
92
+ "--split",
93
+ required=True,
94
+ default="train",
95
+ type=str,
96
+ help="Split need to process.",
97
+ )
98
+ parser.add_argument(
99
+ "--grid_size",
100
+ default=None,
101
+ type=float,
102
+ help="Grid size for initial grid sampling",
103
+ )
104
+ parser.add_argument(
105
+ "--chunk_range",
106
+ default=[6, 6],
107
+ type=int,
108
+ nargs="+",
109
+ help="Range of each chunk, e.g. --chunk_range 6 6",
110
+ )
111
+ parser.add_argument(
112
+ "--chunk_stride",
113
+ default=[3, 3],
114
+ type=int,
115
+ nargs="+",
116
+ help="Stride of each chunk, e.g. --chunk_stride 3 3",
117
+ )
118
+ parser.add_argument(
119
+ "--chunk_minimum_size",
120
+ default=10000,
121
+ type=int,
122
+ help="Minimum number of points in each chunk",
123
+ )
124
+ parser.add_argument(
125
+ "--num_workers",
126
+ default=mp.cpu_count(),
127
+ type=int,
128
+ help="Num workers for preprocessing.",
129
+ )
130
+
131
+ config = parser.parse_args()
132
+ config.dataset_root = Path(config.dataset_root)
133
+ data_list = os.listdir(config.dataset_root / config.split)
134
+
135
+ print("Processing scenes...")
136
+ pool = ProcessPoolExecutor(max_workers=config.num_workers)
137
+ _ = list(
138
+ pool.map(
139
+ chunking_scene,
140
+ data_list,
141
+ repeat(config.dataset_root),
142
+ repeat(config.split),
143
+ repeat(config.grid_size),
144
+ repeat(config.chunk_range),
145
+ repeat(config.chunk_stride),
146
+ repeat(config.chunk_minimum_size),
147
+ )
148
+ )
149
+ pool.shutdown()
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/dino/prepare_scene_list.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import argparse
3
+ from pathlib import Path
4
+
5
+ import numpy as np
6
+
7
+ if __name__ == "__main__":
8
+ num_train_list = 12
9
+ num_val_list = 3
10
+ meta_root = Path(os.path.dirname(__file__)).parent / "meta_data"
11
+
12
+ # Load train/val splits
13
+ train_scenes = np.loadtxt(meta_root / "scannetv2_train.txt", dtype=str)
14
+ val_scenes = np.loadtxt(meta_root / "scannetv2_val.txt", dtype=str)
15
+
16
+ for i in range(num_train_list):
17
+ np.savetxt(
18
+ meta_root / f"scannetv2_train_{i}.txt",
19
+ train_scenes[i::num_train_list],
20
+ fmt="%s",
21
+ )
22
+ for i in range(num_val_list):
23
+ np.savetxt(
24
+ meta_root / f"scannetv2_val_{i}.txt",
25
+ val_scenes[i::num_val_list],
26
+ fmt="%s",
27
+ )
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/dino/preprocess_dino_feature.py ADDED
@@ -0,0 +1,362 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import argparse
3
+ import einops
4
+ import torch
5
+ import torch.nn.functional as F
6
+ import torchvision
7
+ import tqdm
8
+ import cv2
9
+ import camtools as ct
10
+ import open3d as o3d
11
+ import zlib
12
+ import imageio
13
+ import struct
14
+ import numpy as np
15
+ import torch_scatter
16
+ from pathlib import Path
17
+
18
+
19
+ class RGBDFrame:
20
+ def __init__(self, file_handle):
21
+ self.camera_to_world = np.asarray(
22
+ struct.unpack("f" * 16, file_handle.read(16 * 4)), dtype=np.float32
23
+ ).reshape(4, 4)
24
+ self.timestamp_color = struct.unpack("Q", file_handle.read(8))[0]
25
+ self.timestamp_depth = struct.unpack("Q", file_handle.read(8))[0]
26
+ self.color_size_bytes = struct.unpack("Q", file_handle.read(8))[0]
27
+ self.depth_size_bytes = struct.unpack("Q", file_handle.read(8))[0]
28
+ self.color_data = b"".join(
29
+ struct.unpack(
30
+ "c" * self.color_size_bytes, file_handle.read(self.color_size_bytes)
31
+ )
32
+ )
33
+ self.depth_data = b"".join(
34
+ struct.unpack(
35
+ "c" * self.depth_size_bytes, file_handle.read(self.depth_size_bytes)
36
+ )
37
+ )
38
+
39
+ def decompress_depth(self, compression_type):
40
+ if compression_type == "zlib_ushort":
41
+ return self.decompress_depth_zlib()
42
+ else:
43
+ raise
44
+
45
+ def decompress_depth_zlib(self):
46
+ return zlib.decompress(self.depth_data)
47
+
48
+ def decompress_color(self, compression_type):
49
+ if compression_type == "jpeg":
50
+ return self.decompress_color_jpeg()
51
+ else:
52
+ raise
53
+
54
+ def decompress_color_jpeg(self):
55
+ return imageio.v2.imread(self.color_data)
56
+
57
+
58
+ class SensorData:
59
+ COMPRESSION_TYPE_COLOR = {
60
+ -1: "unknown",
61
+ 0: "raw",
62
+ 1: "png",
63
+ 2: "jpeg",
64
+ }
65
+ COMPRESSION_TYPE_DEPTH = {
66
+ -1: "unknown",
67
+ 0: "raw_ushort",
68
+ 1: "zlib_ushort",
69
+ 2: "occi_ushort",
70
+ }
71
+
72
+ def __init__(self, filename):
73
+ self.version = 4
74
+ f = open(filename, "rb")
75
+ version = struct.unpack("I", f.read(4))[0]
76
+ assert self.version == version
77
+ strlen = struct.unpack("Q", f.read(8))[0]
78
+ self.sensor_name = b"".join(struct.unpack("c" * strlen, f.read(strlen)))
79
+ self.intrinsic_color = np.asarray(
80
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
81
+ ).reshape(4, 4)
82
+ self.extrinsic_color = np.asarray(
83
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
84
+ ).reshape(4, 4)
85
+ self.intrinsic_depth = np.asarray(
86
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
87
+ ).reshape(4, 4)
88
+ self.extrinsic_depth = np.asarray(
89
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
90
+ ).reshape(4, 4)
91
+ self.color_compression_type = self.COMPRESSION_TYPE_COLOR[
92
+ struct.unpack("i", f.read(4))[0]
93
+ ]
94
+ self.depth_compression_type = self.COMPRESSION_TYPE_DEPTH[
95
+ struct.unpack("i", f.read(4))[0]
96
+ ]
97
+ self.color_width = struct.unpack("I", f.read(4))[0]
98
+ self.color_height = struct.unpack("I", f.read(4))[0]
99
+ self.depth_width = struct.unpack("I", f.read(4))[0]
100
+ self.depth_height = struct.unpack("I", f.read(4))[0]
101
+ self.depth_shift = struct.unpack("f", f.read(4))[0]
102
+ self.num_frames = struct.unpack("Q", f.read(8))[0]
103
+ self.file_handle = f
104
+
105
+ def export(
106
+ self,
107
+ frame_skip=20,
108
+ export_color=True,
109
+ export_depth=True,
110
+ export_pose=True,
111
+ ):
112
+ for i in range(self.num_frames):
113
+ if i % frame_skip != 0:
114
+ self.file_handle.seek(16 * 4 + 8 + 8, 1) # skip pose, timestamp
115
+ color_size_bytes = struct.unpack("Q", self.file_handle.read(8))[0]
116
+ depth_size_bytes = struct.unpack("Q", self.file_handle.read(8))[0]
117
+ self.file_handle.seek(color_size_bytes + depth_size_bytes, 1)
118
+ continue
119
+ else:
120
+ frame = RGBDFrame(self.file_handle)
121
+ data_dict = {}
122
+ if export_color:
123
+ color = frame.decompress_color(self.color_compression_type)
124
+ data_dict["color"] = color
125
+ if export_depth:
126
+ depth = frame.decompress_depth(self.depth_compression_type)
127
+ depth = np.frombuffer(depth, dtype=np.uint16).reshape(
128
+ self.depth_height, self.depth_width
129
+ )
130
+ data_dict["depth"] = depth
131
+ if export_pose:
132
+ pose = frame.camera_to_world
133
+ data_dict["pose"] = pose
134
+ yield data_dict
135
+
136
+ def __del__(self):
137
+ self.file_handle.close()
138
+
139
+
140
+ def ray_distance_to_z_depth(ray_depth, K):
141
+ height, width = ray_depth.shape
142
+
143
+ u = np.arange(width)
144
+ v = np.arange(height)
145
+ u_grid, v_grid = np.meshgrid(u, v)
146
+
147
+ fx = K[0, 0]
148
+ fy = K[1, 1]
149
+ cx = K[0, 2]
150
+ cy = K[1, 2]
151
+
152
+ u_norm = (u_grid - cx) / fx
153
+ v_norm = (v_grid - cy) / fy
154
+
155
+ norm_square = u_norm**2 + v_norm**2
156
+
157
+ z_depth = ray_depth / np.sqrt(norm_square + 1)
158
+ return z_depth
159
+
160
+
161
+ def center_crop(image, crop_ratio=1.0, patch_size=None):
162
+ if len(image.shape) == 2:
163
+ height, width = image.shape
164
+ elif len(image.shape) == 3:
165
+ height, width, _ = image.shape
166
+ else:
167
+ raise ValueError("Invalid image shape")
168
+ if patch_size is not None:
169
+ crop_h = int(height * crop_ratio // patch_size * patch_size)
170
+ crop_w = int(width * crop_ratio // patch_size * patch_size)
171
+ else:
172
+ crop_h = int(height * crop_ratio)
173
+ crop_w = int(width * crop_ratio)
174
+
175
+ # Calculate the cropping box
176
+ start_h = (height - crop_h) // 2
177
+ start_w = (width - crop_w) // 2
178
+
179
+ # Perform the center crop
180
+ cropped_image = image[start_h : start_h + crop_h, start_w : start_w + crop_w]
181
+
182
+ return cropped_image
183
+
184
+
185
+ def parsing_scene(
186
+ scene_path,
187
+ output_root,
188
+ split,
189
+ model,
190
+ frame_skip=20,
191
+ grid_size=0.08,
192
+ crop_ratio=0.95,
193
+ device="cuda",
194
+ ):
195
+ print(f"Parsing scene: {scene_path.name}")
196
+ device = torch.device(device)
197
+ scene_path = Path(scene_path)
198
+ sensor_reader = SensorData(scene_path / f"{scene_path.name}.sens")
199
+ mesh = o3d.io.read_triangle_mesh(
200
+ str(scene_path / f"{scene_path.name}_vh_clean_2.ply")
201
+ )
202
+ transform = torchvision.transforms.Compose(
203
+ [
204
+ torchvision.transforms.ToTensor(),
205
+ torchvision.transforms.Normalize(
206
+ mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)
207
+ ),
208
+ ]
209
+ )
210
+ scene_coord = []
211
+ scene_feat = []
212
+ scene_count = []
213
+ for data in tqdm.tqdm(
214
+ sensor_reader.export(frame_skip=frame_skip),
215
+ total=sensor_reader.num_frames // frame_skip,
216
+ ):
217
+ height, width = data["depth"].shape
218
+ K = sensor_reader.intrinsic_depth[:3, :3]
219
+ T = data["pose"]
220
+ if np.isnan(T).any() or np.isinf(T).any():
221
+ continue
222
+ depth = ct.raycast.mesh_to_depth(
223
+ mesh=mesh, K=K, T=np.linalg.inv(T), height=height, width=width
224
+ )
225
+ depth = ray_distance_to_z_depth(depth, K)
226
+ depth = center_crop(depth, crop_ratio, model.patch_size)
227
+ height_, width_ = depth.shape
228
+ pixel = np.transpose(np.indices((width_, height_)), (2, 1, 0))
229
+ pixel = pixel.reshape((-1, 2))
230
+ pixel = np.hstack((pixel, np.ones((pixel.shape[0], 1))))
231
+ depth = depth.reshape((-1, 1))
232
+ valid = ~np.isinf(depth).squeeze(-1)
233
+ coord = depth[valid] * (np.linalg.inv(K) @ pixel[valid].T).T # coord_camera
234
+ coord = coord @ T[:3, :3].T + T[:3, 3]
235
+
236
+ color = cv2.resize(
237
+ data["color"], (width, height), interpolation=cv2.INTER_LINEAR
238
+ )
239
+ color = center_crop(color, crop_ratio, model.patch_size)
240
+ with torch.inference_mode():
241
+ color_t = transform(color).unsqueeze(0).to(device)
242
+ feat_t = model.forward_features(color_t)["x_norm_patchtokens"]
243
+ feat_t = einops.rearrange(
244
+ feat_t, "1 (h w) c -> 1 c h w", w=width_ // model.patch_size
245
+ )
246
+ feat_t = F.interpolate(feat_t, (height_, width_), mode="bilinear")
247
+ feat_t = einops.rearrange(feat_t, "1 c h w -> (h w) c")[valid]
248
+ coord_t = torch.tensor(coord, dtype=torch.float32).to(device)
249
+ scene_coord.append(coord_t)
250
+ scene_feat.append(feat_t)
251
+ scene_count.append(
252
+ torch.ones(coord_t.shape[0], dtype=torch.long, device=device)
253
+ )
254
+ scene_coord = torch.concatenate(scene_coord, dim=0)
255
+ scene_feat = torch.concatenate(scene_feat, dim=0)
256
+ scene_count = torch.concatenate(scene_count, dim=0)
257
+
258
+ # grid sampling
259
+ grid_coord = torch.floor_divide(scene_coord, grid_size).to(torch.int32)
260
+ grid_coord, cluster = torch.unique(
261
+ grid_coord, sorted=True, return_inverse=True, dim=0
262
+ )
263
+ scene_coord = [
264
+ torch_scatter.scatter(scene_coord, cluster, reduce="mean", dim=0)
265
+ ]
266
+ scene_feat = [
267
+ torch_scatter.scatter(scene_feat, cluster, reduce="sum", dim=0)
268
+ ]
269
+ scene_count = [
270
+ torch_scatter.scatter(scene_count, cluster, reduce="sum", dim=0)
271
+ ]
272
+
273
+ # color = color.reshape((-1, 3))[valid]
274
+ # pcd = o3d.geometry.PointCloud()
275
+ # pcd.points = o3d.utility.Vector3dVector(coord)
276
+ # pcd.colors = o3d.utility.Vector3dVector(color / 255)
277
+ # o3d.visualization.draw_geometries([pcd])
278
+
279
+ scene_coord = scene_coord[0]
280
+ scene_feat = scene_feat[0] / scene_count[0].unsqueeze(-1)
281
+
282
+ scene_coord = scene_coord.half().cpu().numpy()
283
+ scene_feat = scene_feat.half().cpu().numpy()
284
+ np.savez(
285
+ Path(output_root) / split / f"{scene_path.name}.npz",
286
+ coord=scene_coord,
287
+ feat=scene_feat,
288
+ )
289
+
290
+
291
+ if __name__ == "__main__":
292
+ parser = argparse.ArgumentParser()
293
+ parser.add_argument(
294
+ "--dataset_root",
295
+ required=True,
296
+ help="Path to the ScanNet dataset containing scene folders",
297
+ )
298
+ parser.add_argument(
299
+ "--output_root",
300
+ required=True,
301
+ help="Output path where train/val folders will be located",
302
+ )
303
+ parser.add_argument(
304
+ "--scene_list",
305
+ required=True,
306
+ help="Path to scene list need to process",
307
+ )
308
+ parser.add_argument(
309
+ "--frame_skip",
310
+ default=10,
311
+ help="Frame skip for processing",
312
+ )
313
+ parser.add_argument(
314
+ "--grid_size",
315
+ default=0.08,
316
+ help="Grid size for sampling",
317
+ )
318
+ parser.add_argument(
319
+ "--crop_ratio",
320
+ default=0.95,
321
+ help="Crop ratio for center crop",
322
+ )
323
+
324
+ args = parser.parse_args()
325
+ scene_list = np.loadtxt(args.scene_list, dtype=str)
326
+ if "train" in args.scene_list:
327
+ split = "train"
328
+ folder = "scans"
329
+ elif "val" in args.scene_list:
330
+ split = "val"
331
+ folder = "scans"
332
+ else:
333
+ split = "test"
334
+ folder = "scans_test"
335
+
336
+ os.makedirs(Path(args.output_root) / split, exist_ok=True)
337
+
338
+ device = torch.device("cuda")
339
+ model = torch.hub.load("facebookresearch/dinov2", "dinov2_vitg14").to(device)
340
+ model.eval()
341
+ for scene in scene_list:
342
+ parsing_scene(
343
+ scene_path=Path(args.dataset_root) / folder / scene,
344
+ output_root=args.output_root,
345
+ split=split,
346
+ frame_skip=args.frame_skip,
347
+ grid_size=args.grid_size,
348
+ crop_ratio=args.crop_ratio,
349
+ model=model,
350
+ device="cuda",
351
+ )
352
+
353
+ # parsing_scene(
354
+ # scene_path=Path("/mnt/e/datasets/raw/scannet/scans/scene0230_00"),
355
+ # output_root=args.output_root,
356
+ # split=split,
357
+ # frame_skip=args.frame_skip,
358
+ # grid_size=args.grid_size,
359
+ # crop_ratio=args.crop_ratio,
360
+ # model=model,
361
+ # device="cuda",
362
+ # )
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/extract_partition.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import shutil
3
+ import argparse
4
+ import torch
5
+ import glob
6
+ import os.path
7
+
8
+ if __name__ == "__main__":
9
+ parser = argparse.ArgumentParser()
10
+ parser.add_argument(
11
+ "--dataset_root",
12
+ required=True,
13
+ help="Path to the ScanNet dataset containing scene folders",
14
+ )
15
+ parser.add_argument(
16
+ "--processed_root",
17
+ required=True,
18
+ help="Path to the processed ScanNet dataset, add partition to test data dict",
19
+ )
20
+ parser.add_argument(
21
+ "--segmentor_root",
22
+ required=True,
23
+ help="Path to Felzenswalb and Huttenlocher's Graph Based Image Segmentation binary",
24
+ )
25
+ parser.add_argument(
26
+ "--split",
27
+ default="test",
28
+ choices=["test", "val"],
29
+ help="Split to process. [test / val]",
30
+ )
31
+ config = parser.parse_args()
32
+ if config.split == "test":
33
+ raw_split = "scans_test"
34
+ else:
35
+ raw_split = "scans"
36
+
37
+ scene_list = glob.glob(os.path.join(config.processed_root, config.split, "*.pth"))
38
+ os.makedirs(os.path.join(config.processed_root, "tmp"), exist_ok=True)
39
+
40
+ for scene in scene_list:
41
+ scene_name = os.path.basename(scene).split(".")[0]
42
+ raw_scene = os.path.join(
43
+ config.dataset_root,
44
+ raw_split,
45
+ scene_name,
46
+ f"{scene_name}_vh_clean_2.ply",
47
+ )
48
+ tmp_scene = os.path.join(
49
+ config.processed_root,
50
+ "tmp",
51
+ f"{scene_name}_vh_clean_2.ply",
52
+ )
53
+ # copy original scene to tmp folder
54
+ shutil.copy(raw_scene, tmp_scene)
55
+ # run segmentor
56
+ process = os.popen(f"{config.segmentor_root} {tmp_scene}")
57
+ print(process.read())
58
+ process.close()
59
+ # load partition file
60
+ partition_file = tmp_scene.replace(".ply", ".0.010000.segs.json")
61
+ with open(partition_file) as f:
62
+ partition = json.load(f)["segIndices"]
63
+ data_dict = torch.load(scene)
64
+ data_dict["partition"] = partition
65
+ torch.save(data_dict, scene)
66
+ # clean tmp
67
+ os.remove(partition_file)
68
+ os.remove(tmp_scene)
69
+ print(f"Adding partition information to {scene_name}")
70
+
71
+ os.rmdir(os.path.join(config.processed_root, "tmp"))
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/meta_data/__pycache__/scannet200_constants.cpython-310.pyc ADDED
Binary file (12.5 kB). View file
 
gf_s3dis_ss_0.05/latent-fusion-r-0.5/code/pointcept/datasets/preprocessing/scannet/meta_data/classes_ObjClassification-ShapeNetCore55.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1 trash
2
+ 3 basket
3
+ 4 bathtub
4
+ 5 bed
5
+ 9 shelf
6
+ 13 cabinet
7
+ 18 chair
8
+ 20 keyboard
9
+ 22 tv
10
+ 30 lamp
11
+ 31 laptop
12
+ 35 microwave
13
+ 39 pillow
14
+ 42 printer
15
+ 47 sofa
16
+ 48 stove
17
+ 49 table