File size: 2,944 Bytes
c94c8c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
# from huggingface_hub import list_repo_files, hf_hub_download
# import os

# # Optional: choose your loader
# USE_SAFETENSORS = True
# if USE_SAFETENSORS:
#     from safetensors.torch import load_file as model_loader

# # Step 1: List all files in the dataset repo
# repo_id = "MatchLab/PointMapVerse"
# all_files = list_repo_files(repo_id=repo_id, repo_type="dataset")

# # Step 2: Automatically detect all subfolders (first-level only)
# subfolders = set(f.split('/')[0] for f in all_files if '/' in f)
# print(f"Detected subfolders: {subfolders}")

# # Step 3: Collect target files (e.g., only .safetensors inside subfolders)
# target_files = [f for f in all_files if f.split('/')[0] in ['light_arkitscenes']]

# print(f"Found {len(target_files)} .safetensors files in subfolders.")


# for file_path in target_files:
#     print(f"Caching: {file_path}")
#     cached_file = hf_hub_download(
#         repo_id=repo_id,
#         filename=file_path,
#         repo_type="dataset",
#         local_files_only=False,
#         resume_download = True
#     )
#     # Optional: Load into memory
#     data = model_loader(cached_file)
#     print(data['point_map'].shape)
#     print(f"Loaded: {file_path}, keys: {list(data.keys())}")

# # import os
# # import glob
# # from safetensors.torch import load_file

# # repo_id = "MatchLab/PointMapVerse"

# # # Step 1: Download & cache the dataset snapshot
# # from huggingface_hub import snapshot_download
# # local_dir = snapshot_download(
# #     repo_id=repo_id,
# #     repo_type="dataset",
# #     allow_patterns=["light_scannet/*", "light_3rscan/*", "light_arkitscenes/*"],   # include just these subfolders
# # )

# # print(f"Local dataset directory: {local_dir}")


# # # Step 2: Find all .safetensors files inside the target subfolders
# # file_paths = glob.glob(os.path.join(local_dir, "light_*", "*.safetensors"))
# # print(f"Found {len(file_paths)} .safetensors files")


# from huggingface_hub import hf_hub_download
# repo_id = "MatchLab/PointMapVerse"
# subfolders = ["light_scannet", "light_3rscan", "light_arkitscenes"]

# all_files = []
# for sub in subfolders:
#     for fname in filenames:
#         try:
#             cached_path = hf_hub_download(
#                 repo_id=repo_id,
#                 repo_type="dataset",
#                 filename=f"{sub}/{fname}",
#                 local_files_only=False,   # set True if you already downloaded and cached
#                 resume_download=True,
#             )
#             all_files.append(cached_path)
#         except Exception as e:
#             print(f"⚠️ Could not download {sub}/{fname}: {e}")

# print(f"Downloaded {len(all_files)} files")

# # Step 2: Load the files
# for path in all_files:
#     data = load_file(path)  # dict-like object
#     print(
#         f"Loaded {os.path.basename(path)}: "
#         f"keys={list(data.keys())}, "
#         f"point_map shape={data['point_map'].shape}"
#     )