Darioli commited on
Commit
cbb60e2
·
verified ·
1 Parent(s): 9ebfeea

Upload reader.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. reader.py +194 -0
reader.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import h5py
2
+
3
+ class MicronsReader:
4
+ def __init__(self, file_path):
5
+ """
6
+ Initialize the reader.
7
+ Opening in read-only mode ('r') is faster and prevents accidental corruption.
8
+ """
9
+ self.file_path = file_path
10
+ self.f = h5py.File(self.file_path, 'r')
11
+
12
+ def close(self):
13
+ """Close the file handle manually."""
14
+ self.f.close()
15
+
16
+ def __enter__(self):
17
+ return self
18
+
19
+ def __exit__(self, exc_type, exc_val, exc_tb):
20
+ self.close()
21
+
22
+ def get_full_data_by_hash(self, condition_hash, brain_area=None):
23
+ """
24
+ Returns a dictionary with the clip and all trials (responses, behavior,
25
+ pupil, times) associated with a hash.
26
+
27
+ Args:
28
+ condition_hash (str): The identifier for the video.
29
+ brain_area (str, optional): Filter for neural responses.
30
+
31
+ Returns:
32
+ dict: {
33
+ 'clip': np.array,
34
+ 'stim_type': str,
35
+ 'trials': [
36
+ {'session': str, 'trial_idx': str, 'responses': np.array, ...}, ...
37
+ ]
38
+ }
39
+ """
40
+ # 1. Reuse get_video_data for stimulus info
41
+ h_key = self._encode_hash(condition_hash)
42
+ clip, stim_type = self.get_video_data(h_key)
43
+ if clip is None:
44
+ return None
45
+
46
+ data_out = {
47
+ 'clip': clip,
48
+ 'stim_type': stim_type,
49
+ 'trials': []
50
+ }
51
+
52
+ # 2. Access instances (links to trials)
53
+ video_grp = self.f[f'videos/{h_key}']
54
+ instances = video_grp['instances']
55
+
56
+ for instance_name in instances:
57
+ # SoftLink to the trial group
58
+ trial_grp = instances[instance_name]
59
+
60
+ # Identify parent session to look up brain area indices
61
+ session_key = "_".join(instance_name.split('_')[:2])
62
+
63
+ # 3. Handle Neural Responses
64
+ if brain_area:
65
+ area_path = f"sessions/{session_key}/meta/area_indices/{brain_area}"
66
+ if area_path not in self.f:
67
+ continue # Skip session if area not recorded
68
+ indices = self.f[area_path][:]
69
+ responses = trial_grp['responses'][indices, :]
70
+ else:
71
+ responses = trial_grp['responses'][:]
72
+
73
+ # 4. Aggregate all datasets in the trial folder
74
+ data_out['trials'].append({
75
+ 'session': session_key,
76
+ 'trial_idx': trial_grp.name.split('/')[-1],
77
+ 'responses': responses,
78
+ 'behavior': trial_grp['behavior'][:],
79
+ 'pupil_center': trial_grp['pupil_center'][:],
80
+ })
81
+
82
+ return data_out
83
+
84
+ def get_responses_by_hash(self, condition_hash, brain_area=None):
85
+ """Retrieves only neural responses associated with a hash across sessions."""
86
+ # Note: This is now essentially a subset of get_full_data_by_hash
87
+ full_data = self.get_full_data_by_hash(condition_hash, brain_area=brain_area)
88
+ if full_data is None:
89
+ return []
90
+
91
+ return [
92
+ {
93
+ 'session': t['session'],
94
+ 'trial_idx': t['trial_idx'],
95
+ 'responses': t['responses']
96
+ }
97
+ for t in full_data['trials']
98
+ ]
99
+
100
+ def _encode_hash(self, h):
101
+ """Helper to convert a real hash into an HDF5-safe key."""
102
+ return h.replace('/', '%2F')
103
+
104
+ def _decode_hash(self, h):
105
+ return h.replace('%2F', '/')
106
+
107
+ def get_video_data(self, condition_hash):
108
+ h_key = self._encode_hash(condition_hash)
109
+ video_path = f"videos/{h_key}"
110
+
111
+ if video_path not in self.f:
112
+ return None, None
113
+
114
+ vid_grp = self.f[video_path]
115
+ clip = vid_grp['clip'][:]
116
+ stim_type = vid_grp.attrs.get('type', 'Unknown')
117
+ return clip, stim_type
118
+
119
+ def get_hashes_by_session(self, session_key, return_unique=False):
120
+ """Returns a unique list of condition hashes shown in a specific session."""
121
+ if session_key not in self.f['sessions']:
122
+ raise ValueError(f"Session {session_key} not found.")
123
+ hashes = self.f[f'sessions/{session_key}/meta/condition_hashes'][:]
124
+ return set([self._decode_hash(h.decode('utf-8')) for h in hashes]) if return_unique else [self._decode_hash(h.decode('utf-8')) for h in hashes]
125
+
126
+ def get_hashes_by_type(self, stim_type):
127
+ """Returns hashes belonging to a specific type (e.g., 'Monet2')."""
128
+ if stim_type not in self.f['types']:
129
+ return []
130
+ encoded_keys = list(self.f[f'types/{stim_type}'].keys())
131
+ return [self._decode_hash(k) for k in encoded_keys]
132
+
133
+ def get_available_brain_areas(self, session_key=None):
134
+ """Returns a list of brain areas available in the file or a specific session."""
135
+ if session_key:
136
+ return list(self.f[f'sessions/{session_key}/meta/area_indices'].keys())
137
+ return list(self.f['brain_areas'].keys())
138
+
139
+ def print_structure(self, max_items=5, follow_links=False):
140
+ """
141
+ Prints a tree-like representation of the HDF5 database.
142
+
143
+ Args:
144
+ max_items (int): Max children to show per group.
145
+ follow_links (bool): If True, recurses into SoftLinks (original behavior).
146
+ If False, prints the link destination and stops.
147
+ """
148
+ print(f"\nStructure of: {self.file_path}")
149
+ print("=" * 50)
150
+
151
+ def _print_tree(name, obj, indent="", current_key=""):
152
+ item_name = current_key if current_key else name
153
+
154
+ # 1. Check if this specific key is a SoftLink
155
+ # We need the parent object to check the link status of the child
156
+ # For the root level, obj is self.f
157
+ is_link = False
158
+ link_path = ""
159
+
160
+ # Dataset vs Group handling
161
+ if isinstance(obj, h5py.Dataset):
162
+ print(f"{indent}📄 {item_name:20} [Dataset: {obj.shape}, {obj.dtype}]")
163
+ return
164
+
165
+ # It's a Group
166
+ attrs = dict(obj.attrs)
167
+ attr_str = f" | Attributes: {attrs}" if attrs else ""
168
+ print(f"{indent}📂 {item_name.upper()}/ {attr_str}")
169
+
170
+ keys = sorted(obj.keys())
171
+ num_keys = len(keys)
172
+ display_keys = keys[:max_items]
173
+
174
+ for key in display_keys:
175
+ # Check link status without dereferencing
176
+ link_obj = obj.get(key, getlink=True)
177
+
178
+ if isinstance(link_obj, h5py.SoftLink):
179
+ # It is a SoftLink!
180
+ if follow_links:
181
+ _print_tree(key, obj[key], indent + " ", current_key=key)
182
+ else:
183
+ print(f"{indent} 🔗 {key:18} -> {link_obj.path}")
184
+ else:
185
+ # It is a real Group or Dataset
186
+ _print_tree(key, obj[key], indent + " ", current_key=key)
187
+
188
+ if num_keys > max_items:
189
+ print(f"{indent} ... and {num_keys - max_items} more items")
190
+
191
+ # Start recursion
192
+ for key in sorted(self.f.keys()):
193
+ # We treat the root level keys as 'real' objects to start
194
+ _print_tree(key, self.f[key], current_key=key)