File size: 7,729 Bytes
cbb60e2 18f3dbc cbb60e2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 |
import h5py
class MicronsReader:
def __init__(self, file_path):
"""
Initialize the reader.
Opening in read-only mode ('r') is faster and prevents accidental corruption.
"""
self.file_path = file_path
self.f = h5py.File(self.file_path, 'r')
def close(self):
"""Close the file handle manually."""
self.f.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def get_full_data_by_hash(self, condition_hash, brain_area=None):
"""
Returns a dictionary with the clip and all trials (responses, behavior,
pupil, times) associated with a hash.
Args:
condition_hash (str): The identifier for the video.
brain_area (str, optional): Filter for neural responses.
Returns:
dict: {
'clip': np.array,
'stim_type': str,
'trials': [
{'session': str, 'trial_idx': str, 'responses': np.array, ...}, ...
]
}
"""
# 1. Reuse get_video_data for stimulus info
h_key = self._encode_hash(condition_hash)
clip, stim_type = self.get_video_data(h_key)
if clip is None:
return None
data_out = {
'clip': clip,
'stim_type': stim_type,
'trials': []
}
# 2. Access instances (links to trials)
video_grp = self.f[f'videos/{h_key}']
instances = video_grp['instances']
for instance_name in instances:
# SoftLink to the trial group
trial_grp = instances[instance_name]
# Identify parent session to look up brain area indices
session_key = "_".join(instance_name.split('_')[:2])
# 3. Handle Neural Responses
if brain_area:
area_path = f"sessions/{session_key}/meta/area_indices/{brain_area}"
if area_path not in self.f:
continue # Skip session if area not recorded
indices = self.f[area_path][:]
responses = trial_grp['responses'][indices, :]
else:
responses = trial_grp['responses'][:]
# 4. Aggregate all datasets in the trial folder
data_out['trials'].append({
'session': session_key,
'trial_idx': trial_grp.name.split('/')[-1],
'responses': responses,
'behavior': trial_grp['behavior'][:],
'pupil_center': trial_grp['pupil_center'][:],
})
return data_out
def get_responses_by_hash(self, condition_hash, brain_area=None):
"""Retrieves only neural responses associated with a hash across sessions."""
# Note: This is now essentially a subset of get_full_data_by_hash
full_data = self.get_full_data_by_hash(condition_hash, brain_area=brain_area)
if full_data is None:
return []
return [
{
'session': t['session'],
'trial_idx': t['trial_idx'],
'responses': t['responses']
}
for t in full_data['trials']
]
def _encode_hash(self, h):
"""Helper to convert a real hash into an HDF5-safe key."""
return h.replace('/', '%2F')
def _decode_hash(self, h):
return h.replace('%2F', '/')
def get_video_data(self, condition_hash):
h_key = self._encode_hash(condition_hash)
video_path = f"videos/{h_key}"
if video_path not in self.f:
return None, None
vid_grp = self.f[video_path]
clip = vid_grp['clip'][:]
stim_type = vid_grp.attrs.get('type', 'Unknown')
return clip, stim_type
def get_hashes_by_session(self, session_key, return_unique=False):
"""Returns a unique list of condition hashes shown in a specific session."""
if session_key not in self.f['sessions']:
raise ValueError(f"Session {session_key} not found.")
hashes = self.f[f'sessions/{session_key}/meta/condition_hashes'][:]
return set([self._decode_hash(h.decode('utf-8')) for h in hashes]) if return_unique else [self._decode_hash(h.decode('utf-8')) for h in hashes]
def get_hashes_by_type(self, stim_type):
"""Returns hashes belonging to a specific type (e.g., 'Monet2')."""
if stim_type not in self.f['types']:
return []
encoded_keys = list(self.f[f'types/{stim_type}'].keys())
return [self._decode_hash(k) for k in encoded_keys]
def get_available_brain_areas(self, session_key=None):
"""Returns a list of brain areas available in the file or a specific session."""
if session_key:
return list(self.f[f'sessions/{session_key}/meta/area_indices'].keys())
return list(self.f['brain_areas'].keys())
def count_trials_per_hash(self):
return {k: len(v['instances']) for k, v in self.f['videos'].items()}
def print_structure(self, max_items=5, follow_links=False):
"""
Prints a tree-like representation of the HDF5 database.
Args:
max_items (int): Max children to show per group.
follow_links (bool): If True, recurses into SoftLinks (original behavior).
If False, prints the link destination and stops.
"""
print(f"\nStructure of: {self.file_path}")
print("=" * 50)
def _print_tree(name, obj, indent="", current_key=""):
item_name = current_key if current_key else name
# 1. Check if this specific key is a SoftLink
# We need the parent object to check the link status of the child
# For the root level, obj is self.f
is_link = False
link_path = ""
# Dataset vs Group handling
if isinstance(obj, h5py.Dataset):
print(f"{indent}📄 {item_name:20} [Dataset: {obj.shape}, {obj.dtype}]")
return
# It's a Group
attrs = dict(obj.attrs)
attr_str = f" | Attributes: {attrs}" if attrs else ""
print(f"{indent}📂 {item_name.upper()}/ {attr_str}")
keys = sorted(obj.keys())
num_keys = len(keys)
display_keys = keys[:max_items]
for key in display_keys:
# Check link status without dereferencing
link_obj = obj.get(key, getlink=True)
if isinstance(link_obj, h5py.SoftLink):
# It is a SoftLink!
if follow_links:
_print_tree(key, obj[key], indent + " ", current_key=key)
else:
print(f"{indent} 🔗 {key:18} -> {link_obj.path}")
else:
# It is a real Group or Dataset
_print_tree(key, obj[key], indent + " ", current_key=key)
if num_keys > max_items:
print(f"{indent} ... and {num_keys - max_items} more items")
# Start recursion
for key in sorted(self.f.keys()):
# We treat the root level keys as 'real' objects to start
_print_tree(key, self.f[key], current_key=key) |