MICrONS / reader.py
Darioli's picture
Upload reader.py with huggingface_hub
18f3dbc verified
import h5py
class MicronsReader:
def __init__(self, file_path):
"""
Initialize the reader.
Opening in read-only mode ('r') is faster and prevents accidental corruption.
"""
self.file_path = file_path
self.f = h5py.File(self.file_path, 'r')
def close(self):
"""Close the file handle manually."""
self.f.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def get_full_data_by_hash(self, condition_hash, brain_area=None):
"""
Returns a dictionary with the clip and all trials (responses, behavior,
pupil, times) associated with a hash.
Args:
condition_hash (str): The identifier for the video.
brain_area (str, optional): Filter for neural responses.
Returns:
dict: {
'clip': np.array,
'stim_type': str,
'trials': [
{'session': str, 'trial_idx': str, 'responses': np.array, ...}, ...
]
}
"""
# 1. Reuse get_video_data for stimulus info
h_key = self._encode_hash(condition_hash)
clip, stim_type = self.get_video_data(h_key)
if clip is None:
return None
data_out = {
'clip': clip,
'stim_type': stim_type,
'trials': []
}
# 2. Access instances (links to trials)
video_grp = self.f[f'videos/{h_key}']
instances = video_grp['instances']
for instance_name in instances:
# SoftLink to the trial group
trial_grp = instances[instance_name]
# Identify parent session to look up brain area indices
session_key = "_".join(instance_name.split('_')[:2])
# 3. Handle Neural Responses
if brain_area:
area_path = f"sessions/{session_key}/meta/area_indices/{brain_area}"
if area_path not in self.f:
continue # Skip session if area not recorded
indices = self.f[area_path][:]
responses = trial_grp['responses'][indices, :]
else:
responses = trial_grp['responses'][:]
# 4. Aggregate all datasets in the trial folder
data_out['trials'].append({
'session': session_key,
'trial_idx': trial_grp.name.split('/')[-1],
'responses': responses,
'behavior': trial_grp['behavior'][:],
'pupil_center': trial_grp['pupil_center'][:],
})
return data_out
def get_responses_by_hash(self, condition_hash, brain_area=None):
"""Retrieves only neural responses associated with a hash across sessions."""
# Note: This is now essentially a subset of get_full_data_by_hash
full_data = self.get_full_data_by_hash(condition_hash, brain_area=brain_area)
if full_data is None:
return []
return [
{
'session': t['session'],
'trial_idx': t['trial_idx'],
'responses': t['responses']
}
for t in full_data['trials']
]
def _encode_hash(self, h):
"""Helper to convert a real hash into an HDF5-safe key."""
return h.replace('/', '%2F')
def _decode_hash(self, h):
return h.replace('%2F', '/')
def get_video_data(self, condition_hash):
h_key = self._encode_hash(condition_hash)
video_path = f"videos/{h_key}"
if video_path not in self.f:
return None, None
vid_grp = self.f[video_path]
clip = vid_grp['clip'][:]
stim_type = vid_grp.attrs.get('type', 'Unknown')
return clip, stim_type
def get_hashes_by_session(self, session_key, return_unique=False):
"""Returns a unique list of condition hashes shown in a specific session."""
if session_key not in self.f['sessions']:
raise ValueError(f"Session {session_key} not found.")
hashes = self.f[f'sessions/{session_key}/meta/condition_hashes'][:]
return set([self._decode_hash(h.decode('utf-8')) for h in hashes]) if return_unique else [self._decode_hash(h.decode('utf-8')) for h in hashes]
def get_hashes_by_type(self, stim_type):
"""Returns hashes belonging to a specific type (e.g., 'Monet2')."""
if stim_type not in self.f['types']:
return []
encoded_keys = list(self.f[f'types/{stim_type}'].keys())
return [self._decode_hash(k) for k in encoded_keys]
def get_available_brain_areas(self, session_key=None):
"""Returns a list of brain areas available in the file or a specific session."""
if session_key:
return list(self.f[f'sessions/{session_key}/meta/area_indices'].keys())
return list(self.f['brain_areas'].keys())
def count_trials_per_hash(self):
return {k: len(v['instances']) for k, v in self.f['videos'].items()}
def print_structure(self, max_items=5, follow_links=False):
"""
Prints a tree-like representation of the HDF5 database.
Args:
max_items (int): Max children to show per group.
follow_links (bool): If True, recurses into SoftLinks (original behavior).
If False, prints the link destination and stops.
"""
print(f"\nStructure of: {self.file_path}")
print("=" * 50)
def _print_tree(name, obj, indent="", current_key=""):
item_name = current_key if current_key else name
# 1. Check if this specific key is a SoftLink
# We need the parent object to check the link status of the child
# For the root level, obj is self.f
is_link = False
link_path = ""
# Dataset vs Group handling
if isinstance(obj, h5py.Dataset):
print(f"{indent}📄 {item_name:20} [Dataset: {obj.shape}, {obj.dtype}]")
return
# It's a Group
attrs = dict(obj.attrs)
attr_str = f" | Attributes: {attrs}" if attrs else ""
print(f"{indent}📂 {item_name.upper()}/ {attr_str}")
keys = sorted(obj.keys())
num_keys = len(keys)
display_keys = keys[:max_items]
for key in display_keys:
# Check link status without dereferencing
link_obj = obj.get(key, getlink=True)
if isinstance(link_obj, h5py.SoftLink):
# It is a SoftLink!
if follow_links:
_print_tree(key, obj[key], indent + " ", current_key=key)
else:
print(f"{indent} 🔗 {key:18} -> {link_obj.path}")
else:
# It is a real Group or Dataset
_print_tree(key, obj[key], indent + " ", current_key=key)
if num_keys > max_items:
print(f"{indent} ... and {num_keys - max_items} more items")
# Start recursion
for key in sorted(self.f.keys()):
# We treat the root level keys as 'real' objects to start
_print_tree(key, self.f[key], current_key=key)