idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
23,500
def display_markers ( self ) : for rect in self . idx_markers : self . scene . removeItem ( rect ) self . idx_markers = [ ] markers = [ ] if self . parent . info . markers is not None : if self . parent . value ( 'marker_show' ) : markers = self . parent . info . markers for mrk in markers : rect = QGraphicsRectItem ( mrk [ 'start' ] , BARS [ 'markers' ] [ 'pos0' ] , mrk [ 'end' ] - mrk [ 'start' ] , BARS [ 'markers' ] [ 'pos1' ] ) self . scene . addItem ( rect ) color = self . parent . value ( 'marker_color' ) rect . setPen ( QPen ( QColor ( color ) ) ) rect . setBrush ( QBrush ( QColor ( color ) ) ) rect . setZValue ( - 5 ) self . idx_markers . append ( rect )
Mark all the markers from the dataset .
23,501
def mark_stages ( self , start_time , length , stage_name ) : y_pos = BARS [ 'stage' ] [ 'pos0' ] current_stage = STAGES . get ( stage_name , STAGES [ 'Unknown' ] ) old_score = self . scene . itemAt ( start_time + length / 2 , y_pos + current_stage [ 'pos0' ] + current_stage [ 'pos1' ] - 1 , self . transform ( ) ) if old_score is not None and old_score . pen ( ) == NoPen : lg . debug ( 'Removing old score at {}' . format ( start_time ) ) self . scene . removeItem ( old_score ) self . idx_annot . remove ( old_score ) rect = QGraphicsRectItem ( start_time , y_pos + current_stage [ 'pos0' ] , length , current_stage [ 'pos1' ] ) rect . setPen ( NoPen ) rect . setBrush ( current_stage [ 'color' ] ) self . scene . addItem ( rect ) self . idx_annot . append ( rect )
Mark stages only add the new ones .
23,502
def mark_quality ( self , start_time , length , qual_name ) : y_pos = BARS [ 'quality' ] [ 'pos0' ] height = 10 old_score = self . scene . itemAt ( start_time + length / 2 , y_pos + height - 1 , self . transform ( ) ) if old_score is not None and old_score . pen ( ) == NoPen : lg . debug ( 'Removing old score at {}' . format ( start_time ) ) self . scene . removeItem ( old_score ) self . idx_annot . remove ( old_score ) if qual_name == 'Poor' : rect = QGraphicsRectItem ( start_time , y_pos , length , height ) rect . setPen ( NoPen ) rect . setBrush ( Qt . black ) self . scene . addItem ( rect ) self . idx_annot . append ( rect )
Mark signal quality only add the new ones .
23,503
def mark_cycles ( self , start_time , length , end = False ) : y_pos = STAGES [ 'cycle' ] [ 'pos0' ] height = STAGES [ 'cycle' ] [ 'pos1' ] color = STAGES [ 'cycle' ] [ 'color' ] old_rect = self . scene . itemAt ( start_time + length / 2 , y_pos + height - 1 , self . transform ( ) ) if old_rect is not None and old_rect . pen ( ) == NoPen : lg . debug ( 'Removing old score at {}' . format ( start_time ) ) self . scene . removeItem ( old_rect ) self . idx_annot . remove ( old_rect ) rect = QGraphicsRectItem ( start_time , y_pos , 30 , height ) rect . setPen ( NoPen ) rect . setBrush ( color ) self . scene . addItem ( rect ) self . idx_annot . append ( rect ) if end : start_time -= 120 kink_hi = QGraphicsRectItem ( start_time , y_pos , 150 , 1 ) kink_hi . setPen ( NoPen ) kink_hi . setBrush ( color ) self . scene . addItem ( kink_hi ) self . idx_annot . append ( kink_hi ) kink_lo = QGraphicsRectItem ( start_time , y_pos + height , 150 , 1 ) kink_lo . setPen ( NoPen ) kink_lo . setBrush ( color ) self . scene . addItem ( kink_lo ) self . idx_annot . append ( kink_lo )
Mark cycle bound only add the new one .
23,504
def mousePressEvent ( self , event ) : if self . scene is not None : x_in_scene = self . mapToScene ( event . pos ( ) ) . x ( ) window_length = self . parent . value ( 'window_length' ) window_start = int ( floor ( x_in_scene / window_length ) * window_length ) if self . parent . notes . annot is not None : window_start = self . parent . notes . annot . get_epoch_start ( window_start ) self . update_position ( window_start )
Jump to window when user clicks on overview .
23,505
def reset ( self ) : self . minimum = None self . maximum = None self . start_time = None self . idx_current = None self . idx_markers = [ ] self . idx_annot = [ ] if self . scene is not None : self . scene . clear ( ) self . scene = None
Reset the widget and clear the scene .
23,506
def _prepare_colors ( color , values , limits_c , colormap , alpha , chan = None ) : if values is not None : if limits_c is None : limits_c = array ( [ - 1 , 1 ] ) * nanmax ( abs ( values ) ) norm_values = normalize ( values , * limits_c ) cm = get_colormap ( colormap ) colors = cm [ norm_values ] elif color is not None : colors = ColorArray ( color ) else : cm = get_colormap ( 'hsl' ) group_idx = _chan_groups_to_index ( chan ) colors = cm [ group_idx ] if alpha is not None : colors . alpha = alpha return colors , limits_c
Return colors for all the channels based on various inputs .
23,507
def add_surf ( self , surf , color = SKIN_COLOR , vertex_colors = None , values = None , limits_c = None , colormap = COLORMAP , alpha = 1 , colorbar = False ) : colors , limits = _prepare_colors ( color = color , values = values , limits_c = limits_c , colormap = colormap , alpha = alpha ) vertex_colors = colors . rgba if vertex_colors . shape [ 0 ] == 1 : vertex_colors = tile ( vertex_colors , ( surf . n_vert , 1 ) ) meshdata = MeshData ( vertices = surf . vert , faces = surf . tri , vertex_colors = vertex_colors ) mesh = SurfaceMesh ( meshdata ) self . _add_mesh ( mesh ) surf_center = mean ( surf . vert , axis = 0 ) if surf_center [ 0 ] < 0 : azimuth = 270 else : azimuth = 90 self . _view . camera . azimuth = azimuth self . _view . camera . center = surf_center self . _surf . append ( mesh ) if colorbar : self . _view . add ( _colorbar_for_surf ( colormap , limits ) )
Add surfaces to the visualization .
23,508
def add_chan ( self , chan , color = None , values = None , limits_c = None , colormap = CHAN_COLORMAP , alpha = None , colorbar = False ) : if limits_c is None and self . _chan_limits is not None : limits_c = self . _chan_limits chan_colors , limits = _prepare_colors ( color = color , values = values , limits_c = limits_c , colormap = colormap , alpha = alpha , chan = chan ) self . _chan_limits = limits xyz = chan . return_xyz ( ) marker = Markers ( ) marker . set_data ( pos = xyz , size = CHAN_SIZE , face_color = chan_colors ) self . _add_mesh ( marker ) if colorbar : self . _view . add ( _colorbar_for_surf ( colormap , limits ) )
Add channels to visualization
23,509
def select ( data , trial = None , invert = False , ** axes_to_select ) : if trial is not None and not isinstance ( trial , Iterable ) : raise TypeError ( 'Trial needs to be iterable.' ) for axis_to_select , values_to_select in axes_to_select . items ( ) : if ( not isinstance ( values_to_select , Iterable ) or isinstance ( values_to_select , str ) ) : raise TypeError ( axis_to_select + ' needs to be iterable.' ) if trial is None : trial = range ( data . number_of ( 'trial' ) ) else : trial = trial if invert : trial = setdiff1d ( range ( data . number_of ( 'trial' ) ) , trial ) output = data . _copy ( axis = False ) for one_axis in output . axis : output . axis [ one_axis ] = empty ( len ( trial ) , dtype = 'O' ) output . data = empty ( len ( trial ) , dtype = 'O' ) to_select = { } for cnt , i in enumerate ( trial ) : lg . debug ( 'Selection on trial {0: 6}' . format ( i ) ) for one_axis in output . axis : values = data . axis [ one_axis ] [ i ] if one_axis in axes_to_select . keys ( ) : values_to_select = axes_to_select [ one_axis ] if len ( values_to_select ) == 0 : selected_values = ( ) elif isinstance ( values_to_select [ 0 ] , str ) : selected_values = asarray ( values_to_select , dtype = 'U' ) else : if ( values_to_select [ 0 ] is None and values_to_select [ 1 ] is None ) : bool_values = ones ( len ( values ) , dtype = bool ) elif values_to_select [ 0 ] is None : bool_values = values < values_to_select [ 1 ] elif values_to_select [ 1 ] is None : bool_values = values_to_select [ 0 ] <= values else : bool_values = ( ( values_to_select [ 0 ] <= values ) & ( values < values_to_select [ 1 ] ) ) selected_values = values [ bool_values ] if invert : selected_values = setdiff1d ( values , selected_values ) lg . debug ( 'In axis {0}, selecting {1: 6} ' 'values' . format ( one_axis , len ( selected_values ) ) ) to_select [ one_axis ] = selected_values else : lg . debug ( 'In axis ' + one_axis + ', selecting all the ' 'values' ) selected_values = data . axis [ one_axis ] [ i ] output . axis [ one_axis ] [ cnt ] = selected_values output . data [ cnt ] = data ( trial = i , ** to_select ) return output
Define the selection of trials using ranges or actual values .
23,510
def resample ( data , s_freq = None , axis = 'time' , ftype = 'fir' , n = None ) : output = data . _copy ( ) ratio = int ( data . s_freq / s_freq ) for i in range ( data . number_of ( 'trial' ) ) : output . data [ i ] = decimate ( data . data [ i ] , ratio , axis = data . index_of ( axis ) , zero_phase = True ) n_samples = output . data [ i ] . shape [ data . index_of ( axis ) ] output . axis [ axis ] [ i ] = linspace ( data . axis [ axis ] [ i ] [ 0 ] , data . axis [ axis ] [ i ] [ - 1 ] + 1 / data . s_freq , n_samples ) output . s_freq = s_freq return output
Downsample the data after applying a filter .
23,511
def fetch ( dataset , annot , cat = ( 0 , 0 , 0 , 0 ) , evt_type = None , stage = None , cycle = None , chan_full = None , epoch = None , epoch_dur = 30 , epoch_overlap = 0 , epoch_step = None , reject_epoch = False , reject_artf = False , min_dur = 0 , buffer = 0 ) : bundles = get_times ( annot , evt_type = evt_type , stage = stage , cycle = cycle , chan = chan_full , exclude = reject_epoch , buffer = buffer ) if reject_artf and bundles : for bund in bundles : bund [ 'times' ] = remove_artf_evts ( bund [ 'times' ] , annot , bund [ 'chan' ] , min_dur = 0 ) if bundles : if 'locked' == epoch : bundles = _divide_bundles ( bundles ) elif 'unlocked' == epoch : if epoch_step is not None : step = epoch_step else : step = epoch_dur - ( epoch_dur * epoch_overlap ) bundles = _concat ( bundles , cat ) bundles = _find_intervals ( bundles , epoch_dur , step ) elif not epoch : bundles = _concat ( bundles , cat ) bundles = _longer_than ( bundles , min_dur ) segments = Segments ( dataset ) segments . segments = bundles return segments
Create instance of Segments for analysis complete with info about stage cycle channel event type . Segments contains only metadata until . read_data is called .
23,512
def get_times ( annot , evt_type = None , stage = None , cycle = None , chan = None , exclude = False , buffer = 0 ) : getter = annot . get_epochs last = annot . last_second if stage is None : stage = ( None , ) if cycle is None : cycle = ( None , ) if chan is None : chan = ( None , ) if evt_type is None : evt_type = ( None , ) elif isinstance ( evt_type [ 0 ] , str ) : getter = annot . get_events if chan != ( None , ) : chan . append ( '' ) else : lg . error ( 'Event type must be list/tuple of str or None' ) qual = None if exclude : qual = 'Good' bundles = [ ] for et in evt_type : for ch in chan : for cyc in cycle : for ss in stage : st_input = ss if ss is not None : st_input = ( ss , ) evochs = getter ( name = et , time = cyc , chan = ( ch , ) , stage = st_input , qual = qual ) if evochs : times = [ ( max ( e [ 'start' ] - buffer , 0 ) , min ( e [ 'end' ] + buffer , last ) ) for e in evochs ] times = sorted ( times , key = lambda x : x [ 0 ] ) one_bundle = { 'times' : times , 'stage' : ss , 'cycle' : cyc , 'chan' : ch , 'name' : et } bundles . append ( one_bundle ) return bundles
Get start and end times for selected segments of data bundled together with info .
23,513
def _longer_than ( segments , min_dur ) : if min_dur <= 0. : return segments long_enough = [ ] for seg in segments : if sum ( [ t [ 1 ] - t [ 0 ] for t in seg [ 'times' ] ] ) >= min_dur : long_enough . append ( seg ) return long_enough
Remove segments longer than min_dur .
23,514
def _concat ( bundles , cat = ( 0 , 0 , 0 , 0 ) ) : chan = sorted ( set ( [ x [ 'chan' ] for x in bundles ] ) ) cycle = sorted ( set ( [ x [ 'cycle' ] for x in bundles ] ) ) stage = sorted ( set ( [ x [ 'stage' ] for x in bundles ] ) ) evt_type = sorted ( set ( [ x [ 'name' ] for x in bundles ] ) ) all_cycle = None all_stage = None all_evt_type = None if cycle [ 0 ] is not None : all_cycle = ', ' . join ( [ str ( c ) for c in cycle ] ) if stage [ 0 ] is not None : all_stage = ', ' . join ( stage ) if evt_type [ 0 ] is not None : all_evt_type = ', ' . join ( evt_type ) if cat [ 0 ] : cycle = [ all_cycle ] if cat [ 1 ] : stage = [ all_stage ] if cat [ 3 ] : evt_type = [ all_evt_type ] to_concat = [ ] for ch in chan : for cyc in cycle : for st in stage : for et in evt_type : new_times = [ ] for bund in bundles : chan_cond = ch == bund [ 'chan' ] cyc_cond = cyc in ( bund [ 'cycle' ] , all_cycle ) st_cond = st in ( bund [ 'stage' ] , all_stage ) et_cond = et in ( bund [ 'name' ] , all_evt_type ) if chan_cond and cyc_cond and st_cond and et_cond : new_times . extend ( bund [ 'times' ] ) new_times = sorted ( new_times , key = lambda x : x [ 0 ] ) new_bund = { 'times' : new_times , 'chan' : ch , 'cycle' : cyc , 'stage' : st , 'name' : et } to_concat . append ( new_bund ) if not cat [ 2 ] : to_concat_new = [ ] for bund in to_concat : last = None bund [ 'times' ] . append ( ( inf , inf ) ) start = 0 for i , j in enumerate ( bund [ 'times' ] ) : if last is not None : if not isclose ( j [ 0 ] , last , abs_tol = 0.01 ) : new_times = bund [ 'times' ] [ start : i ] new_bund = bund . copy ( ) new_bund [ 'times' ] = new_times to_concat_new . append ( new_bund ) start = i last = j [ 1 ] to_concat = to_concat_new to_concat = [ x for x in to_concat if x [ 'times' ] ] return to_concat
Prepare event or epoch start and end times for concatenation .
23,515
def _divide_bundles ( bundles ) : divided = [ ] for bund in bundles : for t in bund [ 'times' ] : new_bund = bund . copy ( ) new_bund [ 'times' ] = [ t ] divided . append ( new_bund ) return divided
Take each subsegment inside a bundle and put it in its own bundle copying the bundle metadata .
23,516
def _find_intervals ( bundles , duration , step ) : segments = [ ] for bund in bundles : beg , end = bund [ 'times' ] [ 0 ] [ 0 ] , bund [ 'times' ] [ - 1 ] [ 1 ] if end - beg >= duration : new_begs = arange ( beg , end - duration , step ) for t in new_begs : seg = bund . copy ( ) seg [ 'times' ] = [ ( t , t + duration ) ] segments . append ( seg ) return segments
Divide bundles into segments of a certain duration and a certain step discarding any remainder .
23,517
def _create_data ( data , active_chan , ref_chan = [ ] , grp_name = None ) : output = ChanTime ( ) output . s_freq = data . s_freq output . start_time = data . start_time output . axis [ 'time' ] = data . axis [ 'time' ] output . axis [ 'chan' ] = empty ( 1 , dtype = 'O' ) output . data = empty ( 1 , dtype = 'O' ) output . data [ 0 ] = empty ( ( len ( active_chan ) , data . number_of ( 'time' ) [ 0 ] ) , dtype = 'f' ) sel_data = _select_channels ( data , active_chan + ref_chan ) data1 = montage ( sel_data , ref_chan = ref_chan ) data1 . data [ 0 ] = nan_to_num ( data1 . data [ 0 ] ) all_chan_grp_name = [ ] for i , chan in enumerate ( active_chan ) : chan_grp_name = chan if grp_name : chan_grp_name = chan + ' (' + grp_name + ')' all_chan_grp_name . append ( chan_grp_name ) dat = data1 ( chan = chan , trial = 0 ) output . data [ 0 ] [ i , : ] = dat output . axis [ 'chan' ] [ 0 ] = asarray ( all_chan_grp_name , dtype = 'U' ) return output
Create data after montage .
23,518
def _select_channels ( data , channels ) : output = data . _copy ( ) chan_list = list ( data . axis [ 'chan' ] [ 0 ] ) idx_chan = [ chan_list . index ( i_chan ) for i_chan in channels ] output . data [ 0 ] = data . data [ 0 ] [ idx_chan , : ] output . axis [ 'chan' ] [ 0 ] = asarray ( channels ) return output
Select channels .
23,519
def create_widgets ( MAIN ) : MAIN . labels = Labels ( MAIN ) MAIN . channels = Channels ( MAIN ) MAIN . notes = Notes ( MAIN ) MAIN . merge_dialog = MergeDialog ( MAIN ) MAIN . export_events_dialog = ExportEventsDialog ( MAIN ) MAIN . export_dataset_dialog = ExportDatasetDialog ( MAIN ) MAIN . spindle_dialog = SpindleDialog ( MAIN ) MAIN . slow_wave_dialog = SWDialog ( MAIN ) MAIN . analysis_dialog = AnalysisDialog ( MAIN ) MAIN . overview = Overview ( MAIN ) MAIN . spectrum = Spectrum ( MAIN ) MAIN . traces = Traces ( MAIN ) MAIN . video = Video ( MAIN ) MAIN . settings = Settings ( MAIN ) MAIN . info = Info ( MAIN ) MAIN . setCentralWidget ( MAIN . traces ) new_docks = [ { 'name' : 'Information' , 'widget' : MAIN . info , 'main_area' : Qt . LeftDockWidgetArea , 'extra_area' : Qt . RightDockWidgetArea , } , { 'name' : 'Labels' , 'widget' : MAIN . labels , 'main_area' : Qt . RightDockWidgetArea , 'extra_area' : Qt . LeftDockWidgetArea , } , { 'name' : 'Channels' , 'widget' : MAIN . channels , 'main_area' : Qt . RightDockWidgetArea , 'extra_area' : Qt . LeftDockWidgetArea , } , { 'name' : 'Spectrum' , 'widget' : MAIN . spectrum , 'main_area' : Qt . RightDockWidgetArea , 'extra_area' : Qt . LeftDockWidgetArea , } , { 'name' : 'Annotations' , 'widget' : MAIN . notes , 'main_area' : Qt . LeftDockWidgetArea , 'extra_area' : Qt . RightDockWidgetArea , } , { 'name' : 'Video' , 'widget' : MAIN . video , 'main_area' : Qt . LeftDockWidgetArea , 'extra_area' : Qt . RightDockWidgetArea , } , { 'name' : 'Overview' , 'widget' : MAIN . overview , 'main_area' : Qt . BottomDockWidgetArea , 'extra_area' : Qt . TopDockWidgetArea , } , ] idx_docks = { } actions = MAIN . action actions [ 'dockwidgets' ] = [ ] for dock in new_docks : dockwidget = QDockWidget ( dock [ 'name' ] , MAIN ) dockwidget . setWidget ( dock [ 'widget' ] ) dockwidget . setAllowedAreas ( dock [ 'main_area' ] | dock [ 'extra_area' ] ) dockwidget . setObjectName ( dock [ 'name' ] ) idx_docks [ dock [ 'name' ] ] = dockwidget MAIN . addDockWidget ( dock [ 'main_area' ] , dockwidget ) dockwidget_action = dockwidget . toggleViewAction ( ) dockwidget_action . setIcon ( QIcon ( ICON [ 'widget' ] ) ) actions [ 'dockwidgets' ] . append ( dockwidget_action ) MAIN . tabifyDockWidget ( idx_docks [ 'Information' ] , idx_docks [ 'Video' ] ) MAIN . tabifyDockWidget ( idx_docks [ 'Channels' ] , idx_docks [ 'Labels' ] ) idx_docks [ 'Information' ] . raise_ ( )
Create all the widgets and dockwidgets . It also creates actions to toggle views of dockwidgets in dockwidgets .
23,520
def create_actions ( MAIN ) : actions = MAIN . action actions [ 'open_settings' ] = QAction ( QIcon ( ICON [ 'settings' ] ) , 'Settings' , MAIN ) actions [ 'open_settings' ] . triggered . connect ( MAIN . show_settings ) actions [ 'close_wndw' ] = QAction ( QIcon ( ICON [ 'quit' ] ) , 'Quit' , MAIN ) actions [ 'close_wndw' ] . triggered . connect ( MAIN . close ) actions [ 'about' ] = QAction ( 'About WONAMBI' , MAIN ) actions [ 'about' ] . triggered . connect ( MAIN . about ) actions [ 'aboutqt' ] = QAction ( 'About Qt' , MAIN ) actions [ 'aboutqt' ] . triggered . connect ( lambda : QMessageBox . aboutQt ( MAIN ) )
Create all the possible actions .
23,521
def create_toolbar ( MAIN ) : actions = MAIN . action toolbar = MAIN . addToolBar ( 'File Management' ) toolbar . setObjectName ( 'File Management' ) toolbar . addAction ( MAIN . info . action [ 'open_dataset' ] ) toolbar . addSeparator ( ) toolbar . addAction ( MAIN . channels . action [ 'load_channels' ] ) toolbar . addAction ( MAIN . channels . action [ 'save_channels' ] ) toolbar . addSeparator ( ) toolbar . addAction ( MAIN . notes . action [ 'new_annot' ] ) toolbar . addAction ( MAIN . notes . action [ 'load_annot' ] ) actions = MAIN . traces . action toolbar = MAIN . addToolBar ( 'Scroll' ) toolbar . setObjectName ( 'Scroll' ) toolbar . addAction ( actions [ 'step_prev' ] ) toolbar . addAction ( actions [ 'step_next' ] ) toolbar . addAction ( actions [ 'page_prev' ] ) toolbar . addAction ( actions [ 'page_next' ] ) toolbar . addSeparator ( ) toolbar . addAction ( actions [ 'X_more' ] ) toolbar . addAction ( actions [ 'X_less' ] ) toolbar . addSeparator ( ) toolbar . addAction ( actions [ 'Y_less' ] ) toolbar . addAction ( actions [ 'Y_more' ] ) toolbar . addAction ( actions [ 'Y_wider' ] ) toolbar . addAction ( actions [ 'Y_tighter' ] ) actions = MAIN . notes . action toolbar = MAIN . addToolBar ( 'Annotations' ) toolbar . setObjectName ( 'Annotations' ) toolbar . addAction ( actions [ 'new_bookmark' ] ) toolbar . addSeparator ( ) toolbar . addAction ( actions [ 'new_event' ] ) toolbar . addWidget ( MAIN . notes . idx_eventtype ) toolbar . addSeparator ( ) toolbar . addWidget ( MAIN . notes . idx_stage ) toolbar . addWidget ( MAIN . notes . idx_quality )
Create the various toolbars .
23,522
def update_evt_types ( self ) : self . event_types = self . parent . notes . annot . event_types self . idx_evt_type . clear ( ) self . frequency [ 'norm_evt_type' ] . clear ( ) for ev in self . event_types : self . idx_evt_type . addItem ( ev ) self . frequency [ 'norm_evt_type' ] . addItem ( ev )
Update the event types list when dialog is opened .
23,523
def toggle_concatenate ( self ) : if not ( self . chunk [ 'epoch' ] . isChecked ( ) and self . lock_to_staging . get_value ( ) ) : for i , j in zip ( [ self . idx_chan , self . idx_cycle , self . idx_stage , self . idx_evt_type ] , [ self . cat [ 'chan' ] , self . cat [ 'cycle' ] , self . cat [ 'stage' ] , self . cat [ 'evt_type' ] ] ) : if len ( i . selectedItems ( ) ) > 1 : j . setEnabled ( True ) else : j . setEnabled ( False ) j . setChecked ( False ) if not self . chunk [ 'event' ] . isChecked ( ) : self . cat [ 'evt_type' ] . setEnabled ( False ) if not self . cat [ 'discontinuous' ] . get_value ( ) : self . cat [ 'chan' ] . setEnabled ( False ) self . cat [ 'chan' ] . setChecked ( False ) self . update_nseg ( )
Enable and disable concatenation options .
23,524
def toggle_pac ( self ) : if Pac is not None : pac_on = self . pac [ 'pac_on' ] . get_value ( ) self . pac [ 'prep' ] . setEnabled ( pac_on ) self . pac [ 'box_metric' ] . setEnabled ( pac_on ) self . pac [ 'box_complex' ] . setEnabled ( pac_on ) self . pac [ 'box_surro' ] . setEnabled ( pac_on ) self . pac [ 'box_opts' ] . setEnabled ( pac_on ) if not pac_on : self . pac [ 'prep' ] . set_value ( False ) if Pac is not None and pac_on : pac = self . pac hilb_on = pac [ 'hilbert_on' ] . isChecked ( ) wav_on = pac [ 'wavelet_on' ] . isChecked ( ) for button in pac [ 'hilbert' ] . values ( ) : button [ 0 ] . setEnabled ( hilb_on ) if button [ 1 ] is not None : button [ 1 ] . setEnabled ( hilb_on ) pac [ 'wav_width' ] [ 0 ] . setEnabled ( wav_on ) pac [ 'wav_width' ] [ 1 ] . setEnabled ( wav_on ) if pac [ 'metric' ] . get_value ( ) in [ 'Kullback-Leibler Distance' , 'Heights ratio' ] : pac [ 'nbin' ] [ 0 ] . setEnabled ( True ) pac [ 'nbin' ] [ 1 ] . setEnabled ( True ) else : pac [ 'nbin' ] [ 0 ] . setEnabled ( False ) pac [ 'nbin' ] [ 1 ] . setEnabled ( False ) if pac [ 'metric' ] == 'ndPac' : for button in pac [ 'surro' ] . values ( ) : button [ 0 ] . setEnabled ( False ) if button [ 1 ] is not None : button [ 1 ] . setEnabled ( False ) pac [ 'surro' ] [ 'pval' ] [ 0 ] . setEnabled ( True ) ndpac_on = pac [ 'metric' ] . get_value ( ) == 'ndPac' surro_on = logical_and ( pac [ 'surro_method' ] . get_value ( ) != '' 'No surrogates' , not ndpac_on ) norm_on = pac [ 'surro_norm' ] . get_value ( ) != 'No normalization' blocks_on = 'across time' in pac [ 'surro_method' ] . get_value ( ) pac [ 'surro_method' ] . setEnabled ( not ndpac_on ) for button in pac [ 'surro' ] . values ( ) : button [ 0 ] . setEnabled ( surro_on and norm_on ) if button [ 1 ] is not None : button [ 1 ] . setEnabled ( surro_on and norm_on ) pac [ 'surro' ] [ 'nblocks' ] [ 0 ] . setEnabled ( blocks_on ) pac [ 'surro' ] [ 'nblocks' ] [ 1 ] . setEnabled ( blocks_on ) if ndpac_on : pac [ 'surro_method' ] . set_value ( 'No surrogates' ) pac [ 'surro' ] [ 'pval' ] [ 0 ] . setEnabled ( True )
Enable and disable PAC options .
23,525
def update_nseg ( self ) : self . nseg = 0 if self . one_grp : segments = self . get_segments ( ) if segments is not None : self . nseg = len ( segments ) self . show_nseg . setText ( 'Number of segments: ' + str ( self . nseg ) ) times = [ t for seg in segments for t in seg [ 'times' ] ] self . parent . overview . mark_poi ( times ) else : self . show_nseg . setText ( 'No valid segments' ) self . toggle_freq ( )
Update the number of segments displayed in the dialog .
23,526
def check_all_local ( self ) : all_local_chk = self . event [ 'global' ] [ 'all_local' ] . isChecked ( ) for buttons in self . event [ 'local' ] . values ( ) : buttons [ 0 ] . setChecked ( all_local_chk ) buttons [ 1 ] . setEnabled ( buttons [ 0 ] . isChecked ( ) )
Check or uncheck all local event parameters .
23,527
def check_all_local_prep ( self ) : all_local_pp_chk = self . event [ 'global' ] [ 'all_local_prep' ] . isChecked ( ) for buttons in self . event [ 'local' ] . values ( ) : if buttons [ 1 ] . isEnabled ( ) : buttons [ 1 ] . setChecked ( all_local_pp_chk )
Check or uncheck all enabled event pre - processing .
23,528
def uncheck_all_local ( self ) : for buttons in self . event [ 'local' ] . values ( ) : if not buttons [ 0 ] . get_value ( ) : self . event [ 'global' ] [ 'all_local' ] . setChecked ( False ) if buttons [ 1 ] . isEnabled ( ) and not buttons [ 1 ] . get_value ( ) : self . event [ 'global' ] [ 'all_local_prep' ] . setChecked ( False )
Uncheck all local box when a local event is unchecked .
23,529
def get_segments ( self ) : chunk = { k : v . isChecked ( ) for k , v in self . chunk . items ( ) } lock_to_staging = self . lock_to_staging . get_value ( ) epoch_dur = self . epoch_param [ 'dur' ] . get_value ( ) epoch_overlap = self . epoch_param [ 'overlap_val' ] . value ( ) epoch_step = None epoch = None if chunk [ 'epoch' ] : if lock_to_staging : epoch = 'locked' else : epoch = 'unlocked' if self . epoch_param [ 'step' ] . isChecked ( ) : epoch_step = self . epoch_param [ 'step_val' ] . get_value ( ) if epoch_step <= 0 : epoch_step = 0.1 self . chan = self . get_channels ( ) if not self . chan : return chan_full = None evt_type = None if chunk [ 'event' ] : if self . evt_chan_only . get_value ( ) : chan_full = [ i + ' (' + self . idx_group . currentText ( ) + '' ')' for i in self . chan ] evt_type = self . idx_evt_type . selectedItems ( ) if not evt_type : return else : evt_type = [ x . text ( ) for x in evt_type ] cycle = self . cycle = self . get_cycles ( ) stage = self . idx_stage . selectedItems ( ) if not stage : stage = self . stage = None else : stage = self . stage = [ x . text ( ) for x in self . idx_stage . selectedItems ( ) ] cat = { k : v . get_value ( ) for k , v in self . cat . items ( ) } cat = ( int ( cat [ 'cycle' ] ) , int ( cat [ 'stage' ] ) , int ( cat [ 'discontinuous' ] ) , int ( cat [ 'evt_type' ] ) ) reject_event = self . reject_event . get_value ( ) if reject_event == 'channel-specific' : chan_full = [ i + ' (' + self . idx_group . currentText ( ) + '' ')' for i in self . chan ] reject_artf = True elif reject_event == 'from any channel' : reject_artf = True else : reject_artf = False min_dur = self . min_dur . get_value ( ) reject_epoch = self . reject_epoch . get_value ( ) self . title = self . make_title ( chan_full , cycle , stage , evt_type ) segments = fetch ( self . parent . info . dataset , self . parent . notes . annot , cat = cat , evt_type = evt_type , stage = stage , cycle = cycle , chan_full = chan_full , epoch = epoch , epoch_dur = epoch_dur , epoch_overlap = epoch_overlap , epoch_step = epoch_step , reject_epoch = reject_epoch , reject_artf = reject_artf , min_dur = min_dur ) return segments
Get segments for analysis . Creates instance of trans . Segments .
23,530
def transform_data ( self , data ) : trans = self . trans differ = trans [ 'diff' ] . get_value ( ) bandpass = trans [ 'bandpass' ] . get_value ( ) notch1 = trans [ 'notch1' ] . get_value ( ) notch2 = trans [ 'notch2' ] . get_value ( ) for seg in data : dat = seg [ 'data' ] if differ : dat = math ( dat , operator = diff , axis = 'time' ) if bandpass != 'none' : order = trans [ 'bp' ] [ 'order' ] [ 1 ] . get_value ( ) f1 = trans [ 'bp' ] [ 'f1' ] [ 1 ] . get_value ( ) f2 = trans [ 'bp' ] [ 'f2' ] [ 1 ] . get_value ( ) if f1 == '' : f1 = None if f2 == '' : f2 = None dat = filter_ ( dat , low_cut = f1 , high_cut = f2 , order = order , ftype = bandpass ) if notch1 != 'none' : order = trans [ 'n1' ] [ 'order' ] [ 1 ] . get_value ( ) cf = trans [ 'n1' ] [ 'cf' ] [ 1 ] . get_value ( ) hbw = trans [ 'n1' ] [ 'bw' ] [ 1 ] . get_value ( ) / 2.0 lo_pass = cf - hbw hi_pass = cf + hbw dat = filter_ ( dat , low_cut = hi_pass , order = order , ftype = notch1 ) dat = filter_ ( dat , high_cut = lo_pass , order = order , ftype = notch1 ) if notch2 != 'none' : order = trans [ 'n2' ] [ 'order' ] [ 1 ] . get_value ( ) cf = trans [ 'n2' ] [ 'cf' ] [ 1 ] . get_value ( ) hbw = trans [ 'n2' ] [ 'bw' ] [ 1 ] . get_value ( ) / 2.0 lo_pass = cf - hbw hi_pass = cf + hbw dat = filter_ ( dat , low_cut = hi_pass , order = order , ftype = notch1 ) dat = filter_ ( dat , high_cut = lo_pass , order = order , ftype = notch1 ) seg [ 'trans_data' ] = dat return data
Apply pre - processing transformation to data and add it to data dict .
23,531
def save_as ( self ) : filename = splitext ( self . parent . notes . annot . xml_file ) [ 0 ] + '_data' filename , _ = QFileDialog . getSaveFileName ( self , 'Export analysis data' , filename , 'CSV (*.csv)' ) if filename == '' : return self . filename = filename short_filename = short_strings ( basename ( self . filename ) ) self . idx_filename . setText ( short_filename )
Dialog for getting name location of data export file .
23,532
def plot_freq ( self , x , y , title = '' , ylabel = None , scale = 'semilogy' ) : freq = self . frequency scaling = freq [ 'scaling' ] . get_value ( ) if ylabel is None : if freq [ 'complex' ] . get_value ( ) : ylabel = 'Amplitude (uV)' elif 'power' == scaling : ylabel = 'Power spectral density (uV ** 2 / Hz)' elif 'energy' == scaling : ylabel = 'Energy spectral density (uV ** 2)' self . parent . plot_dialog = PlotDialog ( self . parent ) self . parent . plot_dialog . canvas . plot ( x , y , title , ylabel , scale = scale ) self . parent . show_plot_dialog ( )
Plot mean frequency spectrum and display in dialog .
23,533
def export_pac ( self , xpac , fpha , famp , desc ) : filename = splitext ( self . filename ) [ 0 ] + '_pac.csv' heading_row_1 = [ 'Segment index' , 'Start time' , 'End time' , 'Duration' , 'Stitch' , 'Stage' , 'Cycle' , 'Event type' , 'Channel' , ] spacer = [ '' ] * ( len ( heading_row_1 ) - 1 ) heading_row_2 = [ ] for fp in fpha : fp_str = str ( fp [ 0 ] ) + '-' + str ( fp [ 1 ] ) for fa in famp : fa_str = str ( fa [ 0 ] ) + '-' + str ( fa [ 1 ] ) heading_row_2 . append ( fp_str + '_' + fa_str + '_pac' ) if 'pval' in xpac [ list ( xpac . keys ( ) ) [ 0 ] ] . keys ( ) : heading_row_3 = [ x [ : - 4 ] + '_pval' for x in heading_row_2 ] heading_row_2 . extend ( heading_row_3 ) with open ( filename , 'w' , newline = '' ) as f : lg . info ( 'Writing to ' + str ( filename ) ) csv_file = writer ( f ) csv_file . writerow ( [ 'Wonambi v{}' . format ( __version__ ) ] ) csv_file . writerow ( heading_row_1 + heading_row_2 ) csv_file . writerow ( [ 'Mean' ] + spacer + list ( desc [ 'mean' ] ) ) csv_file . writerow ( [ 'SD' ] + spacer + list ( desc [ 'sd' ] ) ) csv_file . writerow ( [ 'Mean of ln' ] + spacer + list ( desc [ 'mean_log' ] ) ) csv_file . writerow ( [ 'SD of ln' ] + spacer + list ( desc [ 'sd_log' ] ) ) idx = 0 for chan in xpac . keys ( ) : for i , j in enumerate ( xpac [ chan ] [ 'times' ] ) : idx += 1 cyc = None if xpac [ chan ] [ 'cycle' ] [ i ] is not None : cyc = xpac [ chan ] [ 'cycle' ] [ i ] [ 2 ] data_row = list ( ravel ( xpac [ chan ] [ 'data' ] [ i , : , : ] ) ) pval_row = [ ] if 'pval' in xpac [ chan ] : pval_row = list ( ravel ( xpac [ chan ] [ 'pval' ] [ i , : , : ] ) ) csv_file . writerow ( [ idx , j [ 0 ] , j [ 1 ] , xpac [ chan ] [ 'duration' ] [ i ] , xpac [ chan ] [ 'n_stitch' ] [ i ] , xpac [ chan ] [ 'stage' ] [ i ] , cyc , xpac [ chan ] [ 'name' ] [ i ] , chan , ] + data_row + pval_row )
Write PAC analysis data to CSV .
23,534
def compute_evt_params ( self ) : ev = self . event glob = { k : v . get_value ( ) for k , v in ev [ 'global' ] . items ( ) } params = { k : v [ 0 ] . get_value ( ) for k , v in ev [ 'local' ] . items ( ) } prep = { k : v [ 1 ] . get_value ( ) for k , v in ev [ 'local' ] . items ( ) } slopes = { k : v . get_value ( ) for k , v in ev [ 'sw' ] . items ( ) } f1 = ev [ 'f1' ] . get_value ( ) f2 = ev [ 'f2' ] . get_value ( ) if not f2 : f2 = None band = ( f1 , f2 ) if not ( slopes [ 'avg_slope' ] or slopes [ 'max_slope' ] ) : slopes = None evt_dat = event_params ( self . data , params , band = band , slopes = slopes , prep = prep , parent = self ) count = None density = None if glob [ 'count' ] : count = len ( self . data ) if glob [ 'density' ] : epoch_dur = glob [ 'density_per' ] poi = get_times ( self . parent . notes . annot , stage = self . stage , cycle = self . cycle , exclude = True ) total_dur = sum ( [ x [ 1 ] - x [ 0 ] for y in poi for x in y [ 'times' ] ] ) density = len ( self . data ) / ( total_dur / epoch_dur ) return evt_dat , count , density
Compute event parameters .
23,535
def make_title ( self , chan , cycle , stage , evt_type ) : cyc_str = None if cycle is not None : cyc_str = [ str ( c [ 2 ] ) for c in cycle ] cyc_str [ 0 ] = 'cycle ' + cyc_str [ 0 ] title = [ ' + ' . join ( [ str ( x ) for x in y ] ) for y in [ chan , cyc_str , stage , evt_type ] if y is not None ] return ', ' . join ( title )
Make a title for plots etc .
23,536
def plot ( self , x , y , title , ylabel , scale = 'semilogy' , idx_lim = ( 1 , - 1 ) ) : x = x [ slice ( * idx_lim ) ] y = y [ slice ( * idx_lim ) ] ax = self . figure . add_subplot ( 111 ) ax . set_title ( title ) ax . set_xlabel ( 'Frequency (Hz)' ) ax . set_ylabel ( ylabel ) if 'semilogy' == scale : ax . semilogy ( x , y , 'r-' ) elif 'loglog' == scale : ax . loglog ( x , y , 'r-' ) elif 'linear' == scale : ax . plot ( x , y , 'r-' )
Plot the data .
23,537
def create_dialog ( self ) : self . bbox = QDialogButtonBox ( QDialogButtonBox . Close ) self . idx_close = self . bbox . button ( QDialogButtonBox . Close ) self . idx_close . pressed . connect ( self . reject ) btnlayout = QHBoxLayout ( ) btnlayout . addStretch ( 1 ) btnlayout . addWidget ( self . bbox ) layout = QVBoxLayout ( ) layout . addWidget ( self . toolbar ) layout . addWidget ( self . canvas ) layout . addLayout ( btnlayout ) layout . addStretch ( 1 ) self . setLayout ( layout )
Create the basic dialog .
23,538
def make_arousals ( events , time , s_freq ) : arousals = [ ] for ev in events : one_ar = { 'start' : time [ ev [ 0 ] ] , 'end' : time [ ev [ 1 ] - 1 ] , 'dur' : ( ev [ 1 ] - ev [ 0 ] ) / s_freq , } arousals . append ( one_ar ) return arousals
Create dict for each arousal based on events of time points .
23,539
def _convert_time_to_sample ( abs_time , dataset ) : if isinstance ( abs_time , datetime ) : abs_time = abs_time - dataset . header [ 'start_time' ] if not isinstance ( abs_time , timedelta ) : try : abs_time = timedelta ( seconds = float ( abs_time ) ) except TypeError as err : if isinstance ( abs_time , int64 ) : abs_time = timedelta ( seconds = int ( abs_time ) ) else : raise err sample = int ( ceil ( abs_time . total_seconds ( ) * dataset . header [ 's_freq' ] ) ) return sample
Convert absolute time into samples .
23,540
def detect_format ( filename ) : filename = Path ( filename ) if filename . is_dir ( ) : if list ( filename . glob ( '*.stc' ) ) and list ( filename . glob ( '*.erd' ) ) : return Ktlx elif ( filename / 'patient.info' ) . exists ( ) : return Moberg elif ( filename / 'info.xml' ) . exists ( ) : return EgiMff elif list ( filename . glob ( '*.openephys' ) ) : return OpenEphys elif list ( filename . glob ( '*.txt' ) ) : return Text else : raise UnrecognizedFormat ( 'Unrecognized format for directory ' + str ( filename ) ) else : if filename . suffix == '.won' : return Wonambi if filename . suffix . lower ( ) == '.trc' : return Micromed if filename . suffix == '.set' : return EEGLAB if filename . suffix == '.edf' : return Edf if filename . suffix == '.abf' : return Abf if filename . suffix == '.vhdr' or filename . suffix == '.eeg' : return BrainVision if filename . suffix == '.dat' : try : _read_header_length ( filename ) except ( AttributeError , ValueError ) : pass else : return BCI2000 with filename . open ( 'rb' ) as f : file_header = f . read ( 8 ) if file_header in ( b'NEURALCD' , b'NEURALSG' , b'NEURALEV' ) : return BlackRock elif file_header [ : 6 ] == b'MATLAB' : return FieldTrip if filename . suffix . lower ( ) == '.txt' : with filename . open ( 'rt' ) as f : first_line = f . readline ( ) if '.rr' in first_line [ - 4 : ] : return LyonRRI else : raise UnrecognizedFormat ( 'Unrecognized format for file ' + str ( filename ) )
Detect file format .
23,541
def read_videos ( self , begtime = None , endtime = None ) : if isinstance ( begtime , datetime ) : begtime = begtime - self . header [ 'start_time' ] if isinstance ( begtime , timedelta ) : begtime = begtime . total_seconds ( ) if isinstance ( endtime , datetime ) : endtime = endtime - self . header [ 'start_time' ] if isinstance ( endtime , timedelta ) : endtime = endtime . total_seconds ( ) videos = self . dataset . return_videos ( begtime , endtime ) return videos
Return list of videos with start and end times for a period .
23,542
def read_data ( self , chan = None , begtime = None , endtime = None , begsam = None , endsam = None , s_freq = None ) : data = ChanTime ( ) data . start_time = self . header [ 'start_time' ] data . s_freq = s_freq = s_freq if s_freq else self . header [ 's_freq' ] if chan is None : chan = self . header [ 'chan_name' ] if not ( isinstance ( chan , list ) or isinstance ( chan , tuple ) ) : raise TypeError ( 'Parameter "chan" should be a list' ) add_ref = False if '_REF' in chan : add_ref = True chan [ : ] = [ x for x in chan if x != '_REF' ] idx_chan = [ self . header [ 'chan_name' ] . index ( x ) for x in chan ] if begtime is None and begsam is None : begsam = 0 if endtime is None and endsam is None : endsam = self . header [ 'n_samples' ] if begtime is not None : if not isinstance ( begtime , list ) : begtime = [ begtime ] begsam = [ ] for one_begtime in begtime : begsam . append ( _convert_time_to_sample ( one_begtime , self ) ) if endtime is not None : if not isinstance ( endtime , list ) : endtime = [ endtime ] endsam = [ ] for one_endtime in endtime : endsam . append ( _convert_time_to_sample ( one_endtime , self ) ) if not isinstance ( begsam , list ) : begsam = [ begsam ] if not isinstance ( endsam , list ) : endsam = [ endsam ] if len ( begsam ) != len ( endsam ) : raise ValueError ( 'There should be the same number of start and ' + 'end point' ) n_trl = len ( begsam ) data . axis [ 'chan' ] = empty ( n_trl , dtype = 'O' ) data . axis [ 'time' ] = empty ( n_trl , dtype = 'O' ) data . data = empty ( n_trl , dtype = 'O' ) for i , one_begsam , one_endsam in zip ( range ( n_trl ) , begsam , endsam ) : dataset = self . dataset lg . debug ( 'begsam {0: 6}, endsam {1: 6}' . format ( one_begsam , one_endsam ) ) dat = dataset . return_dat ( idx_chan , one_begsam , one_endsam ) chan_in_dat = chan if add_ref : zero_ref = zeros ( ( 1 , one_endsam - one_begsam ) ) dat = concatenate ( ( dat , zero_ref ) , axis = 0 ) chan_in_dat . append ( '_REF' ) data . data [ i ] = dat data . axis [ 'chan' ] [ i ] = asarray ( chan_in_dat , dtype = 'U' ) data . axis [ 'time' ] [ i ] = ( arange ( one_begsam , one_endsam ) / s_freq ) return data
Read the data and creates a ChanTime instance
23,543
def _read_dat ( x ) : n_smp = int ( len ( x ) / DATA_PRECISION ) dat = zeros ( n_smp ) for i in range ( n_smp ) : i0 = i * DATA_PRECISION i1 = i0 + DATA_PRECISION dat [ i ] = int . from_bytes ( x [ i0 : i1 ] , byteorder = 'little' , signed = True ) return dat
read 24bit binary data and convert them to numpy .
23,544
def _read_chan_name ( orig ) : sensors = orig [ 'sensorLayout' ] [ 1 ] eeg_chan = [ ] for one_sensor in sensors : if one_sensor [ 'type' ] in ( '0' , '1' ) : if one_sensor [ 'name' ] is not None : eeg_chan . append ( one_sensor [ 'name' ] ) else : eeg_chan . append ( one_sensor [ 'number' ] ) pns_chan = [ ] if 'pnsSet' in orig : pnsSet = orig [ 'pnsSet' ] [ 1 ] for one_sensor in pnsSet : pns_chan . append ( one_sensor [ 'name' ] ) return eeg_chan + pns_chan , len ( eeg_chan )
Read channel labels which can be across xml files .
23,545
def write_wonambi ( data , filename , subj_id = '' , dtype = 'float64' ) : filename = Path ( filename ) json_file = filename . with_suffix ( '.won' ) memmap_file = filename . with_suffix ( '.dat' ) start_time = data . start_time + timedelta ( seconds = data . axis [ 'time' ] [ 0 ] [ 0 ] ) start_time_str = start_time . strftime ( '%Y-%m-%d %H:%M:%S.%f' ) dataset = { 'subj_id' : subj_id , 'start_time' : start_time_str , 's_freq' : data . s_freq , 'chan_name' : list ( data . axis [ 'chan' ] [ 0 ] ) , 'n_samples' : int ( data . number_of ( 'time' ) [ 0 ] ) , 'dtype' : dtype , } with json_file . open ( 'w' ) as f : dump ( dataset , f , sort_keys = True , indent = 4 ) memshape = ( len ( dataset [ 'chan_name' ] ) , dataset [ 'n_samples' ] ) mem = memmap ( str ( memmap_file ) , dtype , mode = 'w+' , shape = memshape , order = 'F' ) mem [ : , : ] = data . data [ 0 ] mem . flush ( )
Write file in simple Wonambi format .
23,546
def _read_geometry ( surf_file ) : with open ( surf_file , 'rb' ) as f : filebytes = f . read ( ) assert filebytes [ : 3 ] == b'\xff\xff\xfe' i0 = filebytes . index ( b'\x0A\x0A' ) + 2 i1 = i0 + 4 vnum = unpack ( '>i' , filebytes [ i0 : i1 ] ) [ 0 ] i0 = i1 i1 += 4 fnum = unpack ( '>i' , filebytes [ i0 : i1 ] ) [ 0 ] i0 = i1 i1 += 4 * vnum * 3 verts = unpack ( '>' + 'f' * vnum * 3 , filebytes [ i0 : i1 ] ) i0 = i1 i1 += 4 * fnum * 3 faces = unpack ( '>' + 'i' * fnum * 3 , filebytes [ i0 : i1 ] ) verts = asarray ( verts ) . reshape ( vnum , 3 ) faces = asarray ( faces ) . reshape ( fnum , 3 ) return verts , faces
Read a triangular format Freesurfer surface mesh .
23,547
def import_freesurfer_LUT ( fs_lut = None ) : if fs_lut is not None : lg . info ( 'Reading user-specified lookuptable {}' . format ( fs_lut ) ) fs_lut = Path ( fs_lut ) else : try : fs_home = environ [ 'FREESURFER_HOME' ] except KeyError : raise OSError ( 'Freesurfer is not installed or FREESURFER_HOME is ' 'not defined as environmental variable' ) else : fs_lut = Path ( fs_home ) / 'FreeSurferColorLUT.txt' lg . info ( 'Reading lookuptable in FREESURFER_HOME {}' . format ( fs_lut ) ) idx = [ ] label = [ ] rgba = empty ( ( 0 , 4 ) ) with fs_lut . open ( 'r' ) as f : for l in f : if len ( l ) <= 1 or l [ 0 ] == '#' or l [ 0 ] == '\r' : continue ( t0 , t1 , t2 , t3 , t4 , t5 ) = [ t ( s ) for t , s in zip ( ( int , str , int , int , int , int ) , l . split ( ) ) ] idx . append ( t0 ) label . append ( t1 ) rgba = vstack ( ( rgba , array ( [ t2 , t3 , t4 , t5 ] ) ) ) return idx , label , rgba
Import Look - up Table with colors and labels for anatomical regions .
23,548
def find_brain_region ( self , abs_pos , parc_type = 'aparc' , max_approx = None , exclude_regions = None ) : pos = around ( dot ( FS_AFFINE , append ( abs_pos , 1 ) ) ) [ : 3 ] . astype ( int ) lg . debug ( 'Position in the MRI matrix: {}' . format ( pos ) ) mri_dat , _ = self . read_seg ( parc_type ) if max_approx is None : max_approx = 3 for approx in range ( max_approx + 1 ) : lg . debug ( 'Trying approx {} out of {}' . format ( approx , max_approx ) ) regions = _find_neighboring_regions ( pos , mri_dat , self . lookuptable , approx , exclude_regions ) if regions : break if regions : c_regions = Counter ( regions ) return c_regions . most_common ( 1 ) [ 0 ] [ 0 ] , approx else : return '--not found--' , approx
Find the name of the brain region in which an electrode is located .
23,549
def read_seg ( self , parc_type = 'aparc' ) : seg_file = self . dir / 'mri' / ( parc_type + '+aseg.mgz' ) seg_mri = load ( seg_file ) seg_aff = seg_mri . affine seg_dat = seg_mri . get_data ( ) return seg_dat , seg_aff
Read the MRI segmentation .
23,550
def concatenate ( data , axis ) : output = data . _copy ( axis = False ) for dataaxis in data . axis : output . axis [ dataaxis ] = empty ( 1 , dtype = 'O' ) if dataaxis == axis : output . axis [ dataaxis ] [ 0 ] = cat ( data . axis [ dataaxis ] ) else : output . axis [ dataaxis ] [ 0 ] = data . axis [ dataaxis ] [ 0 ] if len ( unique ( output . axis [ dataaxis ] [ 0 ] ) ) != len ( output . axis [ dataaxis ] [ 0 ] ) : lg . warning ( 'Axis ' + dataaxis + ' does not have unique values' ) output . data = empty ( 1 , dtype = 'O' ) if axis == 'trial' : new_axis = empty ( 1 , dtype = 'O' ) n_trial = data . number_of ( 'trial' ) trial_name = [ 'trial{0:06}' . format ( x ) for x in range ( n_trial ) ] new_axis [ 0 ] = asarray ( trial_name , dtype = 'U' ) output . axis [ 'trial_axis' ] = new_axis all_trial = [ ] for one_trial in data . data : all_trial . append ( expand_dims ( one_trial , - 1 ) ) output . data [ 0 ] = cat ( all_trial , axis = - 1 ) else : output . data [ 0 ] = cat ( data . data , axis = output . index_of ( axis ) ) return output
Concatenate multiple trials into one trials according to any dimension .
23,551
def return_rri ( self , begsam , endsam ) : interval = endsam - begsam dat = empty ( interval ) k = 0 with open ( self . filename , 'rt' ) as f : [ next ( f ) for x in range ( 12 ) ] for j , datum in enumerate ( f ) : if begsam <= j < endsam : dat [ k ] = float64 ( datum [ : datum . index ( '\t' ) ] ) k += 1 if k == interval : break return dat
Return raw irregularly - timed RRI .
23,552
def update ( self , checked = False , labels = None , custom_labels = None ) : if labels is not None : self . setEnabled ( True ) self . chan_name = labels self . table . blockSignals ( True ) self . table . clearContents ( ) self . table . setRowCount ( len ( self . chan_name ) ) for i , label in enumerate ( self . chan_name ) : old_label = QTableWidgetItem ( label ) old_label . setFlags ( Qt . ItemIsSelectable | Qt . ItemIsEnabled ) if custom_labels is not None and i < len ( custom_labels ) and custom_labels [ i ] : label_txt = custom_labels [ i ] else : label_txt = label new_label = QTableWidgetItem ( label_txt ) self . table . setItem ( i , 0 , old_label ) self . table . setItem ( i , 1 , new_label ) self . table . blockSignals ( False )
Use this function when we make changes to the list of labels or when we load a new dataset .
23,553
def peaks ( data , method = 'max' , axis = 'time' , limits = None ) : idx_axis = data . index_of ( axis ) output = data . _copy ( ) output . axis . pop ( axis ) for trl in range ( data . number_of ( 'trial' ) ) : values = data . axis [ axis ] [ trl ] dat = data ( trial = trl ) if limits is not None : limits = ( values < limits [ 0 ] ) | ( values > limits [ 1 ] ) idx = [ slice ( None ) ] * len ( data . list_of_axes ) idx [ idx_axis ] = limits dat [ idx ] = nan if method == 'max' : peak_val = nanargmax ( dat , axis = idx_axis ) elif method == 'min' : peak_val = nanargmin ( dat , axis = idx_axis ) output . data [ trl ] = values [ peak_val ] return output
Return the values of an index where the data is at max or min
23,554
def export_freq ( xfreq , filename , desc = None ) : heading_row_1 = [ 'Segment index' , 'Start time' , 'End time' , 'Duration' , 'Stitches' , 'Stage' , 'Cycle' , 'Event type' , 'Channel' , ] spacer = [ '' ] * ( len ( heading_row_1 ) - 1 ) freq = list ( xfreq [ 0 ] [ 'data' ] . axis [ 'freq' ] [ 0 ] ) with open ( filename , 'w' , newline = '' ) as f : lg . info ( 'Writing to ' + str ( filename ) ) csv_file = writer ( f ) csv_file . writerow ( [ 'Wonambi v{}' . format ( __version__ ) ] ) csv_file . writerow ( heading_row_1 + freq ) if desc : csv_file . writerow ( [ 'Mean' ] + spacer + list ( desc [ 'mean' ] ) ) csv_file . writerow ( [ 'SD' ] + spacer + list ( desc [ 'sd' ] ) ) csv_file . writerow ( [ 'Mean of ln' ] + spacer + list ( desc [ 'mean_log' ] ) ) csv_file . writerow ( [ 'SD of ln' ] + spacer + list ( desc [ 'sd_log' ] ) ) idx = 0 for seg in xfreq : for chan in seg [ 'data' ] . axis [ 'chan' ] [ 0 ] : idx += 1 cyc = None if seg [ 'cycle' ] is not None : cyc = seg [ 'cycle' ] [ 2 ] data_row = list ( seg [ 'data' ] ( chan = chan ) [ 0 ] ) csv_file . writerow ( [ idx , seg [ 'start' ] , seg [ 'end' ] , seg [ 'duration' ] , seg [ 'n_stitch' ] , seg [ 'stage' ] , cyc , seg [ 'name' ] , chan , ] + data_row )
Write frequency analysis data to CSV .
23,555
def export_freq_band ( xfreq , bands , filename ) : heading_row_1 = [ 'Segment index' , 'Start time' , 'End time' , 'Duration' , 'Stitches' , 'Stage' , 'Cycle' , 'Event type' , 'Channel' , ] spacer = [ '' ] * ( len ( heading_row_1 ) - 1 ) band_hdr = [ str ( b1 ) + '-' + str ( b2 ) for b1 , b2 in bands ] xband = xfreq . copy ( ) for seg in xband : bandlist = [ ] for i , b in enumerate ( bands ) : pwr , _ = band_power ( seg [ 'data' ] , b ) bandlist . append ( pwr ) seg [ 'band' ] = bandlist as_matrix = asarray ( [ [ x [ 'band' ] [ y ] [ chan ] for y in range ( len ( x [ 'band' ] ) ) ] for x in xband for chan in x [ 'band' ] [ 0 ] . keys ( ) ] ) desc = get_descriptives ( as_matrix ) with open ( filename , 'w' , newline = '' ) as f : lg . info ( 'Writing to ' + str ( filename ) ) csv_file = writer ( f ) csv_file . writerow ( [ 'Wonambi v{}' . format ( __version__ ) ] ) csv_file . writerow ( heading_row_1 + band_hdr ) csv_file . writerow ( [ 'Mean' ] + spacer + list ( desc [ 'mean' ] ) ) csv_file . writerow ( [ 'SD' ] + spacer + list ( desc [ 'sd' ] ) ) csv_file . writerow ( [ 'Mean of ln' ] + spacer + list ( desc [ 'mean_log' ] ) ) csv_file . writerow ( [ 'SD of ln' ] + spacer + list ( desc [ 'sd_log' ] ) ) idx = 0 for seg in xband : for chan in seg [ 'band' ] [ 0 ] . keys ( ) : idx += 1 cyc = None if seg [ 'cycle' ] is not None : cyc = seg [ 'cycle' ] [ 2 ] data_row = list ( [ seg [ 'band' ] [ x ] [ chan ] for x in range ( len ( seg [ 'band' ] ) ) ] ) csv_file . writerow ( [ idx , seg [ 'start' ] , seg [ 'end' ] , seg [ 'duration' ] , seg [ 'n_stitch' ] , seg [ 'stage' ] , cyc , seg [ 'name' ] , chan , ] + data_row )
Write frequency analysis data to CSV by pre - defined band .
23,556
def create_empty_annotations ( xml_file , dataset ) : xml_file = Path ( xml_file ) root = Element ( 'annotations' ) root . set ( 'version' , VERSION ) info = SubElement ( root , 'dataset' ) x = SubElement ( info , 'filename' ) x . text = str ( dataset . filename ) x = SubElement ( info , 'path' ) x . text = str ( dataset . filename ) x = SubElement ( info , 'start_time' ) start_time = dataset . header [ 'start_time' ] . replace ( tzinfo = None ) x . text = start_time . isoformat ( ) first_sec = 0 last_sec = int ( dataset . header [ 'n_samples' ] / dataset . header [ 's_freq' ] ) x = SubElement ( info , 'first_second' ) x . text = str ( first_sec ) x = SubElement ( info , 'last_second' ) x . text = str ( last_sec ) xml = parseString ( tostring ( root ) ) with xml_file . open ( 'w' ) as f : f . write ( xml . toxml ( ) )
Create an empty annotation file .
23,557
def create_annotation ( xml_file , from_fasst ) : xml_file = Path ( xml_file ) try : mat = loadmat ( str ( from_fasst ) , variable_names = 'D' , struct_as_record = False , squeeze_me = True ) except ValueError : raise UnrecognizedFormat ( str ( from_fasst ) + ' does not look like a FASST .mat file' ) D = mat [ 'D' ] info = D . other . info score = D . other . CRC . score microsecond , second = modf ( info . hour [ 2 ] ) start_time = datetime ( * info . date , int ( info . hour [ 0 ] ) , int ( info . hour [ 1 ] ) , int ( second ) , int ( microsecond * 1e6 ) ) first_sec = score [ 3 , 0 ] [ 0 ] last_sec = score [ 0 , 0 ] . shape [ 0 ] * score [ 2 , 0 ] root = Element ( 'annotations' ) root . set ( 'version' , VERSION ) info = SubElement ( root , 'dataset' ) x = SubElement ( info , 'filename' ) x . text = D . other . info . fname x = SubElement ( info , 'path' ) x . text = D . other . info . fname x = SubElement ( info , 'start_time' ) x . text = start_time . isoformat ( ) x = SubElement ( info , 'first_second' ) x . text = str ( int ( first_sec ) ) x = SubElement ( info , 'last_second' ) x . text = str ( int ( last_sec ) ) xml = parseString ( tostring ( root ) ) with xml_file . open ( 'w' ) as f : f . write ( xml . toxml ( ) ) annot = Annotations ( xml_file ) n_raters = score . shape [ 1 ] for i_rater in range ( n_raters ) : rater_name = score [ 1 , i_rater ] epoch_length = int ( score [ 2 , i_rater ] ) annot . add_rater ( rater_name , epoch_length = epoch_length ) for epoch_start , epoch in enumerate ( score [ 0 , i_rater ] ) : if isnan ( epoch ) : continue annot . set_stage_for_epoch ( epoch_start * epoch_length , FASST_STAGE_KEY [ int ( epoch ) ] , save = False ) annot . save ( ) return annot
Create annotations by importing from FASST sleep scoring file .
23,558
def update_annotation_version ( xml_file ) : with open ( xml_file , 'r' ) as f : s = f . read ( ) m = search ( '<annotations version="([0-9]*)">' , s ) current = int ( m . groups ( ) [ 0 ] ) if current < 4 : s = sub ( '<marker><name>(.*?)</name><time>(.*?)</time></marker>' , '<marker><marker_name>\g<1></marker_name><marker_start>\g<2></marker_start><marker_end>\g<2></marker_end><marker_chan/></marker>' , s ) if current < 5 : s = s . replace ( 'marker' , 'bookmark' ) s = sub ( '<annotations version="[0-9]*">' , '<annotations version="5">' , s ) with open ( xml_file , 'w' ) as f : f . write ( s )
Update the fields that have changed over different versions .
23,559
def load ( self ) : lg . info ( 'Loading ' + str ( self . xml_file ) ) update_annotation_version ( self . xml_file ) xml = parse ( self . xml_file ) return xml . getroot ( )
Load xml from file .
23,560
def save ( self ) : if self . rater is not None : self . rater . set ( 'modified' , datetime . now ( ) . isoformat ( ) ) xml = parseString ( tostring ( self . root ) ) with open ( self . xml_file , 'w' ) as f : f . write ( xml . toxml ( ) )
Save xml to file .
23,561
def add_bookmark ( self , name , time , chan = '' ) : try : bookmarks = self . rater . find ( 'bookmarks' ) except AttributeError : raise IndexError ( 'You need to have at least one rater' ) new_bookmark = SubElement ( bookmarks , 'bookmark' ) bookmark_name = SubElement ( new_bookmark , 'bookmark_name' ) bookmark_name . text = name bookmark_time = SubElement ( new_bookmark , 'bookmark_start' ) bookmark_time . text = str ( time [ 0 ] ) bookmark_time = SubElement ( new_bookmark , 'bookmark_end' ) bookmark_time . text = str ( time [ 1 ] ) if isinstance ( chan , ( tuple , list ) ) : chan = ', ' . join ( chan ) event_chan = SubElement ( new_bookmark , 'bookmark_chan' ) event_chan . text = chan self . save ( )
Add a new bookmark
23,562
def remove_bookmark ( self , name = None , time = None , chan = None ) : bookmarks = self . rater . find ( 'bookmarks' ) for m in bookmarks : bookmark_name = m . find ( 'bookmark_name' ) . text bookmark_start = float ( m . find ( 'bookmark_start' ) . text ) bookmark_end = float ( m . find ( 'bookmark_end' ) . text ) bookmark_chan = m . find ( 'bookmark_chan' ) . text if bookmark_chan is None : bookmark_chan = '' if name is None : name_cond = True else : name_cond = bookmark_name == name if time is None : time_cond = True else : time_cond = ( time [ 0 ] <= bookmark_end and time [ 1 ] >= bookmark_start ) if chan is None : chan_cond = True else : chan_cond = bookmark_chan == chan if name_cond and time_cond and chan_cond : bookmarks . remove ( m ) self . save ( )
if you call it without arguments it removes ALL the bookmarks .
23,563
def remove_event_type ( self , name ) : if name not in self . event_types : lg . info ( 'Event type ' + name + ' was not found.' ) events = self . rater . find ( 'events' ) for e in list ( events ) : if e . get ( 'type' ) == name : events . remove ( e ) self . save ( )
Remove event type based on name .
23,564
def remove_event ( self , name = None , time = None , chan = None ) : events = self . rater . find ( 'events' ) if name is not None : pattern = "event_type[@type='" + name + "']" else : pattern = "event_type" if chan is not None : if isinstance ( chan , ( tuple , list ) ) : chan = ', ' . join ( chan ) for e_type in list ( events . iterfind ( pattern ) ) : for e in e_type : event_start = float ( e . find ( 'event_start' ) . text ) event_end = float ( e . find ( 'event_end' ) . text ) event_chan = e . find ( 'event_chan' ) . text if time is None : time_cond = True else : time_cond = allclose ( time [ 0 ] , event_start ) and allclose ( time [ 1 ] , event_end ) if chan is None : chan_cond = True else : chan_cond = event_chan == chan if time_cond and chan_cond : e_type . remove ( e ) self . save ( )
get events inside window .
23,565
def epochs ( self ) : if self . rater is None : raise IndexError ( 'You need to have at least one rater' ) for one_epoch in self . rater . iterfind ( 'stages/epoch' ) : epoch = { 'start' : int ( one_epoch . find ( 'epoch_start' ) . text ) , 'end' : int ( one_epoch . find ( 'epoch_end' ) . text ) , 'stage' : one_epoch . find ( 'stage' ) . text , 'quality' : one_epoch . find ( 'quality' ) . text } yield epoch
Get epochs as generator
23,566
def get_stage_for_epoch ( self , epoch_start , window_length = None , attr = 'stage' ) : for epoch in self . epochs : if epoch [ 'start' ] == epoch_start : return epoch [ attr ] if window_length is not None : epoch_length = epoch [ 'end' ] - epoch [ 'start' ] if logical_and ( window_length < epoch_length , 0 <= ( epoch_start - epoch [ 'start' ] ) < epoch_length ) : return epoch [ attr ]
Return stage for one specific epoch .
23,567
def set_stage_for_epoch ( self , epoch_start , name , attr = 'stage' , save = True ) : if self . rater is None : raise IndexError ( 'You need to have at least one rater' ) for one_epoch in self . rater . iterfind ( 'stages/epoch' ) : if int ( one_epoch . find ( 'epoch_start' ) . text ) == epoch_start : one_epoch . find ( attr ) . text = name if save : self . save ( ) return raise KeyError ( 'epoch starting at ' + str ( epoch_start ) + ' not found' )
Change the stage for one specific epoch .
23,568
def set_cycle_mrkr ( self , epoch_start , end = False ) : if self . rater is None : raise IndexError ( 'You need to have at least one rater' ) bound = 'start' if end : bound = 'end' for one_epoch in self . rater . iterfind ( 'stages/epoch' ) : if int ( one_epoch . find ( 'epoch_start' ) . text ) == epoch_start : cycles = self . rater . find ( 'cycles' ) name = 'cyc_' + bound new_bound = SubElement ( cycles , name ) new_bound . text = str ( int ( epoch_start ) ) self . save ( ) return raise KeyError ( 'epoch starting at ' + str ( epoch_start ) + ' not found' )
Mark epoch start as cycle start or end .
23,569
def remove_cycle_mrkr ( self , epoch_start ) : if self . rater is None : raise IndexError ( 'You need to have at least one rater' ) cycles = self . rater . find ( 'cycles' ) for one_mrkr in cycles . iterfind ( 'cyc_start' ) : if int ( one_mrkr . text ) == epoch_start : cycles . remove ( one_mrkr ) self . save ( ) return for one_mrkr in cycles . iterfind ( 'cyc_end' ) : if int ( one_mrkr . text ) == epoch_start : cycles . remove ( one_mrkr ) self . save ( ) return raise KeyError ( 'cycle marker at ' + str ( epoch_start ) + ' not found' )
Remove cycle marker at epoch_start .
23,570
def clear_cycles ( self ) : if self . rater is None : raise IndexError ( 'You need to have at least one rater' ) cycles = self . rater . find ( 'cycles' ) for cyc in list ( cycles ) : cycles . remove ( cyc ) self . save ( )
Remove all cycle markers in current rater .
23,571
def get_cycles ( self ) : cycles = self . rater . find ( 'cycles' ) if not cycles : return None starts = sorted ( [ float ( mrkr . text ) for mrkr in cycles . findall ( 'cyc_start' ) ] ) ends = sorted ( [ float ( mrkr . text ) for mrkr in cycles . findall ( 'cyc_end' ) ] ) cyc_list = [ ] if not starts or not ends : return None if all ( i < starts [ 0 ] for i in ends ) : raise ValueError ( 'First cycle has no start.' ) for ( this_start , next_start ) in zip ( starts , starts [ 1 : ] + [ inf ] ) : end_between_starts = [ end for end in ends if this_start < end <= next_start ] if len ( end_between_starts ) > 1 : raise ValueError ( 'Found more than one cycle end for same ' 'cycle' ) if end_between_starts : one_cycle = ( this_start , end_between_starts [ 0 ] ) else : one_cycle = ( this_start , next_start ) if one_cycle [ 1 ] == inf : raise ValueError ( 'Last cycle has no end.' ) cyc_list . append ( one_cycle ) output = [ ] for i , j in enumerate ( cyc_list ) : cyc = j [ 0 ] , j [ 1 ] , i + 1 output . append ( cyc ) return output
Return the cycle start and end times .
23,572
def switch ( self , time = None ) : stag_to_int = { 'NREM1' : 1 , 'NREM2' : 2 , 'NREM3' : 3 , 'REM' : 5 , 'Wake' : 0 } hypno = [ stag_to_int [ x [ 'stage' ] ] for x in self . get_epochs ( time = time ) if x [ 'stage' ] in stag_to_int . keys ( ) ] return sum ( asarray ( diff ( hypno ) , dtype = bool ) )
Obtain switch parameter ie number of times the stage shifts .
23,573
def slp_frag ( self , time = None ) : epochs = self . get_epochs ( time = time ) stage_int = { 'Wake' : 0 , 'NREM1' : 1 , 'NREM2' : 2 , 'NREM3' : 3 , 'REM' : 2 } hypno_str = [ x [ 'stage' ] for x in epochs if x [ 'stage' ] in stage_int . keys ( ) ] hypno_int = [ stage_int [ x ] for x in hypno_str ] frag = sum ( asarray ( clip ( diff ( hypno_int ) , a_min = None , a_max = 0 ) , dtype = bool ) ) n3_to_rem = 0 for i , j in enumerate ( hypno_str [ : - 1 ] ) : if j == 'NREM3' : if hypno_str [ i + 1 ] == 'REM' : n3_to_rem += 1 return frag - n3_to_rem
Obtain sleep fragmentation parameter ie number of stage shifts to a lighter stage .
23,574
def export ( self , file_to_export , xformat = 'csv' ) : if 'csv' == xformat : with open ( file_to_export , 'w' , newline = '' ) as f : csv_file = writer ( f ) csv_file . writerow ( [ 'Wonambi v{}' . format ( __version__ ) ] ) csv_file . writerow ( ( 'clock start time' , 'start' , 'end' , 'stage' ) ) for epoch in self . epochs : epoch_time = ( self . start_time + timedelta ( seconds = epoch [ 'start' ] ) ) csv_file . writerow ( ( epoch_time . strftime ( '%H:%M:%S' ) , epoch [ 'start' ] , epoch [ 'end' ] , epoch [ 'stage' ] ) ) if 'remlogic' in xformat : columns = 'Time [hh:mm:ss]\tEvent\tDuration[s]\n' if 'remlogic_fr' == xformat : columns = 'Heure [hh:mm:ss]\tEvénement\tDurée[s]\n' patient_id = splitext ( basename ( self . dataset ) ) [ 0 ] rec_date = self . start_time . strftime ( '%d/%m/%Y' ) stkey = { v : k for k , v in REMLOGIC_STAGE_KEY . items ( ) } stkey [ 'Artefact' ] = 'SLEEP-UNSCORED' stkey [ 'Unknown' ] = 'SLEEP-UNSCORED' stkey [ 'Movement' ] = 'SLEEP-UNSCORED' with open ( file_to_export , 'w' ) as f : f . write ( 'RemLogic Event Export\n' ) f . write ( 'Patient:\t' + patient_id + '\n' ) f . write ( 'Patient ID:\t' + patient_id + '\n' ) f . write ( 'Recording Date:\t' + rec_date + '\n' ) f . write ( '\n' ) f . write ( 'Events Included:\n' ) for i in sorted ( set ( [ stkey [ x [ 'stage' ] ] for x in self . epochs ] ) ) : f . write ( i + '\n' ) f . write ( '\n' ) f . write ( columns ) for epoch in self . epochs : epoch_time = ( self . start_time + timedelta ( seconds = epoch [ 'start' ] ) ) f . write ( ( epoch_time . strftime ( '%Y-%m-%dT%H:%M:%S.000000' ) + '\t' + stkey [ epoch [ 'stage' ] ] + '\t' + str ( self . epoch_length ) + '\n' ) )
Export epochwise annotations to csv file .
23,575
def create ( self ) : b0 = QGroupBox ( 'Dataset' ) form = QFormLayout ( ) b0 . setLayout ( form ) open_rec = QPushButton ( 'Open Dataset...' ) open_rec . clicked . connect ( self . open_dataset ) open_rec . setToolTip ( 'Click here to open a new recording' ) self . idx_filename = open_rec self . idx_s_freq = QLabel ( '' ) self . idx_n_chan = QLabel ( '' ) self . idx_start_time = QLabel ( '' ) self . idx_end_time = QLabel ( '' ) form . addRow ( 'Filename:' , self . idx_filename ) form . addRow ( 'Sampl. Freq:' , self . idx_s_freq ) form . addRow ( 'N. Channels:' , self . idx_n_chan ) form . addRow ( 'Start Time: ' , self . idx_start_time ) form . addRow ( 'End Time: ' , self . idx_end_time ) b1 = QGroupBox ( 'View' ) form = QFormLayout ( ) b1 . setLayout ( form ) self . idx_start = QLabel ( '' ) self . idx_start . setToolTip ( 'Start time in seconds from the beginning of' ' the recordings' ) self . idx_length = QLabel ( '' ) self . idx_length . setToolTip ( 'Duration of the time window in seconds' ) self . idx_scaling = QLabel ( '' ) self . idx_scaling . setToolTip ( 'Global scaling for all the channels' ) self . idx_distance = QLabel ( '' ) self . idx_distance . setToolTip ( 'Visual distances between the traces of ' 'individual channels' ) form . addRow ( 'Start Time:' , self . idx_start ) form . addRow ( 'Length:' , self . idx_length ) form . addRow ( 'Scaling:' , self . idx_scaling ) form . addRow ( 'Distance:' , self . idx_distance ) layout = QVBoxLayout ( ) layout . addWidget ( b0 ) layout . addWidget ( b1 ) self . setLayout ( layout )
Create the widget layout with all the information .
23,576
def open_dataset ( self , recent = None , debug_filename = None , bids = False ) : if recent : filename = recent elif debug_filename is not None : filename = debug_filename else : try : dir_name = dirname ( self . filename ) except ( AttributeError , TypeError ) : dir_name = self . parent . value ( 'recording_dir' ) file_or_dir = choose_file_or_dir ( ) if file_or_dir == 'dir' : filename = QFileDialog . getExistingDirectory ( self , 'Open directory' , dir_name ) elif file_or_dir == 'file' : filename , _ = QFileDialog . getOpenFileName ( self , 'Open file' , dir_name ) elif file_or_dir == 'abort' : return if filename == '' : return if self . dataset is not None : self . parent . reset ( ) self . parent . statusBar ( ) . showMessage ( 'Reading dataset: ' + basename ( filename ) ) lg . info ( 'Reading dataset: ' + str ( filename ) ) self . filename = filename self . dataset = Dataset ( filename ) self . action [ 'export' ] . setEnabled ( True ) self . parent . statusBar ( ) . showMessage ( '' ) self . parent . update ( )
Open a new dataset .
23,577
def display_dataset ( self ) : header = self . dataset . header self . parent . setWindowTitle ( basename ( self . filename ) ) short_filename = short_strings ( basename ( self . filename ) ) self . idx_filename . setText ( short_filename ) self . idx_s_freq . setText ( str ( header [ 's_freq' ] ) ) self . idx_n_chan . setText ( str ( len ( header [ 'chan_name' ] ) ) ) start_time = header [ 'start_time' ] . strftime ( '%b-%d %H:%M:%S' ) self . idx_start_time . setText ( start_time ) end_time = ( header [ 'start_time' ] + timedelta ( seconds = header [ 'n_samples' ] / header [ 's_freq' ] ) ) self . idx_end_time . setText ( end_time . strftime ( '%b-%d %H:%M:%S' ) )
Update the widget with information about the dataset .
23,578
def display_view ( self ) : self . idx_start . setText ( str ( self . parent . value ( 'window_start' ) ) ) self . idx_length . setText ( str ( self . parent . value ( 'window_length' ) ) ) self . idx_scaling . setText ( str ( self . parent . value ( 'y_scale' ) ) ) self . idx_distance . setText ( str ( self . parent . value ( 'y_distance' ) ) )
Update information about the size of the traces .
23,579
def reset ( self ) : self . filename = None self . dataset = None self . idx_filename . setText ( 'Open Recordings...' ) self . idx_s_freq . setText ( '' ) self . idx_n_chan . setText ( '' ) self . idx_start_time . setText ( '' ) self . idx_end_time . setText ( '' ) self . idx_scaling . setText ( '' ) self . idx_distance . setText ( '' ) self . idx_length . setText ( '' ) self . idx_start . setText ( '' )
Reset widget to original state .
23,580
def update ( self ) : self . filename = self . parent . info . dataset . filename self . chan = self . parent . info . dataset . header [ 'chan_name' ] for chan in self . chan : self . idx_chan . addItem ( chan )
Get info from dataset before opening dialog .
23,581
def create_channels ( chan_name = None , n_chan = None ) : if chan_name is not None : n_chan = len ( chan_name ) elif n_chan is not None : chan_name = _make_chan_name ( n_chan ) else : raise TypeError ( 'You need to specify either the channel names (chan_name) or the number of channels (n_chan)' ) xyz = round ( random . randn ( n_chan , 3 ) * 10 , decimals = 2 ) return Channels ( chan_name , xyz )
Create instance of Channels with random xyz coordinates
23,582
def _color_noise ( x , s_freq , coef = 0 ) : y = fft ( x ) ph = angle ( y ) m = abs ( y ) freq = linspace ( 0 , s_freq / 2 , int ( len ( m ) / 2 ) + 1 ) freq = freq [ 1 : - 1 ] m1 = zeros ( len ( m ) ) m1 [ 1 : int ( len ( m ) / 2 ) ] = m [ 1 : int ( len ( m ) / 2 ) ] * f ( freq , coef ) m1 [ int ( len ( m1 ) / 2 + 1 ) : ] = m1 [ 1 : int ( len ( m1 ) / 2 ) ] [ : : - 1 ] y1 = m1 * exp ( 1j * ph ) return real ( ifft ( y1 ) )
Add some color to the noise by changing the power spectrum .
23,583
def _read_openephys ( openephys_file ) : root = ElementTree . parse ( openephys_file ) . getroot ( ) channels = [ ] for recording in root : s_freq = float ( recording . attrib [ 'samplerate' ] ) for processor in recording : for channel in processor : channels . append ( channel . attrib ) return s_freq , channels
Read the channel labels and their respective files from the Continuous_Data . openephys file
23,584
def _read_date ( settings_file ) : root = ElementTree . parse ( settings_file ) . getroot ( ) for e0 in root : if e0 . tag == 'INFO' : for e1 in e0 : if e1 . tag == 'DATE' : break return datetime . strptime ( e1 . text , '%d %b %Y %H:%M:%S' )
Get the data from the settings . xml file
23,585
def _read_n_samples ( channel_file ) : n_blocks = int ( ( channel_file . stat ( ) . st_size - HDR_LENGTH ) / BLK_SIZE ) n_samples = n_blocks * BLK_LENGTH return n_blocks , n_samples
Calculate the number of samples based on the file size
23,586
def _read_header ( filename ) : with filename . open ( 'rb' ) as f : h = f . read ( HDR_LENGTH ) . decode ( ) header = { } for line in h . split ( '\n' ) : if '=' in line : key , value = line . split ( ' = ' ) key = key . strip ( ) [ 7 : ] value = value . strip ( ) [ : - 1 ] header [ key ] = value return header
Read the text header for each file
23,587
def _check_header ( channel_file , s_freq ) : hdr = _read_header ( channel_file ) assert int ( hdr [ 'header_bytes' ] ) == HDR_LENGTH assert int ( hdr [ 'blockLength' ] ) == BLK_LENGTH assert int ( hdr [ 'sampleRate' ] ) == s_freq return float ( hdr [ 'bitVolts' ] )
For each file make sure that the header is consistent with the information in the text file .
23,588
def all_to_annot ( self , annot , names = [ 'TPd' , 'TPs' , 'FP' , 'FN' ] ) : self . to_annot ( annot , 'tp_det' , names [ 0 ] ) self . to_annot ( annot , 'tp_std' , names [ 1 ] ) self . to_annot ( annot , 'fp' , names [ 2 ] ) self . to_annot ( annot , 'fn' , names [ 3 ] )
Convenience function to write all events to XML by category showing overlapping TP detection and TP standard .
23,589
def convert_sample_to_video_time ( sample , orig_s_freq , sampleStamp , sampleTime ) : if sample < sampleStamp [ 0 ] : s_freq = orig_s_freq id0 = 0 elif sample > sampleStamp [ - 1 ] : s_freq = orig_s_freq id0 = len ( sampleStamp ) - 1 else : id0 = where ( asarray ( sampleStamp ) <= sample ) [ 0 ] [ - 1 ] id1 = where ( asarray ( sampleStamp ) >= sample ) [ 0 ] [ 0 ] if id0 == id1 : return sampleTime [ id0 ] s_freq = ( ( sampleStamp [ id1 ] - sampleStamp [ id0 ] ) / ( sampleTime [ id1 ] - sampleTime [ id0 ] ) . total_seconds ( ) ) time_diff = timedelta ( seconds = ( sample - sampleStamp [ id0 ] ) / s_freq ) return sampleTime [ id0 ] + time_diff
Convert sample number to video time using snc information .
23,590
def _find_channels ( note ) : id_ch = note . index ( 'ChanNames' ) chan_beg = note . index ( '(' , id_ch ) chan_end = note . index ( ')' , chan_beg ) note_with_chan = note [ chan_beg + 1 : chan_end ] return [ x . strip ( '" ' ) for x in note_with_chan . split ( ',' ) ]
Find the channel names within a string .
23,591
def _find_start_time ( hdr , s_freq ) : start_time = hdr [ 'stc' ] [ 'creation_time' ] for one_stamp in hdr [ 'stamps' ] : if one_stamp [ 'segment_name' ] . decode ( ) == hdr [ 'erd' ] [ 'filename' ] : offset = one_stamp [ 'start_stamp' ] break erd_time = ( hdr [ 'erd' ] [ 'creation_time' ] - timedelta ( seconds = offset / s_freq ) ) . replace ( microsecond = 0 ) stc_erd_diff = ( start_time - erd_time ) . total_seconds ( ) if stc_erd_diff > START_TIME_TOL : lg . warn ( 'Time difference between ERD and STC is {} s so using ERD time' ' at {}' . format ( stc_erd_diff , erd_time ) ) start_time = erd_time return start_time
Find the start time usually in STC but if that s not correct use ERD
23,592
def _read_ent ( ent_file ) : with ent_file . open ( 'rb' ) as f : f . seek ( 352 ) note_hdr_length = 16 allnote = [ ] while True : note = { } note [ 'type' ] , = unpack ( '<i' , f . read ( 4 ) ) note [ 'length' ] , = unpack ( '<i' , f . read ( 4 ) ) note [ 'prev_length' ] , = unpack ( '<i' , f . read ( 4 ) ) note [ 'unused' ] , = unpack ( '<i' , f . read ( 4 ) ) if not note [ 'type' ] : break s = f . read ( note [ 'length' ] - note_hdr_length ) s = s [ : - 2 ] s = s . decode ( 'utf-8' , errors = 'replace' ) s1 = s . replace ( '\n' , ' ' ) s1 = s1 . replace ( '\\xd ' , '' ) s1 = s1 . replace ( '(.' , '{' ) s1 = sub ( r'\(([A-Za-z0-9," ]*)\)' , r'[\1]' , s1 ) s1 = s1 . replace ( ')' , '}' ) s1 = sub ( r'(\{[\w"]*),' , r'\1 :' , s1 ) s1 = s1 . replace ( '{"' , '"' ) s1 = s1 . replace ( '},' , ',' ) s1 = s1 . replace ( '}}' , '}' ) s1 = sub ( r'\(([0-9 ,-\.]*)\}' , r'[\1]' , s1 ) try : note [ 'value' ] = eval ( s1 ) except : note [ 'value' ] = s allnote . append ( note ) return allnote
Read notes stored in . ent file .
23,593
def _read_packet ( f , pos , n_smp , n_allchan , abs_delta ) : if len ( abs_delta ) == 1 : abs_delta = unpack ( 'b' , abs_delta ) [ 0 ] else : abs_delta = unpack ( 'h' , abs_delta ) [ 0 ] l_deltamask = int ( ceil ( n_allchan / BITS_IN_BYTE ) ) dat = empty ( ( n_allchan , n_smp ) , dtype = int32 ) f . seek ( pos ) for i_smp in range ( n_smp ) : eventbite = f . read ( 1 ) try : assert eventbite in ( b'\x00' , b'\x01' ) except : raise Exception ( 'at pos ' + str ( i_smp ) + ', eventbite (should be x00 or x01): ' + str ( eventbite ) ) byte_deltamask = unpack ( '<' + 'B' * l_deltamask , f . read ( l_deltamask ) ) deltamask = unpackbits ( array ( byte_deltamask [ : : - 1 ] , dtype = 'uint8' ) ) deltamask = deltamask [ : - n_allchan - 1 : - 1 ] n_bytes = int ( deltamask . sum ( ) ) + deltamask . shape [ 0 ] deltamask = deltamask . astype ( 'bool' ) delta_dtype = empty ( n_allchan , dtype = 'a1' ) delta_dtype [ deltamask ] = 'h' delta_dtype [ ~ deltamask ] = 'b' relval = array ( unpack ( '<' + delta_dtype . tostring ( ) . decode ( ) , f . read ( n_bytes ) ) ) read_abs = ( delta_dtype == b'h' ) & ( relval == abs_delta ) dat [ ~ read_abs , i_smp ] = dat [ ~ read_abs , i_smp - 1 ] + relval [ ~ read_abs ] dat [ read_abs , i_smp ] = fromfile ( f , 'i' , count = read_abs . sum ( ) ) return dat
Read a packet of compressed data
23,594
def _read_erd ( erd_file , begsam , endsam ) : hdr = _read_hdr_file ( erd_file ) n_allchan = hdr [ 'num_channels' ] shorted = hdr [ 'shorted' ] n_shorted = sum ( shorted ) if n_shorted > 0 : raise NotImplementedError ( 'shorted channels not tested yet' ) if hdr [ 'file_schema' ] in ( 7 , ) : abs_delta = b'\x80' raise NotImplementedError ( 'schema 7 not tested yet' ) if hdr [ 'file_schema' ] in ( 8 , 9 ) : abs_delta = b'\xff\xff' n_smp = endsam - begsam data = empty ( ( n_allchan , n_smp ) ) data . fill ( NaN ) etc = _read_etc ( erd_file . with_suffix ( '.etc' ) ) all_beg = etc [ 'samplestamp' ] all_end = etc [ 'samplestamp' ] + etc [ 'sample_span' ] - 1 try : begrec = where ( ( all_end >= begsam ) ) [ 0 ] [ 0 ] endrec = where ( ( all_beg < endsam ) ) [ 0 ] [ - 1 ] except IndexError : return data with erd_file . open ( 'rb' ) as f : for rec in range ( begrec , endrec + 1 ) : begpos_rec = begsam - all_beg [ rec ] endpos_rec = endsam - all_beg [ rec ] begpos_rec = max ( begpos_rec , 0 ) endpos_rec = min ( endpos_rec , all_end [ rec ] - all_beg [ rec ] + 1 ) d1 = begpos_rec + all_beg [ rec ] - begsam d2 = endpos_rec + all_beg [ rec ] - begsam dat = _read_packet ( f , etc [ 'offset' ] [ rec ] , endpos_rec , n_allchan , abs_delta ) data [ : , d1 : d2 ] = dat [ : , begpos_rec : endpos_rec ] if n_shorted > 0 : full_channels = where ( asarray ( [ x == 0 for x in shorted ] ) ) [ 0 ] output = empty ( ( n_allchan , n_smp ) ) output . fill ( NaN ) output [ full_channels , : ] = data else : output = data factor = _calculate_conversion ( hdr ) return expand_dims ( factor , 1 ) * output
Read the raw data and return a matrix converted to microvolts .
23,595
def _read_etc ( etc_file ) : etc_type = dtype ( [ ( 'offset' , '<i' ) , ( 'samplestamp' , '<i' ) , ( 'sample_num' , '<i' ) , ( 'sample_span' , '<h' ) , ( 'unknown' , '<h' ) ] ) with etc_file . open ( 'rb' ) as f : f . seek ( 352 ) etc = fromfile ( f , dtype = etc_type ) return etc
Return information about table of content for each erd .
23,596
def _read_snc ( snc_file ) : snc_raw_dtype = dtype ( [ ( 'sampleStamp' , '<i' ) , ( 'sampleTime' , '<q' ) ] ) with snc_file . open ( 'rb' ) as f : f . seek ( 352 ) snc_raw = fromfile ( f , dtype = snc_raw_dtype ) sampleStamp = snc_raw [ 'sampleStamp' ] sampleTime = asarray ( [ _filetime_to_dt ( x ) for x in snc_raw [ 'sampleTime' ] ] ) return sampleStamp , sampleTime
Read Synchronization File and return sample stamp and time
23,597
def _read_stc ( stc_file ) : hdr = _read_hdr_file ( stc_file ) stc_dtype = dtype ( [ ( 'segment_name' , 'a256' ) , ( 'start_stamp' , '<i' ) , ( 'end_stamp' , '<i' ) , ( 'sample_num' , '<i' ) , ( 'sample_span' , '<i' ) ] ) with stc_file . open ( 'rb' ) as f : f . seek ( 352 ) hdr [ 'next_segment' ] = unpack ( '<i' , f . read ( 4 ) ) [ 0 ] hdr [ 'final' ] = unpack ( '<i' , f . read ( 4 ) ) [ 0 ] hdr [ 'padding' ] = unpack ( '<' + 'i' * 12 , f . read ( 48 ) ) stamps = fromfile ( f , dtype = stc_dtype ) return hdr , stamps
Read Segment Table of Contents file .
23,598
def _read_vtc ( vtc_file ) : with vtc_file . open ( 'rb' ) as f : filebytes = f . read ( ) hdr = { } hdr [ 'file_guid' ] = hexlify ( filebytes [ : 16 ] ) i = 20 mpg_file = [ ] start_time = [ ] end_time = [ ] while i < len ( filebytes ) : mpg_file . append ( _make_str ( unpack ( 'c' * 261 , filebytes [ i : i + 261 ] ) ) ) i += 261 Location = filebytes [ i : i + 16 ] correct = b'\xff\xfe\xf8^\xfc\xdc\xe5D\x8f\xae\x19\xf5\xd6"\xb6\xd4' assert Location == correct i += 16 start_time . append ( _filetime_to_dt ( unpack ( '<q' , filebytes [ i : ( i + 8 ) ] ) [ 0 ] ) ) i += 8 end_time . append ( _filetime_to_dt ( unpack ( '<q' , filebytes [ i : ( i + 8 ) ] ) [ 0 ] ) ) i += 8 return mpg_file , start_time , end_time
Read the VTC file .
23,599
def _read_hdr_dir ( self ) : foldername = Path ( self . filename ) stc_file = foldername / ( foldername . stem + '.stc' ) if stc_file . exists ( ) : self . _filename = stc_file . with_suffix ( '' ) else : stc_file = list ( foldername . glob ( '*.stc' ) ) if len ( stc_file ) == 1 : self . _filename = foldername / stc_file [ 0 ] . stem elif len ( stc_file ) == 0 : raise FileNotFoundError ( 'Could not find any .stc file.' ) else : raise OSError ( 'Found too many .stc files: ' + '\n' . join ( str ( x ) for x in stc_file ) ) hdr = { } for erd_file in foldername . glob ( self . _filename . stem + '_*.erd' ) : try : hdr [ 'erd' ] = _read_hdr_file ( erd_file ) hdr [ 'erd' ] . update ( { 'filename' : erd_file . stem } ) break except ( FileNotFoundError , PermissionError ) : pass stc = _read_stc ( self . _filename . with_suffix ( '.stc' ) ) hdr [ 'stc' ] , hdr [ 'stamps' ] = stc return hdr
Read the header for basic information .