idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
239,700
def split ( self , spike_ids = None , spike_clusters_rel = 0 ) : if spike_ids is None : spike_ids = self . emit ( 'request_split' , single = True ) spike_ids = np . asarray ( spike_ids , dtype = np . int64 ) assert spike_ids . dtype == np . int64 assert spike_ids . ndim == 1 if len ( spike_ids ) == 0 : msg = ( "You first need to select spikes in the feature " "view with a few Ctrl+Click around the spikes " "that you want to split." ) self . emit ( 'error' , msg ) return self . clustering . split ( spike_ids , spike_clusters_rel = spike_clusters_rel ) self . _global_history . action ( self . clustering )
Split the selected spikes .
182
5
239,701
def get_labels ( self , field ) : return { c : self . cluster_meta . get ( field , c ) for c in self . clustering . cluster_ids }
Return the labels of all clusters for a given field .
39
11
239,702
def label ( self , name , value , cluster_ids = None ) : if cluster_ids is None : cluster_ids = self . cluster_view . selected if not hasattr ( cluster_ids , '__len__' ) : cluster_ids = [ cluster_ids ] if len ( cluster_ids ) == 0 : return self . cluster_meta . set ( name , cluster_ids , value ) self . _global_history . action ( self . cluster_meta )
Assign a label to clusters .
101
7
239,703
def move ( self , group , cluster_ids = None ) : if isinstance ( cluster_ids , string_types ) : logger . warn ( "The list of clusters should be a list of integers, " "not a string." ) return self . label ( 'group' , group , cluster_ids = cluster_ids )
Assign a group to some clusters .
69
8
239,704
def next ( self ) : if not self . selected : self . cluster_view . next ( ) else : self . similarity_view . next ( )
Select the next cluster .
32
5
239,705
def save ( self ) : spike_clusters = self . clustering . spike_clusters groups = { c : self . cluster_meta . get ( 'group' , c ) or 'unsorted' for c in self . clustering . cluster_ids } # List of tuples (field_name, dictionary). labels = [ ( field , self . get_labels ( field ) ) for field in self . cluster_meta . fields if field not in ( 'next_cluster' ) ] # TODO: add option in add_field to declare a field unsavable. self . emit ( 'request_save' , spike_clusters , groups , * labels ) # Cache the spikes_per_cluster array. self . _save_spikes_per_cluster ( )
Save the manual clustering back to disk .
170
9
239,706
def create_cluster_meta ( cluster_groups ) : meta = ClusterMeta ( ) meta . add_field ( 'group' ) cluster_groups = cluster_groups or { } data = { c : { 'group' : v } for c , v in cluster_groups . items ( ) } meta . from_dict ( data ) return meta
Return a ClusterMeta instance with cluster group support .
74
10
239,707
def add_field ( self , name , default_value = None ) : self . _fields [ name ] = default_value def func ( cluster ) : return self . get ( name , cluster ) setattr ( self , name , func )
Add a field with an optional default value .
51
9
239,708
def set ( self , field , clusters , value , add_to_stack = True ) : # Add the field if it doesn't exist. if field not in self . _fields : self . add_field ( field ) assert field in self . _fields clusters = _as_list ( clusters ) for cluster in clusters : if cluster not in self . _data : self . _data [ cluster ] = { } self . _data [ cluster ] [ field ] = value up = UpdateInfo ( description = 'metadata_' + field , metadata_changed = clusters , metadata_value = value , ) undo_state = self . emit ( 'request_undo_state' , up ) if add_to_stack : self . _undo_stack . add ( ( clusters , field , value , up , undo_state ) ) self . emit ( 'cluster' , up ) return up
Set the value of one of several clusters .
187
9
239,709
def get ( self , field , cluster ) : if _is_list ( cluster ) : return [ self . get ( field , c ) for c in cluster ] assert field in self . _fields default = self . _fields [ field ] return self . _data . get ( cluster , { } ) . get ( field , default )
Retrieve the value of one cluster .
70
8
239,710
def set_from_descendants ( self , descendants ) : for field in self . fields : # This gives a set of metadata values of all the parents # of any new cluster. candidates = defaultdict ( set ) for old , new in descendants : candidates [ new ] . add ( self . get ( field , old ) ) # Loop over all new clusters. for new , vals in candidates . items ( ) : vals = list ( vals ) default = self . _fields [ field ] # If all the parents have the same value, assign it to # the new cluster if it is not the default. if len ( vals ) == 1 and vals [ 0 ] != default : self . set ( field , new , vals [ 0 ] )
Update metadata of some clusters given the metadata of their ascendants .
159
13
239,711
def undo ( self ) : args = self . _undo_stack . back ( ) if args is None : return self . _data = deepcopy ( self . _data_base ) for clusters , field , value , up , undo_state in self . _undo_stack : if clusters is not None : self . set ( field , clusters , value , add_to_stack = False ) # Return the UpdateInfo instance of the undo action. up , undo_state = args [ - 2 : ] up . history = 'undo' up . undo_state = undo_state self . emit ( 'cluster' , up ) return up
Undo the last metadata change .
135
7
239,712
def redo ( self ) : args = self . _undo_stack . forward ( ) if args is None : return clusters , field , value , up , undo_state = args self . set ( field , clusters , value , add_to_stack = False ) # Return the UpdateInfo instance of the redo action. up . history = 'redo' self . emit ( 'cluster' , up ) return up
Redo the next metadata change .
88
7
239,713
def _get_boxes ( pos , size = None , margin = 0 , keep_aspect_ratio = True ) : # Get x, y. pos = np . asarray ( pos , dtype = np . float64 ) x , y = pos . T x = x [ : , np . newaxis ] y = y [ : , np . newaxis ] w , h = size if size is not None else _get_box_size ( x , y , margin = margin ) x0 , y0 = x - w , y - h x1 , y1 = x + w , y + h # Renormalize the whole thing by keeping the aspect ratio. x0min , y0min , x1max , y1max = x0 . min ( ) , y0 . min ( ) , x1 . max ( ) , y1 . max ( ) if not keep_aspect_ratio : b = ( x0min , y0min , x1max , y1max ) else : dx = x1max - x0min dy = y1max - y0min if dx > dy : b = ( x0min , ( y1max + y0min ) / 2. - dx / 2. , x1max , ( y1max + y0min ) / 2. + dx / 2. ) else : b = ( ( x1max + x0min ) / 2. - dy / 2. , y0min , ( x1max + x0min ) / 2. + dy / 2. , y1max ) r = Range ( from_bounds = b , to_bounds = ( - 1 , - 1 , 1 , 1 ) ) return np . c_ [ r . apply ( np . c_ [ x0 , y0 ] ) , r . apply ( np . c_ [ x1 , y1 ] ) ]
Generate non - overlapping boxes in NDC from a set of positions .
405
15
239,714
def _get_texture ( arr , default , n_items , from_bounds ) : if not hasattr ( default , '__len__' ) : # pragma: no cover default = [ default ] n_cols = len ( default ) if arr is None : # pragma: no cover arr = np . tile ( default , ( n_items , 1 ) ) assert arr . shape == ( n_items , n_cols ) # Convert to 3D texture. arr = arr [ np . newaxis , ... ] . astype ( np . float64 ) assert arr . shape == ( 1 , n_items , n_cols ) # NOTE: we need to cast the texture to [0., 1.] (float texture). # This is easy as soon as we assume that the signal bounds are in # [-1, 1]. assert len ( from_bounds ) == 2 m , M = map ( float , from_bounds ) assert np . all ( arr >= m ) assert np . all ( arr <= M ) arr = ( arr - m ) / ( M - m ) assert np . all ( arr >= 0 ) assert np . all ( arr <= 1. ) return arr
Prepare data to be uploaded as a texture .
255
10
239,715
def _get_array ( val , shape , default = None , dtype = np . float64 ) : assert val is not None or default is not None if hasattr ( val , '__len__' ) and len ( val ) == 0 : # pragma: no cover val = None # Do nothing if the array is already correct. if ( isinstance ( val , np . ndarray ) and val . shape == shape and val . dtype == dtype ) : return val out = np . zeros ( shape , dtype = dtype ) # This solves `ValueError: could not broadcast input array from shape (n) # into shape (n, 1)`. if val is not None and isinstance ( val , np . ndarray ) : if val . size == out . size : val = val . reshape ( out . shape ) out . flat [ : ] = val if val is not None else default assert out . shape == shape return out
Ensure an object is an array with the specified shape .
205
12
239,716
def _get_index ( n_items , item_size , n ) : index = np . arange ( n_items ) index = np . repeat ( index , item_size ) index = index . astype ( np . float64 ) assert index . shape == ( n , ) return index
Prepare an index attribute for GPU uploading .
63
9
239,717
def _load_shader ( filename ) : curdir = op . dirname ( op . realpath ( __file__ ) ) glsl_path = op . join ( curdir , 'glsl' ) path = op . join ( glsl_path , filename ) with open ( path , 'r' ) as f : return f . read ( )
Load a shader file .
76
5
239,718
def _random_color ( h_range = ( 0. , 1. ) , s_range = ( .5 , 1. ) , v_range = ( .5 , 1. ) , ) : h , s , v = uniform ( * h_range ) , uniform ( * s_range ) , uniform ( * v_range ) r , g , b = hsv_to_rgb ( np . array ( [ [ [ h , s , v ] ] ] ) ) . flat return r , g , b
Generate a random RGB color .
112
7
239,719
def _is_bright ( rgb ) : r , g , b = rgb gray = 0.299 * r + 0.587 * g + 0.114 * b return gray >= .5
Return whether a RGB color is bright or not .
40
10
239,720
def _bunchify ( b ) : assert isinstance ( b , dict ) b = Bunch ( b ) for k in b : if isinstance ( b [ k ] , dict ) : b [ k ] = Bunch ( b [ k ] ) return b
Ensure all dict elements are Bunch .
56
9
239,721
def _as_list ( obj ) : if obj is None : return None elif isinstance ( obj , string_types ) : return [ obj ] elif isinstance ( obj , tuple ) : return list ( obj ) elif not hasattr ( obj , '__len__' ) : return [ obj ] else : return obj
Ensure an object is a list .
70
8
239,722
def _as_array ( arr , dtype = None ) : if arr is None : return None if isinstance ( arr , np . ndarray ) and dtype is None : return arr if isinstance ( arr , integer_types + ( float , ) ) : arr = [ arr ] out = np . asarray ( arr ) if dtype is not None : if out . dtype != dtype : out = out . astype ( dtype ) if out . dtype not in _ACCEPTED_ARRAY_DTYPES : raise ValueError ( "'arr' seems to have an invalid dtype: " "{0:s}" . format ( str ( out . dtype ) ) ) return out
Convert an object to a numerical NumPy array .
152
11
239,723
def _glslify ( r ) : if isinstance ( r , string_types ) : return r else : assert 2 <= len ( r ) <= 4 return 'vec{}({})' . format ( len ( r ) , ', ' . join ( map ( str , r ) ) )
Transform a string or a n - tuple to a valid GLSL expression .
62
16
239,724
def get ( self , class_name ) : for transform in self . cpu_transforms + self . gpu_transforms : if transform . __class__ . __name__ == class_name : return transform
Get a transform in the chain from its name .
45
10
239,725
def remove ( self , name ) : cpu_transforms = self . _remove_transform ( self . cpu_transforms , name ) gpu_transforms = self . _remove_transform ( self . gpu_transforms , name ) return ( TransformChain ( ) . add_on_cpu ( cpu_transforms ) . add_on_gpu ( gpu_transforms ) )
Remove a transform in the chain .
84
7
239,726
def apply ( self , arr ) : for t in self . cpu_transforms : arr = t . apply ( arr ) return arr
Apply all CPU transforms on an array .
28
8
239,727
def inverse ( self ) : transforms = self . cpu_transforms + self . gpu_transforms inv_transforms = [ transform . inverse ( ) for transform in transforms [ : : - 1 ] ] return TransformChain ( ) . add_on_cpu ( inv_transforms )
Return the inverse chain of transforms .
61
7
239,728
def _create_emitter ( self , event ) : if not hasattr ( self , event ) : setattr ( self , event , lambda * args , * * kwargs : self . emit ( event , * args , * * kwargs ) )
Create a method that emits an event of the same name .
55
12
239,729
def connect ( self , func = None , event = None , set_method = False ) : if func is None : return partial ( self . connect , set_method = set_method ) # Get the event name from the function. if event is None : event = self . _get_on_name ( func ) # We register the callback function. self . _callbacks [ event ] . append ( func ) # A new method self.event() emitting the event is created. if set_method : self . _create_emitter ( event ) return func
Register a callback function to a given event .
118
9
239,730
def unconnect ( self , * funcs ) : for func in funcs : for callbacks in self . _callbacks . values ( ) : if func in callbacks : callbacks . remove ( func )
Unconnect specified callback functions .
44
6
239,731
def emit ( self , event , * args , * * kwargs ) : callbacks = self . _callbacks . get ( event , [ ] ) # Call the last callback if this is a single event. single = kwargs . pop ( 'single' , None ) if single and callbacks : return callbacks [ - 1 ] ( * args , * * kwargs ) # Otherwise, return the list of callback outputs. res = [ ] for callback in callbacks : res . append ( callback ( * args , * * kwargs ) ) return res
Call all callback functions registered with an event .
120
9
239,732
def set_progress_message ( self , message , line_break = False ) : end = '\r' if not line_break else None @ self . connect def on_progress ( value , value_max , * * kwargs ) : kwargs [ 'end' ] = None if value == value_max else end _default_on_progress ( message , value , value_max , * * kwargs )
Set a progress message .
92
5
239,733
def set_complete_message ( self , message ) : @ self . connect def on_complete ( * * kwargs ) : _default_on_complete ( message , * * kwargs )
Set a complete message .
43
5
239,734
def get_plugin ( name ) : for plugin in IPluginRegistry . plugins : if name in plugin . __name__ : return plugin raise ValueError ( "The plugin %s cannot be found." % name )
Get a plugin class from its name .
45
8
239,735
def discover_plugins ( dirs ) : # Scan all subdirectories recursively. for path in _iter_plugin_files ( dirs ) : filename = op . basename ( path ) subdir = op . dirname ( path ) modname , ext = op . splitext ( filename ) file , path , descr = imp . find_module ( modname , [ subdir ] ) if file : # Loading the module registers the plugin in # IPluginRegistry. try : mod = imp . load_module ( modname , file , path , descr ) # noqa except Exception as e : # pragma: no cover logger . exception ( e ) finally : file . close ( ) return IPluginRegistry . plugins
Discover the plugin classes contained in Python files .
157
9
239,736
def add_view ( self , view , name = None , position = None , closable = False , floatable = True , floating = None ) : # Set the name in the view. view . view_index = self . _get_view_index ( view ) # The view name is `<class_name><view_index>`, e.g. `MyView0`. view . name = name or view . __class__ . __name__ + str ( view . view_index ) # Get the Qt canvas for VisPy and matplotlib views. widget = _try_get_vispy_canvas ( view ) widget = _try_get_matplotlib_canvas ( widget ) dock_widget = _create_dock_widget ( widget , view . name , closable = closable , floatable = floatable , ) self . addDockWidget ( _get_dock_position ( position ) , dock_widget ) if floating is not None : dock_widget . setFloating ( floating ) dock_widget . view = view # Emit the close_view event when the dock widget is closed. @ dock_widget . connect_ def on_close_widget ( ) : self . emit ( 'close_view' , view ) dock_widget . show ( ) self . emit ( 'add_view' , view ) logger . log ( 5 , "Add %s to GUI." , view . name ) return dock_widget
Add a widget to the main window .
311
8
239,737
def list_views ( self , name = '' , is_visible = True ) : children = self . findChildren ( QWidget ) return [ child . view for child in children if isinstance ( child , QDockWidget ) and child . view . name . startswith ( name ) and ( child . isVisible ( ) if is_visible else True ) and child . width ( ) >= 10 and child . height ( ) >= 10 ]
List all views which name start with a given string .
94
11
239,738
def get_view ( self , name , is_visible = True ) : views = self . list_views ( name , is_visible = is_visible ) return views [ 0 ] if views else None
Return a view from its name .
43
7
239,739
def view_count ( self ) : views = self . list_views ( ) counts = defaultdict ( lambda : 0 ) for view in views : counts [ view . name ] += 1 return dict ( counts )
Return the number of opened views .
44
7
239,740
def get_menu ( self , name ) : if name not in self . _menus : self . _menus [ name ] = self . menuBar ( ) . addMenu ( name ) return self . _menus [ name ]
Return or create a menu .
50
6
239,741
def restore_geometry_state ( self , gs ) : if not gs : return if gs . get ( 'geometry' , None ) : self . restoreGeometry ( ( gs [ 'geometry' ] ) ) if gs . get ( 'state' , None ) : self . restoreState ( ( gs [ 'state' ] ) )
Restore the position of the main window and the docks .
79
12
239,742
def update_view_state ( self , view , state ) : if view . name not in self : self [ view . name ] = Bunch ( ) self [ view . name ] . update ( state )
Update the state of a view .
44
7
239,743
def load ( self ) : if not op . exists ( self . path ) : logger . debug ( "The GUI state file `%s` doesn't exist." , self . path ) # TODO: create the default state. return assert op . exists ( self . path ) logger . debug ( "Load the GUI state from `%s`." , self . path ) self . update ( _bunchify ( _load_json ( self . path ) ) )
Load the state from the JSON file in the config dir .
98
12
239,744
def save ( self ) : logger . debug ( "Save the GUI state to `%s`." , self . path ) _save_json ( self . path , { k : v for k , v in self . items ( ) if k not in ( 'config_dir' , 'name' ) } )
Save the state to the JSON file in the config dir .
66
12
239,745
def cache ( self , f ) : if self . _memory is None : # pragma: no cover logger . debug ( "Joblib is not installed: skipping cacheing." ) return f assert f # NOTE: discard self in instance methods. if 'self' in inspect . getargspec ( f ) . args : ignore = [ 'self' ] else : ignore = None disk_cached = self . _memory . cache ( f , ignore = ignore ) return disk_cached
Cache a function using the context s cache directory .
102
10
239,746
def memcache ( self , f ) : name = _fullname ( f ) cache = self . load_memcache ( name ) @ wraps ( f ) def memcached ( * args ) : """Cache the function in memory.""" # The arguments need to be hashable. Much faster than using hash(). h = args out = cache . get ( h , None ) if out is None : out = f ( * args ) cache [ h ] = out return out return memcached
Cache a function in memory using an internal dictionary .
102
10
239,747
def save ( self , name , data , location = 'local' , kind = 'json' ) : file_ext = '.json' if kind == 'json' else '.pkl' path = self . _get_path ( name , location , file_ext = file_ext ) _ensure_dir_exists ( op . dirname ( path ) ) logger . debug ( "Save data to `%s`." , path ) if kind == 'json' : _save_json ( path , data ) else : _save_pickle ( path , data )
Save a dictionary in a JSON file within the cache directory .
122
12
239,748
def load ( self , name , location = 'local' ) : path = self . _get_path ( name , location , file_ext = '.json' ) if op . exists ( path ) : return _load_json ( path ) path = self . _get_path ( name , location , file_ext = '.pkl' ) if op . exists ( path ) : return _load_pickle ( path ) logger . debug ( "The file `%s` doesn't exist." , path ) return { }
Load saved data from the cache directory .
111
8
239,749
def _ensure_dir_exists ( path ) : if not op . exists ( path ) : os . makedirs ( path ) assert op . exists ( path ) and op . isdir ( path )
Ensure a directory exists .
45
6
239,750
def load_config ( path = None ) : if not path or not op . exists ( path ) : return Config ( ) path = op . realpath ( path ) dirpath , filename = op . split ( path ) file_ext = op . splitext ( path ) [ 1 ] logger . debug ( "Load config file `%s`." , path ) if file_ext == '.py' : config = PyFileConfigLoader ( filename , dirpath , log = logger ) . load_config ( ) elif file_ext == '.json' : config = JSONFileConfigLoader ( filename , dirpath , log = logger ) . load_config ( ) return config
Load a Python or JSON config file .
142
8
239,751
def save_config ( path , config ) : import json config [ 'version' ] = 1 with open ( path , 'w' ) as f : json . dump ( config , f )
Save a config object to a JSON file .
40
9
239,752
def _edges_to_adjacency_list ( edges ) : adj = { } for i , j in edges : if i in adj : # pragma: no cover ni = adj [ i ] else : ni = adj [ i ] = set ( ) if j in adj : nj = adj [ j ] else : nj = adj [ j ] = set ( ) ni . add ( j ) nj . add ( i ) return adj
Convert a list of edges into an adjacency list .
96
13
239,753
def _probe_positions ( probe , group ) : positions = probe [ 'channel_groups' ] [ group ] [ 'geometry' ] channels = _probe_channels ( probe , group ) return np . array ( [ positions [ channel ] for channel in channels ] )
Return the positions of a probe channel group .
61
9
239,754
def _probe_adjacency_list ( probe ) : cgs = probe [ 'channel_groups' ] . values ( ) graphs = [ cg [ 'graph' ] for cg in cgs ] edges = list ( itertools . chain ( * graphs ) ) adjacency_list = _edges_to_adjacency_list ( edges ) return adjacency_list
Return an adjacency list of a whole probe .
86
11
239,755
def load_probe ( name ) : if op . exists ( name ) : # The argument can be either a path to a PRB file. path = name else : # Or the name of a built-in probe. curdir = op . realpath ( op . dirname ( __file__ ) ) path = op . join ( curdir , 'probes/{}.prb' . format ( name ) ) if not op . exists ( path ) : raise IOError ( "The probe `{}` cannot be found." . format ( name ) ) return MEA ( probe = _read_python ( path ) )
Load one of the built - in probes .
133
9
239,756
def list_probes ( ) : curdir = op . realpath ( op . dirname ( __file__ ) ) return [ op . splitext ( fn ) [ 0 ] for fn in os . listdir ( op . join ( curdir , 'probes' ) ) if fn . endswith ( '.prb' ) ]
Return the list of built - in probes .
73
9
239,757
def linear_positions ( n_channels ) : return np . c_ [ np . zeros ( n_channels ) , np . linspace ( 0. , 1. , n_channels ) ]
Linear channel positions along the vertical axis .
47
9
239,758
def staggered_positions ( n_channels ) : i = np . arange ( n_channels - 1 ) x , y = ( - 1 ) ** i * ( 5 + i ) , 10 * ( i + 1 ) pos = np . flipud ( np . r_ [ np . zeros ( ( 1 , 2 ) ) , np . c_ [ x , y ] ] ) return pos
Generate channel positions for a staggered probe .
87
9
239,759
def change_channel_group ( self , group ) : assert self . _probe is not None self . _channels = _probe_channels ( self . _probe , group ) self . _positions = _probe_positions ( self . _probe , group )
Change the current channel group .
63
6
239,760
def build ( self ) : if self . is_built ( ) : # pragma: no cover return with _wait_signal ( self . loadFinished , 20 ) : self . rebuild ( ) self . _built = True
Build the full HTML source .
49
6
239,761
def add_to_js ( self , name , var ) : frame = self . page ( ) . mainFrame ( ) frame . addToJavaScriptWindowObject ( name , var )
Add an object to Javascript .
39
6
239,762
def eval_js ( self , expr ) : if not self . is_built ( ) : self . _pending_js_eval . append ( expr ) return logger . log ( 5 , "Evaluate Javascript: `%s`." , expr ) out = self . page ( ) . mainFrame ( ) . evaluateJavaScript ( expr ) return _to_py ( out )
Evaluate a Javascript expression .
82
7
239,763
def add_column ( self , func , name = None , show = True ) : assert func name = name or func . __name__ if name == '<lambda>' : raise ValueError ( "Please provide a valid name for " + name ) d = { 'func' : func , 'show' : show , } self . _columns [ name ] = d # Update the headers in the widget. data = _create_json_dict ( cols = self . column_names , ) self . eval_js ( 'table.setHeaders({});' . format ( data ) ) return func
Add a column function which takes an id as argument and returns a value .
129
15
239,764
def column_names ( self ) : return [ name for ( name , d ) in self . _columns . items ( ) if d . get ( 'show' , True ) ]
List of column names .
39
5
239,765
def _get_row ( self , id ) : return { name : d [ 'func' ] ( id ) for ( name , d ) in self . _columns . items ( ) }
Create a row dictionary for a given object id .
41
10
239,766
def set_rows ( self , ids ) : # NOTE: make sure we have integers and not np.generic objects. assert all ( isinstance ( i , int ) for i in ids ) # Determine the sort column and dir to set after the rows. sort_col , sort_dir = self . current_sort default_sort_col , default_sort_dir = self . default_sort sort_col = sort_col or default_sort_col sort_dir = sort_dir or default_sort_dir or 'desc' # Set the rows. logger . log ( 5 , "Set %d rows in the table." , len ( ids ) ) items = [ self . _get_row ( id ) for id in ids ] # Sort the rows before passing them to the widget. # if sort_col: # items = sorted(items, key=itemgetter(sort_col), # reverse=(sort_dir == 'desc')) data = _create_json_dict ( items = items , cols = self . column_names , ) self . eval_js ( 'table.setData({});' . format ( data ) ) # Sort. if sort_col : self . sort_by ( sort_col , sort_dir )
Set the rows of the table .
271
7
239,767
def sort_by ( self , name , sort_dir = 'asc' ) : logger . log ( 5 , "Sort by `%s` %s." , name , sort_dir ) self . eval_js ( 'table.sortBy("{}", "{}");' . format ( name , sort_dir ) )
Sort by a given variable .
70
6
239,768
def select ( self , ids , do_emit = True , * * kwargs ) : # Select the rows without emiting the event. self . eval_js ( 'table.select({}, false);' . format ( dumps ( ids ) ) ) if do_emit : # Emit the event manually if needed. self . emit ( 'select' , ids , * * kwargs )
Select some rows in the table .
89
7
239,769
def set_interval ( self , interval = None , change_status = True , force_update = False ) : if interval is None : interval = self . _interval interval = self . _restrict_interval ( interval ) if not force_update and interval == self . _interval : return self . _interval = interval start , end = interval self . clear ( ) # Set the status message. if change_status : self . set_status ( 'Interval: {:.3f} s - {:.3f} s' . format ( start , end ) ) # Load the traces. traces = self . traces ( interval ) # Find the data bounds. ymin , ymax = traces . data . min ( ) , traces . data . max ( ) data_bounds = ( start , ymin , end , ymax ) # Used for spike click. self . _data_bounds = data_bounds self . _waveform_times = [ ] # Plot the traces. self . _plot_traces ( traces . data , color = traces . get ( 'color' , None ) , data_bounds = data_bounds , ) # Plot the spikes. waveforms = traces . waveforms assert isinstance ( waveforms , list ) for w in waveforms : self . _plot_waveforms ( waveforms = w . data , color = w . color , channel_ids = w . get ( 'channel_ids' , None ) , start_time = w . start_time , data_bounds = data_bounds , ) self . _waveform_times . append ( ( w . start_time , w . spike_id , w . spike_cluster , w . get ( 'channel_ids' , None ) , ) ) # Plot the labels. if self . do_show_labels : self . _plot_labels ( traces . data , data_bounds = data_bounds ) self . build ( ) self . update ( )
Display the traces and spikes in a given interval .
428
10
239,770
def half_duration ( self ) : if self . _interval is not None : a , b = self . _interval return ( b - a ) * .5 else : return self . interval_duration * .5
Half of the duration of the current interval .
47
9
239,771
def go_right ( self ) : start , end = self . _interval delay = ( end - start ) * .2 self . shift ( delay )
Go to right .
33
4
239,772
def go_left ( self ) : start , end = self . _interval delay = ( end - start ) * .2 self . shift ( - delay )
Go to left .
34
4
239,773
def widen ( self ) : t , h = self . time , self . half_duration h *= self . scaling_coeff_x self . set_interval ( ( t - h , t + h ) )
Increase the interval size .
47
5
239,774
def narrow ( self ) : t , h = self . time , self . half_duration h /= self . scaling_coeff_x self . set_interval ( ( t - h , t + h ) )
Decrease the interval size .
47
6
239,775
def auth_from_hass_config ( path = None , * * kwargs ) : if path is None : path = config . find_hass_config ( ) return Auth ( os . path . join ( path , ".storage/auth" ) , * * kwargs )
Initialize auth from HASS config .
62
8
239,776
def user_name ( self , user_id ) : user = self . users . get ( user_id ) if user is None : return "Unknown user ({})" . format ( user_id ) return user [ "name" ]
Return name for user .
50
5
239,777
def default_hass_config_dir ( ) : data_dir = os . getenv ( "APPDATA" ) if os . name == "nt" else os . path . expanduser ( "~" ) return os . path . join ( data_dir , ".homeassistant" )
Put together the default configuration directory based on the OS .
64
11
239,778
def find_hass_config ( ) : if "HASSIO_TOKEN" in os . environ : return "/config" config_dir = default_hass_config_dir ( ) if os . path . isdir ( config_dir ) : return config_dir raise ValueError ( "Unable to automatically find the location of Home Assistant " "config. Please pass it in." )
Try to find HASS config .
85
7
239,779
def _secret_yaml ( loader , node ) : fname = os . path . join ( os . path . dirname ( loader . name ) , "secrets.yaml" ) try : with open ( fname , encoding = "utf-8" ) as secret_file : secrets = YAML ( typ = "safe" ) . load ( secret_file ) except FileNotFoundError : raise ValueError ( "Secrets file {} not found" . format ( fname ) ) from None try : return secrets [ node . value ] except KeyError : raise ValueError ( "Secret {} not found" . format ( node . value ) ) from None
Load secrets and embed it into the configuration YAML .
141
12
239,780
def _include_yaml ( loader , node ) : return load_yaml ( os . path . join ( os . path . dirname ( loader . name ) , node . value ) )
Load another YAML file and embeds it using the !include tag .
41
16
239,781
def _stub_tag ( constructor , node ) : seen = getattr ( constructor , "_stub_seen" , None ) if seen is None : seen = constructor . _stub_seen = set ( ) if node . tag not in seen : print ( "YAML tag {} is not supported" . format ( node . tag ) ) seen . add ( node . tag ) return { }
Stub a constructor with a dictionary .
85
8
239,782
def load_yaml ( fname ) : yaml = YAML ( typ = "safe" ) # Compat with HASS yaml . allow_duplicate_keys = True # Stub HASS constructors HassSafeConstructor . name = fname yaml . Constructor = HassSafeConstructor with open ( fname , encoding = "utf-8" ) as conf_file : # If configuration file is empty YAML returns None # We convert that to an empty dict return yaml . load ( conf_file ) or { }
Load a YAML file .
117
7
239,783
def db_url_from_hass_config ( path ) : config = load_hass_config ( path ) default_path = os . path . join ( path , "home-assistant_v2.db" ) default_url = "sqlite:///{}" . format ( default_path ) recorder = config . get ( "recorder" ) if recorder : db_url = recorder . get ( "db_url" ) if db_url is not None : return db_url if not os . path . isfile ( default_path ) : raise ValueError ( "Unable to determine DB url from hass config at {}" . format ( path ) ) return default_url
Find the recorder database url from a HASS config dir .
150
12
239,784
def localize ( dt ) : if dt . tzinfo is UTC : return ( dt + LOCAL_UTC_OFFSET ) . replace ( tzinfo = None ) # No TZ info so not going to assume anything, return as-is. return dt
Localize a datetime object to local time .
60
10
239,785
def sqlalch_datetime ( dt ) : if isinstance ( dt , str ) : return datetime . strptime ( dt , "%Y-%m-%d %H:%M:%S.%f" ) . replace ( tzinfo = UTC ) if dt . tzinfo is not None and dt . tzinfo . utcoffset ( dt ) is not None : return dt . astimezone ( UTC ) return dt . replace ( tzinfo = UTC )
Convert a SQLAlchemy datetime string to a datetime object .
115
15
239,786
def db_from_hass_config ( path = None , * * kwargs ) : if path is None : path = config . find_hass_config ( ) url = config . db_url_from_hass_config ( path ) return HassDatabase ( url , * * kwargs )
Initialize a database from HASS config .
67
9
239,787
def stripped_db_url ( url ) : parsed = urlparse ( url ) if parsed . password is None : return url return parsed . _replace ( netloc = "{}:***@{}" . format ( parsed . username , parsed . hostname ) ) . geturl ( )
Return a version of the DB url with the password stripped out .
59
13
239,788
def perform_query ( self , query , * * params ) : try : return self . engine . execute ( query , params ) except : print ( "Error with query: {}" . format ( query ) ) raise
Perform a query where query is a string .
45
10
239,789
def fetch_entities ( self ) : query = text ( """ SELECT entity_id FROM states GROUP BY entity_id """ ) response = self . perform_query ( query ) # Parse the domains from the entities. entities = { } domains = set ( ) for [ entity ] in response : domain = entity . split ( "." ) [ 0 ] domains . add ( domain ) entities . setdefault ( domain , [ ] ) . append ( entity ) self . _domains = list ( domains ) self . _entities = entities print ( "There are {} entities with data" . format ( len ( entities ) ) )
Fetch entities for which we have data .
131
9
239,790
def fetch_all_data ( self , limit = 50000 ) : # Query text query = text ( """ SELECT domain, entity_id, state, last_changed FROM states WHERE state NOT IN ('unknown', 'unavailable') ORDER BY last_changed DESC LIMIT :limit """ ) try : print ( "Querying the database, this could take a while" ) response = self . perform_query ( query , limit = limit ) master_df = pd . DataFrame ( response . fetchall ( ) ) print ( "master_df created successfully." ) self . _master_df = master_df . copy ( ) self . parse_all_data ( ) except : raise ValueError ( "Error querying the database." )
Fetch data for all entities .
157
7
239,791
def parse_all_data ( self ) : self . _master_df . columns = [ "domain" , "entity" , "state" , "last_changed" ] # Check if state is float and store in numericals category. self . _master_df [ "numerical" ] = self . _master_df [ "state" ] . apply ( lambda x : functions . isfloat ( x ) ) # Multiindexing self . _master_df . set_index ( [ "domain" , "entity" , "numerical" , "last_changed" ] , inplace = True )
Parses the master df .
132
7
239,792
def correlations ( self ) : corr_df = self . _sensors_num_df . corr ( ) corr_names = [ ] corrs = [ ] for i in range ( len ( corr_df . index ) ) : for j in range ( len ( corr_df . index ) ) : c_name = corr_df . index [ i ] r_name = corr_df . columns [ j ] corr_names . append ( "%s-%s" % ( c_name , r_name ) ) corrs . append ( corr_df . ix [ i , j ] ) corrs_all = pd . DataFrame ( index = corr_names ) corrs_all [ "value" ] = corrs corrs_all = corrs_all . dropna ( ) . drop ( corrs_all [ ( corrs_all [ "value" ] == float ( 1 ) ) ] . index ) corrs_all = corrs_all . drop ( corrs_all [ corrs_all [ "value" ] == float ( - 1 ) ] . index ) corrs_all = corrs_all . sort_values ( "value" , ascending = False ) corrs_all = corrs_all . drop_duplicates ( ) return corrs_all
Calculate the correlation coefficients .
290
7
239,793
def plot ( self , entities : List [ str ] ) : ax = self . _sensors_num_df [ entities ] . plot ( figsize = [ 12 , 6 ] ) ax . legend ( loc = "center left" , bbox_to_anchor = ( 1 , 0.5 ) ) ax . set_xlabel ( "Date" ) ax . set_ylabel ( "Reading" ) return
Basic plot of a numerical sensor data .
90
8
239,794
def plot ( self , entity ) : df = self . _binary_df [ [ entity ] ] resampled = df . resample ( "s" ) . ffill ( ) # Sample at seconds and ffill resampled . columns = [ "value" ] fig , ax = plt . subplots ( 1 , 1 , figsize = ( 16 , 2 ) ) ax . fill_between ( resampled . index , y1 = 0 , y2 = 1 , facecolor = "royalblue" , label = "off" ) ax . fill_between ( resampled . index , y1 = 0 , y2 = 1 , where = ( resampled [ "value" ] > 0 ) , facecolor = "red" , label = "on" , ) ax . set_title ( entity ) ax . set_xlabel ( "Date" ) ax . set_frame_on ( False ) ax . set_yticks ( [ ] ) plt . legend ( loc = ( 1.01 , 0.7 ) ) plt . show ( ) return
Basic plot of a single binary sensor data .
232
9
239,795
def is_sf_database ( db , model = None ) : from django . db import connections if db is None : return getattr ( model , '_salesforce_object' , False ) engine = connections [ db ] . settings_dict [ 'ENGINE' ] return engine == 'salesforce.backend' or connections [ db ] . vendor == 'salesforce'
The alias is a Salesforce database .
83
8
239,796
def allow_migrate ( self , db , app_label , model_name = None , * * hints ) : if model_name : model = apps . get_model ( app_label , model_name ) else : # hints are used with less priority, because many hints are dynamic # models made by migrations on a '__fake__' module which are not # SalesforceModels model = hints . get ( 'model' ) if hasattr ( model , '_salesforce_object' ) : # SF models can be migrated if SALESFORCE_DB_ALIAS is e.g. # a sqlite3 database or any non-SF database. if not ( is_sf_database ( db ) or db == self . sf_alias ) : return False else : if is_sf_database ( db ) or self . sf_alias != 'default' and db == self . sf_alias : return False # TODO: It is usual that "migrate" is currently disallowed for SF. # In the future it can be implemented to do a deep check by # introspection of compatibily between Django models and SF database. if hasattr ( model , '_salesforce_object' ) : # return False pass
Don t attempt to sync SF models to non SF databases and vice versa .
268
15
239,797
def update ( self , * * kwargs ) : assert not self . called self . kw . update ( kwargs ) return self
Customize the lazy field
30
5
239,798
def create ( self ) : assert not self . called return self . klass ( * self . args , * * self . kw )
Create a normal field from the lazy field
29
8
239,799
def get_queryset ( self ) : if router . is_sf_database ( self . db ) : q = models_sql_query . SalesforceQuery ( self . model , where = compiler . SalesforceWhereNode ) return query . SalesforceQuerySet ( self . model , query = q , using = self . db ) return super ( SalesforceManager , self ) . get_queryset ( )
Returns a QuerySet which access remote SF objects .
88
10