idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
35,100
def add_global_handler ( self , event , handler , priority = 0 ) : handler = PrioritizedHandler ( priority , handler ) with self . mutex : event_handlers = self . handlers . setdefault ( event , [ ] ) bisect . insort ( event_handlers , handler )
Adds a global handler function for a specific event type .
35,101
def remove_global_handler ( self , event , handler ) : with self . mutex : if event not in self . handlers : return 0 for h in self . handlers [ event ] : if handler == h . callback : self . handlers [ event ] . remove ( h ) return 1
Removes a global handler function .
35,102
def dcc ( self , dcctype = "chat" ) : with self . mutex : conn = DCCConnection ( self , dcctype ) self . connections . append ( conn ) return conn
Creates and returns a DCCConnection object .
35,103
def _handle_event ( self , connection , event ) : with self . mutex : matching_handlers = sorted ( self . handlers . get ( "all_events" , [ ] ) + self . handlers . get ( event . type , [ ] ) ) for handler in matching_handlers : result = handler . callback ( connection , event ) if result == "NO MORE" : return
Handle an Event event incoming on ServerConnection connection .
35,104
def disconnect ( self , message = "" ) : try : del self . connected except AttributeError : return try : self . socket . shutdown ( socket . SHUT_WR ) self . socket . close ( ) except socket . error : pass del self . socket self . reactor . _handle_event ( self , Event ( "dcc_disconnect" , self . peeraddress , "" , [ m...
Hang up the connection and close the object .
35,105
def privmsg ( self , text ) : if self . dcctype == 'chat' : text += '\n' return self . send_bytes ( self . encode ( text ) )
Send text to DCC peer .
35,106
def send_bytes ( self , bytes ) : try : self . socket . send ( bytes ) log . debug ( "TO PEER: %r\n" , bytes ) except socket . error : self . disconnect ( "Connection reset by peer." )
Send data to DCC peer .
35,107
def dcc ( self , * args , ** kwargs ) : dcc = self . reactor . dcc ( * args , ** kwargs ) self . dcc_connections . append ( dcc ) return dcc
Create and associate a new DCCConnection object .
35,108
def dcc_connect ( self , address , port , dcctype = "chat" ) : warnings . warn ( "Use self.dcc(type).connect()" , DeprecationWarning ) return self . dcc ( dcctype ) . connect ( address , port )
Connect to a DCC peer .
35,109
def dcc_listen ( self , dcctype = "chat" ) : warnings . warn ( "Use self.dcc(type).listen()" , DeprecationWarning ) return self . dcc ( dcctype ) . listen ( )
Listen for connections from a DCC peer .
35,110
def dequote ( message ) : message = low_level_regexp . sub ( _low_level_replace , message ) if DELIMITER not in message : return [ message ] chunks = message . split ( DELIMITER ) return list ( _gen_messages ( chunks ) )
Dequote a message according to CTCP specifications .
35,111
def connect ( server , password = None , factory_class = VNCDoToolFactory , proxy = ThreadedVNCClientProxy , timeout = None ) : if not reactor . running : global _THREAD _THREAD = threading . Thread ( target = reactor . run , name = 'Twisted' , kwargs = { 'installSignalHandlers' : False } ) _THREAD . daemon = True _THR...
Connect to a VNCServer and return a Client instance that is usable in the main thread of non - Twisted Python Applications EXPERIMENTAL .
35,112
def keyPress ( self , key ) : log . debug ( 'keyPress %s' , key ) self . keyDown ( key ) self . keyUp ( key ) return self
Send a key press to the server
35,113
def mousePress ( self , button ) : log . debug ( 'mousePress %s' , button ) buttons = self . buttons | ( 1 << ( button - 1 ) ) self . mouseDown ( button ) self . mouseUp ( button ) return self
Send a mouse click at the last set position
35,114
def mouseDown ( self , button ) : log . debug ( 'mouseDown %s' , button ) self . buttons |= 1 << ( button - 1 ) self . pointerEvent ( self . x , self . y , buttonmask = self . buttons ) return self
Send a mouse button down at the last set position
35,115
def captureRegion ( self , filename , x , y , w , h ) : log . debug ( 'captureRegion %s' , filename ) return self . _capture ( filename , x , y , x + w , y + h )
Save a region of the current display to filename
35,116
def expectScreen ( self , filename , maxrms = 0 ) : log . debug ( 'expectScreen %s' , filename ) return self . _expectFramebuffer ( filename , 0 , 0 , maxrms )
Wait until the display matches a target image
35,117
def expectRegion ( self , filename , x , y , maxrms = 0 ) : log . debug ( 'expectRegion %s (%s, %s)' , filename , x , y ) return self . _expectFramebuffer ( filename , x , y , maxrms )
Wait until a portion of the screen matches the target image
35,118
def setImageMode ( self ) : if self . _version_server == 3.889 : self . setPixelFormat ( bpp = 16 , depth = 16 , bigendian = 0 , truecolor = 1 , redmax = 31 , greenmax = 63 , bluemax = 31 , redshift = 11 , greenshift = 5 , blueshift = 0 ) self . image_mode = "BGR;16" elif ( self . truecolor and ( not self . bigendian )...
Extracts color ordering and 24 vs . 32 bpp info out of the pixel format information
35,119
def _handleDecodeHextileRAW ( self , block , bg , color , x , y , width , height , tx , ty , tw , th ) : self . updateRectangle ( tx , ty , tw , th , block ) self . _doNextHextileSubrect ( bg , color , x , y , width , height , tx , ty )
the tile is in raw encoding
35,120
def _handleDecodeHextileSubrectsColoured ( self , block , bg , color , subrects , x , y , width , height , tx , ty , tw , th ) : sz = self . bypp + 2 pos = 0 end = len ( block ) while pos < end : pos2 = pos + self . bypp color = block [ pos : pos2 ] xy = ord ( block [ pos2 ] ) wh = ord ( block [ pos2 + 1 ] ) sx = xy >>...
subrects with their own color
35,121
def fillRectangle ( self , x , y , width , height , color ) : self . updateRectangle ( x , y , width , height , color * width * height )
fill the area with the color . the color is a string in the pixel format set up earlier
35,122
def setKey ( self , key ) : newkey = [ ] for ki in range ( len ( key ) ) : bsrc = ord ( key [ ki ] ) btgt = 0 for i in range ( 8 ) : if bsrc & ( 1 << i ) : btgt = btgt | ( 1 << 7 - i ) newkey . append ( chr ( btgt ) ) super ( RFBDes , self ) . setKey ( newkey )
RFB protocol for authentication requires client to encrypt challenge sent by server with password using DES method . However bits in each byte of the password are put in reverse order before using it as encryption key .
35,123
def not_played ( self ) -> int : return len ( self . _data [ MiniSeriesData ] . progress ) - len ( self . progress )
The number of games in the player s promos that they haven t played yet .
35,124
def _rgetattr ( obj , key ) : for k in key . split ( "." ) : obj = getattr ( obj , k ) return obj
Recursive getattr for handling dots in keys .
35,125
def draw ( self ) : self . screen . clear ( ) x , y = 1 , 1 max_y , max_x = self . screen . getmaxyx ( ) max_rows = max_y - y lines , current_line = self . get_lines ( ) scroll_top = getattr ( self , 'scroll_top' , 0 ) if current_line <= scroll_top : scroll_top = 0 elif current_line - scroll_top > max_rows : scroll_top...
draw the curses ui on the screen handle scroll if needed
35,126
def get_file ( file_id : str , download ) : data = current_app . config [ "data" ] dao = data . get_files_dao ( ) file_fp , filename , upload_date = dao . get ( file_id ) if download : mime = mimetypes . guess_type ( filename ) [ 0 ] if mime is None : mime = "binary/octet-stream" basename = os . path . basename ( filen...
Get a specific file from GridFS .
35,127
def get_files_zip ( run_id : int , filetype : _FileType ) : data = current_app . config [ "data" ] dao_runs = data . get_run_dao ( ) dao_files = data . get_files_dao ( ) run = dao_runs . get ( run_id ) if filetype == _FileType . ARTIFACT : target_files = run [ 'artifacts' ] elif filetype == _FileType . SOURCE : target_...
Send all artifacts or sources of a run as ZIP .
35,128
def get ( self , file_id : Union [ str , bson . ObjectId ] ) -> [ typing . BinaryIO , str , datetime . datetime ] : if isinstance ( file_id , str ) : file_id = bson . ObjectId ( file_id ) file = self . _fs . get ( file_id ) return file , file . filename , file . upload_date
Return the file identified by a file_id string .
35,129
def find_record ( self , collection_name , query ) : cursor = self . _get_collection ( collection_name ) . find ( query ) for record in cursor : return record return None
Return the first record mathing the given Mongo query .
35,130
def find_records ( self , collection_name , query = { } , sort_by = None , sort_direction = None , start = 0 , limit = None ) : cursor = self . _get_collection ( collection_name ) . find ( query ) if sort_by is not None : cursor = self . _apply_sort ( cursor , sort_by , sort_direction ) cursor = cursor . skip ( start )...
Return a cursor of records from the given MongoDB collection .
35,131
def _get_database ( self , database_name ) : try : return self . _client [ database_name ] except InvalidName as ex : raise DataSourceError ( "Cannot connect to database %s!" % self . _database ) from ex
Get PyMongo client pointing to the current database .
35,132
def _get_collection ( self , collection_name ) : try : return self . _database [ collection_name ] except InvalidName as ex : raise DataSourceError ( "Cannot access MongoDB collection %s!" % collection_name ) from ex except Exception as ex : raise DataSourceError ( "Unexpected error when accessing MongoDB" "collection ...
Get PyMongo client pointing to the current DB and the given collection .
35,133
def get ( self , run_id ) : id = self . _parse_id ( run_id ) run = self . generic_dao . find_record ( self . collection_name , { "_id" : id } ) if run is None : raise NotFoundError ( "Run %s not found." % run_id ) return run
Get a single run from the database .
35,134
def _apply_sort ( cursor , sort_by , sort_direction ) : if sort_direction is not None and sort_direction . lower ( ) == "desc" : sort = pymongo . DESCENDING else : sort = pymongo . ASCENDING return cursor . sort ( sort_by , sort )
Apply sort to a cursor .
35,135
def _to_mongo_query ( query ) : mongo_query = [ ] for clause in query [ "filters" ] : if clause . get ( "type" ) is None : mongo_clause = MongoRunDAO . _simple_clause_to_query ( clause ) else : mongo_clause = MongoRunDAO . _to_mongo_query ( clause ) mongo_query . append ( mongo_clause ) if len ( mongo_query ) == 0 : re...
Convert the query received by the Sacred Web API to a MongoDB query .
35,136
def _simple_clause_to_query ( clause ) : mongo_clause = { } value = clause [ "value" ] if clause [ "field" ] == "status" and clause [ "value" ] in [ "DEAD" , "RUNNING" ] : return MongoRunDAO . _status_filter_to_query ( clause ) if clause [ "operator" ] == "==" : mongo_clause [ clause [ "field" ] ] = value elif clause [...
Convert a clause from the Sacred Web API format to the MongoDB format .
35,137
def _status_filter_to_query ( clause ) : if clause [ "value" ] == "RUNNING" : mongo_clause = MongoRunDAO . RUNNING_NOT_DEAD_CLAUSE elif clause [ "value" ] == "DEAD" : mongo_clause = MongoRunDAO . RUNNING_DEAD_RUN_CLAUSE if clause [ "operator" ] == "!=" : mongo_clause = { "$not" : mongo_clause } return mongo_clause
Convert a clause querying for an experiment state RUNNING or DEAD .
35,138
def delete ( self , run_id ) : return self . generic_dao . delete_record ( self . collection_name , self . _parse_id ( run_id ) )
Delete run with the given id from the backend .
35,139
def add_mongo_config ( app , simple_connection_string , mongo_uri , collection_name ) : if mongo_uri != ( None , None ) : add_mongo_config_with_uri ( app , mongo_uri [ 0 ] , mongo_uri [ 1 ] , collection_name ) if simple_connection_string is not None : print ( "Ignoring the -m option. Overridden by " "a more specific op...
Configure the application to use MongoDB .
35,140
def add_mongo_config_simple ( app , connection_string , collection_name ) : split_string = connection_string . split ( ":" ) config = { "host" : "localhost" , "port" : 27017 , "db" : "sacred" } if len ( split_string ) > 0 and len ( split_string [ - 1 ] ) > 0 : config [ "db" ] = split_string [ - 1 ] if len ( split_strin...
Configure the app to use MongoDB .
35,141
def add_mongo_config_with_uri ( app , connection_string_uri , database_name , collection_name ) : app . config [ "data" ] = PyMongoDataAccess . build_data_access_with_uri ( connection_string_uri , database_name , collection_name )
Configure PyMongo with a MongoDB connection string .
35,142
def stop_all_tensorboards ( ) : for process in Process . instances : print ( "Process '%s', running %d" % ( process . command [ 0 ] , process . is_running ( ) ) ) if process . is_running ( ) and process . command [ 0 ] == "tensorboard" : process . terminate ( )
Terminate all TensorBoard instances .
35,143
def run_tensorboard ( logdir , listen_on = "0.0.0.0" , port = 0 , tensorboard_args = None , timeout = 10 ) : if tensorboard_args is None : tensorboard_args = [ ] tensorboard_instance = Process . create_process ( TENSORBOARD_BINARY . split ( " " ) + [ "--logdir" , logdir , "--host" , listen_on , "--port" , str ( port ) ...
Launch a new TensorBoard instance .
35,144
def parse_port_from_tensorboard_output ( tensorboard_output : str ) -> int : search = re . search ( "at http://[^:]+:([0-9]+)" , tensorboard_output ) if search is not None : port = search . group ( 1 ) return int ( port ) else : raise UnexpectedOutputError ( tensorboard_output , "Address and port where Tensorboard has ...
Parse tensorboard port from its outputted message .
35,145
def connect ( self ) : self . _client = self . _create_client ( ) self . _db = getattr ( self . _client , self . _db_name ) self . _generic_dao = GenericDAO ( self . _client , self . _db_name )
Initialize the database connection .
35,146
def build_data_access ( host , port , database_name , collection_name ) : return PyMongoDataAccess ( "mongodb://%s:%d" % ( host , port ) , database_name , collection_name )
Create data access gateway .
35,147
def run_tensorboard ( run_id , tflog_id ) : data = current_app . config [ "data" ] run = data . get_run_dao ( ) . get ( run_id ) base_dir = Path ( run [ "experiment" ] [ "base_dir" ] ) log_dir = Path ( run [ "info" ] [ "tensorflow" ] [ "logdirs" ] [ tflog_id ] ) if log_dir . is_absolute ( ) : path_to_log_dir = log_dir ...
Launch TensorBoard for a given run ID and log ID of that run .
35,148
def get ( self , run_id , metric_id ) : run_id = self . _parse_run_id ( run_id ) query = self . _build_query ( run_id , metric_id ) row = self . _read_metric_from_db ( metric_id , run_id , query ) metric = self . _to_intermediary_object ( row ) return metric
Read a metric of the given id and run .
35,149
def delete ( self , run_id ) : self . generic_dao . delete_record ( self . metrics_collection_name , { "run_id" : self . _parse_run_id ( run_id ) } )
Delete all metrics belonging to the given run .
35,150
def delete_run ( self , run_id ) : ds = self . datastorage ds . get_metrics_dao ( ) . delete ( run_id ) ds . get_run_dao ( ) . delete ( run_id )
Delete run of the given run_id .
35,151
def get_runs ( self , sort_by = None , sort_direction = None , start = 0 , limit = None , query = { "type" : "and" , "filters" : [ ] } ) : all_run_ids = os . listdir ( self . directory ) def run_iterator ( ) : blacklist = set ( [ "_sources" ] ) for id in all_run_ids : if id in blacklist : continue try : yield self . ge...
Return all runs in the file store .
35,152
def get ( self , run_id ) : config = _read_json ( _path_to_config ( self . directory , run_id ) ) run = _read_json ( _path_to_run ( self . directory , run_id ) ) try : info = _read_json ( _path_to_info ( self . directory , run_id ) ) except IOError : info = { } return _create_run ( run_id , run , config , info )
Return the run associated with a particular run_id .
35,153
def get_metric ( run_id , metric_id ) : data = current_app . config [ "data" ] dao = data . get_metrics_dao ( ) metric = dao . get ( run_id , metric_id ) return Response ( render_template ( "api/metric.js" , run_id = metric [ "run_id" ] , metric_id = metric [ "metric_id" ] , name = metric [ "name" ] , steps = metric [ ...
Get a specific Sacred metric from the database .
35,154
def initialize ( self , app : Flask , app_config ) : debug = app_config [ "debug" ] port = app_config [ "http.port" ] if debug : self . started_on_port = port app . run ( host = "0.0.0.0" , debug = True , port = port ) else : for port in range ( port , port + 50 ) : self . http_server = WSGIServer ( ( '0.0.0.0' , port ...
Prepare the server to run and determine the port .
35,155
def api_run_delete ( run_id ) : data = current_app . config [ "data" ] RunFacade ( data ) . delete_run ( run_id ) return "DELETED run %s" % run_id
Delete the given run and corresponding entities .
35,156
def api_run_get ( run_id ) : data = current_app . config [ "data" ] run = data . get_run_dao ( ) . get ( run_id ) records_total = 1 if run is not None else 0 if records_total == 0 : return Response ( render_template ( "api/error.js" , error_code = 404 , error_message = "Run %s not found." % run_id ) , status = 404 , mi...
Return a single run as a JSON object .
35,157
def parse_int_arg ( name , default ) : return default if request . args . get ( name ) is None else int ( request . args . get ( name ) )
Return a given URL parameter as int or return the default value .
35,158
def parse_query_filter ( ) : query_string = request . args . get ( "queryFilter" ) if query_string is None : return { "type" : "and" , "filters" : [ ] } query = json . loads ( query_string ) assert type ( query ) == dict assert type ( query . get ( "type" ) ) == str return query
Parse the Run query filter from the URL as a dictionary .
35,159
def get_runs ( ) : data = current_app . config [ "data" ] draw = parse_int_arg ( "draw" , 1 ) start = parse_int_arg ( "start" , 0 ) length = parse_int_arg ( "length" , - 1 ) length = length if length >= 0 else None order_column = request . args . get ( "order[0][column]" ) order_dir = request . args . get ( "order[0][d...
Get all runs sort it and return a response .
35,160
def get ( self , file_id : str ) -> [ typing . BinaryIO , str , datetime . datetime ] : raise NotImplementedError ( "Downloading files for downloading files in FileStore has not been implemented yet." )
Return the file identified by a file_id string its file name and upload date .
35,161
def timediff ( time ) : now = datetime . datetime . utcnow ( ) diff = now - time diff_sec = diff . total_seconds ( ) return diff_sec
Return the difference in seconds between now and the given time .
35,162
def last_line ( text ) : last_line_of_text = "" while last_line_of_text == "" and len ( text ) > 0 : last_line_start = text . rfind ( "\n" ) last_line_start = max ( 0 , last_line_start ) last_line_of_text = text [ last_line_start : ] . strip ( "\r\n " ) text = text [ : last_line_start ] return last_line_of_text
Get the last meaningful line of the text that is the last non - empty line .
35,163
def dump_json ( obj ) : return simplejson . dumps ( obj , ignore_nan = True , default = json_util . default )
Dump Python object as JSON string .
35,164
def terminate ( self , wait = False ) : if self . proc is not None : self . proc . stdout . close ( ) try : self . proc . terminate ( ) except ProcessLookupError : pass if wait : self . proc . wait ( )
Terminate the process .
35,165
def terminate_all ( wait = False ) : for instance in Process . instances : if instance . is_running ( ) : instance . terminate ( wait )
Terminate all processes .
35,166
def calc_worklog ( stdout = Ellipsis , stderr = Ellipsis , verbose = False ) : try : cols = int ( os . environ [ 'COLUMNS' ] ) except Exception : cols = 80 return pimms . worklog ( columns = cols , stdout = stdout , stderr = stderr , verbose = verbose )
calc_worklog constructs the worklog from the stdout stderr stdin and verbose arguments .
35,167
def calc_subject ( argv , worklog ) : if len ( argv ) == 0 : raise ValueError ( 'No subject-id given' ) elif len ( argv ) > 1 : worklog . warn ( 'WARNING: Unused subject arguments: %s' % ( argv [ 1 : ] , ) ) subject_id = argv [ 0 ] try : sub = freesurfer_subject ( subject_id ) if sub is not None : worklog ( 'Using Free...
calc_subject converts a subject_id into a subject object .
35,168
def calc_atlases ( worklog , atlas_subject_id = 'fsaverage' ) : try : sub = freesurfer_subject ( atlas_subject_id ) except Exception : sub = None if sub is None : try : sub = hcp_subject ( atlas_subject_id ) except Exception : sub = None if sub is None : raise ValueError ( 'Could not load atlas subject %s' % atlas_subj...
cacl_atlases finds all available atlases in the possible subject directories of the given atlas subject .
35,169
def calc_filemap ( atlas_properties , subject , atlas_version_tags , worklog , output_path = None , overwrite = False , output_format = 'mgz' , create_directory = False ) : if output_path is None : output_path = os . path . join ( subject . path , 'surf' ) if not os . path . isdir ( output_path ) : output_path = subjec...
calc_filemap is a calculator that converts the atlas properties nested - map into a single - depth map whose keys are filenames and whose values are the interpolated property data .
35,170
def parse_type ( self , hdat , dataobj = None ) : try : dataobj = dataobj . dataobj except Exception : pass dtype = np . asarray ( dataobj ) . dtype if dataobj else self . default_type ( ) if hdat and 'type' in hdat : dtype = np . dtype ( hdat [ 'type' ] ) elif hdat and 'dtype' in hdat : dtype = np . dtype ( hdat [ 'dt...
Parses the dtype out of the header data or the array depending on which is given ; if both then the header - data overrides the array ; if neither then np . float32 .
35,171
def parse_affine ( self , hdat , dataobj = None ) : if 'affine' in hdat : return to_affine ( hdat [ 'affine' ] ) else : return to_affine ( self . default_affine ( ) )
Parses the affine out of the given header data and yields it .
35,172
def _parse_field_arguments ( arg , faces , edges , coords ) : if not hasattr ( arg , '__iter__' ) : raise RuntimeError ( 'field argument must be a list-like collection of instructions' ) pot = [ _parse_field_argument ( instruct , faces , edges , coords ) for instruct in arg ] if len ( pot ) <= 1 : return pot [ 0 ] else...
See mesh_register .
35,173
def retino_colors ( vcolorfn , * args , ** kwargs ) : 'See eccen_colors, angle_colors, sigma_colors, and varea_colors.' if len ( args ) == 0 : def _retino_color_pass ( * args , ** new_kwargs ) : return retino_colors ( vcolorfn , * args , ** { k : ( new_kwargs [ k ] if k in new_kwargs else kwargs [ k ] ) for k in set ( ...
See eccen_colors angle_colors sigma_colors and varea_colors .
35,174
def _load_fsLR_atlasroi ( filename , data ) : ( fdir , fnm ) = os . path . split ( filename ) fparts = fnm . split ( '.' ) atl = fparts [ - 3 ] if atl in _load_fsLR_atlasroi . atlases : return _load_fsLR_atlasroi . atlases [ atl ] sid = data [ 'id' ] fnm = [ os . path . join ( fdir , '%d.%s.atlasroi.%s.shape.gii' % ( s...
Loads the appropriate atlas for the given data ; data may point to a cifti file whose atlas is needed or to an atlas file .
35,175
def _load_fsLR_atlasroi_for_size ( size , sid = 100610 ) : from . core import subject fls = _load_fsLR_atlasroi_for_size . sizes if size not in fls : raise ValueError ( 'unknown fs_LR atlas size: %s' % size ) ( n , fls ) = _load_fsLR_atlasroi_for_size . sizes [ size ] fl = os . path . join ( subject ( sid ) . path , 'M...
Loads the appropriate atlas for the given size of data ; size should be the number of stored vertices and sub - corticel voxels stored in the cifti file .
35,176
def calc_arguments ( args ) : ( args , opts ) = _retinotopy_parser ( args ) if opts [ 'help' ] : print ( info , file = sys . stdout ) sys . exit ( 1 ) verbose = opts [ 'verbose' ] def note ( s ) : if verbose : print ( s , file = sys . stdout ) sys . stdout . flush ( ) return verbose def error ( s ) : print ( s , file =...
calc_arguments is a calculator that parses the command - line arguments for the registration command and produces the subject the model the log function and the additional options .
35,177
def calc_retinotopy ( note , error , subject , clean , run_lh , run_rh , invert_rh_angle , max_in_eccen , min_in_eccen , angle_lh_file , theta_lh_file , eccen_lh_file , rho_lh_file , weight_lh_file , radius_lh_file , angle_rh_file , theta_rh_file , eccen_rh_file , rho_rh_file , weight_rh_file , radius_rh_file ) : ctcs ...
calc_retinotopy extracts the retinotopy options from the command line loads the relevant files and stores them as properties on the subject s lh and rh cortices .
35,178
def calc_registrations ( note , error , cortices , model , model_sym , weight_min , scale , prior , max_out_eccen , max_steps , max_step_size , radius_weight , field_sign_weight , resample , invert_rh_angle , part_vol_correct ) : rsamp = ( 'fsaverage_sym' if model_sym else 'fsaverage' ) if resample else False res = { }...
calc_registrations is the calculator that performs the registrations for the left and right hemisphere ; these are returned as the immutable maps yielded from the register_retinotopy command .
35,179
def save_surface_files ( note , error , registrations , subject , no_surf_export , no_reg_export , surface_format , surface_path , angle_tag , eccen_tag , label_tag , radius_tag , registration_name ) : if no_surf_export : return { 'surface_files' : ( ) } surface_format = surface_format . lower ( ) if surface_format in ...
save_surface_files is the calculator that saves the registration data out as surface files which are put back in the registration as the value surface_files .
35,180
def save_volume_files ( note , error , registrations , subject , no_vol_export , volume_format , volume_path , angle_tag , eccen_tag , label_tag , radius_tag ) : if no_vol_export : return { 'volume_files' : ( ) } volume_format = volume_format . lower ( ) if volume_format in [ 'mgh' , 'mgz' , 'auto' , 'automatic' , 'def...
save_volume_files is the calculator that saves the registration data out as volume files which are put back in the registration as the value volume_files .
35,181
def calc_empirical_retinotopy ( cortex , polar_angle = None , eccentricity = None , pRF_radius = None , weight = None , eccentricity_range = None , weight_min = 0 , invert_rh_angle = False , partial_voluming_correction = False ) : data = { } n = cortex . vertex_count ( emin , emax ) = ( - np . inf , np . inf ) if eccen...
calc_empirical_retinotopy computes the value empirical_retinotopy which is an itable object storing the retinotopy data for the registration .
35,182
def calc_model ( cortex , model_argument , model_hemi = Ellipsis , radius = np . pi / 3 ) : if pimms . is_str ( model_argument ) : h = cortex . chirality if model_hemi is Ellipsis else None if model_hemi is None else model_hemi model = retinotopy_model ( model_argument , hemi = h , radius = radius ) else : model = mode...
calc_model loads the appropriate model object given the model argument which may given the name of the model or a model object itself .
35,183
def calc_anchors ( preregistration_map , model , model_hemi , scale = 1 , sigma = Ellipsis , radius_weight = 0 , field_sign_weight = 0 , invert_rh_field_sign = False ) : wgts = preregistration_map . prop ( 'weight' ) rads = preregistration_map . prop ( 'radius' ) if np . isclose ( radius_weight , 0 ) : radius_weight = ...
calc_anchors is a calculator that creates a set of anchor instructions for a registration .
35,184
def calc_registration ( preregistration_map , anchors , max_steps = 2000 , max_step_size = 0.05 , method = 'random' ) : pmap = preregistration_map if is_tuple ( max_steps ) or is_list ( max_steps ) : ( max_steps , stride ) = max_steps traj = [ preregistration_map . coordinates ] x = preregistration_map . coordinates fo...
calc_registration is a calculator that creates the registration coordinates .
35,185
def calc_prediction ( registered_map , preregistration_mesh , native_mesh , model ) : coords3d = np . array ( preregistration_mesh . coordinates ) idcs = registered_map . labels coords3d [ : , idcs ] = registered_map . meta ( 'projection' ) . inverse ( registered_map . coordinates ) rmesh = preregistration_mesh . copy ...
calc_registration_prediction is a pimms calculator that creates the both the prediction and the registration_prediction both of which are pimms itables including the fields polar_angle eccentricity and visual_area . The registration_prediction data describe the vertices for the registered_map not necessarily of the nat...
35,186
def ticker ( self , currency = "" , ** kwargs ) : params = { } params . update ( kwargs ) if currency : currency = str ( currency ) + '/' response = self . __request ( 'ticker/' + currency , params ) return response
This endpoint displays cryptocurrency ticker data in order of rank . The maximum number of results per call is 100 . Pagination is possible by using the start and limit parameters .
35,187
def _surpress_formatting_errors ( fn ) : @ wraps ( fn ) def inner ( * args , ** kwargs ) : try : return fn ( * args , ** kwargs ) except ValueError : return "" return inner
I know this is dangerous and the wrong way to solve the problem but when using both row and columns summaries it s easier to just swallow errors so users can format their tables how they need .
35,188
def _format_numer ( number_format , prefix = '' , suffix = '' ) : @ _surpress_formatting_errors def inner ( v ) : if isinstance ( v , Number ) : return ( "{{}}{{:{}}}{{}}" . format ( number_format ) . format ( prefix , v , suffix ) ) else : raise TypeError ( "Numberic type required." ) return inner
Format a number to a string .
35,189
def as_percent ( precision = 2 , ** kwargs ) : if not isinstance ( precision , Integral ) : raise TypeError ( "Precision must be an integer." ) return _surpress_formatting_errors ( _format_numer ( ".{}%" . format ( precision ) ) )
Convert number to percentage string .
35,190
def as_unit ( unit , precision = 2 , location = 'suffix' ) : if not isinstance ( precision , Integral ) : raise TypeError ( "Precision must be an integer." ) if location == 'prefix' : formatter = partial ( _format_numer , prefix = unit ) elif location == 'suffix' : formatter = partial ( _format_numer , suffix = unit ) ...
Convert value to unit .
35,191
def apply ( self , df ) : if self . subset : if _axis_is_rows ( self . axis ) : df = df [ self . subset ] if _axis_is_cols ( self . axis ) : df = df . loc [ self . subset ] result = df . agg ( self . func , axis = self . axis , * self . args , ** self . kwargs ) result . name = self . title return result
Compute aggregate over DataFrame
35,192
def apply ( self , styler ) : return styler . format ( self . formatter , * self . args , ** self . kwargs )
Apply Summary over Pandas Styler
35,193
def _apply_summaries ( self ) : def as_frame ( r ) : if isinstance ( r , pd . Series ) : return r . to_frame ( ) else : return r df = self . data if df . index . nlevels > 1 : raise ValueError ( "You cannot currently have both summary rows and columns on a " "MultiIndex." ) _df = df if self . summary_rows : rows = pd ....
Add all summary rows and columns .
35,194
def style ( self ) : row_titles = [ a . title for a in self . _cleaned_summary_rows ] col_titles = [ a . title for a in self . _cleaned_summary_cols ] row_ix = pd . IndexSlice [ row_titles , : ] col_ix = pd . IndexSlice [ : , col_titles ] def handle_na ( df ) : df . loc [ col_ix ] = df . loc [ col_ix ] . fillna ( '' ) ...
Add summaries and convert to Pandas Styler
35,195
def summary ( self , func = methodcaller ( 'sum' ) , title = 'Total' , axis = 0 , subset = None , * args , ** kwargs ) : if axis is None : return ( self . summary ( func = func , title = title , axis = 0 , subset = subset , * args , ** kwargs ) . summary ( func = func , title = title , axis = 1 , subset = subset , * ar...
Add multiple summary rows or columns to the dataframe .
35,196
def as_percent ( self , precision = 2 , * args , ** kwargs ) : f = Formatter ( as_percent ( precision ) , args , kwargs ) return self . _add_formatter ( f )
Format subset as percentages
35,197
def as_currency ( self , currency = 'USD' , locale = LOCALE_OBJ , * args , ** kwargs ) : f = Formatter ( as_currency ( currency = currency , locale = locale ) , args , kwargs ) return self . _add_formatter ( f )
Format subset as currency
35,198
def as_unit ( self , unit , location = 'suffix' , * args , ** kwargs ) : f = Formatter ( as_unit ( unit , location = location ) , args , kwargs ) return self . _add_formatter ( f )
Format subset as with units
35,199
def validate_onset ( reference_event , estimated_event , t_collar = 0.200 ) : if 'event_onset' in reference_event and 'event_onset' in estimated_event : return math . fabs ( reference_event [ 'event_onset' ] - estimated_event [ 'event_onset' ] ) <= t_collar elif 'onset' in reference_event and 'onset' in estimated_event...
Validate estimated event based on event onset