idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
10,300
def register ( name , func = None ) : def decorator ( func ) : # Perform the registration ControlDaemon . _register ( name , func ) return func # If func was given, call the decorator, otherwise, return the # decorator if func : return decorator ( func ) else : return decorator
Function or decorator which registers a given function as a recognized control command .
66
15
10,301
def ping ( daemon , channel , data = None ) : if not channel : # No place to reply to return # Get our configured node name node_name = daemon . config [ 'control' ] . get ( 'node_name' ) # Format the response reply = [ 'pong' ] if node_name or data : reply . append ( node_name or '' ) if data : reply . append ( data ) # And send it with utils . ignore_except ( ) : daemon . db . publish ( channel , ':' . join ( reply ) )
Process the ping control message .
118
6
10,302
def reload ( daemon , load_type = None , spread = None ) : # Figure out what type of reload this needs to be if load_type == 'immediate' : spread = None elif load_type == 'spread' : try : spread = float ( spread ) except ( TypeError , ValueError ) : # Not a valid float; use the configured spread value load_type = None else : load_type = None if load_type is None : # Use configured set-up; see if we have a spread # configured try : spread = float ( daemon . config [ 'control' ] [ 'reload_spread' ] ) except ( TypeError , ValueError , KeyError ) : # No valid configuration spread = None if spread : # Apply a randomization to spread the load around eventlet . spawn_after ( random . random ( ) * spread , daemon . reload ) else : # Spawn in immediate mode eventlet . spawn_n ( daemon . reload )
Process the reload control message .
204
6
10,303
def set_limits ( self , limits ) : # First task, build the checksum of the new limits chksum = hashlib . md5 ( ) # sufficient for our purposes for lim in limits : chksum . update ( lim ) new_sum = chksum . hexdigest ( ) # Now install it with self . limit_lock : if self . limit_sum == new_sum : # No changes return self . limit_data = [ msgpack . loads ( lim ) for lim in limits ] self . limit_sum = new_sum
Set the limit data to the given list of limits . Limits are specified as the raw msgpack string representing the limit . Computes the checksum of the limits ; if the checksum is identical to the current one no action is taken .
118
48
10,304
def start ( self ) : # Spawn the listening thread self . listen_thread = eventlet . spawn_n ( self . listen ) # Now do the initial load self . reload ( )
Starts the ControlDaemon by launching the listening thread and triggering the initial limits load .
39
18
10,305
def listen ( self ) : # Use a specific database handle, with override. This allows # the long-lived listen thread to be configured to use a # different database or different database options. db = self . config . get_database ( 'control' ) # Need a pub-sub object kwargs = { } if 'shard_hint' in self . config [ 'control' ] : kwargs [ 'shard_hint' ] = self . config [ 'control' ] [ 'shard_hint' ] pubsub = db . pubsub ( * * kwargs ) # Subscribe to the right channel(s)... channel = self . config [ 'control' ] . get ( 'channel' , 'control' ) pubsub . subscribe ( channel ) # Now we listen... for msg in pubsub . listen ( ) : # Only interested in messages to our reload channel if ( msg [ 'type' ] in ( 'pmessage' , 'message' ) and msg [ 'channel' ] == channel ) : # Figure out what kind of message this is command , _sep , args = msg [ 'data' ] . partition ( ':' ) # We must have some command... if not command : continue # Don't do anything with internal commands if command [ 0 ] == '_' : LOG . error ( "Cannot call internal command %r" % command ) continue # Look up the command if command in self . _commands : func = self . _commands [ command ] else : # Try an entrypoint func = utils . find_entrypoint ( 'turnstile.command' , command , compat = False ) self . _commands [ command ] = func # Don't do anything with missing commands if not func : LOG . error ( "No such command %r" % command ) continue # Execute the desired command arglist = args . split ( ':' ) if args else [ ] try : func ( self , * arglist ) except Exception : LOG . exception ( "Failed to execute command %r arguments %r" % ( command , arglist ) ) continue
Listen for incoming control messages .
446
6
10,306
def reload ( self ) : # Acquire the pending semaphore. If we fail, exit--someone # else is already doing the reload if not self . pending . acquire ( False ) : return # Do the remaining steps in a try/finally block so we make # sure to release the semaphore control_args = self . config [ 'control' ] try : # Load all the limits key = control_args . get ( 'limits_key' , 'limits' ) self . limits . set_limits ( self . db . zrange ( key , 0 , - 1 ) ) except Exception : # Log an error LOG . exception ( "Could not load limits" ) # Get our error set and publish channel error_key = control_args . get ( 'errors_key' , 'errors' ) error_channel = control_args . get ( 'errors_channel' , 'errors' ) # Get an informative message msg = "Failed to load limits: " + traceback . format_exc ( ) # Store the message into the error set. We use a set here # because it's likely that more than one node will # generate the same message if there is an error, and this # avoids an explosion in the size of the set. with utils . ignore_except ( ) : self . db . sadd ( error_key , msg ) # Publish the message to a channel with utils . ignore_except ( ) : self . db . publish ( error_channel , msg ) finally : self . pending . release ( )
Reloads the limits configuration from the database .
324
10
10,307
def enforce_policy ( rule ) : def wrapper ( func ) : """Decorator used for wrap API.""" @ functools . wraps ( func ) def wrapped ( * args , * * kwargs ) : if enforcer . enforce ( rule , { } , g . cred ) : return func ( * args , * * kwargs ) return wrapped return wrapper
Enforce a policy to a API .
78
8
10,308
def initialize ( config ) : # Determine the client class to use if 'redis_client' in config : client = utils . find_entrypoint ( 'turnstile.redis_client' , config [ 'redis_client' ] , required = True ) else : client = redis . StrictRedis # Extract relevant connection information from the configuration kwargs = { } for cfg_var , type_ in REDIS_CONFIGS . items ( ) : if cfg_var in config : kwargs [ cfg_var ] = type_ ( config [ cfg_var ] ) # Make sure we have at a minimum the hostname if 'host' not in kwargs and 'unix_socket_path' not in kwargs : raise redis . ConnectionError ( "No host specified for redis database" ) # Look up the connection pool configuration cpool_class = None cpool = { } extra_kwargs = { } for key , value in config . items ( ) : if key . startswith ( 'connection_pool.' ) : _dummy , _sep , varname = key . partition ( '.' ) if varname == 'connection_class' : cpool [ varname ] = utils . find_entrypoint ( 'turnstile.connection_class' , value , required = True ) elif varname == 'max_connections' : cpool [ varname ] = int ( value ) elif varname == 'parser_class' : cpool [ varname ] = utils . find_entrypoint ( 'turnstile.parser_class' , value , required = True ) else : cpool [ varname ] = value elif key not in REDIS_CONFIGS and key not in REDIS_EXCLUDES : extra_kwargs [ key ] = value if cpool : cpool_class = redis . ConnectionPool # Use custom connection pool class if requested... if 'connection_pool' in config : cpool_class = utils . find_entrypoint ( 'turnstile.connection_pool' , config [ 'connection_pool' ] , required = True ) # If we're using a connection pool, we'll need to pass the keyword # arguments to that instead of to redis if cpool_class : cpool . update ( kwargs ) # Use a custom connection class? if 'connection_class' not in cpool : if 'unix_socket_path' in cpool : if 'host' in cpool : del cpool [ 'host' ] if 'port' in cpool : del cpool [ 'port' ] cpool [ 'path' ] = cpool [ 'unix_socket_path' ] del cpool [ 'unix_socket_path' ] cpool [ 'connection_class' ] = redis . UnixDomainSocketConnection else : cpool [ 'connection_class' ] = redis . Connection # Build the connection pool to use and set up to pass it into # the redis constructor... kwargs = dict ( connection_pool = cpool_class ( * * cpool ) ) # Build and return the database kwargs . update ( extra_kwargs ) return client ( * * kwargs )
Initialize a connection to the Redis database .
704
10
10,309
def limits_hydrate ( db , lims ) : return [ limits . Limit . hydrate ( db , lim ) for lim in lims ]
Helper function to hydrate a list of limits .
31
10
10,310
def limit_update ( db , key , limits ) : # Start by dehydrating all the limits desired = [ msgpack . dumps ( l . dehydrate ( ) ) for l in limits ] desired_set = set ( desired ) # Now, let's update the limits with db . pipeline ( ) as pipe : while True : try : # Watch for changes to the key pipe . watch ( key ) # Look up the existing limits existing = set ( pipe . zrange ( key , 0 , - 1 ) ) # Start the transaction... pipe . multi ( ) # Remove limits we no longer have for lim in existing - desired_set : pipe . zrem ( key , lim ) # Update or add all our desired limits for idx , lim in enumerate ( desired ) : pipe . zadd ( key , ( idx + 1 ) * 10 , lim ) # Execute the transaction pipe . execute ( ) except redis . WatchError : # Try again... continue else : # We're all done! break
Safely updates the list of limits in the database .
210
11
10,311
def command ( db , channel , command , * args ) : # Build the command we're sending cmd = [ command ] cmd . extend ( str ( a ) for a in args ) # Send it out db . publish ( channel , ':' . join ( cmd ) )
Utility function to issue a command to all Turnstile instances .
56
14
10,312
def _tokenize_latex ( self , exp ) : tokens = [ ] prevexp = "" while exp : t , exp = self . _get_next_token ( exp ) if t . strip ( ) != "" : tokens . append ( t ) if prevexp == exp : break prevexp = exp return tokens
Internal method to tokenize latex
70
6
10,313
def _convert_query ( self , query ) : query = self . dictionary . doc2bow ( self . _tokenize_latex ( query ) ) sims = self . index [ query ] neighbors = sorted ( sims , key = lambda item : - item [ 1 ] ) neighbors = { "neighbors" : [ { self . columns [ 0 ] : { "data" : self . docs [ n [ 0 ] ] , "fmt" : "math" } , self . columns [ 1 ] : { "data" : float ( n [ 1 ] ) } } for n in neighbors ] } if neighbors else { "neighbors" : [ ] } return neighbors
Convert query into an indexable string .
146
9
10,314
def join ( path1 , path2 ) : if path1 . endswith ( '/' ) and path2 . startswith ( '/' ) : return '' . join ( [ path1 , path2 [ 1 : ] ] ) elif path1 . endswith ( '/' ) or path2 . startswith ( '/' ) : return '' . join ( [ path1 , path2 ] ) else : return '' . join ( [ path1 , '/' , path2 ] )
nicely join two path elements together
105
7
10,315
def print_math ( math_expression_lst , name = "math.html" , out = 'html' , formatter = lambda x : x ) : try : shutil . rmtree ( 'viz' ) except : pass pth = get_cur_path ( ) + print_math_template_path shutil . copytree ( pth , 'viz' ) # clean_str = formatter(math_expression_lst) html_loc = None if out == "html" : html_loc = pth + "standalone_index.html" if out == "notebook" : from IPython . display import display , HTML html_loc = pth + "notebook_index.html" html = open ( html_loc ) . read ( ) html = html . replace ( "__MATH_LIST__" , json . dumps ( math_expression_lst ) ) if out == "notebook" : display ( HTML ( html ) ) elif out == "html" : with open ( name , "w+" ) as out_f : out_f . write ( html )
Converts LaTeX math expressions into an html layout . Creates a html file in the directory where print_math is called by default . Displays math to jupyter notebook if notebook argument is specified .
243
43
10,316
def log_error ( self , msg , * args ) : if self . _logger is not None : self . _logger . error ( msg , * args ) else : print ( msg % args )
Log an error or print in stdout if no logger .
44
12
10,317
def _get_value_opc_attr ( self , attr_name , prec_decimals = 2 ) : try : value = getattr ( self , attr_name ) if value is not None : return round ( value , prec_decimals ) except I2cVariableNotImplemented : pass return None
Return sensor attribute with precission or None if not present .
71
12
10,318
def current_state_str ( self ) : if self . sample_ok : msg = '' temperature = self . _get_value_opc_attr ( 'temperature' ) if temperature is not None : msg += 'Temp: %s ºC, ' emperature humidity = self . _get_value_opc_attr ( 'humidity' ) if humidity is not None : msg += 'Humid: %s %%, ' % humidity pressure = self . _get_value_opc_attr ( 'pressure' ) if pressure is not None : msg += 'Press: %s mb, ' % pressure light_level = self . _get_value_opc_attr ( 'light_level' ) if light_level is not None : msg += 'Light: %s lux, ' % light_level return msg [ : - 2 ] else : return "Bad sample"
Return string representation of the current state of the sensor .
192
11
10,319
def _applyMultichan ( samples , func ) : # type: (np.ndarray, Callable[[np.ndarray], np.ndarray]) -> np.ndarray if len ( samples . shape ) == 1 or samples . shape [ 1 ] == 1 : newsamples = func ( samples ) else : y = np . array ( [ ] ) for i in range ( samples . shape [ 1 ] ) : y = np . concatenate ( ( y , func ( samples [ : , i ] ) ) ) newsamples = y . reshape ( samples . shape [ 1 ] , - 1 ) . T return newsamples
Apply func to each channel of audio data in samples
134
10
10,320
def _resample_obspy ( samples , sr , newsr , window = 'hanning' , lowpass = True ) : # type: (np.ndarray, int, int, str, bool) -> np.ndarray from scipy . signal import resample from math import ceil factor = sr / float ( newsr ) if newsr < sr and lowpass : # be sure filter still behaves good if factor > 16 : logger . info ( "Automatic filter design is unstable for resampling " "factors (current sampling rate/new sampling rate) " "above 16. Manual resampling is necessary." ) freq = min ( sr , newsr ) * 0.5 / float ( factor ) logger . debug ( f"resample_obspy: lowpass {freq}" ) samples = lowpass_cheby2 ( samples , freq = freq , sr = sr , maxorder = 12 ) num = int ( ceil ( len ( samples ) / factor ) ) return _applyMultichan ( samples , lambda S : resample ( S , num , window = window ) )
Resample using Fourier method . The same as resample_scipy but with low - pass filtering for upsampling
240
26
10,321
def resample ( samples , oldsr , newsr ) : # type: (np.ndarray, int, int) -> np.ndarray backends = [ _resample_samplerate , # turns the samples into float32, which is ok for audio _resample_scikits , _resample_nnresample , # very good results, follows libsamplerate closely _resample_obspy , # these last two introduce some error at the first samples _resample_scipy ] # type: List[Callable[[np.ndarray, int, int], Opt[np.ndarray]]] for backend in backends : newsamples = backend ( samples , oldsr , newsr ) if newsamples is not None : return newsamples
Resample samples with given samplerate sr to new samplerate newsr
164
16
10,322
def get_package_version ( ) : base = os . path . abspath ( os . path . dirname ( __file__ ) ) with open ( os . path . join ( base , 'policy' , '__init__.py' ) , mode = 'rt' , encoding = 'utf-8' ) as initf : for line in initf : m = version . match ( line . strip ( ) ) if not m : continue return m . groups ( ) [ 0 ]
return package version without importing it
104
6
10,323
def get_long_description ( ) : base = os . path . abspath ( os . path . dirname ( __file__ ) ) readme_file = os . path . join ( base , 'README.md' ) with open ( readme_file , mode = 'rt' , encoding = 'utf-8' ) as readme : return readme . read ( )
return package s long description
83
5
10,324
def get_install_requires ( ) : base = os . path . abspath ( os . path . dirname ( __file__ ) ) requirements_file = os . path . join ( base , 'requirements.txt' ) if not os . path . exists ( requirements_file ) : return [ ] with open ( requirements_file , mode = 'rt' , encoding = 'utf-8' ) as f : return f . read ( ) . splitlines ( )
return package s install requires
100
5
10,325
def main ( flags ) : dl = SheetDownloader ( flags ) dl . init ( ) for file_info in settings . GOOGLE_SHEET_SYNC [ 'files' ] : print ( 'Downloading {}' . format ( file_info [ 'path' ] ) ) dl . download_sheet ( file_info [ 'path' ] , file_info [ 'sheet' ] , file_info [ 'range' ] , )
Download all sheets as configured .
100
6
10,326
def download_sheet ( self , file_path , sheet_id , cell_range ) : result = self . service . spreadsheets ( ) . values ( ) . get ( spreadsheetId = sheet_id , range = cell_range , ) . execute ( ) values = result . get ( 'values' , [ ] ) with open ( file_path , newline = '' , encoding = 'utf-8' , mode = 'w' ) as f : writer = csv . writer ( f , lineterminator = '\n' ) for row in values : writer . writerow ( row )
Download the cell range from the sheet and store it as CSV in the file_path file .
127
19
10,327
def getFriendlyString ( self ) : if self . _friendlyString is not None : return self . _friendlyString resultComponents = [ self . getIntMajor ( ) , self . getIntMinor ( ) , self . getIntBuild ( ) , self . getIntRevision ( ) ] for i in range ( len ( resultComponents ) - 1 , - 1 , - 1 ) : if resultComponents [ i ] == 0 : del resultComponents [ i ] else : break result = "." . join ( map ( str , resultComponents ) ) self . _friendlyString = result return result
Returns the version printed in a friendly way .
129
9
10,328
def getVersions ( self ) : if not os . path . exists ( self . _path ) : return [ ] result = [ ] for entryName in os . listdir ( self . _path ) : try : entryVersion = Version ( entryName ) result . append ( entryVersion ) except InvalidVersionException : continue return result
Returns the versions of the suitable entries available in the directory - an empty list if no such entry is available
68
21
10,329
def setUsers ( self , * args , * * kwargs ) : try : usrs = [ us for us in self . mambuusersclass ( branchId = self [ 'id' ] , * args , * * kwargs ) if us [ 'userState' ] == "ACTIVE" ] except AttributeError as ae : from . mambuuser import MambuUsers self . mambuusersclass = MambuUsers usrs = [ us for us in self . mambuusersclass ( branchId = self [ 'id' ] , * args , * * kwargs ) if us [ 'userState' ] == "ACTIVE" ] self [ 'users' ] = usrs return 1
Adds the active users for this branch to a users field .
158
12
10,330
def unindent ( self ) : _logger ( ) . debug ( 'unindent' ) cursor = self . editor . textCursor ( ) _logger ( ) . debug ( 'cursor has selection %r' , cursor . hasSelection ( ) ) if cursor . hasSelection ( ) : cursor . beginEditBlock ( ) self . unindent_selection ( cursor ) cursor . endEditBlock ( ) self . editor . setTextCursor ( cursor ) else : tab_len = self . editor . tab_length indentation = cursor . positionInBlock ( ) indentation -= self . min_column if indentation == 0 : return max_spaces = indentation % tab_len if max_spaces == 0 : max_spaces = tab_len spaces = self . count_deletable_spaces ( cursor , max_spaces ) _logger ( ) . info ( 'deleting %d space before cursor' % spaces ) cursor . beginEditBlock ( ) for _ in range ( spaces ) : cursor . deletePreviousChar ( ) cursor . endEditBlock ( ) self . editor . setTextCursor ( cursor ) _logger ( ) . debug ( cursor . block ( ) . text ( ) )
Un - indents text at cursor position .
266
9
10,331
def with_name ( cls , name , id_user = 0 , * * extra_data ) : return cls ( name = name , id_user = 0 , * * extra_data )
Instantiate a WorkflowEngine given a name or UUID .
43
13
10,332
def from_uuid ( cls , uuid , * * extra_data ) : model = Workflow . query . get ( uuid ) if model is None : raise LookupError ( "No workflow with UUID {} was found" . format ( uuid ) ) instance = cls ( model = model , * * extra_data ) instance . objects = WorkflowObjectModel . query . filter ( WorkflowObjectModel . id_workflow == uuid , WorkflowObjectModel . id_parent == None , # noqa ) . all ( ) return instance
Load an existing workflow from the database given a UUID .
120
12
10,333
def continue_object ( self , workflow_object , restart_point = 'restart_task' , task_offset = 1 , stop_on_halt = False ) : translate = { 'restart_task' : 'current' , 'continue_next' : 'next' , 'restart_prev' : 'prev' , } self . state . callback_pos = workflow_object . callback_pos or [ 0 ] self . restart ( task = translate [ restart_point ] , obj = 'first' , objects = [ workflow_object ] , stop_on_halt = stop_on_halt )
Continue workflow for one given object from restart_point .
134
11
10,334
def has_completed ( self ) : objects_in_db = WorkflowObjectModel . query . filter ( WorkflowObjectModel . id_workflow == self . uuid , WorkflowObjectModel . id_parent == None , # noqa ) . filter ( WorkflowObjectModel . status . in_ ( [ workflow_object_class . known_statuses . COMPLETED ] ) ) . count ( ) return objects_in_db == len ( list ( self . objects ) )
Return True if workflow is fully completed .
104
8
10,335
def set_workflow_by_name ( self , workflow_name ) : from . proxies import workflows if workflow_name not in workflows : # No workflow with that name exists raise WorkflowDefinitionError ( "Workflow '%s' does not exist" % ( workflow_name , ) , workflow_name = workflow_name ) self . workflow_definition = workflows [ workflow_name ] self . callbacks . replace ( self . workflow_definition . workflow )
Configure the workflow to run by the name of this one .
100
13
10,336
def after_each_callback ( eng , callback_func , obj ) : obj . callback_pos = eng . state . callback_pos obj . extra_data [ "_last_task_name" ] = callback_func . __name__ task_history = get_task_history ( callback_func ) if "_task_history" not in obj . extra_data : obj . extra_data [ "_task_history" ] = [ task_history ] else : obj . extra_data [ "_task_history" ] . append ( task_history )
Take action after every WF callback .
118
8
10,337
def before_object ( eng , objects , obj ) : super ( InvenioProcessingFactory , InvenioProcessingFactory ) . before_object ( eng , objects , obj ) if "_error_msg" in obj . extra_data : del obj . extra_data [ "_error_msg" ] db . session . commit ( )
Take action before the processing of an object begins .
72
10
10,338
def after_object ( eng , objects , obj ) : # We save each object once it is fully run through super ( InvenioProcessingFactory , InvenioProcessingFactory ) . after_object ( eng , objects , obj ) obj . save ( status = obj . known_statuses . COMPLETED , id_workflow = eng . model . uuid ) db . session . commit ( )
Take action once the proccessing of an object completes .
85
12
10,339
def before_processing ( eng , objects ) : super ( InvenioProcessingFactory , InvenioProcessingFactory ) . before_processing ( eng , objects ) eng . save ( WorkflowStatus . RUNNING ) db . session . commit ( )
Execute before processing the workflow .
53
7
10,340
def after_processing ( eng , objects ) : super ( InvenioProcessingFactory , InvenioProcessingFactory ) . after_processing ( eng , objects ) if eng . has_completed : eng . save ( WorkflowStatus . COMPLETED ) else : eng . save ( WorkflowStatus . HALTED ) db . session . commit ( )
Process to update status .
75
5
10,341
def Exception ( obj , eng , callbacks , exc_info ) : exception_repr = '' . join ( traceback . format_exception ( * exc_info ) ) msg = "Error:\n%s" % ( exception_repr ) eng . log . error ( msg ) if obj : # Sets an error message as a tuple (title, details) obj . extra_data [ '_error_msg' ] = exception_repr obj . save ( status = obj . known_statuses . ERROR , callback_pos = eng . state . callback_pos , id_workflow = eng . uuid ) eng . save ( WorkflowStatus . ERROR ) db . session . commit ( ) # Call super which will reraise super ( InvenioTransitionAction , InvenioTransitionAction ) . Exception ( obj , eng , callbacks , exc_info )
Handle general exceptions in workflow saving states .
187
8
10,342
def WaitProcessing ( obj , eng , callbacks , exc_info ) : e = exc_info [ 1 ] obj . set_action ( e . action , e . message ) obj . save ( status = eng . object_status . WAITING , callback_pos = eng . state . callback_pos , id_workflow = eng . uuid ) eng . save ( WorkflowStatus . HALTED ) eng . log . warning ( "Workflow '%s' waiting at task %s with message: %s" , eng . name , eng . current_taskname or "Unknown" , e . message ) db . session . commit ( ) # Call super which will reraise TransitionActions . HaltProcessing ( obj , eng , callbacks , exc_info )
Take actions when WaitProcessing is raised .
167
9
10,343
def StopProcessing ( obj , eng , callbacks , exc_info ) : e = exc_info [ 1 ] obj . save ( status = eng . object_status . COMPLETED , id_workflow = eng . uuid ) eng . save ( WorkflowStatus . COMPLETED ) obj . log . warning ( "Workflow '%s' stopped at task %s with message: %s" , eng . name , eng . current_taskname or "Unknown" , e . message ) db . session . commit ( ) super ( InvenioTransitionAction , InvenioTransitionAction ) . StopProcessing ( obj , eng , callbacks , exc_info )
Stop the engne and mark the workflow as completed .
144
11
10,344
def SkipToken ( obj , eng , callbacks , exc_info ) : msg = "Skipped running this object: {0}" . format ( obj . id ) eng . log . debug ( msg ) raise Continue
Take action when SkipToken is raised .
45
8
10,345
def AbortProcessing ( obj , eng , callbacks , exc_info ) : msg = "Processing was aborted for object: {0}" . format ( obj . id ) eng . log . debug ( msg ) raise Break
Take action when AbortProcessing is raised .
48
10
10,346
def edits1 ( word ) : letters = 'qwertyuiopasdfghjklzxcvbnm' splits = [ ( word [ : i ] , word [ i : ] ) for i in range ( len ( word ) + 1 ) ] print ( 'splits = ' , splits ) deletes = [ L + R [ 1 : ] for L , R in splits if R ] print ( 'deletes = ' , deletes ) transposes = [ L + R [ 1 ] + R [ 0 ] + R [ 2 : ] for L , R in splits if len ( R ) > 1 ] print ( 'transposes = ' , transposes ) replaces = [ L + c + R [ 1 : ] for L , R in splits if R for c in letters ] print ( 'replaces = ' , replaces ) inserts = [ L + c + R for L , R in splits for c in letters ] print ( 'inserts = ' , inserts ) print ( deletes + transposes + replaces + inserts ) print ( len ( set ( deletes + transposes + replaces + inserts ) ) ) return deletes + transposes + replaces + inserts
All edits that are one edit away from word .
246
10
10,347
def to_locus ( variant_or_locus ) : if isinstance ( variant_or_locus , Locus ) : return variant_or_locus try : return variant_or_locus . locus except AttributeError : # IMPORTANT: if varcode someday changes from inclusive to interbase # coordinates, this will need to be updated. return Locus . from_inclusive_coordinates ( variant_or_locus . contig , variant_or_locus . start , variant_or_locus . end )
Return a Locus object for a Variant instance .
118
10
10,348
def pileup ( self , locus ) : locus = to_locus ( locus ) if len ( locus . positions ) != 1 : raise ValueError ( "Not a single-base locus: %s" % locus ) return self . pileups [ locus ]
Given a 1 - base locus return the Pileup at that locus .
61
17
10,349
def at ( self , * loci ) : loci = [ to_locus ( obj ) for obj in loci ] single_position_loci = [ ] for locus in loci : for position in locus . positions : single_position_loci . append ( Locus . from_interbase_coordinates ( locus . contig , position ) ) pileups = dict ( ( locus , self . pileups [ locus ] ) for locus in single_position_loci ) return PileupCollection ( pileups , self )
Return a new PileupCollection instance including only pileups for the specified loci .
120
18
10,350
def reads ( self ) : # TODO: Optimize this. def alignment_precedence ( pysam_alignment_record ) : return pysam_alignment_record . mapping_quality result = { } for pileup in self . pileups . values ( ) : for e in pileup . elements : key = read_key ( e . alignment ) if key not in result or ( alignment_precedence ( e . alignment ) > alignment_precedence ( result [ key ] ) ) : result [ key ] = e . alignment return list ( result . values ( ) )
The reads in this PileupCollection . All reads will have an alignment that overlaps at least one of the included loci .
127
27
10,351
def read_attributes ( self , attributes = None ) : def include ( attribute ) : return attributes is None or attribute in attributes reads = self . reads ( ) possible_column_names = list ( PileupCollection . _READ_ATTRIBUTE_NAMES ) result = OrderedDict ( ( name , [ getattr ( read , name ) for read in reads ] ) for name in PileupCollection . _READ_ATTRIBUTE_NAMES if include ( name ) ) # Add tag columns. if reads : tag_dicts = [ dict ( x . get_tags ( ) ) for x in reads ] tag_keys = set . union ( * [ set ( item . keys ( ) ) for item in tag_dicts ] ) for tag_key in sorted ( tag_keys ) : column_name = "TAG_%s" % tag_key possible_column_names . append ( column_name ) if include ( column_name ) : result [ column_name ] = [ d . get ( tag_key ) for d in tag_dicts ] # Lastly, we include the underlying pysam alignment record. possible_column_names . append ( "pysam_alignment_record" ) if include ( "pysam_alignment_record" ) : result [ "pysam_alignment_record" ] = reads # If particular attributes were requested, check that they're here. if attributes is not None : for attribute in attributes : if attribute not in result : raise ValueError ( "No such attribute: %s. Valid attributes are: %s" % ( attribute , " " . join ( possible_column_names ) ) ) assert set ( attributes ) == set ( result ) return pandas . DataFrame ( result )
Collect read attributes across reads in this PileupCollection into a pandas . DataFrame .
379
19
10,352
def group_by_allele ( self , locus ) : locus = to_locus ( locus ) read_to_allele = None loci = [ ] if locus . positions : # Our locus includes at least one reference base. for position in locus . positions : base_position = Locus . from_interbase_coordinates ( locus . contig , position ) loci . append ( base_position ) new_read_to_allele = { } for element in self . pileups [ base_position ] : allele_prefix = "" key = alignment_key ( element . alignment ) if read_to_allele is not None : try : allele_prefix = read_to_allele [ key ] except KeyError : continue allele = allele_prefix + element . bases new_read_to_allele [ key ] = allele read_to_allele = new_read_to_allele else : # Our locus is between reference bases. position_before = Locus . from_interbase_coordinates ( locus . contig , locus . start ) loci . append ( position_before ) read_to_allele = { } for element in self . pileups [ position_before ] : allele = element . bases [ 1 : ] read_to_allele [ alignment_key ( element . alignment ) ] = allele split = defaultdict ( lambda : PileupCollection ( pileups = { } , parent = self ) ) for locus in loci : pileup = self . pileups [ locus ] for e in pileup . elements : key = read_to_allele . get ( alignment_key ( e . alignment ) ) if key is not None : if locus in split [ key ] . pileups : split [ key ] . pileups [ locus ] . append ( e ) else : split [ key ] . pileups [ locus ] = Pileup ( locus , [ e ] ) # Sort by number of reads (descending). Break ties with the # lexicographic ordering of the allele string. def sorter ( pair ) : ( allele , pileup_collection ) = pair return ( - 1 * pileup_collection . num_reads ( ) , allele ) return OrderedDict ( sorted ( split . items ( ) , key = sorter ) )
Split the PileupCollection by the alleles suggested by the reads at the specified locus .
504
20
10,353
def allele_summary ( self , locus , score = lambda x : x . num_reads ( ) ) : locus = to_locus ( locus ) return [ ( allele , score ( x ) ) for ( allele , x ) in self . group_by_allele ( locus ) . items ( ) ]
Convenience method to summarize the evidence for each of the alleles present at a locus . Applies a score function to the PileupCollection associated with each allele .
69
36
10,354
def group_by_match ( self , variant ) : locus = to_locus ( variant ) if len ( variant . ref ) != len ( locus . positions ) : logging . warning ( "Ref is length %d but locus has %d bases in variant: %s" % ( len ( variant . ref ) , len ( locus . positions ) , str ( variant ) ) ) alleles_dict = self . group_by_allele ( locus ) single_base_loci = [ Locus . from_interbase_coordinates ( locus . contig , position ) for position in locus . positions ] empty_pileups = dict ( ( locus , Pileup ( locus = locus , elements = [ ] ) ) for locus in single_base_loci ) empty_collection = PileupCollection ( pileups = empty_pileups , parent = self ) ref = { variant . ref : alleles_dict . pop ( variant . ref , empty_collection ) } alt = { variant . alt : alleles_dict . pop ( variant . alt , empty_collection ) } other = alleles_dict # TODO: consider end of read issues for insertions return MatchingEvidence ( ref , alt , other )
Given a variant split the PileupCollection based on whether it the data supports the reference allele the alternate allele or neither .
272
25
10,355
def match_summary ( self , variant , score = lambda x : x . num_reads ( ) ) : split = self . group_by_match ( variant ) def name ( allele_to_pileup_collection ) : return "," . join ( allele_to_pileup_collection ) def aggregate_and_score ( pileup_collections ) : merged = PileupCollection . merge ( * pileup_collections ) return score ( merged ) result = [ ( name ( split . ref ) , aggregate_and_score ( split . ref . values ( ) ) ) , ( name ( split . alt ) , aggregate_and_score ( split . alt . values ( ) ) ) , ] result . extend ( ( allele , score ( collection ) ) for ( allele , collection ) in split . other . items ( ) ) return result
Convenience method to summarize the evidence for and against a variant using a user - specified score function .
181
21
10,356
def filter ( self , drop_duplicates = False , drop_improper_mate_pairs = False , min_mapping_quality = None , min_base_quality = None , filters = None ) : if filters is None : filters = [ ] if drop_duplicates : filters . append ( lambda e : not e . alignment . is_duplicate ) if drop_improper_mate_pairs : filters . append ( lambda e : e . alignment . is_proper_pair ) if min_mapping_quality is not None : filters . append ( lambda e : e . alignment . mapping_quality >= min_mapping_quality ) if min_base_quality is not None : filters . append ( lambda e : e . min_base_quality >= min_base_quality ) pileups = OrderedDict ( ( locus , pileup . filter ( filters ) ) for ( locus , pileup ) in self . pileups . items ( ) ) return PileupCollection ( pileups = pileups , parent = self )
Return a new PileupCollection that includes only pileup elements satisfying the specified criteria .
228
18
10,357
def merge ( self , * others ) : new_pileups = { } for collection in ( self , ) + others : for ( locus , pileup ) in collection . pileups . items ( ) : if locus in new_pileups : new_pileups [ locus ] . update ( pileup ) else : new_pileups [ locus ] = Pileup ( locus , pileup . elements ) return PileupCollection ( new_pileups , parent = self )
Return a new PileupCollection that is the union of self and the other specified collections .
110
19
10,358
def from_bam ( pysam_samfile , loci , normalized_contig_names = True ) : loci = [ to_locus ( obj ) for obj in loci ] close_on_completion = False if typechecks . is_string ( pysam_samfile ) : pysam_samfile = Samfile ( pysam_samfile ) close_on_completion = True try : # Map from pyensembl normalized chromosome names used in Variant to # the names used in the BAM file. if normalized_contig_names : chromosome_name_map = { } for name in pysam_samfile . references : normalized = pyensembl . locus . normalize_chromosome ( name ) chromosome_name_map [ normalized ] = name chromosome_name_map [ name ] = name else : chromosome_name_map = None result = PileupCollection ( { } ) # Optimization: we sort variants so our BAM reads are localized. locus_iterator = itertools . chain . from_iterable ( ( Locus . from_interbase_coordinates ( locus_interval . contig , pos ) for pos in locus_interval . positions ) for locus_interval in sorted ( loci ) ) for locus in locus_iterator : result . pileups [ locus ] = Pileup ( locus , [ ] ) if normalized_contig_names : try : chromosome = chromosome_name_map [ locus . contig ] except KeyError : logging . warn ( "No such contig in bam: %s" % locus . contig ) continue else : chromosome = locus . contig columns = pysam_samfile . pileup ( chromosome , locus . position , locus . position + 1 , # exclusive, 0-indexed truncate = True , stepper = "nofilter" ) try : column = next ( columns ) except StopIteration : # No reads align to this locus. continue # Note that storing the pileups here is necessary, since the # subsequent assertion will invalidate our column. pileups = column . pileups assert list ( columns ) == [ ] # column is invalid after this. for pileup_read in pileups : if not pileup_read . is_refskip : element = PileupElement . from_pysam_alignment ( locus , pileup_read ) result . pileups [ locus ] . append ( element ) return result finally : if close_on_completion : pysam_samfile . close ( )
Create a PileupCollection for a set of loci from a BAM file .
566
18
10,359
def invenio_query_factory ( parser = None , walkers = None ) : parser = parser or Main walkers = walkers or [ PypegConverter ( ) ] walkers . append ( ElasticSearchDSL ( ) ) def invenio_query ( pattern ) : query = pypeg2 . parse ( pattern , parser , whitespace = "" ) for walker in walkers : query = query . accept ( walker ) return query return invenio_query
Create a parser returning Elastic Search DSL query instance .
105
10
10,360
def check_dimensions ( self , dataset ) : results = [ ] required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are timeseries-profile-orthogonal feature types' ) message = '{} must be a valid profile-orthogonal feature type. It must have dimensions of (station, time, z).' message += ' If it\'s a single station, it must have dimensions (time, z). x and y dimensions must be scalar or have' message += ' dimensions (station). time must be a coordinate variable with dimension (time) and z must be a' message += ' coordinate variabel with dimension (z).' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_timeseries_profile_single_station ( dataset , variable ) is_valid = is_valid or util . is_timeseries_profile_multi_station ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable ) ) results . append ( required_ctx . to_result ( ) ) return results
Checks that the feature types of this dataset are consistent with a timeseries - profile - orthogonal dataset .
240
23
10,361
def theme ( name = 'readthedocs' ) : os . environ [ 'SPHINX_THEME' ] = name if os . environ [ 'SPHINX_THEME' ] == 'bootstrap' : local ( 'cp docs/source/_templates/layout_bootstrap.html docs/source/_templates/layout.html' ) elif name is 'readthedocs' : return else : local ( 'cp docs/source/_templates/layout_simple.html docs/source/_templates/layout.html' )
set name to bootstrap in case you want to use bootstrap . This also requires the template sto be in the main dir
127
25
10,362
def html ( theme_name = 'readthedocs' ) : # disable Flask RSTPAGES due to sphinx incompatibility os . environ [ 'RSTPAGES' ] = 'FALSE' theme ( theme_name ) api ( ) man ( ) clean ( ) local ( "cd docs; make html" ) local ( "fab security.check" ) local ( "touch docs/build/html/.nojekyll" )
build the doc locally and view
97
6
10,363
def sign_message ( body : ByteString , secret : Text ) -> Text : return 'sha1={}' . format ( hmac . new ( secret . encode ( ) , body , sha1 ) . hexdigest ( ) )
Compute a message s signature .
51
7
10,364
async def _get_user ( self ) : if self . _cache is None : try : self . _cache = await self . facebook . get_user ( self . fbid , self . page_id ) except PlatformOperationError : self . _cache = { } return self . _cache
Get the user dict from cache or query it from the platform if missing .
63
15
10,365
async def get_friendly_name ( self ) -> Text : u = await self . _get_user ( ) f = u . get ( 'first_name' , '' ) . strip ( ) l = u . get ( 'last_name' , '' ) . strip ( ) return f or l
The friendly name is mapped to Facebook s first name . If the first name is missing use the last name .
65
22
10,366
async def get_gender ( self ) -> User . Gender : u = await self . _get_user ( ) try : return User . Gender ( u . get ( 'gender' ) ) except ValueError : return User . Gender . unknown
Get the gender from Facebook .
51
6
10,367
def get_user ( self ) -> FacebookUser : return FacebookUser ( self . _event [ 'sender' ] [ 'id' ] , self . get_page_id ( ) , self . _facebook , self , )
Generate a Facebook user instance
49
6
10,368
def get_layers ( self ) -> List [ BaseLayer ] : out = [ ] msg = self . _event . get ( 'message' , { } ) if 'text' in msg : out . append ( lyr . RawText ( msg [ 'text' ] ) ) for attachment in msg . get ( 'attachments' ) or [ ] : if attachment [ 'type' ] == 'image' : out . append ( lyr . Image ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'audio' : out . append ( lyr . Audio ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'file' : out . append ( lyr . File ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'video' : out . append ( lyr . Video ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'location' : # noinspection PyArgumentList out . append ( lyr . Location ( lyr . Location . Point ( lat = attachment [ 'payload' ] [ 'coordinates' ] [ 'lat' ] , lon = attachment [ 'payload' ] [ 'coordinates' ] [ 'long' ] , ) ) ) if 'quick_reply' in msg : out . append ( QuickReply ( msg [ 'quick_reply' ] [ 'payload' ] ) ) if 'postback' in self . _event : payload = ujson . loads ( self . _event [ 'postback' ] [ 'payload' ] ) out . append ( lyr . Postback ( payload ) ) if 'optin' in self . _event : out . append ( OptIn ( self . _event [ 'optin' ] [ 'ref' ] ) ) return out
Return all layers that can be found in the message .
431
11
10,369
def verify_token ( self ) : h = sha256 ( ) h . update ( self . app_access_token . encode ( ) ) return h . hexdigest ( )
Automatically generated secure verify token
39
6
10,370
def hook_up ( self , router : UrlDispatcher ) : router . add_get ( self . webhook_path , self . check_hook ) router . add_post ( self . webhook_path , self . receive_events )
Dynamically hooks the right webhook paths
54
9
10,371
async def check_hook ( self , request : HttpRequest ) : verify_token = request . query . get ( 'hub.verify_token' ) if not verify_token : return json_response ( { 'error' : 'No verification token was provided' , } , status = 400 ) if verify_token == self . verify_token : return Response ( text = request . query . get ( 'hub.challenge' , '' ) ) return json_response ( { 'error' : 'could not find the page token in the configuration' , } )
Called when Facebook checks the hook
121
7
10,372
async def receive_events ( self , request : HttpRequest ) : body = await request . read ( ) s = self . settings ( ) try : content = ujson . loads ( body ) except ValueError : return json_response ( { 'error' : True , 'message' : 'Cannot decode body' } , status = 400 ) secret = s [ 'app_secret' ] actual_sig = request . headers [ 'X-Hub-Signature' ] expected_sig = sign_message ( body , secret ) if not hmac . compare_digest ( actual_sig , expected_sig ) : return json_response ( { 'error' : True , 'message' : 'Invalid signature' , } , status = 401 ) for entry in content [ 'entry' ] : for raw_message in entry . get ( 'messaging' , [ ] ) : message = FacebookMessage ( raw_message , self ) await self . handle_event ( message ) return json_response ( { 'ok' : True , } )
Events received from Facebook
225
4
10,373
async def _deferred_init ( self ) : await self . _check_subscriptions ( ) await self . _set_whitelist ( ) await self . _set_get_started ( ) await self . _set_greeting_text ( ) await self . _set_persistent_menu ( )
Run those things in a sepearate tasks as they are not required for the bot to work and they take a lot of time to run .
70
29
10,374
async def _send_to_messenger_profile ( self , page , content ) : log_name = ', ' . join ( repr ( x ) for x in content . keys ( ) ) page_id = page [ 'page_id' ] current = await self . _get_messenger_profile ( page , content . keys ( ) ) if dict_is_subset ( content , current ) : logger . info ( 'Page %s: %s is already up to date' , page_id , log_name ) return params = { 'access_token' : page [ 'page_token' ] , } headers = { 'content-type' : 'application/json' , } post = self . session . post ( PROFILE_ENDPOINT , params = params , headers = headers , data = ujson . dumps ( content ) ) # noinspection PyBroadException try : async with post as r : await self . _handle_fb_response ( r ) except Exception : logger . exception ( 'Page %s: %s could not be set' , page_id , log_name ) reporter . report ( ) else : logger . info ( 'Page %s: %s was updated' , page_id , log_name )
The messenger profile API handles all meta - information about the bot like the menu . This allows to submit data to this API endpoint .
269
26
10,375
async def _set_get_started ( self ) : page = self . settings ( ) if 'get_started' in page : payload = page [ 'get_started' ] else : payload = { 'action' : 'get_started' } await self . _send_to_messenger_profile ( page , { 'get_started' : { 'payload' : ujson . dumps ( payload ) , } , } ) logger . info ( 'Get started set for page %s' , page [ 'page_id' ] )
Set the get started action for all configured pages .
117
10
10,376
async def _set_greeting_text ( self ) : page = self . settings ( ) if 'greeting' in page : await self . _send_to_messenger_profile ( page , { 'greeting' : page [ 'greeting' ] , } ) logger . info ( 'Greeting text set for page %s' , page [ 'page_id' ] )
Set the greeting text of the page
89
7
10,377
async def _set_persistent_menu ( self ) : page = self . settings ( ) if 'menu' in page : await self . _send_to_messenger_profile ( page , { 'persistent_menu' : page [ 'menu' ] , } ) logger . info ( 'Set menu for page %s' , page [ 'page_id' ] )
Define the persistent menu for all pages
82
8
10,378
async def _set_whitelist ( self ) : page = self . settings ( ) if 'whitelist' in page : await self . _send_to_messenger_profile ( page , { 'whitelisted_domains' : page [ 'whitelist' ] , } ) logger . info ( 'Whitelisted %s for page %s' , page [ 'whitelist' ] , page [ 'page_id' ] )
Whitelist domains for the messenger extensions
100
7
10,379
def _get_subscriptions_endpoint ( self ) : s = self . settings ( ) params = { 'access_token' : self . app_access_token , } return ( GRAPH_ENDPOINT . format ( f'{s["app_id"]}/subscriptions' ) , params , )
Generates the URL and tokens for the subscriptions endpoint
71
10
10,380
async def _get_subscriptions ( self ) -> Tuple [ Set [ Text ] , Text ] : url , params = self . _get_subscriptions_endpoint ( ) get = self . session . get ( url , params = params ) async with get as r : await self . _handle_fb_response ( r ) data = await r . json ( ) for scope in data [ 'data' ] : if scope [ 'object' ] == 'page' : return ( set ( x [ 'name' ] for x in scope [ 'fields' ] ) , scope [ 'callback_url' ] , ) return set ( ) , ''
List the subscriptions currently active
140
5
10,381
async def _set_subscriptions ( self , subscriptions ) : url , params = self . _get_subscriptions_endpoint ( ) data = { 'object' : 'page' , 'callback_url' : self . webhook_url , 'fields' : ', ' . join ( subscriptions ) , 'verify_token' : self . verify_token , } headers = { 'Content-Type' : 'application/json' , } post = self . session . post ( url , params = params , data = ujson . dumps ( data ) , headers = headers , ) async with post as r : await self . _handle_fb_response ( r ) data = await r . json ( )
Set the subscriptions to a specific list of values
153
9
10,382
async def _check_subscriptions ( self ) : subscribed , url = await self . _get_subscriptions ( ) expect = set ( settings . FACEBOOK_SUBSCRIPTIONS ) if ( expect - subscribed ) or url != self . webhook_url : await self . _set_subscriptions ( expect | subscribed ) logger . info ( 'Updated webhook subscriptions' ) else : logger . info ( 'No need to update webhook subscriptions' )
Checks that all subscriptions are subscribed
100
7
10,383
async def handle_event ( self , event : FacebookMessage ) : responder = FacebookResponder ( self ) await self . _notify ( event , responder )
Handle an incoming message from Facebook .
36
7
10,384
def _access_token ( self , request : Request = None , page_id : Text = '' ) : if not page_id : msg = request . message # type: FacebookMessage page_id = msg . get_page_id ( ) page = self . settings ( ) if page [ 'page_id' ] == page_id : return page [ 'page_token' ] raise PlatformOperationError ( 'Trying to get access token of the ' 'page "{}", which is not configured.' . format ( page_id ) )
Guess the access token for that specific request .
115
10
10,385
async def _make_qr ( self , qr : QuickRepliesList . BaseOption , request : Request ) : if isinstance ( qr , QuickRepliesList . TextOption ) : return { 'content_type' : 'text' , 'title' : await render ( qr . text , request ) , 'payload' : qr . slug , } elif isinstance ( qr , QuickRepliesList . LocationOption ) : return { 'content_type' : 'location' , }
Generate a single quick reply s content .
111
9
10,386
async def _send_text ( self , request : Request , stack : Stack ) : parts = [ ] for layer in stack . layers : if isinstance ( layer , lyr . MultiText ) : lines = await render ( layer . text , request , multi_line = True ) for line in lines : for part in wrap ( line , 320 ) : parts . append ( part ) elif isinstance ( layer , ( lyr . Text , lyr . RawText ) ) : text = await render ( layer . text , request ) for part in wrap ( text , 320 ) : parts . append ( part ) for part in parts [ : - 1 ] : await self . _send ( request , { 'text' : part , } , stack ) part = parts [ - 1 ] msg = { 'text' : part , } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
Send text layers to the user . Each layer will go in its own bubble .
199
16
10,387
async def _send_generic_template ( self , request : Request , stack : Stack ) : gt = stack . get_layer ( GenericTemplate ) payload = await gt . serialize ( request ) msg = { 'attachment' : { 'type' : 'template' , 'payload' : payload } } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
Generates and send a generic template .
96
8
10,388
async def _send_button_template ( self , request : Request , stack : Stack ) : gt = stack . get_layer ( ButtonTemplate ) payload = { 'template_type' : 'button' , 'text' : await render ( gt . text , request ) , 'buttons' : [ await b . serialize ( request ) for b in gt . buttons ] , } msg = { 'attachment' : { 'type' : 'template' , 'payload' : payload } } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
Generates and send a button template .
137
8
10,389
async def _send_typing ( self , request : Request , stack : Stack ) : active = stack . get_layer ( lyr . Typing ) . active msg = ujson . dumps ( { 'recipient' : { 'id' : request . conversation . fbid , } , 'sender_action' : 'typing_on' if active else 'typing_off' , } ) headers = { 'content-type' : 'application/json' , } params = { 'access_token' : self . _access_token ( request ) , } post = self . session . post ( MESSAGES_ENDPOINT , params = params , data = msg , headers = headers , ) logger . debug ( 'Sending: %s' , msg ) async with post as r : await self . _handle_fb_response ( r )
Send to Facebook typing indications
186
5
10,390
async def _handle_fb_response ( self , response : aiohttp . ClientResponse ) : ok = response . status == 200 if not ok : # noinspection PyBroadException try : error = ( await response . json ( ) ) [ 'error' ] [ 'message' ] except Exception : error = '(nothing)' raise PlatformOperationError ( 'Facebook says: "{}"' . format ( error ) )
Check that Facebook was OK with the API call we just made and raise an exception if it failed .
88
20
10,391
async def _send ( self , request : Request , content : Dict [ Text , Any ] , stack : Stack ) : msg = { 'recipient' : { 'id' : request . conversation . fbid , } , 'message' : content , } if stack and stack . has_layer ( MessagingType ) : mt = stack . get_layer ( MessagingType ) else : mt = MessagingType ( response = True ) msg . update ( mt . serialize ( ) ) msg_json = ujson . dumps ( msg ) headers = { 'content-type' : 'application/json' , } params = { 'access_token' : self . _access_token ( request ) , } post = self . session . post ( MESSAGES_ENDPOINT , params = params , data = msg_json , headers = headers , ) logger . debug ( 'Sending: %s' , msg_json ) async with post as r : await self . _handle_fb_response ( r )
Actually proceed to sending the message to the Facebook API .
219
11
10,392
async def get_user ( self , user_id , page_id ) : access_token = self . _access_token ( page_id = page_id ) params = { 'fields' : 'first_name,last_name,profile_pic,locale,timezone' ',gender' , 'access_token' : access_token , } url = GRAPH_ENDPOINT . format ( user_id ) get = self . session . get ( url , params = params ) async with get as r : await self . _handle_fb_response ( r ) return await r . json ( )
Query a user from the API and return its JSON
133
10
10,393
async def ensure_usable_media ( self , media : BaseMedia ) -> UrlMedia : if not isinstance ( media , UrlMedia ) : raise ValueError ( 'Facebook platform only accepts URL media' ) return media
So far let s just accept URL media . We ll see in the future how it goes .
48
19
10,394
def _make_fake_message ( self , user_id , page_id , payload ) : event = { 'sender' : { 'id' : user_id , } , 'recipient' : { 'id' : page_id , } , 'postback' : { 'payload' : ujson . dumps ( payload ) , } , } return FacebookMessage ( event , self , False )
Creates a fake message for the given user_id . It contains a postback with the given payload .
88
22
10,395
def _message_from_sr ( self , token : Text , payload : Any ) -> Optional [ BaseMessage ] : page = self . settings ( ) secret = page [ 'app_secret' ] try : sr_data = SignedRequest . parse ( token , secret ) except ( TypeError , ValueError , SignedRequestError ) as e : return return self . _make_fake_message ( sr_data [ 'psid' ] , page [ 'page_id' ] , payload , )
Tries to verify the signed request
105
7
10,396
def _message_from_token ( self , token : Text , payload : Any ) -> Optional [ BaseMessage ] : try : tk = jwt . decode ( token , settings . WEBVIEW_SECRET_KEY ) except jwt . InvalidTokenError : return try : user_id = tk [ 'fb_psid' ] assert isinstance ( user_id , Text ) page_id = tk [ 'fb_pid' ] assert isinstance ( page_id , Text ) except ( KeyError , AssertionError ) : return if self . settings ( ) [ 'page_id' ] == page_id : return self . _make_fake_message ( user_id , page_id , payload )
Analyzes a signed token and generates the matching message
156
10
10,397
def get_trans_reg ( self , name : Text , default : Any = None ) -> Any : tr = self . register . get ( Register . TRANSITION , { } ) return tr . get ( name , default )
Convenience function to access the transition register of a specific kind .
47
14
10,398
async def get_locale ( self ) -> Text : if self . _locale_override : return self . _locale_override else : return await self . user . get_locale ( )
Get the locale to use for this request . It s either the overridden locale or the locale provided by the platform .
46
24
10,399
async def get_trans_flags ( self ) -> 'Flags' : from bernard . middleware import MiddlewareManager async def make_flags ( request : Request ) -> 'Flags' : return { } mf = MiddlewareManager . instance ( ) . get ( 'make_trans_flags' , make_flags ) return await mf ( self )
Gives a chance to middlewares to make the translation flags
77
13