idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
39,800
def get_entity ( self , entity_id , at = None ) : if self . _snapshot_strategy is not None : snapshot = self . _snapshot_strategy . get_snapshot ( entity_id , lte = at ) else : snapshot = None if snapshot is None : initial_state = None gt = None else : initial_state = entity_from_snapshot ( snapshot ) gt = snapshot . originator_version return self . get_and_project_events ( entity_id , gt = gt , lte = at , initial_state = initial_state )
Returns entity with given ID optionally until position .
39,801
def get_and_project_events ( self , entity_id , gt = None , gte = None , lt = None , lte = None , limit = None , initial_state = None , query_descending = False ) : if gt is None and gte is None and lt is None and lte is None and self . __page_size__ is None : is_ascending = False else : is_ascending = not query_descending domain_events = self . event_store . get_domain_events ( originator_id = entity_id , gt = gt , gte = gte , lt = lt , lte = lte , limit = limit , is_ascending = is_ascending , page_size = self . __page_size__ ) if not is_ascending : domain_events = list ( reversed ( list ( domain_events ) ) ) return self . project_events ( initial_state , domain_events )
Reconstitutes requested domain entity from domain events found in event store .
39,802
def take_snapshot ( self , entity_id , lt = None , lte = None ) : snapshot = None if self . _snapshot_strategy : latest_event = self . event_store . get_most_recent_event ( entity_id , lt = lt , lte = lte ) if latest_event is not None : latest_snapshot = self . _snapshot_strategy . get_snapshot ( entity_id , lt = lt , lte = lte ) latest_version = latest_event . originator_version if latest_snapshot and latest_snapshot . originator_version == latest_version : snapshot = latest_snapshot else : if latest_snapshot : initial_state = entity_from_snapshot ( latest_snapshot ) gt = latest_snapshot . originator_version else : initial_state = None gt = None entity = self . get_and_project_events ( entity_id = entity_id , gt = gt , lte = latest_version , initial_state = initial_state , ) snapshot = self . _snapshot_strategy . take_snapshot ( entity_id , entity , latest_version ) return snapshot
Takes a snapshot of the entity as it existed after the most recent event optionally less than or less than or equal to a particular position .
39,803
def create_new_example ( self , foo = '' , a = '' , b = '' ) : return create_new_example ( foo = foo , a = a , b = b )
Entity object factory .
39,804
def timestamp_long_from_uuid ( uuid_arg ) : if isinstance ( uuid_arg , str ) : uuid_arg = UUID ( uuid_arg ) assert isinstance ( uuid_arg , UUID ) , uuid_arg uuid_time = uuid_arg . time return uuid_time - 0x01B21DD213814000
Returns an integer value representing a unix timestamp in tenths of microseconds .
39,805
def subscribe_to ( * event_classes ) : event_classes = list ( event_classes ) def wrap ( func ) : def handler ( event ) : if isinstance ( event , ( list , tuple ) ) : for e in event : handler ( e ) elif not event_classes or isinstance ( event , tuple ( event_classes ) ) : func ( event ) subscribe ( handler = handler , predicate = lambda _ : True ) return func if len ( event_classes ) == 1 and isfunction ( event_classes [ 0 ] ) : func = event_classes . pop ( ) return wrap ( func ) else : return wrap
Decorator for making a custom event handler function subscribe to a certain class of event .
39,806
def mutator ( arg = None ) : domain_class = None def _mutator ( func ) : wrapped = singledispatch ( func ) @ wraps ( wrapped ) def wrapper ( initial , event ) : initial = initial or domain_class return wrapped . dispatch ( type ( event ) ) ( initial , event ) wrapper . register = wrapped . register return wrapper if isfunction ( arg ) : return _mutator ( arg ) else : domain_class = arg return _mutator
Structures mutator functions by allowing handlers to be registered for different types of event . When the decorated function is called with an initial value and an event it will call the handler that has been registered for that type of event .
39,807
def encrypt ( self , plaintext ) : plainbytes = plaintext . encode ( 'utf8' ) compressed = zlib . compress ( plainbytes ) cipher = AES . new ( self . cipher_key , AES . MODE_GCM , nonce = random_bytes ( 12 ) ) encrypted , tag = cipher . encrypt_and_digest ( compressed ) combined = cipher . nonce + tag + encrypted cipherbytes = base64 . b64encode ( combined ) ciphertext = cipherbytes . decode ( 'utf8' ) return ciphertext
Return ciphertext for given plaintext .
39,808
def decrypt ( self , ciphertext ) : cipherbytes = ciphertext . encode ( 'utf8' ) try : combined = base64 . b64decode ( cipherbytes ) except ( base64 . binascii . Error , TypeError ) as e : raise DataIntegrityError ( "Cipher text is damaged: {}" . format ( e ) ) nonce = combined [ : 12 ] if len ( nonce ) != 12 : raise DataIntegrityError ( "Cipher text is damaged: invalid nonce length" ) tag = combined [ 12 : 28 ] if len ( tag ) != 16 : raise DataIntegrityError ( "Cipher text is damaged: invalid tag length" ) encrypted = combined [ 28 : ] cipher = AES . new ( self . cipher_key , AES . MODE_GCM , nonce ) try : compressed = cipher . decrypt_and_verify ( encrypted , tag ) except ValueError as e : raise DataIntegrityError ( "Cipher text is damaged: {}" . format ( e ) ) plainbytes = zlib . decompress ( compressed ) plaintext = plainbytes . decode ( 'utf8' ) return plaintext
Return plaintext for given ciphertext .
39,809
def store ( self , domain_event_or_events ) : sequenced_item_or_items = self . item_from_event ( domain_event_or_events ) try : self . record_manager . record_sequenced_items ( sequenced_item_or_items ) except RecordConflictError as e : raise ConcurrencyError ( e )
Appends given domain event or list of domain events to their sequence .
39,810
def item_from_event ( self , domain_event_or_events ) : if isinstance ( domain_event_or_events , ( list , tuple ) ) : return [ self . item_from_event ( e ) for e in domain_event_or_events ] else : return self . mapper . item_from_event ( domain_event_or_events )
Maps domain event to sequenced item namedtuple .
39,811
def get_domain_events ( self , originator_id , gt = None , gte = None , lt = None , lte = None , limit = None , is_ascending = True , page_size = None ) : if page_size : sequenced_items = self . iterator_class ( record_manager = self . record_manager , sequence_id = originator_id , page_size = page_size , gt = gt , gte = gte , lt = lt , lte = lte , limit = limit , is_ascending = is_ascending , ) else : sequenced_items = self . record_manager . get_items ( sequence_id = originator_id , gt = gt , gte = gte , lt = lt , lte = lte , limit = limit , query_ascending = is_ascending , results_ascending = is_ascending , ) domain_events = map ( self . mapper . event_from_item , sequenced_items ) return list ( domain_events )
Gets domain events from the sequence identified by originator_id .
39,812
def get_domain_event ( self , originator_id , position ) : sequenced_item = self . record_manager . get_item ( sequence_id = originator_id , position = position , ) return self . mapper . event_from_item ( sequenced_item )
Gets a domain event from the sequence identified by originator_id at position eq .
39,813
def get_most_recent_event ( self , originator_id , lt = None , lte = None ) : events = self . get_domain_events ( originator_id = originator_id , lt = lt , lte = lte , limit = 1 , is_ascending = False ) events = list ( events ) try : return events [ 0 ] except IndexError : pass
Gets a domain event from the sequence identified by originator_id at the highest position .
39,814
def all_domain_events ( self ) : for originator_id in self . record_manager . all_sequence_ids ( ) : for domain_event in self . get_domain_events ( originator_id = originator_id , page_size = 100 ) : yield domain_event
Yields all domain events in the event store .
39,815
def publish_prompt ( self , event = None ) : prompt = Prompt ( self . name , self . pipeline_id ) try : publish ( prompt ) except PromptFailed : raise except Exception as e : raise PromptFailed ( "{}: {}" . format ( type ( e ) , str ( e ) ) )
Publishes prompt for a given event .
39,816
def _prepare_insert ( self , tmpl , record_class , field_names , placeholder_for_id = False ) : field_names = list ( field_names ) if hasattr ( record_class , 'application_name' ) and 'application_name' not in field_names : field_names . append ( 'application_name' ) if hasattr ( record_class , 'pipeline_id' ) and 'pipeline_id' not in field_names : field_names . append ( 'pipeline_id' ) if hasattr ( record_class , 'causal_dependencies' ) and 'causal_dependencies' not in field_names : field_names . append ( 'causal_dependencies' ) if placeholder_for_id : if self . notification_id_name : if self . notification_id_name not in field_names : field_names . append ( 'id' ) statement = tmpl . format ( tablename = self . get_record_table_name ( record_class ) , columns = ", " . join ( field_names ) , placeholders = ", " . join ( [ '%s' for _ in field_names ] ) , notification_id = self . notification_id_name ) return statement
With transaction isolation level of read committed this should generate records with a contiguous sequence of integer IDs using an indexed ID column the database - side SQL max function the insert - select - from form and optimistic concurrency control .
39,817
def get_notifications ( self , start = None , stop = None , * args , ** kwargs ) : filter_kwargs = { } if start is not None : filter_kwargs [ '%s__gte' % self . notification_id_name ] = start + 1 if stop is not None : filter_kwargs [ '%s__lt' % self . notification_id_name ] = stop + 1 objects = self . record_class . objects . filter ( ** filter_kwargs ) if hasattr ( self . record_class , 'application_name' ) : objects = objects . filter ( application_name = self . application_name ) if hasattr ( self . record_class , 'pipeline_id' ) : objects = objects . filter ( pipeline_id = self . pipeline_id ) objects = objects . order_by ( '%s' % self . notification_id_name ) return objects . all ( )
Returns all records in the table .
39,818
def start ( self ) : subscribe ( handler = self . forward_prompt , predicate = self . is_prompt ) msg = SystemInitRequest ( self . system . process_classes , self . infrastructure_class , self . system . followings , self . pipeline_ids ) response = self . actor_system . ask ( self . system_actor , msg ) assert isinstance ( response , SystemInitResponse ) , type ( response ) assert list ( response . pipeline_actors . keys ( ) ) == self . pipeline_ids , ( "Configured pipeline IDs mismatch initialised system {} {}" ) . format ( list ( self . pipeline_actors . keys ( ) ) , self . pipeline_ids ) self . pipeline_actors = response . pipeline_actors
Starts all the actors to run a system of process applications .
39,819
def close ( self ) : super ( ActorModelRunner , self ) . close ( ) unsubscribe ( handler = self . forward_prompt , predicate = self . is_prompt ) if self . shutdown_on_close : self . shutdown ( )
Stops all the actors running a system of process applications .
39,820
def register_new_suffix_tree ( self , case_insensitive = False ) : suffix_tree = register_new_suffix_tree ( case_insensitive = case_insensitive ) suffix_tree . _node_repo = self . node_repo suffix_tree . _node_child_collection_repo = self . node_child_collection_repo suffix_tree . _edge_repo = self . edge_repo suffix_tree . _stringid_collection_repo = self . stringid_collection_repo return suffix_tree
Returns a new suffix tree entity .
39,821
def find_string_ids ( self , substring , suffix_tree_id , limit = None ) : edge , ln = self . find_substring_edge ( substring = substring , suffix_tree_id = suffix_tree_id ) if edge is None : return set ( ) string_ids = get_string_ids ( node_id = edge . dest_node_id , node_repo = self . node_repo , node_child_collection_repo = self . node_child_collection_repo , stringid_collection_repo = self . stringid_collection_repo , length_until_end = edge . length + 1 - ln , limit = limit ) return set ( string_ids )
Returns a set of IDs for strings that contain the given substring .
39,822
def find_substring_edge ( self , substring , suffix_tree_id ) : suffix_tree = self . suffix_tree_repo [ suffix_tree_id ] started = datetime . datetime . now ( ) edge , ln = find_substring_edge ( substring = substring , suffix_tree = suffix_tree , edge_repo = self . edge_repo ) print ( " - searched for edge in {} for substring: '{}'" . format ( datetime . datetime . now ( ) - started , substring ) ) return edge , ln
Returns an edge that matches the given substring .
39,823
def run_followers ( self , prompt ) : assert isinstance ( prompt , Prompt ) self . pending_prompts . put ( prompt ) if self . iteration_lock . acquire ( False ) : start_time = time . time ( ) i = 0 try : while True : try : prompt = self . pending_prompts . get ( False ) except Empty : break else : followers = self . system . followers [ prompt . process_name ] for follower_name in followers : follower = self . system . processes [ follower_name ] follower . run ( prompt ) i += 1 self . pending_prompts . task_done ( ) finally : run_frequency = i / ( time . time ( ) - start_time ) self . iteration_lock . release ( )
First caller adds a prompt to queue and runs followers until there are no more pending prompts .
39,824
def create_new_example ( foo = '' , a = '' , b = '' ) : return Example . __create__ ( foo = foo , a = a , b = b )
Factory method for example entities .
39,825
def applicationpolicy ( arg = None ) : def _mutator ( func ) : wrapped = singledispatch ( func ) @ wraps ( wrapped ) def wrapper ( * args , ** kwargs ) : event = kwargs . get ( 'event' ) or args [ - 1 ] return wrapped . dispatch ( type ( event ) ) ( * args , ** kwargs ) wrapper . register = wrapped . register return wrapper assert isfunction ( arg ) , arg return _mutator ( arg )
Decorator for application policy method .
39,826
def _prepare_insert ( self , tmpl , record_class , field_names , placeholder_for_id = False ) : field_names = list ( field_names ) if hasattr ( record_class , 'application_name' ) and 'application_name' not in field_names : field_names . append ( 'application_name' ) if hasattr ( record_class , 'pipeline_id' ) and 'pipeline_id' not in field_names : field_names . append ( 'pipeline_id' ) if hasattr ( record_class , 'causal_dependencies' ) and 'causal_dependencies' not in field_names : field_names . append ( 'causal_dependencies' ) if self . notification_id_name : if placeholder_for_id : if self . notification_id_name not in field_names : field_names . append ( self . notification_id_name ) statement = text ( tmpl . format ( tablename = self . get_record_table_name ( record_class ) , columns = ", " . join ( field_names ) , placeholders = ", " . join ( [ ":{}" . format ( f ) for f in field_names ] ) , notification_id = self . notification_id_name ) ) bindparams = [ ] for col_name in field_names : column_type = getattr ( record_class , col_name ) . type bindparams . append ( bindparam ( col_name , type_ = column_type ) ) statement = statement . bindparams ( * bindparams ) compiled = statement . compile ( dialect = self . session . bind . dialect ) return compiled
With transaction isolation level of read committed this should generate records with a contiguous sequence of integer IDs assumes an indexed ID column the database - side SQL max function the insert - select - from form and optimistic concurrency control .
39,827
def delete_record ( self , record ) : try : self . session . delete ( record ) self . session . commit ( ) except Exception as e : self . session . rollback ( ) raise ProgrammingError ( e ) finally : self . session . close ( )
Permanently removes record from table .
39,828
def get_or_create ( self , log_name , bucket_size ) : try : return self [ log_name ] except RepositoryKeyError : return start_new_timebucketedlog ( log_name , bucket_size = bucket_size )
Gets or creates a log .
39,829
def project_events ( self , initial_state , domain_events ) : return reduce ( self . _mutator_func or self . mutate , domain_events , initial_state )
Evolves initial state using the sequence of domain events and a mutator function .
39,830
def get_last_array ( self ) : root = self . repo [ self . id ] apex_id , apex_height = root . get_last_item_and_next_position ( ) if apex_id is None : return None , None apex = self . repo [ apex_id ] assert isinstance ( apex , Array ) array = apex array_i = 0 height = apex_height while height > 1 : height -= 1 array_id , width = array . get_last_item_and_next_position ( ) assert width > 0 offset = width - 1 array_i += offset * self . repo . array_size ** height array = self . repo [ array_id ] return array , array_i
Returns last array in compound .
39,831
def calc_parent ( self , i , j , h ) : N = self . repo . array_size c_i = i c_j = j c_h = h c_n = c_i // ( N ** c_h ) p_n = c_n // N p_p = c_n % N p_h = c_h + 1 span = N ** p_h p_i = p_n * span p_j = p_i + span assert p_i <= c_i , 'i greater on parent than child: {}' . format ( p_i , p_j ) assert p_j >= c_j , 'j less on parent than child: {}' . format ( p_i , p_j ) return p_i , p_j , p_h , p_p
Returns get_big_array and end of span of parent sequence that contains given child .
39,832
def item_from_event ( self , domain_event ) : item_args = self . construct_item_args ( domain_event ) return self . construct_sequenced_item ( item_args )
Constructs a sequenced item from a domain event .
39,833
def construct_item_args ( self , domain_event ) : sequence_id = domain_event . __dict__ [ self . sequence_id_attr_name ] position = getattr ( domain_event , self . position_attr_name , None ) topic , state = self . get_item_topic_and_state ( domain_event . __class__ , domain_event . __dict__ ) other_args = tuple ( ( getattr ( domain_event , name ) for name in self . other_attr_names ) ) return ( sequence_id , position , topic , state ) + other_args
Constructs attributes of a sequenced item from the given domain event .
39,834
def event_from_item ( self , sequenced_item ) : assert isinstance ( sequenced_item , self . sequenced_item_class ) , ( self . sequenced_item_class , type ( sequenced_item ) ) topic = getattr ( sequenced_item , self . field_names . topic ) state = getattr ( sequenced_item , self . field_names . state ) return self . event_from_topic_and_state ( topic , state )
Reconstructs domain event from stored event topic and event attrs . Used in the event store when getting domain events .
39,835
def get_item ( self , sequence_id , position ) : return self . from_record ( self . get_record ( sequence_id , position ) )
Gets sequenced item from the datastore .
39,836
def get_items ( self , sequence_id , gt = None , gte = None , lt = None , lte = None , limit = None , query_ascending = True , results_ascending = True ) : records = self . get_records ( sequence_id = sequence_id , gt = gt , gte = gte , lt = lt , lte = lte , limit = limit , query_ascending = query_ascending , results_ascending = results_ascending , ) for item in map ( self . from_record , records ) : yield item
Returns sequenced item generator .
39,837
def to_record ( self , sequenced_item ) : kwargs = self . get_field_kwargs ( sequenced_item ) if hasattr ( self . record_class , 'application_name' ) : kwargs [ 'application_name' ] = self . application_name if hasattr ( self . record_class , 'pipeline_id' ) : kwargs [ 'pipeline_id' ] = self . pipeline_id return self . record_class ( ** kwargs )
Constructs a record object from given sequenced item object .
39,838
def from_record ( self , record ) : kwargs = self . get_field_kwargs ( record ) return self . sequenced_item_class ( ** kwargs )
Constructs and returns a sequenced item object from given ORM object .
39,839
def get_pipeline_and_notification_id ( self , sequence_id , position ) : record = self . get_record ( sequence_id , position ) notification_id = getattr ( record , self . notification_id_name ) return record . pipeline_id , notification_id
Returns pipeline ID and notification ID for event at given position in given sequence .
39,840
def insert_select_max ( self ) : if self . _insert_select_max is None : if hasattr ( self . record_class , 'application_name' ) : assert hasattr ( self . record_class , 'pipeline_id' ) , self . record_class tmpl = self . _insert_select_max_tmpl + self . _where_application_name_tmpl else : tmpl = self . _insert_select_max_tmpl self . _insert_select_max = self . _prepare_insert ( tmpl = tmpl , record_class = self . record_class , field_names = list ( self . field_names ) , ) return self . _insert_select_max
SQL statement that inserts records with contiguous IDs by selecting max ID from indexed table records .
39,841
def insert_values ( self ) : if self . _insert_values is None : self . _insert_values = self . _prepare_insert ( tmpl = self . _insert_values_tmpl , placeholder_for_id = True , record_class = self . record_class , field_names = self . field_names , ) return self . _insert_values
SQL statement that inserts records without ID .
39,842
def insert_tracking_record ( self ) : if self . _insert_tracking_record is None : self . _insert_tracking_record = self . _prepare_insert ( tmpl = self . _insert_values_tmpl , placeholder_for_id = True , record_class = self . tracking_record_class , field_names = self . tracking_record_field_names , ) return self . _insert_tracking_record
SQL statement that inserts tracking records .
39,843
def start ( cls , originator_id , quorum_size , network_uid ) : assert isinstance ( quorum_size , int ) , "Not an integer: {}" . format ( quorum_size ) return cls . __create__ ( event_class = cls . Started , originator_id = originator_id , quorum_size = quorum_size , network_uid = network_uid )
Factory method that returns a new Paxos aggregate .
39,844
def propose_value ( self , value , assume_leader = False ) : if value is None : raise ValueError ( "Not allowed to propose value None" ) paxos = self . paxos_instance paxos . leader = assume_leader msg = paxos . propose_value ( value ) if msg is None : msg = paxos . prepare ( ) self . setattrs_from_paxos ( paxos ) self . announce ( msg ) return msg
Proposes a value to the network .
39,845
def receive_message ( self , msg ) : if isinstance ( msg , Resolution ) : return paxos = self . paxos_instance while msg : if isinstance ( msg , Resolution ) : self . print_if_verbose ( "{} resolved value {}" . format ( self . network_uid , msg . value ) ) break else : self . print_if_verbose ( "{} <- {} <- {}" . format ( self . network_uid , msg . __class__ . __name__ , msg . from_uid ) ) msg = paxos . receive ( msg ) do_announce_resolution = True if msg and ( do_announce_resolution or not isinstance ( msg , Resolution ) ) : self . announce ( msg ) self . setattrs_from_paxos ( paxos )
Responds to messages from other participants .
39,846
def announce ( self , msg ) : self . print_if_verbose ( "{} -> {}" . format ( self . network_uid , msg . __class__ . __name__ ) ) self . __trigger_event__ ( event_class = self . MessageAnnounced , msg = msg , )
Announces a Paxos message .
39,847
def setattrs_from_paxos ( self , paxos ) : changes = { } for name in self . paxos_variables : paxos_value = getattr ( paxos , name ) if paxos_value != getattr ( self , name , None ) : self . print_if_verbose ( "{} {}: {}" . format ( self . network_uid , name , paxos_value ) ) changes [ name ] = paxos_value setattr ( self , name , paxos_value ) if changes : self . __trigger_event__ ( event_class = self . AttributesChanged , changes = changes )
Registers changes of attribute value on Paxos instance .
39,848
def propose_value ( self , key , value , assume_leader = False ) : assert isinstance ( key , UUID ) paxos_aggregate = PaxosAggregate . start ( originator_id = key , quorum_size = self . quorum_size , network_uid = self . name ) msg = paxos_aggregate . propose_value ( value , assume_leader = assume_leader ) while msg : msg = paxos_aggregate . receive_message ( msg ) new_events = paxos_aggregate . __batch_pending_events__ ( ) self . record_process_event ( ProcessEvent ( new_events ) ) self . repository . take_snapshot ( paxos_aggregate . id ) self . publish_prompt ( ) return paxos_aggregate
Starts new Paxos aggregate and proposes a value for a key .
39,849
def receive ( self , msg ) : handler = getattr ( self , 'receive_' + msg . __class__ . __name__ . lower ( ) , None ) if handler is None : raise InvalidMessageError ( 'Receiving class does not support messages of type: ' + msg . __class__ . __name__ ) return handler ( msg )
Message dispatching function . This function accepts any PaxosMessage subclass and calls the appropriate handler function
39,850
def propose_value ( self , value ) : if self . proposed_value is None : self . proposed_value = value if self . leader : self . current_accept_msg = Accept ( self . network_uid , self . proposal_id , value ) return self . current_accept_msg
Sets the proposal value for this node iff this node is not already aware of a previous proposal value . If the node additionally believes itself to be the current leader an Accept message will be returned
39,851
def prepare ( self ) : self . leader = False self . promises_received = set ( ) self . nacks_received = set ( ) self . proposal_id = ProposalID ( self . highest_proposal_id . number + 1 , self . network_uid ) self . highest_proposal_id = self . proposal_id self . current_prepare_msg = Prepare ( self . network_uid , self . proposal_id ) return self . current_prepare_msg
Returns a new Prepare message with a proposal id higher than that of any observed proposals . A side effect of this method is to clear the leader flag if it is currently set .
39,852
def receive_nack ( self , msg ) : self . observe_proposal ( msg . promised_proposal_id ) if msg . proposal_id == self . proposal_id and self . nacks_received is not None : self . nacks_received . add ( msg . from_uid ) if len ( self . nacks_received ) == self . quorum_size : return self . prepare ( )
Returns a new Prepare message if the number of Nacks received reaches a quorum .
39,853
def receive_promise ( self , msg ) : self . observe_proposal ( msg . proposal_id ) if not self . leader and msg . proposal_id == self . proposal_id and msg . from_uid not in self . promises_received : self . promises_received . add ( msg . from_uid ) if self . highest_accepted_id is None or msg . last_accepted_id > self . highest_accepted_id : self . highest_accepted_id = msg . last_accepted_id if msg . last_accepted_value is not None : self . proposed_value = msg . last_accepted_value if len ( self . promises_received ) == self . quorum_size : self . leader = True if self . proposed_value is not None : self . current_accept_msg = Accept ( self . network_uid , self . proposal_id , self . proposed_value ) return self . current_accept_msg
Returns an Accept messages if a quorum of Promise messages is achieved
39,854
def receive_prepare ( self , msg ) : if self . promised_id is None or msg . proposal_id >= self . promised_id : self . promised_id = msg . proposal_id return Promise ( self . network_uid , msg . from_uid , self . promised_id , self . accepted_id , self . accepted_value ) else : return Nack ( self . network_uid , msg . from_uid , msg . proposal_id , self . promised_id )
Returns either a Promise or a Nack in response . The Acceptor s state must be persisted to disk prior to transmitting the Promise message .
39,855
def receive_accept ( self , msg ) : if self . promised_id is None or msg . proposal_id >= self . promised_id : self . promised_id = msg . proposal_id self . accepted_id = msg . proposal_id self . accepted_value = msg . proposal_value return Accepted ( self . network_uid , msg . proposal_id , msg . proposal_value ) else : return Nack ( self . network_uid , msg . from_uid , msg . proposal_id , self . promised_id )
Returns either an Accepted or Nack message in response . The Acceptor s state must be persisted to disk prior to transmitting the Accepted message .
39,856
def receive_accepted ( self , msg ) : if self . final_value is not None : if msg . proposal_id >= self . final_proposal_id and msg . proposal_value == self . final_value : self . final_acceptors . add ( msg . from_uid ) return Resolution ( self . network_uid , self . final_value ) last_pn = self . acceptors . get ( msg . from_uid ) if last_pn is not None and msg . proposal_id <= last_pn : return self . acceptors [ msg . from_uid ] = msg . proposal_id if last_pn is not None : proposal_key = str ( last_pn ) ps = self . proposals [ proposal_key ] ps . retain_count -= 1 ps . acceptors . remove ( msg . from_uid ) if ps . retain_count == 0 : del self . proposals [ proposal_key ] proposal_key = str ( msg . proposal_id ) if not proposal_key in self . proposals : self . proposals [ proposal_key ] = ProposalStatus ( msg . proposal_value ) ps = self . proposals [ proposal_key ] assert msg . proposal_value == ps . value , 'Value mismatch for single proposal!' ps . accept_count += 1 ps . retain_count += 1 ps . acceptors . add ( msg . from_uid ) if ps . accept_count == self . quorum_size : self . final_proposal_id = msg . proposal_id self . final_value = msg . proposal_value self . final_acceptors = ps . acceptors self . proposals = None self . acceptors = None return Resolution ( self . network_uid , self . final_value )
Called when an Accepted message is received from an acceptor . Once the final value is determined the return value of this method will be a Resolution message containing the consentual value . Subsequent calls after the resolution is chosen will continue to add new Acceptors to the final_acceptors set and return Resolution messages .
39,857
def resolve_topic ( topic ) : try : module_name , _ , class_name = topic . partition ( '#' ) module = importlib . import_module ( module_name ) except ImportError as e : raise TopicResolutionError ( "{}: {}" . format ( topic , e ) ) try : cls = resolve_attr ( module , class_name ) except AttributeError as e : raise TopicResolutionError ( "{}: {}" . format ( topic , e ) ) return cls
Return class described by given topic .
39,858
def resolve_attr ( obj , path ) : if not path : return obj head , _ , tail = path . partition ( '.' ) head_obj = getattr ( obj , head ) return resolve_attr ( head_obj , tail )
A recursive version of getattr for navigating dotted paths .
39,859
def make_skip_list ( cts ) : special_terms = [ "Europe" , "West" , "the West" , "South Pacific" , "Gulf of Mexico" , "Atlantic" , "the Black Sea" , "Black Sea" , "North America" , "Mideast" , "Middle East" , "the Middle East" , "Asia" , "the Caucasus" , "Africa" , "Central Asia" , "Balkans" , "Eastern Europe" , "Arctic" , "Ottoman Empire" , "Asia-Pacific" , "East Asia" , "Horn of Africa" , "Americas" , "North Africa" , "the Strait of Hormuz" , "Mediterranean" , "East" , "North" , "South" , "Latin America" , "Southeast Asia" , "Western Pacific" , "South Asia" , "Persian Gulf" , "Central Europe" , "Western Hemisphere" , "Western Europe" , "European Union (E.U.)" , "EU" , "European Union" , "E.U." , "Asia-Pacific" , "Europe" , "Caribbean" , "US" , "U.S." , "Persian Gulf" , "West Africa" , "North" , "East" , "South" , "West" , "Western Countries" ] spacy_problems = [ "Kurd" , "Qur'an" ] skip_list = special_terms + spacy_problems skip_list = set ( skip_list ) return skip_list
Return hand - defined list of place names to skip and not attempt to geolocate . If users would like to exclude country names this would be the function to do it with .
39,860
def country_list_nlp ( cts ) : ct_nlp = [ ] for i in cts . keys ( ) : nlped = nlp ( i ) ct_nlp . append ( nlped ) return ct_nlp
NLP countries so we can use for vector comparisons
39,861
def make_country_nationality_list ( cts , ct_file ) : countries = pd . read_csv ( ct_file ) nationality = dict ( zip ( countries . nationality , countries . alpha_3_code ) ) both_codes = { ** nationality , ** cts } return both_codes
Combine list of countries and list of nationalities
39,862
def structure_results ( res ) : out = { 'hits' : { 'hits' : [ ] } } keys = [ u'admin1_code' , u'admin2_code' , u'admin3_code' , u'admin4_code' , u'alternativenames' , u'asciiname' , u'cc2' , u'coordinates' , u'country_code2' , u'country_code3' , u'dem' , u'elevation' , u'feature_class' , u'feature_code' , u'geonameid' , u'modification_date' , u'name' , u'population' , u'timezone' ] for i in res : i_out = { } for k in keys : i_out [ k ] = i [ k ] out [ 'hits' ] [ 'hits' ] . append ( i_out ) return out
Format Elasticsearch result as Python dictionary
39,863
def setup_es ( hosts , port , use_ssl = False , auth = None ) : kwargs = dict ( hosts = hosts or [ 'localhost' ] , port = port or 9200 , use_ssl = use_ssl , ) if auth : kwargs . update ( http_auth = auth ) CLIENT = Elasticsearch ( ** kwargs ) S = Search ( using = CLIENT , index = "geonames" ) return S
Setup an Elasticsearch connection
39,864
def _feature_country_mentions ( self , doc ) : c_list = [ ] for i in doc . ents : try : country = self . _both_codes [ i . text ] c_list . append ( country ) except KeyError : pass count = Counter ( c_list ) . most_common ( ) try : top , top_count = count [ 0 ] except : top = "" top_count = 0 try : two , two_count = count [ 1 ] except : two = "" two_count = 0 countries = ( top , top_count , two , two_count ) return countries
Given a document count how many times different country names and adjectives are mentioned . These are features used in the country picking phase .
39,865
def clean_entity ( self , ent ) : dump_list = [ 'province' , 'the' , 'area' , 'airport' , 'district' , 'square' , 'town' , 'village' , 'prison' , "river" , "valley" , "provincial" , "prison" , "region" , "municipality" , "state" , "territory" , "of" , "in" , "county" , "central" ] keep_positions = [ ] for word in ent : if word . text . lower ( ) not in dump_list : keep_positions . append ( word . i ) keep_positions = np . asarray ( keep_positions ) try : new_ent = ent . doc [ keep_positions . min ( ) : keep_positions . max ( ) + 1 ] except ValueError : new_ent = ent return new_ent
Strip out extra words that often get picked up by spaCy s NER .
39,866
def _feature_most_alternative ( self , results , full_results = False ) : try : alt_names = [ len ( i [ 'alternativenames' ] ) for i in results [ 'hits' ] [ 'hits' ] ] most_alt = results [ 'hits' ] [ 'hits' ] [ np . array ( alt_names ) . argmax ( ) ] if full_results : return most_alt else : return most_alt [ 'country_code3' ] except ( IndexError , ValueError , TypeError ) : return ""
Find the placename with the most alternative names and return its country . More alternative names are a rough measure of importance .
39,867
def _feature_most_population ( self , results ) : try : populations = [ i [ 'population' ] for i in results [ 'hits' ] [ 'hits' ] ] most_pop = results [ 'hits' ] [ 'hits' ] [ np . array ( populations ) . astype ( "int" ) . argmax ( ) ] return most_pop [ 'country_code3' ] except Exception as e : return ""
Find the placename with the largest population and return its country . More population is a rough measure of importance .
39,868
def _feature_word_embedding ( self , text ) : try : simils = np . dot ( self . _prebuilt_vec , text . vector ) except Exception as e : return { "country_1" : "" , "confid_a" : 0 , "confid_b" : 0 , "country_2" : "" } ranks = simils . argsort ( ) [ : : - 1 ] confid = simils . max ( ) confid2 = simils [ ranks [ 0 ] ] - simils [ ranks [ 1 ] ] if confid == 0 or confid2 == 0 : return "" country_code = self . _cts [ str ( self . _ct_nlp [ ranks [ 0 ] ] ) ] country_picking = { "country_1" : country_code , "confid_a" : confid , "confid_b" : confid2 , "country_2" : self . _cts [ str ( self . _ct_nlp [ ranks [ 1 ] ] ) ] } return country_picking
Given a word guess the appropriate country by word vector .
39,869
def _feature_first_back ( self , results ) : try : first_back = results [ 'hits' ] [ 'hits' ] [ 0 ] [ 'country_code3' ] except ( TypeError , IndexError ) : first_back = "" try : second_back = results [ 'hits' ] [ 'hits' ] [ 1 ] [ 'country_code3' ] except ( TypeError , IndexError ) : second_back = "" top = ( first_back , second_back ) return top
Get the country of the first two results back from geonames .
39,870
def is_country ( self , text ) : ct_list = self . _just_cts . keys ( ) if text in ct_list : return True else : return False
Check if a piece of text is in the list of countries
39,871
def query_geonames ( self , placename ) : if self . is_country ( placename ) : q = { "multi_match" : { "query" : placename , "fields" : [ 'name' , 'asciiname' , 'alternativenames' ] , "type" : "phrase" } } res = self . conn . filter ( "term" , feature_code = 'PCLI' ) . query ( q ) [ 0 : 5 ] . execute ( ) else : q = { "multi_match" : { "query" : placename , "fields" : [ 'name^5' , 'asciiname^5' , 'alternativenames' ] , "type" : "phrase" } } res = self . conn . query ( q ) [ 0 : 50 ] . execute ( ) if res . hits . total == 0 : q = { "multi_match" : { "query" : placename , "fields" : [ 'name' , 'asciiname' , 'alternativenames' ] , "fuzziness" : 1 , "operator" : "and" } } res = self . conn . query ( q ) [ 0 : 50 ] . execute ( ) es_result = utilities . structure_results ( res ) return es_result
Wrap search parameters into an elasticsearch query to the geonames index and return results .
39,872
def query_geonames_country ( self , placename , country ) : q = { "multi_match" : { "query" : placename , "fields" : [ 'name^5' , 'asciiname^5' , 'alternativenames' ] , "type" : "phrase" } } res = self . conn . filter ( "term" , country_code3 = country ) . query ( q ) [ 0 : 50 ] . execute ( ) if res . hits . total == 0 : q = { "multi_match" : { "query" : placename , "fields" : [ 'name' , 'asciiname' , 'alternativenames' ] , "fuzziness" : 1 , "operator" : "and" } } res = self . conn . filter ( "term" , country_code3 = country ) . query ( q ) [ 0 : 50 ] . execute ( ) out = utilities . structure_results ( res ) return out
Like query_geonames but this time limited to a specified country .
39,873
def make_country_matrix ( self , loc ) : top = loc [ 'features' ] [ 'ct_mention' ] top_count = loc [ 'features' ] [ 'ctm_count1' ] two = loc [ 'features' ] [ 'ct_mention2' ] two_count = loc [ 'features' ] [ 'ctm_count2' ] word_vec = loc [ 'features' ] [ 'word_vec' ] first_back = loc [ 'features' ] [ 'first_back' ] most_alt = loc [ 'features' ] [ 'most_alt' ] most_pop = loc [ 'features' ] [ 'most_pop' ] possible_labels = set ( [ top , two , word_vec , first_back , most_alt , most_pop ] ) possible_labels = [ i for i in possible_labels if i ] X_mat = [ ] for label in possible_labels : inputs = np . array ( [ word_vec , first_back , most_alt , most_pop ] ) x = inputs == label x = np . asarray ( ( x * 2 ) - 1 ) exists = inputs != "" exists = np . asarray ( ( exists * 2 ) - 1 ) counts = np . asarray ( [ top_count , two_count ] ) right = np . asarray ( [ top , two ] ) == label right = right * 2 - 1 right [ counts == 0 ] = 0 features = np . concatenate ( [ x , exists , counts , right ] ) X_mat . append ( np . asarray ( features ) ) keras_inputs = { "labels" : possible_labels , "matrix" : np . asmatrix ( X_mat ) } return keras_inputs
Create features for all possible country labels return as matrix for keras .
39,874
def infer_country ( self , doc ) : if not hasattr ( doc , "ents" ) : doc = nlp ( doc ) proced = self . make_country_features ( doc , require_maj = False ) if not proced : pass feat_list = [ ] for loc in proced : feat = self . make_country_matrix ( loc ) feat_list . append ( feat ) for n , i in enumerate ( feat_list ) : labels = i [ 'labels' ] try : prediction = self . country_model . predict ( i [ 'matrix' ] ) . transpose ( ) [ 0 ] ranks = prediction . argsort ( ) [ : : - 1 ] labels = np . asarray ( labels ) [ ranks ] prediction = prediction [ ranks ] except ValueError : prediction = np . array ( [ 0 ] ) labels = np . array ( [ "" ] ) loc [ 'country_predicted' ] = labels [ 0 ] loc [ 'country_conf' ] = prediction [ 0 ] loc [ 'all_countries' ] = labels loc [ 'all_confidence' ] = prediction return proced
NLP a doc find its entities get their features and return the model s country guess for each . Maybe use a better name .
39,875
def get_admin1 ( self , country_code2 , admin1_code ) : lookup_key = "." . join ( [ country_code2 , admin1_code ] ) try : admin1_name = self . _admin1_dict [ lookup_key ] return admin1_name except KeyError : return "NA"
Convert a geonames admin1 code to the associated place name .
39,876
def ranker ( self , X , meta ) : total_score = X . sum ( axis = 1 ) . transpose ( ) total_score = np . squeeze ( np . asarray ( total_score ) ) ranks = total_score . argsort ( ) ranks = ranks [ : : - 1 ] sorted_meta = [ meta [ r ] for r in ranks ] sorted_X = X [ ranks ] return ( sorted_X , sorted_meta )
Sort the place features list by the score of its relevance .
39,877
def format_for_prodigy ( self , X , meta , placename , return_feature_subset = False ) : all_tasks = [ ] sorted_X , sorted_meta = self . ranker ( X , meta ) sorted_meta = sorted_meta [ : 4 ] sorted_X = sorted_X [ : 4 ] for n , i in enumerate ( sorted_meta ) : feature_code = i [ 'feature_code' ] try : fc = self . _code_to_text [ feature_code ] except KeyError : fc = '' text = '' . join ( [ '"' , i [ 'place_name' ] , '"' , ", a " , fc , " in " , i [ 'country_code3' ] , ", id: " , i [ 'geonameid' ] ] ) d = { "id" : n + 1 , "text" : text } all_tasks . append ( d ) if return_feature_subset : return ( all_tasks , sorted_meta , sorted_X ) else : return all_tasks
Given a feature matrix geonames data and the original query construct a prodigy task .
39,878
def format_geonames ( self , entry , searchterm = None ) : try : lat , lon = entry [ 'coordinates' ] . split ( "," ) new_res = { "admin1" : self . get_admin1 ( entry [ 'country_code2' ] , entry [ 'admin1_code' ] ) , "lat" : lat , "lon" : lon , "country_code3" : entry [ "country_code3" ] , "geonameid" : entry [ "geonameid" ] , "place_name" : entry [ "name" ] , "feature_class" : entry [ "feature_class" ] , "feature_code" : entry [ "feature_code" ] } return new_res except ( IndexError , TypeError ) : new_res = { "admin1" : "" , "lat" : "" , "lon" : "" , "country_code3" : "" , "geonameid" : "" , "place_name" : "" , "feature_class" : "" , "feature_code" : "" } return new_res
Pull out just the fields we want from a geonames entry
39,879
def clean_proced ( self , proced ) : for loc in proced : try : del loc [ 'all_countries' ] except KeyError : pass try : del loc [ 'matrix' ] except KeyError : pass try : del loc [ 'all_confidence' ] except KeyError : pass try : del loc [ 'place_confidence' ] except KeyError : pass try : del loc [ 'text' ] except KeyError : pass try : del loc [ 'label' ] except KeyError : pass try : del loc [ 'features' ] except KeyError : pass return proced
Small helper function to delete the features from the final dictionary . These features are mostly interesting for debugging but won t be relevant for most users .
39,880
def geoparse ( self , doc , verbose = False ) : if not hasattr ( doc , "ents" ) : doc = nlp ( doc ) proced = self . infer_country ( doc ) if not proced : return [ ] if self . threads : pool = ThreadPool ( len ( proced ) ) results = pool . map ( self . proc_lookup_country , proced ) pool . close ( ) pool . join ( ) else : results = [ ] for loc in proced : if loc [ 'country_conf' ] > self . country_threshold : res = self . query_geonames_country ( loc [ 'word' ] , loc [ 'country_predicted' ] ) results . append ( res ) else : results . append ( "" ) for n , loc in enumerate ( proced ) : res = results [ n ] try : _ = res [ 'hits' ] [ 'hits' ] except ( TypeError , KeyError ) : continue X , meta = self . features_for_rank ( loc , res ) if X . shape [ 1 ] == 0 : continue all_tasks , sorted_meta , sorted_X = self . format_for_prodigy ( X , meta , loc [ 'word' ] , return_feature_subset = True ) fl_pad = np . pad ( sorted_X , ( ( 0 , 4 - sorted_X . shape [ 0 ] ) , ( 0 , 0 ) ) , 'constant' ) fl_unwrap = fl_pad . flatten ( ) prediction = self . rank_model . predict ( np . asmatrix ( fl_unwrap ) ) place_confidence = prediction . max ( ) loc [ 'geo' ] = sorted_meta [ prediction . argmax ( ) ] loc [ 'place_confidence' ] = place_confidence if not verbose : proced = self . clean_proced ( proced ) return proced
Main geoparsing function . Text to extracted resolved entities .
39,881
def batch_geoparse ( self , text_list ) : if not self . threads : print ( "batch_geoparsed should be used with threaded searches. Please set `threads=True` when initializing the geoparser." ) nlped_docs = list ( nlp . pipe ( text_list , as_tuples = False , n_threads = multiprocessing . cpu_count ( ) ) ) processed = [ ] for i in tqdm ( nlped_docs , disable = not self . progress ) : p = self . geoparse ( i ) processed . append ( p ) return processed
Batch geoparsing function . Take in a list of text documents and return a list of lists of the geoparsed documents . The speed improvements come exclusively from using spaCy s nlp . pipe .
39,882
def entry_to_matrix ( prodigy_entry ) : doc = prodigy_entry [ 'text' ] doc = nlp ( doc ) geo_proced = geo . process_text ( doc , require_maj = False ) ent_text = np . asarray ( [ gp [ 'word' ] for gp in geo_proced ] ) match = ent_text == entry [ 'meta' ] [ 'word' ] anti_match = np . abs ( match - 1 ) match_position = match . argmax ( ) geo_proc = geo_proced [ match_position ] iso = geo . cts [ prodigy_entry [ 'label' ] ] feat = geo . features_to_matrix ( geo_proc ) answer_x = feat [ 'matrix' ] label = np . asarray ( feat [ 'labels' ] ) if prodigy_entry [ 'answer' ] == "accept" : answer_binary = label == iso answer_binary = answer_binary . astype ( 'int' ) elif prodigy_entry [ 'answer' ] == "reject" : answer_binary = label == iso answer_x = answer_x [ answer_binary , : ] answer_binary = np . asarray ( [ 0 ] ) x = feat [ 'matrix' ] other_x = x [ anti_match , : ] try : if answer_x . shape [ 0 ] == answer_binary . shape [ 0 ] : return ( answer_x , answer_binary ) except : pass
Take in a line from the labeled json and return a vector of labels and a matrix of features for training .
39,883
def refresh_client ( self ) : req = self . session . post ( self . _fmip_refresh_url , params = self . params , data = json . dumps ( { 'clientContext' : { 'fmly' : True , 'shouldLocate' : True , 'selectedDevice' : 'all' , } } ) ) self . response = req . json ( ) for device_info in self . response [ 'content' ] : device_id = device_info [ 'id' ] if device_id not in self . _devices : self . _devices [ device_id ] = AppleDevice ( device_info , self . session , self . params , manager = self , sound_url = self . _fmip_sound_url , lost_url = self . _fmip_lost_url , message_url = self . _fmip_message_url , ) else : self . _devices [ device_id ] . update ( device_info ) if not self . _devices : raise PyiCloudNoDevicesException ( )
Refreshes the FindMyiPhoneService endpoint
39,884
def status ( self , additional = [ ] ) : self . manager . refresh_client ( ) fields = [ 'batteryLevel' , 'deviceDisplayName' , 'deviceStatus' , 'name' ] fields += additional properties = { } for field in fields : properties [ field ] = self . content . get ( field ) return properties
Returns status information for device .
39,885
def lost_device ( self , number , text = 'This iPhone has been lost. Please call me.' , newpasscode = "" ) : data = json . dumps ( { 'text' : text , 'userText' : True , 'ownerNbr' : number , 'lostModeEnabled' : True , 'trackingEnabled' : True , 'device' : self . content [ 'id' ] , 'passcode' : newpasscode } ) self . session . post ( self . lost_url , params = self . params , data = data )
Send a request to the device to trigger lost mode .
39,886
def events ( self , from_dt = None , to_dt = None ) : self . refresh_client ( from_dt , to_dt ) return self . response [ 'Event' ]
Retrieves events for a given date range by default this month .
39,887
def calendars ( self ) : today = datetime . today ( ) first_day , last_day = monthrange ( today . year , today . month ) from_dt = datetime ( today . year , today . month , first_day ) to_dt = datetime ( today . year , today . month , last_day ) params = dict ( self . params ) params . update ( { 'lang' : 'en-us' , 'usertz' : get_localzone ( ) . zone , 'startDate' : from_dt . strftime ( '%Y-%m-%d' ) , 'endDate' : to_dt . strftime ( '%Y-%m-%d' ) } ) req = self . session . get ( self . _calendars , params = params ) self . response = req . json ( ) return self . response [ 'Collection' ]
Retrieves calendars for this month
39,888
def create_pickled_data ( idevice , filename ) : data = { } for x in idevice . content : data [ x ] = idevice . content [ x ] location = filename pickle_file = open ( location , 'wb' ) pickle . dump ( data , pickle_file , protocol = pickle . HIGHEST_PROTOCOL ) pickle_file . close ( )
This helper will output the idevice to a pickled file named after the passed filename .
39,889
def refresh_client ( self , from_dt = None , to_dt = None ) : params_contacts = dict ( self . params ) params_contacts . update ( { 'clientVersion' : '2.1' , 'locale' : 'en_US' , 'order' : 'last,first' , } ) req = self . session . get ( self . _contacts_refresh_url , params = params_contacts ) self . response = req . json ( ) params_refresh = dict ( self . params ) params_refresh . update ( { 'prefToken' : req . json ( ) [ "prefToken" ] , 'syncToken' : req . json ( ) [ "syncToken" ] , } ) self . session . post ( self . _contacts_changeset_url , params = params_refresh ) req = self . session . get ( self . _contacts_refresh_url , params = params_contacts ) self . response = req . json ( )
Refreshes the ContactsService endpoint ensuring that the contacts data is up - to - date .
39,890
def authenticate ( self ) : logger . info ( "Authenticating as %s" , self . user [ 'apple_id' ] ) data = dict ( self . user ) data . update ( { 'extended_login' : False } ) try : req = self . session . post ( self . _base_login_url , params = self . params , data = json . dumps ( data ) ) except PyiCloudAPIResponseError as error : msg = 'Invalid email/password combination.' raise PyiCloudFailedLoginException ( msg , error ) resp = req . json ( ) self . params . update ( { 'dsid' : resp [ 'dsInfo' ] [ 'dsid' ] } ) if not os . path . exists ( self . _cookie_directory ) : os . mkdir ( self . _cookie_directory ) self . session . cookies . save ( ) logger . debug ( "Cookies saved to %s" , self . _get_cookiejar_path ( ) ) self . data = resp self . webservices = self . data [ 'webservices' ] logger . info ( "Authentication completed successfully" ) logger . debug ( self . params )
Handles authentication and persists the X - APPLE - WEB - KB cookie so that subsequent logins will not cause additional e - mails from Apple .
39,891
def trusted_devices ( self ) : request = self . session . get ( '%s/listDevices' % self . _setup_endpoint , params = self . params ) return request . json ( ) . get ( 'devices' )
Returns devices trusted for two - step authentication .
39,892
def send_verification_code ( self , device ) : data = json . dumps ( device ) request = self . session . post ( '%s/sendVerificationCode' % self . _setup_endpoint , params = self . params , data = data ) return request . json ( ) . get ( 'success' , False )
Requests that a verification code is sent to the given device
39,893
def validate_verification_code ( self , device , code ) : device . update ( { 'verificationCode' : code , 'trustBrowser' : True } ) data = json . dumps ( device ) try : request = self . session . post ( '%s/validateVerificationCode' % self . _setup_endpoint , params = self . params , data = data ) except PyiCloudAPIResponseError as error : if error . code == - 21669 : return False raise self . authenticate ( ) return not self . requires_2sa
Verifies a verification code received on a trusted device
39,894
def devices ( self ) : service_root = self . webservices [ 'findme' ] [ 'url' ] return FindMyiPhoneServiceManager ( service_root , self . session , self . params )
Return all devices .
39,895
def send ( r , pool = None , stream = False ) : if pool is not None : return pool . spawn ( r . send , stream = stream ) return gevent . spawn ( r . send , stream = stream )
Sends the request object using the specified pool . If a pool isn t specified this method blocks . Pools are useful because you can specify size and can hence limit concurrency .
39,896
def imap ( requests , stream = False , size = 2 , exception_handler = None ) : pool = Pool ( size ) def send ( r ) : return r . send ( stream = stream ) for request in pool . imap_unordered ( send , requests ) : if request . response is not None : yield request . response elif exception_handler : ex_result = exception_handler ( request , request . exception ) if ex_result is not None : yield ex_result pool . join ( )
Concurrently converts a generator object of Requests to a generator of Responses .
39,897
def set_user_profile ( self , displayname = None , avatar_url = None , reason = "Changing room profile information" ) : member = self . client . api . get_membership ( self . room_id , self . client . user_id ) if member [ "membership" ] != "join" : raise Exception ( "Can't set profile if you have not joined the room." ) if displayname is None : displayname = member [ "displayname" ] if avatar_url is None : avatar_url = member [ "avatar_url" ] self . client . api . set_membership ( self . room_id , self . client . user_id , 'join' , reason , { "displayname" : displayname , "avatar_url" : avatar_url } )
Set user profile within a room .
39,898
def display_name ( self ) : if self . name : return self . name elif self . canonical_alias : return self . canonical_alias members = [ u . get_display_name ( self ) for u in self . get_joined_members ( ) if self . client . user_id != u . user_id ] members . sort ( ) if len ( members ) == 1 : return members [ 0 ] elif len ( members ) == 2 : return "{0} and {1}" . format ( members [ 0 ] , members [ 1 ] ) elif len ( members ) > 2 : return "{0} and {1} others" . format ( members [ 0 ] , len ( members ) - 1 ) else : return "Empty room"
Calculates the display name for a room .
39,899
def send_text ( self , text ) : return self . client . api . send_message ( self . room_id , text )
Send a plain text message to the room .