INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Initialize the logger
def init_logger(log_requests=False): """ Initialize the logger """ logger = logging.getLogger(__name__.split(".")[0]) for handler in logger.handlers: # pragma: nocover logger.removeHandler(handler) formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s") handler = logging.StreamHandler() handler.setLevel(logging.DEBUG) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) logger.propagate = False if log_requests: requests.packages.urllib3.add_stderr_logger()
Close the connection.
def close(self): """Close the connection.""" if self.connection: self.connection.close() self.connection = None logging.debug("Connection closed.")
Send a control command.
def control(self, key): """Send a control command.""" if not self.connection: raise exceptions.ConnectionClosed() payload = json.dumps({ "method": "ms.remote.control", "params": { "Cmd": "Click", "DataOfCmd": key, "Option": "false", "TypeOfRemote": "SendRemoteKey" } }) logging.info("Sending control command: %s", key) self.connection.send(payload) time.sleep(self._key_interval)
Send a control command.
def control(self, key): """Send a control command.""" if not self.connection: raise exceptions.ConnectionClosed() payload = b"\x00\x00\x00" + self._serialize_string(key) packet = b"\x00\x00\x00" + self._serialize_string(payload, True) logging.info("Sending control command: %s", key) self.connection.send(packet) self._read_response() time.sleep(self._key_interval)
Factory method, registers new node.
def register_new_node(suffix_node_id=None): """Factory method, registers new node. """ node_id = uuid4() event = Node.Created(originator_id=node_id, suffix_node_id=suffix_node_id) entity = Node.mutate(event=event) publish(event) return entity
Factory method, registers new edge.
def register_new_edge(edge_id, first_char_index, last_char_index, source_node_id, dest_node_id): """Factory method, registers new edge. """ event = Edge.Created( originator_id=edge_id, first_char_index=first_char_index, last_char_index=last_char_index, source_node_id=source_node_id, dest_node_id=dest_node_id, ) entity = Edge.mutate(event=event) publish(event) return entity
Factory method, returns new suffix tree object.
def register_new_suffix_tree(case_insensitive=False): """Factory method, returns new suffix tree object. """ assert isinstance(case_insensitive, bool) root_node = register_new_node() suffix_tree_id = uuid4() event = SuffixTree.Created( originator_id=suffix_tree_id, root_node_id=root_node.id, case_insensitive=case_insensitive, ) entity = SuffixTree.mutate(event=event) assert isinstance(entity, SuffixTree) entity.nodes[root_node.id] = root_node publish(event) return entity
Returns the index if substring in tree, otherwise -1.
def find_substring(substring, suffix_tree, edge_repo): """Returns the index if substring in tree, otherwise -1. """ assert isinstance(substring, str) assert isinstance(suffix_tree, SuffixTree) assert isinstance(edge_repo, EventSourcedRepository) if not substring: return -1 if suffix_tree.case_insensitive: substring = substring.lower() curr_node_id = suffix_tree.root_node_id i = 0 while i < len(substring): edge_id = make_edge_id(curr_node_id, substring[i]) try: edge = edge_repo[edge_id] except RepositoryKeyError: return -1 ln = min(edge.length + 1, len(substring) - i) if substring[i:i + ln] != suffix_tree.string[edge.first_char_index:edge.first_char_index + ln]: return -1 i += edge.length + 1 curr_node_id = edge.dest_node_id return edge.first_char_index - len(substring) + ln
The core construction method.
def _add_prefix(self, last_char_index): """The core construction method. """ last_parent_node_id = None while True: parent_node_id = self.active.source_node_id if self.active.explicit(): edge_id = make_edge_id(self.active.source_node_id, self.string[last_char_index]) if edge_id in self.edges: # prefix is already in tree break else: edge_id = make_edge_id(self.active.source_node_id, self.string[self.active.first_char_index]) e = self.edges[edge_id] if self.string[e.first_char_index + self.active.length + 1] == self.string[last_char_index]: # prefix is already in tree break parent_node_id = self._split_edge(e, self.active) node = register_new_node() self.nodes[node.id] = node edge_id = make_edge_id(parent_node_id, self.string[last_char_index]) e = register_new_edge( edge_id=edge_id, first_char_index=last_char_index, last_char_index=self.N, source_node_id=parent_node_id, dest_node_id=node.id, ) self._insert_edge(e) if last_parent_node_id is not None: self.nodes[last_parent_node_id].suffix_node_id = parent_node_id last_parent_node_id = parent_node_id if self.active.source_node_id == self.root_node_id: self.active.first_char_index += 1 else: self.active.source_node_id = self.nodes[self.active.source_node_id].suffix_node_id self._canonize_suffix(self.active) if last_parent_node_id is not None: self.nodes[last_parent_node_id].suffix_node_id = parent_node_id self.active.last_char_index += 1 self._canonize_suffix(self.active)
This canonizes the suffix, walking along its suffix string until it is explicit or there are no more matched nodes.
def _canonize_suffix(self, suffix): """This canonizes the suffix, walking along its suffix string until it is explicit or there are no more matched nodes. """ if not suffix.explicit(): edge_id = make_edge_id(suffix.source_node_id, self.string[suffix.first_char_index]) e = self.edges[edge_id] if e.length <= suffix.length: suffix.first_char_index += e.length + 1 suffix.source_node_id = e.dest_node_id self._canonize_suffix(suffix)
Reconstructs domain entity from given snapshot.
def entity_from_snapshot(snapshot): """ Reconstructs domain entity from given snapshot. """ assert isinstance(snapshot, AbstractSnapshop), type(snapshot) if snapshot.state is not None: entity_class = resolve_topic(snapshot.topic) return reconstruct_object(entity_class, snapshot.state)
Gets the last snapshot for entity, optionally until a particular version number. :rtype: Snapshot
def get_snapshot(self, entity_id, lt=None, lte=None): """ Gets the last snapshot for entity, optionally until a particular version number. :rtype: Snapshot """ snapshots = self.snapshot_store.get_domain_events(entity_id, lt=lt, lte=lte, limit=1, is_ascending=False) if len(snapshots) == 1: return snapshots[0]
Creates a Snapshot from the given state, and appends it to the snapshot store. :rtype: Snapshot
def take_snapshot(self, entity_id, entity, last_event_version): """ Creates a Snapshot from the given state, and appends it to the snapshot store. :rtype: Snapshot """ # Create the snapshot. snapshot = Snapshot( originator_id=entity_id, originator_version=last_event_version, topic=get_topic(entity.__class__), state=None if entity is None else deepcopy(entity.__dict__) ) self.snapshot_store.store(snapshot) # Return the snapshot. return snapshot
Returns entity with given ID, optionally until position.
def get_entity(self, entity_id, at=None): """ Returns entity with given ID, optionally until position. """ # Get a snapshot (None if none exist). if self._snapshot_strategy is not None: snapshot = self._snapshot_strategy.get_snapshot(entity_id, lte=at) else: snapshot = None # Decide the initial state of the entity, and the # version of the last item applied to the entity. if snapshot is None: initial_state = None gt = None else: initial_state = entity_from_snapshot(snapshot) gt = snapshot.originator_version # Obtain and return current state. return self.get_and_project_events(entity_id, gt=gt, lte=at, initial_state=initial_state)
Reconstitutes requested domain entity from domain events found in event store.
def get_and_project_events(self, entity_id, gt=None, gte=None, lt=None, lte=None, limit=None, initial_state=None, query_descending=False): """ Reconstitutes requested domain entity from domain events found in event store. """ # Decide if query is in ascending order. # - A "speed up" for when events are stored in descending order (e.g. # in Cassandra) and it is faster to get them in that order. # - This isn't useful when 'until' or 'after' or 'limit' are set, # because the inclusiveness or exclusiveness of until and after # and the end of the stream that is truncated by limit both depend on # the direction of the query. Also paging backwards isn't useful, because # all the events are needed eventually, so it would probably slow things # down. Paging is intended to support replaying longer event streams, and # only makes sense to work in ascending order. if gt is None and gte is None and lt is None and lte is None and self.__page_size__ is None: is_ascending = False else: is_ascending = not query_descending # Get entity's domain events from the event store. domain_events = self.event_store.get_domain_events( originator_id=entity_id, gt=gt, gte=gte, lt=lt, lte=lte, limit=limit, is_ascending=is_ascending, page_size=self.__page_size__ ) # The events will be replayed in ascending order. if not is_ascending: domain_events = list(reversed(list(domain_events))) # Project the domain events onto the initial state. return self.project_events(initial_state, domain_events)
Takes a snapshot of the entity as it existed after the most recent event, optionally less than, or less than or equal to, a particular position.
def take_snapshot(self, entity_id, lt=None, lte=None): """ Takes a snapshot of the entity as it existed after the most recent event, optionally less than, or less than or equal to, a particular position. """ snapshot = None if self._snapshot_strategy: # Get the latest event (optionally until a particular position). latest_event = self.event_store.get_most_recent_event(entity_id, lt=lt, lte=lte) # If there is something to snapshot, then look for a snapshot # taken before or at the entity version of the latest event. Please # note, the snapshot might have a smaller version number than # the latest event if events occurred since the latest snapshot was taken. if latest_event is not None: latest_snapshot = self._snapshot_strategy.get_snapshot( entity_id, lt=lt, lte=lte ) latest_version = latest_event.originator_version if latest_snapshot and latest_snapshot.originator_version == latest_version: # If up-to-date snapshot exists, there's nothing to do. snapshot = latest_snapshot else: # Otherwise recover entity state from latest snapshot. if latest_snapshot: initial_state = entity_from_snapshot(latest_snapshot) gt = latest_snapshot.originator_version else: initial_state = None gt = None # Fast-forward entity state to latest version. entity = self.get_and_project_events( entity_id=entity_id, gt=gt, lte=latest_version, initial_state=initial_state, ) # Take snapshot from entity. snapshot = self._snapshot_strategy.take_snapshot(entity_id, entity, latest_version) return snapshot
Entity object factory.
def create_new_example(self, foo='', a='', b=''): """Entity object factory.""" return create_new_example(foo=foo, a=a, b=b)
Returns an integer value representing a unix timestamp in tenths of microseconds. :param uuid_arg: :return: Unix timestamp integer in tenths of microseconds. :rtype: int
def timestamp_long_from_uuid(uuid_arg): """ Returns an integer value representing a unix timestamp in tenths of microseconds. :param uuid_arg: :return: Unix timestamp integer in tenths of microseconds. :rtype: int """ if isinstance(uuid_arg, str): uuid_arg = UUID(uuid_arg) assert isinstance(uuid_arg, UUID), uuid_arg uuid_time = uuid_arg.time return uuid_time - 0x01B21DD213814000
A UNIX timestamp as a Decimal object (exact number type). Returns current time when called without args, otherwise converts given floating point number ``t`` to a Decimal with 9 decimal places. :param t: Floating point UNIX timestamp ("seconds since epoch"). :return: A Decimal with 6 decimal places, representing the given floating point or the value returned by time.time().
def decimaltimestamp(t=None): """ A UNIX timestamp as a Decimal object (exact number type). Returns current time when called without args, otherwise converts given floating point number ``t`` to a Decimal with 9 decimal places. :param t: Floating point UNIX timestamp ("seconds since epoch"). :return: A Decimal with 6 decimal places, representing the given floating point or the value returned by time.time(). """ t = time.time() if t is None else t return Decimal('{:.6f}'.format(t))
Decorator for making a custom event handler function subscribe to a certain class of event. The decorated function will be called once for each matching event that is published, and will be given one argument, the event, when it is called. If events are published in lists, for example the AggregateRoot publishes a list of pending events when its __save__() method is called, then the decorated function will be called once for each event that is an instance of the given event_class. Please note, this decorator isn't suitable for use with object class methods. The decorator receives in Python 3 an unbound function, and defines a handler which it subscribes that calls the decorated function for each matching event. However the method isn't called on the object, so the object instance is never available in the decorator, so the decorator can't call a normal object method because it doesn't have a value for 'self'. event_class: type used to match published events, an event matches if it is an instance of this type The following example shows a custom handler that reacts to Todo.Created event and saves a projection of a Todo model object. .. code:: @subscribe_to(Todo.Created) def new_todo_projection(event): todo = TodoProjection(id=event.originator_id, title=event.title) todo.save()
def subscribe_to(*event_classes): """ Decorator for making a custom event handler function subscribe to a certain class of event. The decorated function will be called once for each matching event that is published, and will be given one argument, the event, when it is called. If events are published in lists, for example the AggregateRoot publishes a list of pending events when its __save__() method is called, then the decorated function will be called once for each event that is an instance of the given event_class. Please note, this decorator isn't suitable for use with object class methods. The decorator receives in Python 3 an unbound function, and defines a handler which it subscribes that calls the decorated function for each matching event. However the method isn't called on the object, so the object instance is never available in the decorator, so the decorator can't call a normal object method because it doesn't have a value for 'self'. event_class: type used to match published events, an event matches if it is an instance of this type The following example shows a custom handler that reacts to Todo.Created event and saves a projection of a Todo model object. .. code:: @subscribe_to(Todo.Created) def new_todo_projection(event): todo = TodoProjection(id=event.originator_id, title=event.title) todo.save() """ event_classes = list(event_classes) def wrap(func): def handler(event): if isinstance(event, (list, tuple)): for e in event: handler(e) elif not event_classes or isinstance(event, tuple(event_classes)): func(event) subscribe(handler=handler, predicate=lambda _: True) return func if len(event_classes) == 1 and isfunction(event_classes[0]): func = event_classes.pop() return wrap(func) else: return wrap
Structures mutator functions by allowing handlers to be registered for different types of event. When the decorated function is called with an initial value and an event, it will call the handler that has been registered for that type of event. It works like singledispatch, which it uses. The difference is that when the decorated function is called, this decorator dispatches according to the type of last call arg, which fits better with reduce(). The builtin Python function reduce() is used by the library to replay a sequence of events against an initial state. If a mutator function is given to reduce(), along with a list of events and an initializer, reduce() will call the mutator function once for each event in the list, but the initializer will be the first value, and the event will be the last argument, and we want to dispatch according to the type of the event. It happens that singledispatch is coded to switch on the type of the first argument, which makes it unsuitable for structuring a mutator function without the modifications introduced here. The other aspect introduced by this decorator function is the option to set the type of the handled entity in the decorator. When an entity is replayed from scratch, in other words when all its events are replayed, the initial state is None. The handler which handles the first event in the sequence will probably construct an object instance. It is possible to write the type into the handler, but that makes the entity more difficult to subclass because you will also need to write a handler for it. If the decorator is invoked with the type, when the initial value passed as a call arg to the mutator function is None, the handler will instead receive the type of the entity, which it can use to construct the entity object. .. code:: class Entity(object): class Created(object): pass @mutator(Entity) def mutate(initial, event): raise NotImplementedError(type(event)) @mutate.register(Entity.Created) def _(initial, event): return initial(**event.__dict__) entity = mutate(None, Entity.Created())
def mutator(arg=None): """Structures mutator functions by allowing handlers to be registered for different types of event. When the decorated function is called with an initial value and an event, it will call the handler that has been registered for that type of event. It works like singledispatch, which it uses. The difference is that when the decorated function is called, this decorator dispatches according to the type of last call arg, which fits better with reduce(). The builtin Python function reduce() is used by the library to replay a sequence of events against an initial state. If a mutator function is given to reduce(), along with a list of events and an initializer, reduce() will call the mutator function once for each event in the list, but the initializer will be the first value, and the event will be the last argument, and we want to dispatch according to the type of the event. It happens that singledispatch is coded to switch on the type of the first argument, which makes it unsuitable for structuring a mutator function without the modifications introduced here. The other aspect introduced by this decorator function is the option to set the type of the handled entity in the decorator. When an entity is replayed from scratch, in other words when all its events are replayed, the initial state is None. The handler which handles the first event in the sequence will probably construct an object instance. It is possible to write the type into the handler, but that makes the entity more difficult to subclass because you will also need to write a handler for it. If the decorator is invoked with the type, when the initial value passed as a call arg to the mutator function is None, the handler will instead receive the type of the entity, which it can use to construct the entity object. .. code:: class Entity(object): class Created(object): pass @mutator(Entity) def mutate(initial, event): raise NotImplementedError(type(event)) @mutate.register(Entity.Created) def _(initial, event): return initial(**event.__dict__) entity = mutate(None, Entity.Created()) """ domain_class = None def _mutator(func): wrapped = singledispatch(func) @wraps(wrapped) def wrapper(initial, event): initial = initial or domain_class return wrapped.dispatch(type(event))(initial, event) wrapper.register = wrapped.register return wrapper if isfunction(arg): return _mutator(arg) else: domain_class = arg return _mutator
When used as a method decorator, returns a property object with the method as the getter and a setter defined to call instance method change_attribute(), which publishes an AttributeChanged event.
def attribute(getter): """ When used as a method decorator, returns a property object with the method as the getter and a setter defined to call instance method change_attribute(), which publishes an AttributeChanged event. """ if isfunction(getter): def setter(self, value): name = '_' + getter.__name__ self.__change_attribute__(name=name, value=value) def new_getter(self): name = '_' + getter.__name__ return getattr(self, name, None) return property(fget=new_getter, fset=setter, doc=getter.__doc__) else: raise ProgrammingError("Expected a function, got: {}".format(repr(getter)))
Return ciphertext for given plaintext.
def encrypt(self, plaintext): """Return ciphertext for given plaintext.""" # String to bytes. plainbytes = plaintext.encode('utf8') # Compress plaintext bytes. compressed = zlib.compress(plainbytes) # Construct AES-GCM cipher, with 96-bit nonce. cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce=random_bytes(12)) # Encrypt and digest. encrypted, tag = cipher.encrypt_and_digest(compressed) # Combine with nonce. combined = cipher.nonce + tag + encrypted # Encode as Base64. cipherbytes = base64.b64encode(combined) # Bytes to string. ciphertext = cipherbytes.decode('utf8') # Return ciphertext. return ciphertext
Return plaintext for given ciphertext.
def decrypt(self, ciphertext): """Return plaintext for given ciphertext.""" # String to bytes. cipherbytes = ciphertext.encode('utf8') # Decode from Base64. try: combined = base64.b64decode(cipherbytes) except (base64.binascii.Error, TypeError) as e: # base64.binascii.Error for Python 3. # TypeError for Python 2. raise DataIntegrityError("Cipher text is damaged: {}".format(e)) # Split out the nonce, tag, and encrypted data. nonce = combined[:12] if len(nonce) != 12: raise DataIntegrityError("Cipher text is damaged: invalid nonce length") tag = combined[12:28] if len(tag) != 16: raise DataIntegrityError("Cipher text is damaged: invalid tag length") encrypted = combined[28:] # Construct AES cipher, with old nonce. cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce) # Decrypt and verify. try: compressed = cipher.decrypt_and_verify(encrypted, tag) except ValueError as e: raise DataIntegrityError("Cipher text is damaged: {}".format(e)) # Decompress plaintext bytes. plainbytes = zlib.decompress(compressed) # Bytes to string. plaintext = plainbytes.decode('utf8') # Return plaintext. return plaintext
Returns domain events for given entity ID.
def get_domain_events(self, originator_id, gt=None, gte=None, lt=None, lte=None, limit=None, is_ascending=True, page_size=None): """ Returns domain events for given entity ID. """
Appends given domain event, or list of domain events, to their sequence. :param domain_event_or_events: domain event, or list of domain events
def store(self, domain_event_or_events): """ Appends given domain event, or list of domain events, to their sequence. :param domain_event_or_events: domain event, or list of domain events """ # Convert to sequenced item. sequenced_item_or_items = self.item_from_event(domain_event_or_events) # Append to the sequenced item(s) to the sequence. try: self.record_manager.record_sequenced_items(sequenced_item_or_items) except RecordConflictError as e: raise ConcurrencyError(e)
Maps domain event to sequenced item namedtuple. :param domain_event_or_events: application-level object (or list) :return: namedtuple: sequence item namedtuple (or list)
def item_from_event(self, domain_event_or_events): """ Maps domain event to sequenced item namedtuple. :param domain_event_or_events: application-level object (or list) :return: namedtuple: sequence item namedtuple (or list) """ # Convert the domain event(s) to sequenced item(s). if isinstance(domain_event_or_events, (list, tuple)): return [self.item_from_event(e) for e in domain_event_or_events] else: return self.mapper.item_from_event(domain_event_or_events)
Gets domain events from the sequence identified by `originator_id`. :param originator_id: ID of a sequence of events :param gt: get items after this position :param gte: get items at or after this position :param lt: get items before this position :param lte: get items before or at this position :param limit: get limited number of items :param is_ascending: get items from lowest position :param page_size: restrict and repeat database query :return: list of domain events
def get_domain_events(self, originator_id, gt=None, gte=None, lt=None, lte=None, limit=None, is_ascending=True, page_size=None): """ Gets domain events from the sequence identified by `originator_id`. :param originator_id: ID of a sequence of events :param gt: get items after this position :param gte: get items at or after this position :param lt: get items before this position :param lte: get items before or at this position :param limit: get limited number of items :param is_ascending: get items from lowest position :param page_size: restrict and repeat database query :return: list of domain events """ if page_size: sequenced_items = self.iterator_class( record_manager=self.record_manager, sequence_id=originator_id, page_size=page_size, gt=gt, gte=gte, lt=lt, lte=lte, limit=limit, is_ascending=is_ascending, ) else: sequenced_items = self.record_manager.get_items( sequence_id=originator_id, gt=gt, gte=gte, lt=lt, lte=lte, limit=limit, query_ascending=is_ascending, results_ascending=is_ascending, ) # Deserialize to domain events. domain_events = map(self.mapper.event_from_item, sequenced_items) return list(domain_events)
Gets a domain event from the sequence identified by `originator_id` at position `eq`. :param originator_id: ID of a sequence of events :param position: get item at this position :return: domain event
def get_domain_event(self, originator_id, position): """ Gets a domain event from the sequence identified by `originator_id` at position `eq`. :param originator_id: ID of a sequence of events :param position: get item at this position :return: domain event """ sequenced_item = self.record_manager.get_item( sequence_id=originator_id, position=position, ) return self.mapper.event_from_item(sequenced_item)
Gets a domain event from the sequence identified by `originator_id` at the highest position. :param originator_id: ID of a sequence of events :param lt: get highest before this position :param lte: get highest at or before this position :return: domain event
def get_most_recent_event(self, originator_id, lt=None, lte=None): """ Gets a domain event from the sequence identified by `originator_id` at the highest position. :param originator_id: ID of a sequence of events :param lt: get highest before this position :param lte: get highest at or before this position :return: domain event """ events = self.get_domain_events(originator_id=originator_id, lt=lt, lte=lte, limit=1, is_ascending=False) events = list(events) try: return events[0] except IndexError: pass
Yields all domain events in the event store.
def all_domain_events(self): """ Yields all domain events in the event store. """ for originator_id in self.record_manager.all_sequence_ids(): for domain_event in self.get_domain_events(originator_id=originator_id, page_size=100): yield domain_event
Publishes prompt for a given event. Used to prompt downstream process application when an event is published by this application's model, which can happen when application command methods, rather than the process policy, are called. Wraps exceptions with PromptFailed, to avoid application policy exceptions being seen directly in other applications when running synchronously in single thread.
def publish_prompt(self, event=None): """ Publishes prompt for a given event. Used to prompt downstream process application when an event is published by this application's model, which can happen when application command methods, rather than the process policy, are called. Wraps exceptions with PromptFailed, to avoid application policy exceptions being seen directly in other applications when running synchronously in single thread. """ prompt = Prompt(self.name, self.pipeline_id) try: publish(prompt) except PromptFailed: raise except Exception as e: raise PromptFailed("{}: {}".format(type(e), str(e)))
With transaction isolation level of "read committed" this should generate records with a contiguous sequence of integer IDs, using an indexed ID column, the database-side SQL max function, the insert-select-from form, and optimistic concurrency control.
def _prepare_insert(self, tmpl, record_class, field_names, placeholder_for_id=False): """ With transaction isolation level of "read committed" this should generate records with a contiguous sequence of integer IDs, using an indexed ID column, the database-side SQL max function, the insert-select-from form, and optimistic concurrency control. """ field_names = list(field_names) if hasattr(record_class, 'application_name') and 'application_name' not in field_names: field_names.append('application_name') if hasattr(record_class, 'pipeline_id') and 'pipeline_id' not in field_names: field_names.append('pipeline_id') if hasattr(record_class, 'causal_dependencies') and 'causal_dependencies' not in field_names: field_names.append('causal_dependencies') if placeholder_for_id: if self.notification_id_name: if self.notification_id_name not in field_names: field_names.append('id') statement = tmpl.format( tablename=self.get_record_table_name(record_class), columns=", ".join(field_names), placeholders=", ".join(['%s' for _ in field_names]), notification_id=self.notification_id_name ) return statement
Returns all records in the table.
def get_notifications(self, start=None, stop=None, *args, **kwargs): """ Returns all records in the table. """ filter_kwargs = {} # Todo: Also support sequencing by 'position' if items are sequenced by timestamp? if start is not None: filter_kwargs['%s__gte' % self.notification_id_name] = start + 1 if stop is not None: filter_kwargs['%s__lt' % self.notification_id_name] = stop + 1 objects = self.record_class.objects.filter(**filter_kwargs) if hasattr(self.record_class, 'application_name'): objects = objects.filter(application_name=self.application_name) if hasattr(self.record_class, 'pipeline_id'): objects = objects.filter(pipeline_id=self.pipeline_id) objects = objects.order_by('%s' % self.notification_id_name) return objects.all()
Starts all the actors to run a system of process applications.
def start(self): """ Starts all the actors to run a system of process applications. """ # Subscribe to broadcast prompts published by a process # application in the parent operating system process. subscribe(handler=self.forward_prompt, predicate=self.is_prompt) # Initialise the system actor. msg = SystemInitRequest( self.system.process_classes, self.infrastructure_class, self.system.followings, self.pipeline_ids ) response = self.actor_system.ask(self.system_actor, msg) # Keep the pipeline actor addresses, to send prompts directly. assert isinstance(response, SystemInitResponse), type(response) assert list(response.pipeline_actors.keys()) == self.pipeline_ids, ( "Configured pipeline IDs mismatch initialised system {} {}").format( list(self.pipeline_actors.keys()), self.pipeline_ids ) self.pipeline_actors = response.pipeline_actors
Stops all the actors running a system of process applications.
def close(self): """Stops all the actors running a system of process applications.""" super(ActorModelRunner, self).close() unsubscribe(handler=self.forward_prompt, predicate=self.is_prompt) if self.shutdown_on_close: self.shutdown()
Returns a new suffix tree entity.
def register_new_suffix_tree(self, case_insensitive=False): """Returns a new suffix tree entity. """ suffix_tree = register_new_suffix_tree(case_insensitive=case_insensitive) suffix_tree._node_repo = self.node_repo suffix_tree._node_child_collection_repo = self.node_child_collection_repo suffix_tree._edge_repo = self.edge_repo suffix_tree._stringid_collection_repo = self.stringid_collection_repo return suffix_tree
Returns a suffix tree entity, equipped with node and edge repos it (at least at the moment) needs.
def get_suffix_tree(self, suffix_tree_id): """Returns a suffix tree entity, equipped with node and edge repos it (at least at the moment) needs. """ suffix_tree = self.suffix_tree_repo[suffix_tree_id] assert isinstance(suffix_tree, GeneralizedSuffixTree) suffix_tree._node_repo = self.node_repo suffix_tree._node_child_collection_repo = self.node_child_collection_repo suffix_tree._edge_repo = self.edge_repo suffix_tree._stringid_collection_repo = self.stringid_collection_repo return suffix_tree
Returns a set of IDs for strings that contain the given substring.
def find_string_ids(self, substring, suffix_tree_id, limit=None): """Returns a set of IDs for strings that contain the given substring. """ # Find an edge for the substring. edge, ln = self.find_substring_edge(substring=substring, suffix_tree_id=suffix_tree_id) # If there isn't an edge, return an empty set. if edge is None: return set() # Get all the string IDs beneath the edge's destination node. string_ids = get_string_ids( node_id=edge.dest_node_id, node_repo=self.node_repo, node_child_collection_repo=self.node_child_collection_repo, stringid_collection_repo=self.stringid_collection_repo, length_until_end=edge.length + 1 - ln, limit=limit ) # Return a set of string IDs. return set(string_ids)
Returns an edge that matches the given substring.
def find_substring_edge(self, substring, suffix_tree_id): """Returns an edge that matches the given substring. """ suffix_tree = self.suffix_tree_repo[suffix_tree_id] started = datetime.datetime.now() edge, ln = find_substring_edge(substring=substring, suffix_tree=suffix_tree, edge_repo=self.edge_repo) # if edge is not None: # print("Got edge for substring '{}': {}".format(substring, edge)) # else: # print("No edge for substring '{}'".format(substring)) print(" - searched for edge in {} for substring: '{}'".format(datetime.datetime.now() - started, substring)) return edge, ln
First caller adds a prompt to queue and runs followers until there are no more pending prompts. Subsequent callers just add a prompt to the queue, avoiding recursion.
def run_followers(self, prompt): """ First caller adds a prompt to queue and runs followers until there are no more pending prompts. Subsequent callers just add a prompt to the queue, avoiding recursion. """ assert isinstance(prompt, Prompt) # Put the prompt on the queue. self.pending_prompts.put(prompt) if self.iteration_lock.acquire(False): start_time = time.time() i = 0 try: while True: try: prompt = self.pending_prompts.get(False) except Empty: break else: followers = self.system.followers[prompt.process_name] for follower_name in followers: follower = self.system.processes[follower_name] follower.run(prompt) i += 1 self.pending_prompts.task_done() finally: run_frequency = i / (time.time() - start_time) # print(f"Run frequency: {run_frequency}") self.iteration_lock.release()
Puts prompt in each downstream inbox (an actual queue).
def put(self, prompt): """ Puts prompt in each downstream inbox (an actual queue). """ for queue in self.downstream_inboxes.values(): queue.put(prompt)
Factory method for example entities. :rtype: Example
def create_new_example(foo='', a='', b=''): """ Factory method for example entities. :rtype: Example """ return Example.__create__(foo=foo, a=a, b=b)
:rtype: InfrastructureFactory
def construct_infrastructure_factory(self, *args, **kwargs): """ :rtype: InfrastructureFactory """ factory_class = self.infrastructure_factory_class assert issubclass(factory_class, InfrastructureFactory) return factory_class( record_manager_class=self.record_manager_class, integer_sequenced_record_class=self.stored_event_record_class, sequenced_item_class=self.sequenced_item_class, contiguous_record_ids=self.contiguous_record_ids, application_name=self.name, pipeline_id=self.pipeline_id, snapshot_record_class=self.snapshot_record_class, *args, **kwargs )
Decorator for application policy method. Allows policy to be built up from methods registered for different event classes.
def applicationpolicy(arg=None): """ Decorator for application policy method. Allows policy to be built up from methods registered for different event classes. """ def _mutator(func): wrapped = singledispatch(func) @wraps(wrapped) def wrapper(*args, **kwargs): event = kwargs.get('event') or args[-1] return wrapped.dispatch(type(event))(*args, **kwargs) wrapper.register = wrapped.register return wrapper assert isfunction(arg), arg return _mutator(arg)
With transaction isolation level of "read committed" this should generate records with a contiguous sequence of integer IDs, assumes an indexed ID column, the database-side SQL max function, the insert-select-from form, and optimistic concurrency control.
def _prepare_insert(self, tmpl, record_class, field_names, placeholder_for_id=False): """ With transaction isolation level of "read committed" this should generate records with a contiguous sequence of integer IDs, assumes an indexed ID column, the database-side SQL max function, the insert-select-from form, and optimistic concurrency control. """ field_names = list(field_names) if hasattr(record_class, 'application_name') and 'application_name' not in field_names: field_names.append('application_name') if hasattr(record_class, 'pipeline_id') and 'pipeline_id' not in field_names: field_names.append('pipeline_id') if hasattr(record_class, 'causal_dependencies') and 'causal_dependencies' not in field_names: field_names.append('causal_dependencies') if self.notification_id_name: if placeholder_for_id: if self.notification_id_name not in field_names: field_names.append(self.notification_id_name) statement = text(tmpl.format( tablename=self.get_record_table_name(record_class), columns=", ".join(field_names), placeholders=", ".join([":{}".format(f) for f in field_names]), notification_id=self.notification_id_name )) # Define bind parameters with explicit types taken from record column types. bindparams = [] for col_name in field_names: column_type = getattr(record_class, col_name).type bindparams.append(bindparam(col_name, type_=column_type)) # Redefine statement with explicitly typed bind parameters. statement = statement.bindparams(*bindparams) # Compile the statement with the session dialect. compiled = statement.compile(dialect=self.session.bind.dialect) return compiled
Permanently removes record from table.
def delete_record(self, record): """ Permanently removes record from table. """ try: self.session.delete(record) self.session.commit() except Exception as e: self.session.rollback() raise ProgrammingError(e) finally: self.session.close()
Gets or creates a log. :rtype: Timebucketedlog
def get_or_create(self, log_name, bucket_size): """ Gets or creates a log. :rtype: Timebucketedlog """ try: return self[log_name] except RepositoryKeyError: return start_new_timebucketedlog(log_name, bucket_size=bucket_size)
Evolves initial state using the sequence of domain events and a mutator function.
def project_events(self, initial_state, domain_events): """ Evolves initial state using the sequence of domain events and a mutator function. """ return reduce(self._mutator_func or self.mutate, domain_events, initial_state)
Returns last array in compound. :rtype: CompoundSequenceReader
def get_last_array(self): """ Returns last array in compound. :rtype: CompoundSequenceReader """ # Get the root array (might not have been registered). root = self.repo[self.id] # Get length and last item in the root array. apex_id, apex_height = root.get_last_item_and_next_position() # Bail if there isn't anything yet. if apex_id is None: return None, None # Get the current apex array. apex = self.repo[apex_id] assert isinstance(apex, Array) # Descend until hitting the bottom. array = apex array_i = 0 height = apex_height while height > 1: height -= 1 array_id, width = array.get_last_item_and_next_position() assert width > 0 offset = width - 1 array_i += offset * self.repo.array_size ** height array = self.repo[array_id] return array, array_i
Returns get_big_array and end of span of parent sequence that contains given child.
def calc_parent(self, i, j, h): """ Returns get_big_array and end of span of parent sequence that contains given child. """ N = self.repo.array_size c_i = i c_j = j c_h = h # Calculate the number of the sequence in its row (sequences # with same height), from left to right, starting from 0. c_n = c_i // (N ** c_h) p_n = c_n // N # Position of the child ID in the parent array. p_p = c_n % N # Parent height is child height plus one. p_h = c_h + 1 # Span of sequences in parent row is max size N, to the power of the height. span = N ** p_h # Calculate parent i and j. p_i = p_n * span p_j = p_i + span # Check the parent i,j bounds the child i,j, ie child span is contained by parent span. assert p_i <= c_i, 'i greater on parent than child: {}'.format(p_i, p_j) assert p_j >= c_j, 'j less on parent than child: {}'.format(p_i, p_j) # Return parent i, j, h, p. return p_i, p_j, p_h, p_p
Constructs a sequenced item from a domain event.
def item_from_event(self, domain_event): """ Constructs a sequenced item from a domain event. """ item_args = self.construct_item_args(domain_event) return self.construct_sequenced_item(item_args)
Constructs attributes of a sequenced item from the given domain event.
def construct_item_args(self, domain_event): """ Constructs attributes of a sequenced item from the given domain event. """ # Get the sequence ID. sequence_id = domain_event.__dict__[self.sequence_id_attr_name] # Get the position in the sequence. position = getattr(domain_event, self.position_attr_name, None) # Get topic and data. topic, state = self.get_item_topic_and_state( domain_event.__class__, domain_event.__dict__ ) # Get the 'other' args. # - these are meant to be derivative of the other attributes, # to populate database fields, and shouldn't affect the hash. other_args = tuple((getattr(domain_event, name) for name in self.other_attr_names)) return (sequence_id, position, topic, state) + other_args
Reconstructs domain event from stored event topic and event attrs. Used in the event store when getting domain events.
def event_from_item(self, sequenced_item): """ Reconstructs domain event from stored event topic and event attrs. Used in the event store when getting domain events. """ assert isinstance(sequenced_item, self.sequenced_item_class), ( self.sequenced_item_class, type(sequenced_item) ) # Get the topic and state. topic = getattr(sequenced_item, self.field_names.topic) state = getattr(sequenced_item, self.field_names.state) return self.event_from_topic_and_state(topic, state)
Gets sequenced item from the datastore.
def get_item(self, sequence_id, position): """ Gets sequenced item from the datastore. """ return self.from_record(self.get_record(sequence_id, position))
Returns sequenced item generator.
def get_items(self, sequence_id, gt=None, gte=None, lt=None, lte=None, limit=None, query_ascending=True, results_ascending=True): """ Returns sequenced item generator. """ records = self.get_records( sequence_id=sequence_id, gt=gt, gte=gte, lt=lt, lte=lte, limit=limit, query_ascending=query_ascending, results_ascending=results_ascending, ) for item in map(self.from_record, records): yield item
Returns records for a sequence.
def get_records(self, sequence_id, gt=None, gte=None, lt=None, lte=None, limit=None, query_ascending=True, results_ascending=True): """ Returns records for a sequence. """
Constructs a record object from given sequenced item object.
def to_record(self, sequenced_item): """ Constructs a record object from given sequenced item object. """ kwargs = self.get_field_kwargs(sequenced_item) # Supply application_name, if needed. if hasattr(self.record_class, 'application_name'): kwargs['application_name'] = self.application_name # Supply pipeline_id, if needed. if hasattr(self.record_class, 'pipeline_id'): kwargs['pipeline_id'] = self.pipeline_id return self.record_class(**kwargs)
Constructs and returns a sequenced item object, from given ORM object.
def from_record(self, record): """ Constructs and returns a sequenced item object, from given ORM object. """ kwargs = self.get_field_kwargs(record) return self.sequenced_item_class(**kwargs)
Returns pipeline ID and notification ID for event at given position in given sequence.
def get_pipeline_and_notification_id(self, sequence_id, position): """ Returns pipeline ID and notification ID for event at given position in given sequence. """ # Todo: Optimise query by selecting only two columns, pipeline_id and id (notification ID)? record = self.get_record(sequence_id, position) notification_id = getattr(record, self.notification_id_name) return record.pipeline_id, notification_id
SQL statement that inserts records with contiguous IDs, by selecting max ID from indexed table records.
def insert_select_max(self): """ SQL statement that inserts records with contiguous IDs, by selecting max ID from indexed table records. """ if self._insert_select_max is None: if hasattr(self.record_class, 'application_name'): # Todo: Maybe make it support application_name without pipeline_id? assert hasattr(self.record_class, 'pipeline_id'), self.record_class tmpl = self._insert_select_max_tmpl + self._where_application_name_tmpl else: tmpl = self._insert_select_max_tmpl self._insert_select_max = self._prepare_insert( tmpl=tmpl, record_class=self.record_class, field_names=list(self.field_names), ) return self._insert_select_max
SQL statement that inserts records without ID.
def insert_values(self): """ SQL statement that inserts records without ID. """ if self._insert_values is None: self._insert_values = self._prepare_insert( tmpl=self._insert_values_tmpl, placeholder_for_id=True, record_class=self.record_class, field_names=self.field_names, ) return self._insert_values
SQL statement that inserts tracking records.
def insert_tracking_record(self): """ SQL statement that inserts tracking records. """ if self._insert_tracking_record is None: self._insert_tracking_record = self._prepare_insert( tmpl=self._insert_values_tmpl, placeholder_for_id=True, record_class=self.tracking_record_class, field_names=self.tracking_record_field_names, ) return self._insert_tracking_record
Returns instance of PaxosInstance (protocol implementation).
def paxos_instance(self): """ Returns instance of PaxosInstance (protocol implementation). """ # Construct instance with the constant attributes. instance = PaxosInstance(self.network_uid, self.quorum_size) # Set the variable attributes from the aggregate. for name in self.paxos_variables: value = getattr(self, name, None) if value is not None: if isinstance(value, (set, list, dict, tuple)): value = deepcopy(value) setattr(instance, name, value) # Return the instance. return instance
Factory method that returns a new Paxos aggregate.
def start(cls, originator_id, quorum_size, network_uid): """ Factory method that returns a new Paxos aggregate. """ assert isinstance(quorum_size, int), "Not an integer: {}".format(quorum_size) return cls.__create__( event_class=cls.Started, originator_id=originator_id, quorum_size=quorum_size, network_uid=network_uid )
Proposes a value to the network.
def propose_value(self, value, assume_leader=False): """ Proposes a value to the network. """ if value is None: raise ValueError("Not allowed to propose value None") paxos = self.paxos_instance paxos.leader = assume_leader msg = paxos.propose_value(value) if msg is None: msg = paxos.prepare() self.setattrs_from_paxos(paxos) self.announce(msg) return msg
Responds to messages from other participants.
def receive_message(self, msg): """ Responds to messages from other participants. """ if isinstance(msg, Resolution): return paxos = self.paxos_instance while msg: if isinstance(msg, Resolution): self.print_if_verbose("{} resolved value {}".format(self.network_uid, msg.value)) break else: self.print_if_verbose("{} <- {} <- {}".format(self.network_uid, msg.__class__.__name__, msg.from_uid)) msg = paxos.receive(msg) # Todo: Make it optional not to announce resolution (without which it's hard to see final value). do_announce_resolution = True if msg and (do_announce_resolution or not isinstance(msg, Resolution)): self.announce(msg) self.setattrs_from_paxos(paxos)
Announces a Paxos message.
def announce(self, msg): """ Announces a Paxos message. """ self.print_if_verbose("{} -> {}".format(self.network_uid, msg.__class__.__name__)) self.__trigger_event__( event_class=self.MessageAnnounced, msg=msg, )
Registers changes of attribute value on Paxos instance.
def setattrs_from_paxos(self, paxos): """ Registers changes of attribute value on Paxos instance. """ changes = {} for name in self.paxos_variables: paxos_value = getattr(paxos, name) if paxos_value != getattr(self, name, None): self.print_if_verbose("{} {}: {}".format(self.network_uid, name, paxos_value)) changes[name] = paxos_value setattr(self, name, paxos_value) if changes: self.__trigger_event__( event_class=self.AttributesChanged, changes=changes )
Starts new Paxos aggregate and proposes a value for a key. Decorated with retry in case of notification log conflict or operational error.
def propose_value(self, key, value, assume_leader=False): """ Starts new Paxos aggregate and proposes a value for a key. Decorated with retry in case of notification log conflict or operational error. """ assert isinstance(key, UUID) paxos_aggregate = PaxosAggregate.start( originator_id=key, quorum_size=self.quorum_size, network_uid=self.name ) msg = paxos_aggregate.propose_value(value, assume_leader=assume_leader) while msg: msg = paxos_aggregate.receive_message(msg) new_events = paxos_aggregate.__batch_pending_events__() self.record_process_event(ProcessEvent(new_events)) self.repository.take_snapshot(paxos_aggregate.id) self.publish_prompt() return paxos_aggregate
Message dispatching function. This function accepts any PaxosMessage subclass and calls the appropriate handler function
def receive(self, msg): ''' Message dispatching function. This function accepts any PaxosMessage subclass and calls the appropriate handler function ''' handler = getattr(self, 'receive_' + msg.__class__.__name__.lower(), None) if handler is None: raise InvalidMessageError('Receiving class does not support messages of type: ' + msg.__class__.__name__) return handler(msg)
Sets the proposal value for this node iff this node is not already aware of a previous proposal value. If the node additionally believes itself to be the current leader, an Accept message will be returned
def propose_value(self, value): ''' Sets the proposal value for this node iff this node is not already aware of a previous proposal value. If the node additionally believes itself to be the current leader, an Accept message will be returned ''' if self.proposed_value is None: self.proposed_value = value if self.leader: self.current_accept_msg = Accept(self.network_uid, self.proposal_id, value) return self.current_accept_msg
Returns a new Prepare message with a proposal id higher than that of any observed proposals. A side effect of this method is to clear the leader flag if it is currently set.
def prepare(self): ''' Returns a new Prepare message with a proposal id higher than that of any observed proposals. A side effect of this method is to clear the leader flag if it is currently set. ''' self.leader = False self.promises_received = set() self.nacks_received = set() self.proposal_id = ProposalID(self.highest_proposal_id.number + 1, self.network_uid) self.highest_proposal_id = self.proposal_id self.current_prepare_msg = Prepare(self.network_uid, self.proposal_id) return self.current_prepare_msg
Returns a new Prepare message if the number of Nacks received reaches a quorum.
def receive_nack(self, msg): ''' Returns a new Prepare message if the number of Nacks received reaches a quorum. ''' self.observe_proposal(msg.promised_proposal_id) if msg.proposal_id == self.proposal_id and self.nacks_received is not None: self.nacks_received.add(msg.from_uid) if len(self.nacks_received) == self.quorum_size: return self.prepare()
Returns an Accept messages if a quorum of Promise messages is achieved
def receive_promise(self, msg): ''' Returns an Accept messages if a quorum of Promise messages is achieved ''' self.observe_proposal(msg.proposal_id) if not self.leader and msg.proposal_id == self.proposal_id and msg.from_uid not in self.promises_received: self.promises_received.add(msg.from_uid) if self.highest_accepted_id is None or msg.last_accepted_id > self.highest_accepted_id: self.highest_accepted_id = msg.last_accepted_id if msg.last_accepted_value is not None: self.proposed_value = msg.last_accepted_value if len(self.promises_received) == self.quorum_size: self.leader = True if self.proposed_value is not None: self.current_accept_msg = Accept(self.network_uid, self.proposal_id, self.proposed_value) return self.current_accept_msg
Returns either a Promise or a Nack in response. The Acceptor's state must be persisted to disk prior to transmitting the Promise message.
def receive_prepare(self, msg): ''' Returns either a Promise or a Nack in response. The Acceptor's state must be persisted to disk prior to transmitting the Promise message. ''' if self.promised_id is None or msg.proposal_id >= self.promised_id: self.promised_id = msg.proposal_id return Promise(self.network_uid, msg.from_uid, self.promised_id, self.accepted_id, self.accepted_value) else: return Nack(self.network_uid, msg.from_uid, msg.proposal_id, self.promised_id)
Returns either an Accepted or Nack message in response. The Acceptor's state must be persisted to disk prior to transmitting the Accepted message.
def receive_accept(self, msg): ''' Returns either an Accepted or Nack message in response. The Acceptor's state must be persisted to disk prior to transmitting the Accepted message. ''' if self.promised_id is None or msg.proposal_id >= self.promised_id: self.promised_id = msg.proposal_id self.accepted_id = msg.proposal_id self.accepted_value = msg.proposal_value return Accepted(self.network_uid, msg.proposal_id, msg.proposal_value) else: return Nack(self.network_uid, msg.from_uid, msg.proposal_id, self.promised_id)
Called when an Accepted message is received from an acceptor. Once the final value is determined, the return value of this method will be a Resolution message containing the consentual value. Subsequent calls after the resolution is chosen will continue to add new Acceptors to the final_acceptors set and return Resolution messages.
def receive_accepted(self, msg): ''' Called when an Accepted message is received from an acceptor. Once the final value is determined, the return value of this method will be a Resolution message containing the consentual value. Subsequent calls after the resolution is chosen will continue to add new Acceptors to the final_acceptors set and return Resolution messages. ''' if self.final_value is not None: if msg.proposal_id >= self.final_proposal_id and msg.proposal_value == self.final_value: self.final_acceptors.add(msg.from_uid) return Resolution(self.network_uid, self.final_value) last_pn = self.acceptors.get(msg.from_uid) if last_pn is not None and msg.proposal_id <= last_pn: return # Old message self.acceptors[msg.from_uid] = msg.proposal_id if last_pn is not None: # String proposal_key, need string keys for JSON. proposal_key = str(last_pn) ps = self.proposals[proposal_key] ps.retain_count -= 1 ps.acceptors.remove(msg.from_uid) if ps.retain_count == 0: del self.proposals[proposal_key] # String proposal_key, need string keys for JSON. proposal_key = str(msg.proposal_id) if not proposal_key in self.proposals: self.proposals[proposal_key] = ProposalStatus(msg.proposal_value) ps = self.proposals[proposal_key] assert msg.proposal_value == ps.value, 'Value mismatch for single proposal!' ps.accept_count += 1 ps.retain_count += 1 ps.acceptors.add(msg.from_uid) if ps.accept_count == self.quorum_size: self.final_proposal_id = msg.proposal_id self.final_value = msg.proposal_value self.final_acceptors = ps.acceptors self.proposals = None self.acceptors = None return Resolution(self.network_uid, self.final_value)
Return class described by given topic. Args: topic: A string describing a class. Returns: A class. Raises: TopicResolutionError: If there is no such class.
def resolve_topic(topic): """Return class described by given topic. Args: topic: A string describing a class. Returns: A class. Raises: TopicResolutionError: If there is no such class. """ try: module_name, _, class_name = topic.partition('#') module = importlib.import_module(module_name) except ImportError as e: raise TopicResolutionError("{}: {}".format(topic, e)) try: cls = resolve_attr(module, class_name) except AttributeError as e: raise TopicResolutionError("{}: {}".format(topic, e)) return cls
A recursive version of getattr for navigating dotted paths. Args: obj: An object for which we want to retrieve a nested attribute. path: A dot separated string containing zero or more attribute names. Returns: The attribute referred to by obj.a1.a2.a3... Raises: AttributeError: If there is no such attribute.
def resolve_attr(obj, path): """A recursive version of getattr for navigating dotted paths. Args: obj: An object for which we want to retrieve a nested attribute. path: A dot separated string containing zero or more attribute names. Returns: The attribute referred to by obj.a1.a2.a3... Raises: AttributeError: If there is no such attribute. """ if not path: return obj head, _, tail = path.partition('.') head_obj = getattr(obj, head) return resolve_attr(head_obj, tail)
Helper function to return dictionary of countries in {"country" : "iso"} form.
def country_list_maker(): """ Helper function to return dictionary of countries in {"country" : "iso"} form. """ cts = {"Afghanistan":"AFG", "Åland Islands":"ALA", "Albania":"ALB", "Algeria":"DZA", "American Samoa":"ASM", "Andorra":"AND", "Angola":"AGO", "Anguilla":"AIA", "Antarctica":"ATA", "Antigua Barbuda":"ATG", "Argentina":"ARG", "Armenia":"ARM", "Aruba":"ABW", "Ascension Island":"NA", "Australia":"AUS", "Austria":"AUT", "Azerbaijan":"AZE", "Bahamas":"BHS", "Bahrain":"BHR", "Bangladesh":"BGD", "Barbados":"BRB", "Belarus":"BLR", "Belgium":"BEL", "Belize":"BLZ", "Benin":"BEN", "Bermuda":"BMU", "Bhutan":"BTN", "Bolivia":"BOL", "Bosnia Herzegovina":"BIH", "Botswana":"BWA", "Bouvet Island":"BVT", "Brazil":"BRA", "Britain":"GBR", "Great Britain":"GBR", "British Virgin Islands":"VGB", "Brunei":"BRN", "Bulgaria":"BGR", "Burkina Faso":"BFA", "Burundi":"BDI", "Cambodia":"KHM", "Cameroon":"CMR", "Canada":"CAN","Cape Verde":"CPV", "Cayman Islands":"CYM", "Central African Republic":"CAF", "Chad":"TCD", "Chile":"CHL", "China":"CHN", "Cocos Islands":"CCK", "Colombia":"COL", "Comoros":"COM", "Republic of Congo":"COG", "Cook Islands":"COK", "Costa Rica":"CRI", "Cote Ivoire":"CIV", "Ivory Coast":"CIV","Croatia":"HRV", "Cuba":"CUB", "Curaçao":"CUW", "Cyprus":"CYP", "Czech Republic":"CZE", "Denmark":"DNK", "Djibouti":"DJI", "Dominica":"DMA", "Dominican Republic":"DOM", "Democratic Republic of Congo" : "COD", "Ecuador":"ECU", "Egypt":"EGY", "El Salvador":"SLV", "England" : "GBR", "Equatorial Guinea":"GNQ", "Eritrea":"ERI", "Estonia":"EST", "Ethiopia":"ETH", "Falkland Islands":"FLK", "Faroe Islands":"FRO", "Fiji":"FJI", "Finland":"FIN", "France":"FRA", "French Guiana":"GUF", "French Polynesia":"PYF","Gabon":"GAB", "Gambia":"GMB", "Georgia":"GEO", "Germany":"DEU", "Ghana":"GHA", "Gibraltar":"GIB", "Greece":"GRC", "Greenland":"GRL", "Grenada":"GRD", "Guadeloupe":"GLP", "Guam":"GUM", "Guatemala":"GTM", "Guernsey":"GGY", "Guinea":"GIN", "Guinea Bissau":"GNB", "Guyana":"GUY", "Haiti":"HTI","Honduras":"HND", "Hong Kong":"HKG", "Hungary":"HUN", "Iceland":"ISL", "India":"IND", "Indonesia":"IDN", "Iran":"IRN", "Iraq":"IRQ", "Ireland":"IRL", "Israel":"ISR", "Italy":"ITA", "Jamaica":"JAM", "Japan":"JPN", "Jordan":"JOR", "Kazakhstan":"KAZ", "Kenya":"KEN", "Kiribati":"KIR", "Kosovo": "XKX", "Kuwait":"KWT", "Kyrgyzstan":"KGZ", "Laos":"LAO", "Latvia":"LVA", "Lebanon":"LBN", "Lesotho":"LSO", "Liberia":"LBR", "Libya":"LBY", "Liechtenstein":"LIE", "Lithuania":"LTU", "Luxembourg":"LUX", "Macau":"MAC", "Macedonia":"MKD", "Madagascar":"MDG", "Malawi":"MWI", "Malaysia":"MYS", "Maldives":"MDV", "Mali":"MLI", "Malta":"MLT", "Marshall Islands":"MHL", "Martinique":"MTQ", "Mauritania":"MRT", "Mauritius":"MUS", "Mayotte":"MYT", "Mexico":"MEX", "Micronesia":"FSM", "Moldova":"MDA", "Monaco":"MCO", "Mongolia":"MNG", "Montenegro":"MNE", "Montserrat":"MSR", "Morocco":"MAR", "Mozambique":"MOZ", "Myanmar":"MMR", "Burma":"MMR", "Namibia":"NAM", "Nauru":"NRU", "Nepal":"NPL", "Netherlands":"NLD", "Netherlands Antilles":"ANT", "New Caledonia":"NCL", "New Zealand":"NZL", "Nicaragua":"NIC", "Niger":"NER", "Nigeria":"NGA", "Niue":"NIU", "North Korea":"PRK", "Northern Ireland":"IRL", "Northern Mariana Islands":"MNP", "Norway":"NOR", "Oman":"OMN", "Pakistan":"PAK", "Palau":"PLW", "Palestine":"PSE","Panama":"PAN", "Papua New Guinea":"PNG", "Paraguay":"PRY", "Peru":"PER", "Philippines":"PHL", "Pitcairn Islands":"PCN", "Poland":"POL", "Portugal":"PRT", "Puerto Rico":"PRI", "Qatar":"QAT", "Réunion":"REU", "Romania":"ROU", "Russia":"RUS", "Rwanda":"RWA", "Saint Barthélemy":"BLM", "Saint Helena":"SHN", "Saint Kitts Nevis":"KNA", "Saint Lucia":"LCA", "Saint Pierre Miquelon":"SPM", "Saint Vincent Grenadines":"VCT", "Samoa":"WSM", "San Marino":"SMR", "São Tomé Príncipe":"STP", "Saudi Arabia":"SAU", "Senegal":"SEN", "Serbia":"SRB", "Seychelles":"SYC", "Sierra Leone":"SLE", "Singapore":"SGP", "Sint Maarten":"SXM", "Slovakia":"SVK", "Slovenia":"SVN", "Solomon Islands":"SLB", "Somalia":"SOM", "South Africa":"ZAF", "South Korea":"KOR", "South Sudan":"SSD", "Spain":"ESP", "Sri Lanka":"LKA", "Sudan":"SDN", "Suriname":"SUR", "Svalbard Jan Mayen":"SJM", "Swaziland":"SWZ", "Sweden":"SWE", "Switzerland":"CHE", "Syria":"SYR", "Taiwan":"TWN", "Tajikistan":"TJK", "Tanzania":"TZA", "Thailand":"THA", "Timor Leste":"TLS", "East Timor":"TLS","Togo":"TGO", "Tokelau":"TKL", "Tonga":"TON", "Trinidad Tobago":"TTO", "Tunisia":"TUN", "Turkey":"TUR", "Turkmenistan":"TKM", "Turks Caicos Islands":"TCA", "Tuvalu":"TUV", "U.S. Minor Outlying Islands":"UMI", "Virgin Islands":"VIR", "Uganda":"UGA", "Ukraine":"UKR", "United Arab Emirates":"ARE", "United Kingdom":"GBR", "United States":"USA", "Uruguay":"URY", "Uzbekistan":"UZB", "Vanuatu":"VUT", "Vatican":"VAT", "Venezuela":"VEN", "Vietnam":"VNM", "Wallis Futuna":"WLF", "Western Sahara":"ESH", "Yemen":"YEM", "Zambia":"ZMB", "Zimbabwe":"ZWE", "UK":"GBR", "USA":"USA", "America":"USA", "Palestinian Territories":"PSE", "Congo Brazzaville":"COG", "Congo Kinshasa":"COD", "Wales" : "GBR", "Scotland" : "GBR", "Britain" : "GBR",} return cts
Return hand-defined list of place names to skip and not attempt to geolocate. If users would like to exclude country names, this would be the function to do it with.
def make_skip_list(cts): """ Return hand-defined list of place names to skip and not attempt to geolocate. If users would like to exclude country names, this would be the function to do it with. """ # maybe make these non-country searches but don't discard, at least for # some (esp. bodies of water) special_terms = ["Europe", "West", "the West", "South Pacific", "Gulf of Mexico", "Atlantic", "the Black Sea", "Black Sea", "North America", "Mideast", "Middle East", "the Middle East", "Asia", "the Caucasus", "Africa", "Central Asia", "Balkans", "Eastern Europe", "Arctic", "Ottoman Empire", "Asia-Pacific", "East Asia", "Horn of Africa", "Americas", "North Africa", "the Strait of Hormuz", "Mediterranean", "East", "North", "South", "Latin America", "Southeast Asia", "Western Pacific", "South Asia", "Persian Gulf", "Central Europe", "Western Hemisphere", "Western Europe", "European Union (E.U.)", "EU", "European Union", "E.U.", "Asia-Pacific", "Europe", "Caribbean", "US", "U.S.", "Persian Gulf", "West Africa", "North", "East", "South", "West", "Western Countries" ] # Some words are recurring spacy problems... spacy_problems = ["Kurd", "Qur'an"] #skip_list = list(cts.keys()) + special_terms skip_list = special_terms + spacy_problems skip_list = set(skip_list) return skip_list
NLP countries so we can use for vector comparisons
def country_list_nlp(cts): """NLP countries so we can use for vector comparisons""" ct_nlp = [] for i in cts.keys(): nlped = nlp(i) ct_nlp.append(nlped) return ct_nlp
Combine list of countries and list of nationalities
def make_country_nationality_list(cts, ct_file): """Combine list of countries and list of nationalities""" countries = pd.read_csv(ct_file) nationality = dict(zip(countries.nationality,countries.alpha_3_code)) both_codes = {**nationality, **cts} return both_codes
cts is e.g. {"Germany" : "DEU"}. inv_cts is the inverse: {"DEU" : "Germany"}
def make_inv_cts(cts): """ cts is e.g. {"Germany" : "DEU"}. inv_cts is the inverse: {"DEU" : "Germany"} """ inv_ct = {} for old_k, old_v in cts.items(): if old_v not in inv_ct.keys(): inv_ct.update({old_v : old_k}) return inv_ct
Small helper function to read in a admin1 code <--> admin1 name document. Parameters ---------- filepath: string path to the admin1 mapping JSON. This file is usually mordecai/resources/data/admin1CodesASCII.json Returns ------- admin1_dict: dictionary keys are country + admin1codes, values are names Example: "US.OK" : "Oklahoma" Example: "SE.21": "Uppsala"
def read_in_admin1(filepath): """ Small helper function to read in a admin1 code <--> admin1 name document. Parameters ---------- filepath: string path to the admin1 mapping JSON. This file is usually mordecai/resources/data/admin1CodesASCII.json Returns ------- admin1_dict: dictionary keys are country + admin1codes, values are names Example: "US.OK" : "Oklahoma" Example: "SE.21": "Uppsala" """ with open(filepath) as admin1file: admin1_dict = json.loads(admin1file.read()) return admin1_dict
Format Elasticsearch result as Python dictionary
def structure_results(res): """Format Elasticsearch result as Python dictionary""" out = {'hits': {'hits': []}} keys = [u'admin1_code', u'admin2_code', u'admin3_code', u'admin4_code', u'alternativenames', u'asciiname', u'cc2', u'coordinates', u'country_code2', u'country_code3', u'dem', u'elevation', u'feature_class', u'feature_code', u'geonameid', u'modification_date', u'name', u'population', u'timezone'] for i in res: i_out = {} for k in keys: i_out[k] = i[k] out['hits']['hits'].append(i_out) return out
Setup an Elasticsearch connection Parameters ---------- hosts: list Hostnames / IP addresses for elasticsearch cluster port: string Port for elasticsearch cluster use_ssl: boolean Whether to use SSL for the elasticsearch connection auth: tuple (username, password) to use with HTTP auth Returns ------- es_conn: an elasticsearch_dsl Search connection object.
def setup_es(hosts, port, use_ssl=False, auth=None): """ Setup an Elasticsearch connection Parameters ---------- hosts: list Hostnames / IP addresses for elasticsearch cluster port: string Port for elasticsearch cluster use_ssl: boolean Whether to use SSL for the elasticsearch connection auth: tuple (username, password) to use with HTTP auth Returns ------- es_conn: an elasticsearch_dsl Search connection object. """ kwargs = dict( hosts=hosts or ['localhost'], port=port or 9200, use_ssl=use_ssl, ) if auth: kwargs.update(http_auth=auth) CLIENT = Elasticsearch(**kwargs) S = Search(using=CLIENT, index="geonames") return S
Given a document, count how many times different country names and adjectives are mentioned. These are features used in the country picking phase. Parameters --------- doc: a spaCy nlp'ed piece of text Returns ------- countries: dict the top two countries (ISO code) and their frequency of mentions.
def _feature_country_mentions(self, doc): """ Given a document, count how many times different country names and adjectives are mentioned. These are features used in the country picking phase. Parameters --------- doc: a spaCy nlp'ed piece of text Returns ------- countries: dict the top two countries (ISO code) and their frequency of mentions. """ c_list = [] for i in doc.ents: try: country = self._both_codes[i.text] c_list.append(country) except KeyError: pass count = Counter(c_list).most_common() try: top, top_count = count[0] except: top = "" top_count = 0 try: two, two_count = count[1] except: two = "" two_count = 0 countries = (top, top_count, two, two_count) return countries
Strip out extra words that often get picked up by spaCy's NER. To do: preserve info about what got stripped out to help with ES/Geonames resolution later. Parameters --------- ent: a spaCy named entity Span Returns ------- new_ent: a spaCy Span, with extra words stripped out.
def clean_entity(self, ent): """ Strip out extra words that often get picked up by spaCy's NER. To do: preserve info about what got stripped out to help with ES/Geonames resolution later. Parameters --------- ent: a spaCy named entity Span Returns ------- new_ent: a spaCy Span, with extra words stripped out. """ dump_list = ['province', 'the', 'area', 'airport', 'district', 'square', 'town', 'village', 'prison', "river", "valley", "provincial", "prison", "region", "municipality", "state", "territory", "of", "in", "county", "central"] # maybe have 'city'? Works differently in different countries # also, "District of Columbia". Might need to use cap/no cap keep_positions = [] for word in ent: if word.text.lower() not in dump_list: keep_positions.append(word.i) keep_positions = np.asarray(keep_positions) try: new_ent = ent.doc[keep_positions.min():keep_positions.max() + 1] # can't set directly #new_ent.label_.__set__(ent.label_) except ValueError: new_ent = ent return new_ent
Find the most common country name in ES/Geonames results Paramaters ---------- results: dict output of `query_geonames` Returns ------- most_common: str ISO code of most common country, or empty string if none
def _feature_most_common(self, results): """ Find the most common country name in ES/Geonames results Paramaters ---------- results: dict output of `query_geonames` Returns ------- most_common: str ISO code of most common country, or empty string if none """ try: country_count = Counter([i['country_code3'] for i in results['hits']['hits']]) most_common = country_count.most_common()[0][0] return most_common except IndexError: return "" except TypeError: return ""
Find the placename with the most alternative names and return its country. More alternative names are a rough measure of importance. Paramaters ---------- results: dict output of `query_geonames` Returns ------- most_alt: str ISO code of country of place with most alternative names, or empty string if none
def _feature_most_alternative(self, results, full_results=False): """ Find the placename with the most alternative names and return its country. More alternative names are a rough measure of importance. Paramaters ---------- results: dict output of `query_geonames` Returns ------- most_alt: str ISO code of country of place with most alternative names, or empty string if none """ try: alt_names = [len(i['alternativenames']) for i in results['hits']['hits']] most_alt = results['hits']['hits'][np.array(alt_names).argmax()] if full_results: return most_alt else: return most_alt['country_code3'] except (IndexError, ValueError, TypeError): return ""
Find the placename with the largest population and return its country. More population is a rough measure of importance. Paramaters ---------- results: dict output of `query_geonames` Returns ------- most_pop: str ISO code of country of place with largest population, or empty string if none
def _feature_most_population(self, results): """ Find the placename with the largest population and return its country. More population is a rough measure of importance. Paramaters ---------- results: dict output of `query_geonames` Returns ------- most_pop: str ISO code of country of place with largest population, or empty string if none """ try: populations = [i['population'] for i in results['hits']['hits']] most_pop = results['hits']['hits'][np.array(populations).astype("int").argmax()] return most_pop['country_code3'] except Exception as e: return ""
Given a word, guess the appropriate country by word vector. Parameters --------- text: str the text to extract locations from. Returns ------- country_picking: dict The top two countries (ISO codes) and two measures confidence for the first choice.
def _feature_word_embedding(self, text): """ Given a word, guess the appropriate country by word vector. Parameters --------- text: str the text to extract locations from. Returns ------- country_picking: dict The top two countries (ISO codes) and two measures confidence for the first choice. """ try: simils = np.dot(self._prebuilt_vec, text.vector) except Exception as e: #print("Vector problem, ", Exception, e) return {"country_1" : "", "confid_a" : 0, "confid_b" : 0, "country_2" : ""} ranks = simils.argsort()[::-1] confid = simils.max() confid2 = simils[ranks[0]] - simils[ranks[1]] if confid == 0 or confid2 == 0: return "" country_code = self._cts[str(self._ct_nlp[ranks[0]])] country_picking = {"country_1" : country_code, "confid_a" : confid, "confid_b" : confid2, "country_2" : self._cts[str(self._ct_nlp[ranks[1]])]} return country_picking
Get the country of the first two results back from geonames. Parameters ----------- results: dict elasticsearch results Returns ------- top: tuple first and second results' country name (ISO)
def _feature_first_back(self, results): """ Get the country of the first two results back from geonames. Parameters ----------- results: dict elasticsearch results Returns ------- top: tuple first and second results' country name (ISO) """ try: first_back = results['hits']['hits'][0]['country_code3'] except (TypeError, IndexError): # usually occurs if no Geonames result first_back = "" try: second_back = results['hits']['hits'][1]['country_code3'] except (TypeError, IndexError): second_back = "" top = (first_back, second_back) return top
Check if a piece of text is in the list of countries
def is_country(self, text): """Check if a piece of text is in the list of countries""" ct_list = self._just_cts.keys() if text in ct_list: return True else: return False
Wrap search parameters into an elasticsearch query to the geonames index and return results. Parameters --------- conn: an elasticsearch Search conn, like the one returned by `setup_es()` placename: str the placename text extracted by NER system Returns ------- out: The raw results of the elasticsearch query
def query_geonames(self, placename): """ Wrap search parameters into an elasticsearch query to the geonames index and return results. Parameters --------- conn: an elasticsearch Search conn, like the one returned by `setup_es()` placename: str the placename text extracted by NER system Returns ------- out: The raw results of the elasticsearch query """ # first first, try for country name if self.is_country(placename): q = {"multi_match": {"query": placename, "fields": ['name', 'asciiname', 'alternativenames'], "type" : "phrase"}} res = self.conn.filter("term", feature_code='PCLI').query(q)[0:5].execute() # always 5 else: # second, try for an exact phrase match q = {"multi_match": {"query": placename, "fields": ['name^5', 'asciiname^5', 'alternativenames'], "type" : "phrase"}} res = self.conn.query(q)[0:50].execute() # if no results, use some fuzziness, but still require all terms to be present. # Fuzzy is not allowed in "phrase" searches. if res.hits.total == 0: # tried wrapping this in a {"constant_score" : {"query": ... but made it worse q = {"multi_match": {"query": placename, "fields": ['name', 'asciiname', 'alternativenames'], "fuzziness" : 1, "operator": "and" } } res = self.conn.query(q)[0:50].execute() es_result = utilities.structure_results(res) return es_result
Like query_geonames, but this time limited to a specified country.
def query_geonames_country(self, placename, country): """ Like query_geonames, but this time limited to a specified country. """ # first, try for an exact phrase match q = {"multi_match": {"query": placename, "fields": ['name^5', 'asciiname^5', 'alternativenames'], "type": "phrase"}} res = self.conn.filter("term", country_code3=country).query(q)[0:50].execute() # if no results, use some fuzziness, but still require all terms to be present. # Fuzzy is not allowed in "phrase" searches. if res.hits.total == 0: # tried wrapping this in a {"constant_score" : {"query": ... but made it worse q = {"multi_match": {"query": placename, "fields": ['name', 'asciiname', 'alternativenames'], "fuzziness": 1, "operator": "and"}} res = self.conn.filter("term", country_code3=country).query(q)[0:50].execute() out = utilities.structure_results(res) return out
Count forward 1 word from each entity, looking for defined terms that indicate geographic feature types (e.g. "village" = "P"). Parameters ----------- ent : spacy entity span It has to be an entity to handle indexing in the document Returns -------- tuple (length 2) (feature_code, feature_class) derived from explicit word usage
def _feature_location_type_mention(self, ent): """ Count forward 1 word from each entity, looking for defined terms that indicate geographic feature types (e.g. "village" = "P"). Parameters ----------- ent : spacy entity span It has to be an entity to handle indexing in the document Returns -------- tuple (length 2) (feature_code, feature_class) derived from explicit word usage """ P_list = ["city", "cities", "town", "towns", "villages", "village", "settlement", "capital", "town", "towns", "neighborhood", "neighborhoods", "municipality"] ADM1_list = ["province", "governorate", "state", "department", "oblast", "changwat"] ADM2_list = ["district", "rayon", "amphoe", "county"] A_other = ["region"] AIRPORT_list = ["airport"] TERRAIN_list = ["mountain", "mountains", "stream", "river"] FOREST_list = ["forest"] # TODO: incorporate positions, especially now that we don't split by # sentence feature_positions = [] feature_class = feature_code = "" interest_words = ent.doc[ent.end - 1 : ent.end + 1] # last word or next word following for word in interest_words: if ent.text in self._just_cts.keys(): feature_class = "A" feature_code = "PCLI" elif word.text.lower() in P_list: feature_class = "P" feature_code = "" elif word.text.lower() in ADM1_list: feature_class = "A" feature_code = "ADM1" elif word.text.lower() in ADM2_list: feature_class = "A" feature_code = "ADM2" elif word.text.lower() in TERRAIN_list: feature_class = "T" feature_code = "" elif word.text.lower() in AIRPORT_list: feature_class = "S" feature_code = "AIRP" elif word.text.lower() in A_other: feature_class = "A" feature_code = "" return (feature_class, feature_code)
Create features for the country picking model. Function where all the individual feature maker functions are called and aggregated. (Formerly "process_text") Parameters ----------- doc : str or spaCy doc Returns ------- task_list : list of dicts Each entry has the word, surrounding text, span, and the country picking features. This output can be put into Prodigy for labeling almost as-is (the "features" key needs to be renamed "meta" or be deleted.)
def make_country_features(self, doc, require_maj=False): """ Create features for the country picking model. Function where all the individual feature maker functions are called and aggregated. (Formerly "process_text") Parameters ----------- doc : str or spaCy doc Returns ------- task_list : list of dicts Each entry has the word, surrounding text, span, and the country picking features. This output can be put into Prodigy for labeling almost as-is (the "features" key needs to be renamed "meta" or be deleted.) """ if not hasattr(doc, "ents"): doc = nlp(doc) # initialize the place to store finalized tasks task_list = [] # get document vector #doc_vec = self._feature_word_embedding(text)['country_1'] # get explicit counts of country names ct_mention, ctm_count1, ct_mention2, ctm_count2 = self._feature_country_mentions(doc) # pull out the place names, skipping empty ones, countries, and known # junk from the skip list (like "Atlanic Ocean" ents = [] for ent in doc.ents: if not ent.text.strip(): continue if ent.label_ not in ["GPE", "LOC", "FAC"]: continue # don't include country names (make a parameter) if ent.text.strip() in self._skip_list: continue ents.append(ent) if not ents: return [] # Look them up in geonames, either sequentially if no threading, or # in parallel if threads. if self.threads: pool = ThreadPool(len(ents)) ent_text = [i.text for i in ents] ent_results = pool.map(self.simple_lookup, ent_text) pool.close() pool.join() else: ent_results = [] for ent in ents: try: result = self.query_geonames(ent.text) except ConnectionTimeout: result = "" ent_results.append(result) for n, ent in enumerate(ents): result = ent_results[n] #skip_list.add(ent.text.strip()) ent_label = ent.label_ # destroyed by trimming ent = self.clean_entity(ent) # vector for just the solo word vp = self._feature_word_embedding(ent) try: word_vec = vp['country_1'] wv_confid = float(vp['confid_a']) except TypeError: # no idea why this comes up word_vec = "" wv_confid = "0" # look for explicit mentions of feature names class_mention, code_mention = self._feature_location_type_mention(ent) # build results-based features most_alt = self._feature_most_alternative(result) # TODO check if most_common feature really isn't that useful most_common = self._feature_most_common(result) most_pop = self._feature_most_population(result) first_back, second_back = self._feature_first_back(result) try: maj_vote = Counter([word_vec, most_alt, first_back, most_pop, ct_mention #doc_vec_sent, doc_vec ]).most_common()[0][0] except Exception as e: print("Problem taking majority vote: ", ent, e) maj_vote = "" if not maj_vote: maj_vote = "" # We only want all this junk for the labeling task. We just want to straight to features # and the model when in production. try: start = ent.start_char end = ent.end_char iso_label = maj_vote try: text_label = self._inv_cts[iso_label] except KeyError: text_label = "" task = {"text" : ent.text, "label" : text_label, # human-readable country name "word" : ent.text, "spans" : [{ "start" : start, "end" : end, } # make sure to rename for Prodigy ], "features" : { "maj_vote" : iso_label, "word_vec" : word_vec, "first_back" : first_back, #"doc_vec" : doc_vec, "most_alt" : most_alt, "most_pop" : most_pop, "ct_mention" : ct_mention, "ctm_count1" : ctm_count1, "ct_mention2" : ct_mention2, "ctm_count2" : ctm_count2, "wv_confid" : wv_confid, "class_mention" : class_mention, # inferred geonames class from mentions "code_mention" : code_mention, #"places_vec" : places_vec, #"doc_vec_sent" : doc_vec_sent } } task_list.append(task) except Exception as e: print(ent.text,) print(e) return task_list