idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
40,700
def _check_outcome_validity ( self , check_outcome ) : for outcome_id , outcome in self . outcomes . items ( ) : if check_outcome is not outcome : if check_outcome . outcome_id == outcome_id : return False , "outcome id '{0}' existing in state" . format ( check_outcome . outcome_id ) if check_outcome . name == outcome . name : return False , "outcome name '{0}' existing in state" . format ( check_outcome . name ) return True , "valid"
Checks the validity of an outcome
40,701
def _check_data_port_validity ( self , check_data_port ) : valid , message = self . _check_data_port_id ( check_data_port ) if not valid : return False , message valid , message = self . _check_data_port_name ( check_data_port ) if not valid : return False , message if not self . is_root_state : return self . parent . check_data_port_connection ( check_data_port ) else : from rafcon . core . states . container_state import ContainerState if isinstance ( self , ContainerState ) : return self . check_data_port_connection ( check_data_port ) return True , "valid"
Checks the validity of a data port
40,702
def check_input_data_type ( self ) : for data_port in self . input_data_ports . values ( ) : if data_port . name in self . input_data and self . input_data [ data_port . name ] is not None : if not isinstance ( self . input_data [ data_port . name ] , data_port . data_type ) : logger . error ( "{0} had an data port error: Input of execute function must be of type '{1}' not '{2}'" " as current value '{3}'" . format ( self , data_port . data_type . __name__ , type ( self . input_data [ data_port . name ] ) . __name__ , self . input_data [ data_port . name ] ) )
Check the input data types of the state
40,703
def check_output_data_type ( self ) : for data_port in self . output_data_ports . values ( ) : if data_port . name in self . output_data and self . output_data [ data_port . name ] is not None : if not isinstance ( self . output_data [ data_port . name ] , data_port . data_type ) : logger . error ( "{0} had an data port error: Output of execute function must be of type '{1}' not " "'{2}' as current value {3}" . format ( self , data_port . data_type . __name__ , type ( self . output_data [ data_port . name ] ) . __name__ , self . output_data [ data_port . name ] ) )
Check the output data types of the state
40,704
def change_state_id ( self , state_id = None ) : if state_id is None : state_id = state_id_generator ( used_state_ids = [ self . state_id ] ) if not self . is_root_state and not self . is_root_state_of_library : used_ids = list ( self . parent . states . keys ( ) ) + [ self . parent . state_id , self . state_id ] if state_id in used_ids : state_id = state_id_generator ( used_state_ids = used_ids ) self . _state_id = state_id
Changes the id of the state to a new id
40,705
def get_semantic_data ( self , path_as_list ) : target_dict = self . semantic_data for path_element in path_as_list : if path_element in target_dict : target_dict = target_dict [ path_element ] else : raise KeyError ( "The state with name {1} and id {2} holds no semantic data with path {0}." "" . format ( path_as_list [ : path_as_list . index ( path_element ) + 1 ] , self . name , self . state_id ) ) return target_dict
Retrieves an entry of the semantic data .
40,706
def add_semantic_data ( self , path_as_list , value , key ) : assert isinstance ( key , string_types ) target_dict = self . get_semantic_data ( path_as_list ) target_dict [ key ] = value return path_as_list + [ key ]
Adds a semantic data entry .
40,707
def remove_semantic_data ( self , path_as_list ) : if len ( path_as_list ) == 0 : raise AttributeError ( "The argument path_as_list is empty but but the method remove_semantic_data needs a " "valid path to remove a vividict item." ) target_dict = self . get_semantic_data ( path_as_list [ 0 : - 1 ] ) removed_element = target_dict [ path_as_list [ - 1 ] ] del target_dict [ path_as_list [ - 1 ] ] return removed_element
Removes a entry from the semantic data vividict .
40,708
def input_data_ports ( self , input_data_ports ) : if not isinstance ( input_data_ports , dict ) : raise TypeError ( "input_data_ports must be of type dict" ) if [ port_id for port_id , port in input_data_ports . items ( ) if not port_id == port . data_port_id ] : raise AttributeError ( "The key of the input dictionary and the id of the data port do not match" ) for port_id , port in input_data_ports . items ( ) : if not isinstance ( port , InputDataPort ) : if isinstance ( port , DataPort ) : port = InputDataPort ( port . name , port . data_type , port . default_value , port . data_port_id ) input_data_ports [ port_id ] = port else : raise TypeError ( "Elements of input_data_ports must be of type InputDataPort, given: {0}" . format ( type ( port ) . __name__ ) ) old_input_data_ports = self . _input_data_ports self . _input_data_ports = input_data_ports for port_id , port in input_data_ports . items ( ) : try : port . parent = self except ValueError : self . _input_data_ports = old_input_data_ports raise for old_input_data_port in old_input_data_ports . values ( ) : if old_input_data_port not in self . _input_data_ports . values ( ) and old_input_data_port . parent is self : old_input_data_port . parent = None
Property for the _input_data_ports field
40,709
def output_data_ports ( self , output_data_ports ) : if not isinstance ( output_data_ports , dict ) : raise TypeError ( "output_data_ports must be of type dict" ) if [ port_id for port_id , port in output_data_ports . items ( ) if not port_id == port . data_port_id ] : raise AttributeError ( "The key of the output dictionary and the id of the data port do not match" ) for port_id , port in output_data_ports . items ( ) : if not isinstance ( port , OutputDataPort ) : if isinstance ( port , DataPort ) : port = OutputDataPort ( port . name , port . data_type , port . default_value , port . data_port_id ) output_data_ports [ port_id ] = port else : raise TypeError ( "Elements of output_data_ports must be of type OutputDataPort, given: {0}" . format ( type ( port ) . __name__ ) ) old_output_data_ports = self . _output_data_ports self . _output_data_ports = output_data_ports for port_id , port in output_data_ports . items ( ) : try : port . parent = self except ValueError : self . _output_data_ports = old_output_data_ports raise for old_output_data_port in old_output_data_ports . values ( ) : if old_output_data_port not in self . _output_data_ports . values ( ) and old_output_data_port . parent is self : old_output_data_port . parent = None
Setter for _output_data_ports field
40,710
def income ( self , income ) : if not isinstance ( income , Income ) : raise ValueError ( "income must be of type Income" ) old_income = self . income self . _income = income try : income . parent = self except ValueError : self . _income = old_income raise
Setter for the state s income
40,711
def outcomes ( self , outcomes ) : if not isinstance ( outcomes , dict ) : raise TypeError ( "outcomes must be of type dict" ) if [ outcome_id for outcome_id , outcome in outcomes . items ( ) if not isinstance ( outcome , Outcome ) ] : raise TypeError ( "element of outcomes must be of type Outcome" ) if [ outcome_id for outcome_id , outcome in outcomes . items ( ) if not outcome_id == outcome . outcome_id ] : raise AttributeError ( "The key of the outcomes dictionary and the id of the outcome do not match" ) old_outcomes = self . outcomes self . _outcomes = outcomes for outcome_id , outcome in outcomes . items ( ) : try : outcome . parent = self except ValueError : self . _outcomes = old_outcomes raise if - 1 not in outcomes : self . _outcomes [ - 1 ] = Outcome ( outcome_id = - 1 , name = "aborted" , parent = self ) if - 2 not in outcomes : self . _outcomes [ - 2 ] = Outcome ( outcome_id = - 2 , name = "preempted" , parent = self ) for old_outcome in old_outcomes . values ( ) : if old_outcome not in iter ( list ( self . _outcomes . values ( ) ) ) and old_outcome . parent is self : old_outcome . parent = None
Setter for _outcomes field
40,712
def get_next_upper_library_root_state ( self ) : from rafcon . core . state_machine import StateMachine if self . is_root_state_of_library : return self state = self while state . parent is not None and not isinstance ( state . parent , StateMachine ) : if state . parent . is_root_state_of_library : return state . parent state = state . parent return None
Get next upper library root state
40,713
def get_uppermost_library_root_state ( self ) : library_root_state = self . get_next_upper_library_root_state ( ) parent_library_root_state = library_root_state while parent_library_root_state and library_root_state is parent_library_root_state : if library_root_state : parent_library_root_state = library_root_state . parent . get_next_upper_library_root_state ( ) if parent_library_root_state : library_root_state = parent_library_root_state return library_root_state
Find state_copy of uppermost LibraryState
40,714
def cluster_on_extra_high_voltage ( network , busmap , with_time = True ) : network_c = Network ( ) buses = aggregatebuses ( network , busmap , { 'x' : _leading ( busmap , network . buses ) , 'y' : _leading ( busmap , network . buses ) } ) lines = network . lines . copy ( ) mask = lines . bus0 . isin ( buses . index ) lines = lines . loc [ mask , : ] links = network . links . copy ( ) mask = links . bus0 . isin ( buses . index ) links = links . loc [ mask , : ] transformers = network . transformers . copy ( ) mask = transformers . bus0 . isin ( buses . index ) transformers = transformers . loc [ mask , : ] io . import_components_from_dataframe ( network_c , buses , "Bus" ) io . import_components_from_dataframe ( network_c , lines , "Line" ) io . import_components_from_dataframe ( network_c , links , "Link" ) io . import_components_from_dataframe ( network_c , transformers , "Transformer" ) if with_time : network_c . snapshots = network . snapshots network_c . set_snapshots ( network . snapshots ) network_c . snapshot_weightings = network . snapshot_weightings . copy ( ) network . generators . control = "PV" network . generators [ 'weight' ] = 1 new_df , new_pnl = aggregategenerators ( network , busmap , with_time ) io . import_components_from_dataframe ( network_c , new_df , 'Generator' ) for attr , df in iteritems ( new_pnl ) : io . import_series_from_dataframe ( network_c , df , 'Generator' , attr ) aggregate_one_ports = components . one_port_components . copy ( ) aggregate_one_ports . discard ( 'Generator' ) for one_port in aggregate_one_ports : new_df , new_pnl = aggregateoneport ( network , busmap , component = one_port , with_time = with_time ) io . import_components_from_dataframe ( network_c , new_df , one_port ) for attr , df in iteritems ( new_pnl ) : io . import_series_from_dataframe ( network_c , df , one_port , attr ) network_c . determine_network_topology ( ) return network_c
Main function of the EHV - Clustering approach . Creates a new clustered pypsa . Network given a busmap mapping all bus_ids to other bus_ids of the same network .
40,715
def graph_from_edges ( edges ) : M = nx . MultiGraph ( ) for e in edges : n0 , n1 , weight , key = e M . add_edge ( n0 , n1 , weight = weight , key = key ) return M
Constructs an undirected multigraph from a list containing data on weighted edges .
40,716
def gen ( nodes , n , graph ) : g = graph . copy ( ) for i in range ( 0 , len ( nodes ) , n ) : yield ( nodes [ i : i + n ] , g )
Generator for applying multiprocessing .
40,717
def on_drag_data_received ( self , widget , context , x , y , data , info , time ) : state_id_insert = data . get_text ( ) parent_m = self . model . selection . get_selected_state ( ) if not isinstance ( parent_m , ContainerStateModel ) : return state_v = self . canvas . get_view_for_model ( parent_m . states [ state_id_insert ] ) pos_start = state_v . model . get_meta_data_editor ( ) [ 'rel_pos' ] motion = InMotion ( state_v , self . view . editor ) motion . start_move ( self . view . editor . get_matrix_i2v ( state_v ) . transform_point ( pos_start [ 0 ] , pos_start [ 1 ] ) ) motion . move ( ( x , y ) ) motion . stop_move ( ) state_v . model . set_meta_data_editor ( 'rel_pos' , motion . item . position ) self . canvas . wait_for_update ( trigger_update = True ) self . _meta_data_changed ( None , state_v . model , 'append_to_last_change' , True )
Receives state_id from LibraryTree and moves the state to the position of the mouse
40,718
def on_drag_motion ( self , widget , context , x , y , time ) : hovered_item = ItemFinder ( self . view . editor ) . get_item_at_point ( ( x , y ) ) if isinstance ( hovered_item , NameView ) : hovered_item = hovered_item . parent if hovered_item is None : self . view . editor . unselect_all ( ) elif isinstance ( hovered_item . model , ContainerStateModel ) : if len ( self . view . editor . selected_items ) == 1 and hovered_item in self . view . editor . selected_items : return if len ( self . view . editor . selected_items ) > 0 : self . view . editor . unselect_all ( ) if not rafcon . gui . singleton . global_gui_config . get_config_value ( 'DRAG_N_DROP_WITH_FOCUS' ) : self . view . editor . handler_block ( self . focus_changed_handler_id ) self . view . editor . focused_item = hovered_item if not rafcon . gui . singleton . global_gui_config . get_config_value ( 'DRAG_N_DROP_WITH_FOCUS' ) : self . view . editor . handler_unblock ( self . focus_changed_handler_id )
Changes the selection on mouse over during drag motion
40,719
def _copy_selection ( self , * event ) : if react_to_event ( self . view , self . view . editor , event ) : logger . debug ( "copy selection" ) global_clipboard . copy ( self . model . selection ) return True
Copies the current selection to the clipboard .
40,720
def _cut_selection ( self , * event ) : if react_to_event ( self . view , self . view . editor , event ) : logger . debug ( "cut selection" ) global_clipboard . cut ( self . model . selection ) return True
Cuts the current selection and copys it to the clipboard .
40,721
def _paste_clipboard ( self , * event ) : if react_to_event ( self . view , self . view . editor , event ) : logger . debug ( "Paste" ) gui_helper_state_machine . paste_into_selected_state ( self . model ) return True
Paste the current clipboard into the current selection if the current selection is a container state .
40,722
def _move_focused_item_into_viewport ( self , view , focused_item ) : self . view . editor . handler_block ( self . drag_motion_handler_id ) self . move_item_into_viewport ( focused_item ) self . view . editor . handler_unblock ( self . drag_motion_handler_id )
Called when an item is focused moves the item into the viewport
40,723
def move_item_into_viewport ( self , item ) : if not item : return HORIZONTAL = 0 VERTICAL = 1 if not isinstance ( item , Item ) : state_v = item . parent elif not isinstance ( item , StateView ) : state_v = self . canvas . get_parent ( item ) else : state_v = item viewport_size = self . view . editor . get_allocation ( ) . width , self . view . editor . get_allocation ( ) . height state_size = self . view . editor . get_matrix_i2v ( state_v ) . transform_distance ( state_v . width , state_v . height ) min_relative_size = min ( viewport_size [ i ] / state_size [ i ] for i in [ HORIZONTAL , VERTICAL ] ) if min_relative_size != 1 : margin_relative = 1. / gui_constants . BORDER_WIDTH_STATE_SIZE_FACTOR zoom_factor = min_relative_size * ( 1 - margin_relative ) if zoom_factor > 1 : zoom_base = 4 zoom_factor = max ( 1 , math . log ( zoom_factor * zoom_base , zoom_base ) ) self . view . editor . zoom ( zoom_factor ) self . canvas . wait_for_update ( ) state_pos = self . view . editor . get_matrix_i2v ( state_v ) . transform_point ( 0 , 0 ) state_size = self . view . editor . get_matrix_i2v ( state_v ) . transform_distance ( state_v . width , state_v . height ) viewport_size = self . view . editor . get_allocation ( ) . width , self . view . editor . get_allocation ( ) . height padding_offset_horizontal = ( viewport_size [ HORIZONTAL ] - state_size [ HORIZONTAL ] ) / 2. padding_offset_vertical = ( viewport_size [ VERTICAL ] - state_size [ VERTICAL ] ) / 2. self . view . editor . hadjustment . set_value ( state_pos [ HORIZONTAL ] - padding_offset_horizontal ) self . view . editor . vadjustment . set_value ( state_pos [ VERTICAL ] - padding_offset_vertical )
Causes the item to be moved into the viewport
40,724
def state_machine_destruction ( self , model , prop_name , info ) : if self . model is model : self . canvas . get_view_for_model ( self . root_state_m ) . remove ( )
Clean up when state machine is being destructed
40,725
def meta_changed_notify_after ( self , state_machine_m , _ , info ) : meta_signal_message = info [ 'arg' ] if meta_signal_message . origin == "graphical_editor_gaphas" : return if meta_signal_message . origin == "load_meta_data" : return notification = meta_signal_message . notification if not notification : return if self . model . ongoing_complex_actions : return model = notification . model view = self . canvas . get_view_for_model ( model ) if meta_signal_message . change == 'show_content' : library_state_m = model library_state_v = view if library_state_m . meta [ 'gui' ] [ 'show_content' ] is not library_state_m . show_content ( ) : logger . warning ( "The content of the LibraryState won't be shown, because " "MAX_VISIBLE_LIBRARY_HIERARCHY is 1." ) if library_state_m . show_content ( ) : if not library_state_m . state_copy_initialized : logger . warning ( "Show library content without initialized state copy does not work {0}" "" . format ( library_state_m ) ) logger . debug ( "Show content of {}" . format ( library_state_m . state ) ) gui_helper_meta_data . scale_library_content ( library_state_m ) self . add_state_view_for_model ( library_state_m . state_copy , view , hierarchy_level = library_state_v . hierarchy_level + 1 ) else : logger . debug ( "Hide content of {}" . format ( library_state_m . state ) ) state_copy_v = self . canvas . get_view_for_model ( library_state_m . state_copy ) if state_copy_v : state_copy_v . remove ( ) else : if isinstance ( view , StateView ) : view . apply_meta_data ( recursive = meta_signal_message . affects_children ) else : view . apply_meta_data ( ) self . canvas . request_update ( view , matrix = True ) self . canvas . wait_for_update ( )
Handle notification about the change of a state s meta data
40,726
def add_transition_view_for_model ( self , transition_m , parent_state_m ) : parent_state_v = self . canvas . get_view_for_model ( parent_state_m ) hierarchy_level = parent_state_v . hierarchy_level transition_v = TransitionView ( transition_m , hierarchy_level ) self . canvas . add ( transition_v , parent_state_v , index = None ) self . _connect_transition_to_ports ( transition_m , transition_v , parent_state_m , parent_state_v ) return transition_v
Creates a TransitionView and adds it to the canvas
40,727
def add_data_flow_view_for_model ( self , data_flow_m , parent_state_m ) : parent_state_v = self . canvas . get_view_for_model ( parent_state_m ) hierarchy_level = parent_state_v . hierarchy_level data_flow_v = DataFlowView ( data_flow_m , hierarchy_level ) self . canvas . add ( data_flow_v , parent_state_v , index = 1 ) self . _connect_data_flow_to_ports ( data_flow_m , data_flow_v , parent_state_m )
Creates a DataFlowView and adds it to the canvas
40,728
def react_to_event ( self , event ) : if not react_to_event ( self . view , self . view . editor , event ) : return False if not rafcon . gui . singleton . state_machine_manager_model . selected_state_machine_id == self . model . state_machine . state_machine_id : return False return True
Check whether the given event should be handled
40,729
async def initialize ( bot : Bot , host , password , rest_port , ws_port , timeout = 30 ) : global _loop _loop = bot . loop player_manager . user_id = bot . user . id player_manager . channel_finder_func = bot . get_channel register_event_listener ( _handle_event ) register_update_listener ( _handle_update ) lavalink_node = node . Node ( _loop , dispatch , bot . _connection . _get_websocket , host , password , port = ws_port , rest = rest_port , user_id = player_manager . user_id , num_shards = bot . shard_count if bot . shard_count is not None else 1 , ) await lavalink_node . connect ( timeout = timeout ) bot . add_listener ( node . on_socket_response ) bot . add_listener ( _on_guild_remove , name = "on_guild_remove" ) return lavalink_node
Initializes the websocket connection to the lavalink player .
40,730
def register_event_listener ( coro ) : if not asyncio . iscoroutinefunction ( coro ) : raise TypeError ( "Function is not a coroutine." ) if coro not in _event_listeners : _event_listeners . append ( coro )
Registers a coroutine to receive lavalink event information .
40,731
def register_update_listener ( coro ) : if not asyncio . iscoroutinefunction ( coro ) : raise TypeError ( "Function is not a coroutine." ) if coro not in _update_listeners : _update_listeners . append ( coro )
Registers a coroutine to receive lavalink player update information .
40,732
def register_stats_listener ( coro ) : if not asyncio . iscoroutinefunction ( coro ) : raise TypeError ( "Function is not a coroutine." ) if coro not in _stats_listeners : _stats_listeners . append ( coro )
Registers a coroutine to receive lavalink server stats information .
40,733
def prepare_destruction ( self ) : self . _tool = None self . _painter = None self . relieve_model ( self . _selection ) self . _selection = None self . _Observer__PROP_TO_METHS . clear ( ) self . _Observer__METH_TO_PROPS . clear ( ) self . _Observer__PAT_TO_METHS . clear ( ) self . _Observer__METH_TO_PAT . clear ( ) self . _Observer__PAT_METH_TO_KWARGS . clear ( )
Get rid of circular references
40,734
def get_port_at_point ( self , vpos , distance = 10 , exclude = None , exclude_port_fun = None ) : v2i = self . get_matrix_v2i vx , vy = vpos max_dist = distance port = None glue_pos = None item = None rect = ( vx - distance , vy - distance , distance * 2 , distance * 2 ) items = self . get_items_in_rectangle ( rect , reverse = True ) for i in items : if exclude and i in exclude : continue for p in i . ports ( ) : if not p . connectable : continue if exclude_port_fun and exclude_port_fun ( p ) : continue ix , iy = v2i ( i ) . transform_point ( vx , vy ) pg , d = p . glue ( ( ix , iy ) ) if d > max_dist : continue max_dist = d item = i port = p i2v = self . get_matrix_i2v ( i ) . transform_point glue_pos = i2v ( * pg ) return item , port , glue_pos
Find item with port closest to specified position .
40,735
def queue_draw_item ( self , * items ) : gaphas_items = [ ] for item in items : if isinstance ( item , Element ) : gaphas_items . append ( item ) else : try : gaphas_items . append ( item . parent ) except AttributeError : pass super ( ExtendedGtkView , self ) . queue_draw_item ( * gaphas_items )
Extends the base class method to allow Ports to be passed as item
40,736
def select_item ( self , items ) : if not items : return elif not hasattr ( items , "__iter__" ) : items = ( items , ) selection_changed = False with self . _suppress_selection_events ( ) : for item in items : self . queue_draw_item ( item ) if item is not None and item . model not in self . _selection : self . _selection . add ( item . model ) selection_changed = True if selection_changed : self . emit ( 'selection-changed' , self . _get_selected_items ( ) )
Select an items . This adds items to the set of selected items .
40,737
def unselect_item ( self , item ) : self . queue_draw_item ( item ) if item . model in self . _selection : with self . _suppress_selection_events ( ) : self . _selection . remove ( item . model ) self . emit ( 'selection-changed' , self . _get_selected_items ( ) )
Unselect an item .
40,738
def unselect_all ( self ) : items = self . _get_selected_items ( ) with self . _suppress_selection_events ( ) : self . _selection . clear ( ) self . queue_draw_item ( * items ) self . emit ( 'selection-changed' , self . _get_selected_items ( ) )
Clearing the selected_item also clears the focused_item .
40,739
def handle_new_selection ( self , items ) : if items is None : items = ( ) elif not hasattr ( items , "__iter__" ) : items = ( items , ) models = set ( item . model for item in items ) self . _selection . handle_new_selection ( models )
Determines the selection
40,740
def _get_focused_item ( self ) : focused_model = self . _selection . focus if not focused_model : return None return self . canvas . get_view_for_model ( focused_model )
Returns the currently focused item
40,741
def _set_focused_item ( self , item ) : if not item : return self . _del_focused_item ( ) if item . model is not self . _selection . focus : self . queue_draw_item ( self . _focused_item , item ) self . _selection . focus = item . model self . emit ( 'focus-changed' , item )
Sets the focus to the passed item
40,742
def log_to_ganttplot ( execution_history_items ) : import matplotlib . pyplot as plt import matplotlib . dates as dates import numpy as np d = log_to_DataFrame ( execution_history_items ) unique_states , idx = np . unique ( d . path_by_name , return_index = True ) ordered_unique_states = np . array ( d . path_by_name ) [ np . sort ( idx ) ] name2idx = { k : i for i , k in enumerate ( ordered_unique_states ) } calldate = dates . date2num ( d . timestamp_call . dt . to_pydatetime ( ) ) returndate = dates . date2num ( d . timestamp_return . dt . to_pydatetime ( ) ) state2color = { 'HierarchyState' : 'k' , 'ExecutionState' : 'g' , 'BarrierConcurrencyState' : 'y' , 'PreemptiveConcurrencyState' : 'y' } fig , ax = plt . subplots ( 1 , 1 ) ax . barh ( bottom = [ name2idx [ k ] for k in d . path_by_name ] , width = returndate - calldate , left = calldate , align = 'center' , color = [ state2color [ s ] for s in d . state_type ] , lw = 0.0 ) plt . yticks ( list ( range ( len ( ordered_unique_states ) ) ) , ordered_unique_states )
Example how to use the DataFrame representation
40,743
def call_gui_callback ( callback , * args , ** kwargs ) : from future . utils import raise_ from threading import Condition import sys from rafcon . utils import log global exception_info , result from gi . repository import GLib condition = Condition ( ) exception_info = None @ log . log_exceptions ( ) def fun ( ) : global exception_info , result result = None try : result = callback ( * args ) except : exception_info = sys . exc_info ( ) finally : condition . acquire ( ) condition . notify ( ) condition . release ( ) if "priority" in kwargs : priority = kwargs [ "priority" ] else : priority = GLib . PRIORITY_LOW condition . acquire ( ) GLib . idle_add ( fun , priority = priority ) condition . wait ( ) condition . release ( ) if exception_info : e_type , e_value , e_traceback = exception_info raise_ ( e_type , e_value , e_traceback ) return result
Wrapper method for GLib . idle_add
40,744
def create_tab_header_label ( tab_name , icons ) : tooltip_event_box = Gtk . EventBox ( ) tooltip_event_box . set_tooltip_text ( tab_name ) tab_label = Gtk . Label ( ) if global_gui_config . get_config_value ( 'USE_ICONS_AS_TAB_LABELS' , True ) : tab_label . set_markup ( '<span font_desc="%s %s">&#x%s;</span>' % ( constants . ICON_FONT , constants . FONT_SIZE_BIG , icons [ tab_name ] ) ) else : tab_label . set_text ( get_widget_title ( tab_name ) ) tab_label . set_angle ( 90 ) tab_label . show ( ) tooltip_event_box . add ( tab_label ) tooltip_event_box . set_visible_window ( False ) tooltip_event_box . show ( ) return tooltip_event_box
Create the tab header labels for notebook tabs . If USE_ICONS_AS_TAB_LABELS is set to True in the gui_config icons are used as headers . Otherwise the titles of the tabs are rotated by 90 degrees .
40,745
def create_button_label ( icon , font_size = constants . FONT_SIZE_NORMAL ) : label = Gtk . Label ( ) set_label_markup ( label , '&#x' + icon + ';' , constants . ICON_FONT , font_size ) label . show ( ) return label
Create a button label with a chosen icon .
40,746
def get_widget_title ( tab_label_text ) : title = '' title_list = tab_label_text . split ( '_' ) for word in title_list : title += word . upper ( ) + ' ' title . strip ( ) return title
Transform Notebook tab label to title by replacing underscores with white spaces and capitalizing the first letter of each word .
40,747
def get_notebook_tab_title ( notebook , page_num ) : child = notebook . get_nth_page ( page_num ) tab_label_eventbox = notebook . get_tab_label ( child ) return get_widget_title ( tab_label_eventbox . get_tooltip_text ( ) )
Helper function that gets a notebook s tab title given its page number
40,748
def set_notebook_title ( notebook , page_num , title_label ) : text = get_notebook_tab_title ( notebook , page_num ) set_label_markup ( title_label , text , constants . INTERFACE_FONT , constants . FONT_SIZE_BIG , constants . LETTER_SPACING_1PT ) return text
Set the title of a GTK notebook to one of its tab s titles
40,749
def create_menu_box_with_icon_and_label ( label_text ) : box = Gtk . Box . new ( Gtk . Orientation . HORIZONTAL , 10 ) box . set_border_width ( 0 ) icon_label = Gtk . Label ( ) text_label = Gtk . AccelLabel . new ( label_text ) text_label . set_xalign ( 0 ) box . pack_start ( icon_label , False , False , 0 ) box . pack_start ( text_label , True , True , 0 ) return box , icon_label , text_label
Creates a MenuItem box which is a replacement for the former ImageMenuItem . The box contains a label for the icon and one for the text .
40,750
def set_window_size_and_position ( window , window_key ) : size = global_runtime_config . get_config_value ( window_key + '_WINDOW_SIZE' ) position = global_runtime_config . get_config_value ( window_key + '_WINDOW_POS' ) maximized = global_runtime_config . get_config_value ( window_key + '_WINDOW_MAXIMIZED' ) if not maximized : window . unmaximize ( ) if not size : size = constants . WINDOW_SIZE [ window_key + '_WINDOW' ] window . resize ( * size ) if position : position = ( max ( 0 , position [ 0 ] ) , max ( 0 , position [ 1 ] ) ) screen_width = Gdk . Screen . width ( ) screen_height = Gdk . Screen . height ( ) if position [ 0 ] < screen_width and position [ 1 ] < screen_height : window . move ( * position ) else : window . set_position ( Gtk . WindowPosition . MOUSE ) if maximized : window . maximize ( ) window . show ( )
Adjust GTK Window s size position and maximized state according to the corresponding values in the runtime_config file . The maximize method is triggered last to restore also the last stored size and position of the window . If the runtime_config does not exist or the corresponding values are missing in the file default values for the window size are used and the mouse position is used to adjust the window s position .
40,751
def react_to_event ( view , widget , event ) : if not view : return False if not isinstance ( widget , Gtk . Widget ) : return False child_is_focus = False if not isinstance ( widget , Gtk . Container ) else bool ( widget . get_focus_child ( ) ) if not child_is_focus and not widget . is_focus ( ) : return False def has_focus ( widget ) : if widget . has_focus ( ) : return True if not isinstance ( widget , Gtk . Container ) : return False return any ( has_focus ( child ) for child in widget . get_children ( ) ) if has_focus ( widget ) : return True if len ( event ) < 2 or ( len ( event ) >= 2 and not isinstance ( event [ 1 ] , Gdk . ModifierType ) ) : return True return False
Checks whether the widget is supposed to react to passed event
40,752
def is_event_of_key_string ( event , key_string ) : return len ( event ) >= 2 and not isinstance ( event [ 1 ] , Gdk . ModifierType ) and event [ 0 ] == Gtk . accelerator_parse ( key_string ) [ 0 ]
Condition check if key string represent the key value of handed event and whether the event is of right type
40,753
def avoid_parallel_execution ( func ) : def func_wrapper ( * args , ** kwargs ) : if not getattr ( func , "currently_executing" , False ) : func . currently_executing = True try : return func ( * args , ** kwargs ) finally : func . currently_executing = False else : logger . verbose ( "Avoid parallel execution of function {}" . format ( func ) ) return func_wrapper
A decorator to avoid the parallel execution of a function .
40,754
def run ( self ) : self . state_execution_status = StateExecutionStatus . ACTIVE logger . debug ( "Entering library state '{0}' with name '{1}'" . format ( self . library_name , self . name ) ) self . state_copy . _run_id = self . _run_id self . state_copy . input_data = self . input_data self . state_copy . output_data = self . output_data self . state_copy . execution_history = self . execution_history self . state_copy . backward_execution = self . backward_execution self . state_copy . run ( ) logger . debug ( "Exiting library state '{0}' with name '{1}'" . format ( self . library_name , self . name ) ) self . state_execution_status = StateExecutionStatus . WAIT_FOR_NEXT_STATE self . finalize ( self . state_copy . final_outcome )
This defines the sequence of actions that are taken when the library state is executed
40,755
def remove_outcome ( self , outcome_id , force = False , destroy = True ) : if force : return State . remove_outcome ( self , outcome_id , force , destroy ) else : raise NotImplementedError ( "Remove outcome is not implemented for library state {}" . format ( self ) )
Overwrites the remove_outcome method of the State class . Prevents user from removing a outcome from the library state .
40,756
def remove_output_data_port ( self , data_port_id , force = False , destroy = True ) : if force : return State . remove_output_data_port ( self , data_port_id , force , destroy ) else : raise NotImplementedError ( "Remove output data port is not implemented for library state {}" . format ( self ) )
Overwrites the remove_output_data_port method of the State class . Prevents user from removing a output data port from the library state .
40,757
def library_hierarchy_depth ( self ) : current_library_hierarchy_depth = 1 library_root_state = self . get_next_upper_library_root_state ( ) while library_root_state is not None : current_library_hierarchy_depth += 1 library_root_state = library_root_state . parent . get_next_upper_library_root_state ( ) return current_library_hierarchy_depth
Calculates the library hierarchy depth
40,758
def load ( self , config_file = None , path = None ) : if config_file is None : if path is None : path , config_file = split ( resource_filename ( __name__ , CONFIG_FILE ) ) else : config_file = CONFIG_FILE super ( Config , self ) . load ( config_file , path )
Loads the configuration from a specific file
40,759
def state_type_changed ( self , model , prop_name , info ) : msg = info [ 'arg' ] if msg . action in [ 'change_state_type' , 'change_root_state_type' ] and msg . after : import rafcon . gui . singleton as gui_singletons msg = info [ 'arg' ] new_state_m = msg . affected_models [ - 1 ] states_editor_ctrl = gui_singletons . main_window_controller . get_controller ( 'states_editor_ctrl' ) states_editor_ctrl . recreate_state_editor ( self . model , new_state_m )
Reopen state editor when state type is changed
40,760
def state_destruction ( self , model , prop_name , info ) : import rafcon . gui . singleton as gui_singletons states_editor_ctrl = gui_singletons . main_window_controller . get_controller ( 'states_editor_ctrl' ) state_identifier = states_editor_ctrl . get_state_identifier ( self . model ) states_editor_ctrl . close_page ( state_identifier , delete = True )
Close state editor when state is being destructed
40,761
def meta_changed ( self , model , prop_name , info ) : self . state_machine . marked_dirty = True msg = info . arg if model is not self and msg . change . startswith ( 'sm_notification_' ) : msg = msg . _replace ( change = msg . change . replace ( 'sm_notification_' , '' , 1 ) ) self . state_meta_signal . emit ( msg )
When the meta was changed we have to set the dirty flag as the changes are unsaved
40,762
def action_signal_triggered ( self , model , prop_name , info ) : self . state_machine . marked_dirty = True msg = info . arg if model is not self and msg . action . startswith ( 'sm_notification_' ) : msg = msg . _replace ( action = msg . action . replace ( 'sm_notification_' , '' , 1 ) ) self . state_action_signal . emit ( msg ) else : pass
When the action was performed we have to set the dirty flag as the changes are unsaved
40,763
def load_meta_data ( self , path = None , recursively = True ) : meta_data_path = path if path is not None else self . state_machine . file_system_path if meta_data_path : path_meta_data = os . path . join ( meta_data_path , storage . FILE_NAME_META_DATA ) try : tmp_meta = storage . load_data_file ( path_meta_data ) except ValueError : tmp_meta = { } else : tmp_meta = { } tmp_meta = Vividict ( tmp_meta ) if recursively : root_state_path = None if not path else os . path . join ( path , self . root_state . state . state_id ) self . root_state . load_meta_data ( root_state_path ) if tmp_meta : self . meta = tmp_meta self . meta_signal . emit ( MetaSignalMsg ( "load_meta_data" , "all" , True ) )
Load meta data of state machine model from the file system
40,764
def store_meta_data ( self , copy_path = None ) : if copy_path : meta_file_json = os . path . join ( copy_path , storage . FILE_NAME_META_DATA ) else : meta_file_json = os . path . join ( self . state_machine . file_system_path , storage . FILE_NAME_META_DATA ) storage_utils . write_dict_to_json ( self . meta , meta_file_json ) self . root_state . store_meta_data ( copy_path )
Save meta data of the state machine model to the file system
40,765
def _raise_on_bad_jar_filename ( jar_filename ) : if jar_filename is None : return if not isinstance ( jar_filename , string_type ) : raise TypeError ( "jar_filename is not a string: %r" % jar_filename ) if not os . path . exists ( jar_filename ) : raise ValueError ( "jar_filename does not exist: %r" % jar_filename )
Ensure that jar_filename is a valid path to a jar file .
40,766
def get_jar_url ( version = None ) : if version is None : version = DEFAULT_CORENLP_VERSION try : string_type = basestring except NameError : string_type = str if not isinstance ( version , string_type ) : raise TypeError ( "Version must be a string or None (got %r)." % version ) jar_filename = 'stanford-corenlp-%s.jar' % version return 'http://search.maven.org/remotecontent?filepath=' + 'edu/stanford/nlp/stanford-corenlp/%s/%s' % ( version , jar_filename )
Get the URL to a Stanford CoreNLP jar file with a specific version . These jars come from Maven since the Maven version is smaller than the full CoreNLP distributions . Defaults to DEFAULT_CORENLP_VERSION .
40,767
def convert_tree ( self , ptb_tree , representation = 'basic' , include_punct = True , include_erased = False , add_lemmas = False , universal = True ) : self . _raise_on_bad_input ( ptb_tree ) self . _raise_on_bad_representation ( representation ) tree = self . treeReader ( ptb_tree ) if tree is None : raise ValueError ( "Invalid Penn Treebank tree: %r" % ptb_tree ) deps = self . _get_deps ( tree , include_punct , representation , universal = universal ) tagged_yield = self . _listify ( tree . taggedYield ( ) ) indices_to_words = dict ( enumerate ( tagged_yield , 1 ) ) sentence = Sentence ( ) covered_indices = set ( ) def add_token ( index , form , head , deprel , extra ) : tag = indices_to_words [ index ] . tag ( ) if add_lemmas : lemma = self . stem ( form , tag ) else : lemma = None token = Token ( index = index , form = form , lemma = lemma , cpos = tag , pos = tag , feats = None , head = head , deprel = deprel , phead = None , pdeprel = None , extra = extra ) sentence . append ( token ) for dep in deps : index = dep . dep ( ) . index ( ) head = dep . gov ( ) . index ( ) deprel = dep . reln ( ) . toString ( ) form = indices_to_words [ index ] . value ( ) dep_is_copy = dep . dep ( ) . copyCount ( ) gov_is_copy = dep . gov ( ) . copyCount ( ) if dep_is_copy or gov_is_copy : extra = { } if dep_is_copy : extra [ 'dep_is_copy' ] = dep_is_copy if gov_is_copy : extra [ 'gov_is_copy' ] = gov_is_copy else : extra = None add_token ( index , form , head , deprel , extra ) covered_indices . add ( index ) if include_erased : all_indices = set ( indices_to_words . keys ( ) ) for index in all_indices - covered_indices : form = indices_to_words [ index ] . value ( ) if not include_punct and not self . puncFilter ( form ) : continue add_token ( index , form , head = 0 , deprel = 'erased' , extra = None ) sentence . sort ( ) if representation == 'basic' : sentence . renumber ( ) return sentence
Arguments are as in StanfordDependencies . convert_trees but with the addition of add_lemmas . If add_lemmas = True we will run the Stanford CoreNLP lemmatizer and fill in the lemma field .
40,768
def stem ( self , form , tag ) : key = ( form , tag ) if key not in self . lemma_cache : lemma = self . stemmer ( * key ) . word ( ) self . lemma_cache [ key ] = lemma return self . lemma_cache [ key ]
Returns the stem of word with specific form and part - of - speech tag according to the Stanford lemmatizer . Lemmas are cached .
40,769
def _get_deps ( self , tree , include_punct , representation , universal ) : if universal : converter = self . universal_converter if self . universal_converter == self . converter : import warnings warnings . warn ( "This jar doesn't support universal " "dependencies, falling back to Stanford " "Dependencies. To suppress this message, " "call with universal=False" ) else : converter = self . converter if include_punct : egs = converter ( tree , self . acceptFilter ) else : egs = converter ( tree ) if representation == 'basic' : deps = egs . typedDependencies ( ) elif representation == 'collapsed' : deps = egs . typedDependenciesCollapsed ( True ) elif representation == 'CCprocessed' : deps = egs . typedDependenciesCCprocessed ( True ) else : assert representation == 'collapsedTree' deps = egs . typedDependenciesCollapsedTree ( ) return self . _listify ( deps )
Get a list of dependencies from a Stanford Tree for a specific Stanford Dependencies representation .
40,770
def _listify ( collection ) : new_list = [ ] for index in range ( len ( collection ) ) : new_list . append ( collection [ index ] ) return new_list
This is a workaround where Collections are no longer iterable when using JPype .
40,771
def as_conll ( self ) : def get ( field ) : value = getattr ( self , field ) if value is None : value = '_' elif field == 'feats' : value = '|' . join ( value ) return str ( value ) return '\t' . join ( [ get ( field ) for field in FIELD_NAMES ] )
Represent this Token as a line as a string in CoNLL - X format .
40,772
def from_conll ( this_class , text ) : fields = text . split ( '\t' ) fields [ 0 ] = int ( fields [ 0 ] ) fields [ 6 ] = int ( fields [ 6 ] ) if fields [ 5 ] != '_' : fields [ 5 ] = tuple ( fields [ 5 ] . split ( '|' ) ) fields = [ value if value != '_' else None for value in fields ] fields . append ( None ) return this_class ( ** dict ( zip ( FIELD_NAMES_PLUS , fields ) ) )
Construct a Token from a line in CoNLL - X format .
40,773
def as_asciitree ( self , str_func = None ) : import asciitree from collections import defaultdict children = defaultdict ( list ) token_to_index = { } roots = [ ] for token in self : children [ token . head ] . append ( token ) token_to_index [ token ] = token . index if token . head == 0 : roots . append ( token ) assert roots , "Couldn't find root Token(s)" if len ( roots ) > 1 : root = Token ( 0 , 'ROOT' , 'ROOT-LEMMA' , 'ROOT-CPOS' , 'ROOT-POS' , None , None , 'ROOT-DEPREL' , None , None , None ) token_to_index [ root ] = 0 children [ 0 ] = roots else : root = roots [ 0 ] def child_func ( token ) : index = token_to_index [ token ] return children [ index ] if not str_func : def str_func ( token ) : return ' %s [%s]' % ( token . form , token . deprel ) return asciitree . draw_tree ( root , child_func , str_func )
Represent this Sentence as an ASCII tree string . Requires the asciitree package . A default token stringifier is provided but for custom formatting specify a str_func which should take a single Token and return a string .
40,774
def from_conll ( this_class , stream ) : stream = iter ( stream ) sentence = this_class ( ) for line in stream : line = line . strip ( ) if line : sentence . append ( Token . from_conll ( line ) ) elif sentence : return sentence return sentence
Construct a Sentence . stream is an iterable over strings where each string is a line in CoNLL - X format . If there are multiple sentences in this stream we only return the first one .
40,775
def from_conll ( this_class , stream ) : stream = iter ( stream ) corpus = this_class ( ) while 1 : sentence = Sentence . from_conll ( stream ) if sentence : corpus . append ( sentence ) else : break return corpus
Construct a Corpus . stream is an iterable over strings where each string is a line in CoNLL - X format .
40,776
def endpoint_from_name ( endpoint_name ) : if endpoint_name is None : return None factory = relation_factory ( endpoint_name ) if factory : return factory . from_name ( endpoint_name )
The object used for interacting with the named relations or None .
40,777
def endpoint_from_flag ( flag ) : relation_name = None value = _get_flag_value ( flag ) if isinstance ( value , dict ) and 'relation' in value : relation_name = value [ 'relation' ] elif flag . startswith ( 'endpoint.' ) : relation_name = flag . split ( '.' ) [ 1 ] elif '.' in flag : relation_name = flag . split ( '.' ) [ 0 ] if relation_name not in hookenv . relation_types ( ) : return None if relation_name : factory = relation_factory ( relation_name ) if factory : return factory . from_flag ( flag ) return None
The object used for interacting with relations tied to a flag or None .
40,778
def relation_factory ( relation_name ) : role , interface = hookenv . relation_to_role_and_interface ( relation_name ) if not ( role and interface ) : hookenv . log ( 'Unable to determine role and interface for relation ' '{}' . format ( relation_name ) , hookenv . ERROR ) return None return _find_relation_factory ( _relation_module ( role , interface ) )
Get the RelationFactory for the given relation name .
40,779
def _relation_module ( role , interface ) : _append_path ( hookenv . charm_dir ( ) ) _append_path ( os . path . join ( hookenv . charm_dir ( ) , 'hooks' ) ) base_module = 'relations.{}.{}' . format ( interface , role ) for module in ( 'reactive.{}' . format ( base_module ) , base_module ) : if module in sys . modules : break try : importlib . import_module ( module ) break except ImportError : continue else : hookenv . log ( 'Unable to find implementation for relation: ' '{} of {}' . format ( role , interface ) , hookenv . ERROR ) return None return sys . modules [ module ]
Return module for relation based on its role and interface or None .
40,780
def _find_relation_factory ( module ) : if not module : return None candidates = [ o for o in ( getattr ( module , attr ) for attr in dir ( module ) ) if ( o is not RelationFactory and o is not RelationBase and isclass ( o ) and issubclass ( o , RelationFactory ) ) ] candidates = [ c1 for c1 in candidates if not any ( issubclass ( c2 , c1 ) for c2 in candidates if c1 is not c2 ) ] if not candidates : hookenv . log ( 'No RelationFactory found in {}' . format ( module . __name__ ) , hookenv . WARNING ) return None if len ( candidates ) > 1 : raise RuntimeError ( 'Too many RelationFactory found in {}' '' . format ( module . __name__ ) ) return candidates [ 0 ]
Attempt to find a RelationFactory subclass in the module .
40,781
def relation_call ( method , relation_name = None , flag = None , state = None , * args ) : if relation_name : relation = relation_from_name ( relation_name ) if relation is None : raise ValueError ( 'Relation not found: %s' % relation_name ) elif flag or state : relation = relation_from_flag ( flag or state ) if relation is None : raise ValueError ( 'Relation not found: %s' % ( flag or state ) ) else : raise ValueError ( 'Must specify either relation_name or flag' ) result = getattr ( relation , method ) ( * args ) if isinstance ( relation , RelationBase ) and method == 'conversations' : result = [ c . scope for c in result ] return result
Invoke a method on the class implementing a relation via the CLI
40,782
def from_flag ( cls , flag ) : value = _get_flag_value ( flag ) if value is None : return None relation_name = value [ 'relation' ] conversations = Conversation . load ( value [ 'conversations' ] ) return cls . from_name ( relation_name , conversations )
Find relation implementation in the current charm based on the name of an active flag .
40,783
def from_name ( cls , relation_name , conversations = None ) : if relation_name is None : return None relation_class = cls . _cache . get ( relation_name ) if relation_class : return relation_class ( relation_name , conversations ) role , interface = hookenv . relation_to_role_and_interface ( relation_name ) if role and interface : relation_class = cls . _find_impl ( role , interface ) if relation_class : cls . _cache [ relation_name ] = relation_class return relation_class ( relation_name , conversations ) return None
Find relation implementation in the current charm based on the name of the relation .
40,784
def _find_impl ( cls , role , interface ) : module = _relation_module ( role , interface ) if not module : return None return cls . _find_subclass ( module )
Find relation implementation based on its role and interface .
40,785
def conversation ( self , scope = None ) : if scope is None : if self . scope is scopes . UNIT : scope = hookenv . remote_unit ( ) elif self . scope is scopes . SERVICE : scope = hookenv . remote_service_name ( ) else : scope = self . scope if scope is None : raise ValueError ( 'Unable to determine default scope: no current hook or global scope' ) for conversation in self . _conversations : if conversation . scope == scope : return conversation else : raise ValueError ( "Conversation with scope '%s' not found" % scope )
Get a single conversation by scope that this relation is currently handling .
40,786
def relation_ids ( self ) : if self . scope == scopes . GLOBAL : return hookenv . relation_ids ( self . namespace ) else : return [ self . namespace ]
The set of IDs of the specific relation instances that this conversation is communicating with .
40,787
def join ( cls , scope ) : relation_name = hookenv . relation_type ( ) relation_id = hookenv . relation_id ( ) unit = hookenv . remote_unit ( ) service = hookenv . remote_service_name ( ) if scope is scopes . UNIT : scope = unit namespace = relation_id elif scope is scopes . SERVICE : scope = service namespace = relation_id else : namespace = relation_name key = cls . _key ( namespace , scope ) data = unitdata . kv ( ) . get ( key , { 'namespace' : namespace , 'scope' : scope , 'units' : [ ] } ) conversation = cls . deserialize ( data ) conversation . units . add ( unit ) unitdata . kv ( ) . set ( key , cls . serialize ( conversation ) ) return conversation
Get or create a conversation for the given scope and active hook context .
40,788
def depart ( self ) : unit = hookenv . remote_unit ( ) self . units . remove ( unit ) if self . units : unitdata . kv ( ) . set ( self . key , self . serialize ( self ) ) else : unitdata . kv ( ) . unset ( self . key )
Remove the current remote unit for the active hook context from this conversation . This should be called from a - departed hook .
40,789
def serialize ( cls , conversation ) : return { 'namespace' : conversation . namespace , 'units' : sorted ( conversation . units ) , 'scope' : conversation . scope , }
Serialize a conversation instance for storage .
40,790
def load ( cls , keys ) : conversations = [ ] for key in keys : conversation = unitdata . kv ( ) . get ( key ) if conversation : conversations . append ( cls . deserialize ( conversation ) ) return conversations
Load a set of conversations by their keys .
40,791
def set_state ( self , state ) : state = state . format ( relation_name = self . relation_name ) value = _get_flag_value ( state , { 'relation' : self . relation_name , 'conversations' : [ ] , } ) if self . key not in value [ 'conversations' ] : value [ 'conversations' ] . append ( self . key ) set_flag ( state , value )
Activate and put this conversation into the given state .
40,792
def remove_state ( self , state ) : state = state . format ( relation_name = self . relation_name ) value = _get_flag_value ( state ) if not value : return if self . key in value [ 'conversations' ] : value [ 'conversations' ] . remove ( self . key ) if value [ 'conversations' ] : set_flag ( state , value ) else : clear_flag ( state )
Remove this conversation from the given state and potentially deactivate the state if no more conversations are in it .
40,793
def is_state ( self , state ) : state = state . format ( relation_name = self . relation_name ) value = _get_flag_value ( state ) if not value : return False return self . key in value [ 'conversations' ]
Test if this conversation is in the given state .
40,794
def toggle_state ( self , state , active = TOGGLE ) : if active is TOGGLE : active = not self . is_state ( state ) if active : self . set_state ( state ) else : self . remove_state ( state )
Toggle the given state for this conversation .
40,795
def set_local ( self , key = None , value = None , data = None , ** kwdata ) : if data is None : data = { } if key is not None : data [ key ] = value data . update ( kwdata ) if not data : return unitdata . kv ( ) . update ( data , prefix = '%s.%s.' % ( self . key , 'local-data' ) )
Locally store some data associated with this conversation .
40,796
def register_trigger ( when = None , when_not = None , set_flag = None , clear_flag = None ) : if not any ( ( when , when_not ) ) : raise ValueError ( 'Must provide one of when or when_not' ) if all ( ( when , when_not ) ) : raise ValueError ( 'Only one of when or when_not can be provided' ) if not any ( ( set_flag , clear_flag ) ) : raise ValueError ( 'Must provide at least one of set_flag or clear_flag' ) trigger = _get_trigger ( when , when_not ) if set_flag and set_flag not in trigger [ 'set_flag' ] : trigger [ 'set_flag' ] . append ( set_flag ) if clear_flag and clear_flag not in trigger [ 'clear_flag' ] : trigger [ 'clear_flag' ] . append ( clear_flag ) _save_trigger ( when , when_not , trigger )
Register a trigger to set or clear a flag when a given flag is set .
40,797
def get_flags ( ) : flags = unitdata . kv ( ) . getrange ( 'reactive.states.' , strip = True ) or { } return sorted ( flags . keys ( ) )
Return a list of all flags which are set .
40,798
def dispatch ( restricted = False ) : FlagWatch . reset ( ) def _test ( to_test ) : return list ( filter ( lambda h : h . test ( ) , to_test ) ) def _invoke ( to_invoke ) : while to_invoke : unitdata . kv ( ) . set ( 'reactive.dispatch.removed_state' , False ) for handler in list ( to_invoke ) : to_invoke . remove ( handler ) hookenv . log ( 'Invoking reactive handler: %s' % handler . id ( ) , level = hookenv . INFO ) handler . invoke ( ) if unitdata . kv ( ) . get ( 'reactive.dispatch.removed_state' ) : to_invoke = _test ( to_invoke ) break FlagWatch . commit ( ) tracer ( ) . start_dispatch ( ) if restricted : unitdata . kv ( ) . set ( 'reactive.dispatch.phase' , 'restricted' ) hook_handlers = _test ( Handler . get_handlers ( ) ) tracer ( ) . start_dispatch_phase ( 'restricted' , hook_handlers ) _invoke ( hook_handlers ) return unitdata . kv ( ) . set ( 'reactive.dispatch.phase' , 'hooks' ) hook_handlers = _test ( Handler . get_handlers ( ) ) tracer ( ) . start_dispatch_phase ( 'hooks' , hook_handlers ) _invoke ( hook_handlers ) unitdata . kv ( ) . set ( 'reactive.dispatch.phase' , 'other' ) for i in range ( 100 ) : FlagWatch . iteration ( i ) other_handlers = _test ( Handler . get_handlers ( ) ) if i == 0 : tracer ( ) . start_dispatch_phase ( 'other' , other_handlers ) tracer ( ) . start_dispatch_iteration ( i , other_handlers ) if not other_handlers : break _invoke ( other_handlers ) FlagWatch . reset ( )
Dispatch registered handlers .
40,799
def discover ( ) : _append_path ( hookenv . charm_dir ( ) ) _append_path ( os . path . join ( hookenv . charm_dir ( ) , 'hooks' ) ) for search_dir in ( 'reactive' , 'hooks/reactive' , 'hooks/relations' ) : search_path = os . path . join ( hookenv . charm_dir ( ) , search_dir ) for dirpath , dirnames , filenames in os . walk ( search_path ) : for filename in filenames : filepath = os . path . join ( dirpath , filename ) _register_handlers_from_file ( search_path , filepath )
Discover handlers based on convention .