idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
40,500 | def remove_controller ( self , controller ) : if isinstance ( controller , ExtendedController ) : for key , child_controller in self . __child_controllers . items ( ) : if controller is child_controller : break else : return False else : key = controller if key in self . __child_controllers : if self . __shortcut_manager is not None : self . __action_registered_controllers . remove ( self . __child_controllers [ key ] ) self . __child_controllers [ key ] . unregister_actions ( self . __shortcut_manager ) self . __child_controllers [ key ] . destroy ( ) del self . __child_controllers [ key ] return True return False | Remove child controller and destroy it |
40,501 | def register_actions ( self , shortcut_manager ) : assert isinstance ( shortcut_manager , ShortcutManager ) self . __shortcut_manager = shortcut_manager for controller in list ( self . __child_controllers . values ( ) ) : if controller not in self . __action_registered_controllers : try : controller . register_actions ( shortcut_manager ) except Exception as e : logger . error ( "Error while registering action for {0}: {1}" . format ( controller . __class__ . __name__ , e ) ) self . __action_registered_controllers . append ( controller ) | Register callback methods for triggered actions in all child controllers . |
40,502 | def destroy ( self ) : self . disconnect_all_signals ( ) controller_names = [ key for key in self . __child_controllers ] for controller_name in controller_names : self . remove_controller ( controller_name ) self . relieve_all_models ( ) if self . parent : self . __parent = None if self . _view_initialized : self . view . get_top_widget ( ) . destroy ( ) self . view = None self . _Observer__PROP_TO_METHS . clear ( ) self . _Observer__METH_TO_PROPS . clear ( ) self . _Observer__PAT_TO_METHS . clear ( ) self . _Observer__METH_TO_PAT . clear ( ) self . _Observer__PAT_METH_TO_KWARGS . clear ( ) self . observe = None else : logger . warning ( "The controller {0} seems to be destroyed before the view was fully initialized. {1} " "Check if you maybe do not call {2} or there exist most likely threading problems." "" . format ( self . __class__ . __name__ , self . model , ExtendedController . register_view ) ) | Recursively destroy all Controllers |
40,503 | def observe_model ( self , model ) : self . __registered_models . add ( model ) return super ( ExtendedController , self ) . observe_model ( model ) | Make this model observable within the controller |
40,504 | def relieve_model ( self , model ) : self . __registered_models . remove ( model ) return super ( ExtendedController , self ) . relieve_model ( model ) | Do no longer observe the model |
40,505 | def relieve_all_models ( self ) : map ( self . relieve_model , list ( self . __registered_models ) ) self . __registered_models . clear ( ) | Relieve all registered models |
40,506 | def change_data_type ( self , data_type , default_value = None ) : old_data_type = self . data_type self . data_type = data_type if default_value is None : default_value = self . default_value if type_helpers . type_inherits_of_type ( type ( default_value ) , self . _data_type ) : self . _default_value = default_value else : if old_data_type . __name__ == "float" and data_type == "int" : if self . default_value : self . _default_value = int ( default_value ) else : self . _default_value = 0 elif old_data_type . __name__ == "int" and data_type == "float" : if self . default_value : self . _default_value = float ( default_value ) else : self . _default_value = 0.0 else : self . _default_value = None | This method changes both the data type and default value . If one of the parameters does not fit an exception is thrown and no property is changed . Using this method ensures a consistent data type and default value and only notifies once . |
40,507 | def check_default_value ( self , default_value , data_type = None ) : if data_type is None : data_type = self . data_type if default_value is not None : if isinstance ( default_value , string_types ) : if len ( default_value ) > 1 and default_value [ 0 ] == '$' : return default_value if default_value == "None" : return None default_value = type_helpers . convert_string_value_to_type_value ( default_value , data_type ) if default_value is None : raise AttributeError ( "Could not convert default value '{0}' to data type '{1}'." . format ( default_value , data_type ) ) else : if not isinstance ( default_value , self . data_type ) : if self . _no_type_error_exceptions : logger . warning ( "Handed default value '{0}' is of type '{1}' but data port data type is {2} {3}." "" . format ( default_value , type ( default_value ) , data_type , self ) ) else : raise TypeError ( "Handed default value '{0}' is of type '{1}' but data port data type is {2}" "{3} of {4}." . format ( default_value , type ( default_value ) , data_type , self , self . parent . get_path ( ) if self . parent is not None else "" ) ) return default_value | Check whether the passed default value suits to the passed data type . If no data type is passed the data type of the data port is used . If the default value does not fit an exception is thrown . If the default value is of type string it is tried to convert that value to the data type . |
40,508 | def create_folder_cmd_line ( query , default_name = None , default_path = None ) : default = None if default_name and default_path : default = os . path . join ( default_path , default_name ) user_input = input ( query + ' [default {}]: ' . format ( default ) ) if len ( user_input ) == 0 : user_input = default if not user_input : return None if not os . path . isdir ( user_input ) : try : os . makedirs ( user_input ) except OSError : return None return user_input | Queries the user for a path to be created |
40,509 | def save_folder_cmd_line ( query , default_name = None , default_path = None ) : default = None if default_name and default_path : default = os . path . join ( default_path , default_name ) user_input = input ( query + ' [default {}]: ' . format ( default ) ) if len ( user_input ) == 0 : user_input = default if not user_input or not os . path . isdir ( os . path . dirname ( user_input ) ) : return None return user_input | Queries the user for a path or file to be saved into |
40,510 | def remove_core_element ( self , model ) : assert model . transition . parent is self . model . state or model . transition . parent is self . model . parent . state gui_helper_state_machine . delete_core_element_of_model ( model ) | Remove respective core element of handed transition model |
40,511 | def _update_internal_data_base ( self ) : model = self . model self . combo [ 'internal' ] = { } self . combo [ 'external' ] = { } self . combo [ 'free_from_states' ] = { } self . combo [ 'free_from_outcomes_dict' ] = { } self . combo [ 'free_ext_from_outcomes_dict' ] = { } self . combo [ 'free_ext_from_outcomes_dict' ] = { } if isinstance ( model , ContainerStateModel ) : for transition_id , transition in model . state . transitions . items ( ) : self . combo [ 'internal' ] [ transition_id ] = { } [ from_state_combo , from_outcome_combo , to_state_combo , to_outcome_combo , free_from_states , free_from_outcomes_dict ] = self . get_possible_combos_for_transition ( transition , self . model , self . model ) self . combo [ 'internal' ] [ transition_id ] [ 'from_state' ] = from_state_combo self . combo [ 'internal' ] [ transition_id ] [ 'from_outcome' ] = from_outcome_combo self . combo [ 'internal' ] [ transition_id ] [ 'to_state' ] = to_state_combo self . combo [ 'internal' ] [ transition_id ] [ 'to_outcome' ] = to_outcome_combo self . combo [ 'free_from_states' ] = free_from_states self . combo [ 'free_from_outcomes_dict' ] = free_from_outcomes_dict if not model . state . transitions : [ x , y , z , v , free_from_states , free_from_outcomes_dict ] = self . get_possible_combos_for_transition ( None , self . model , self . model ) self . combo [ 'free_from_states' ] = free_from_states self . combo [ 'free_from_outcomes_dict' ] = free_from_outcomes_dict if not ( self . model . state . is_root_state or self . model . state . is_root_state_of_library ) : for transition_id , transition in model . parent . state . transitions . items ( ) : if transition . from_state == model . state . state_id or transition . to_state == model . state . state_id : self . combo [ 'external' ] [ transition_id ] = { } [ from_state_combo , from_outcome_combo , to_state_combo , to_outcome_combo , free_from_states , free_from_outcomes_dict ] = self . get_possible_combos_for_transition ( transition , self . model . parent , self . model , True ) self . combo [ 'external' ] [ transition_id ] [ 'from_state' ] = from_state_combo self . combo [ 'external' ] [ transition_id ] [ 'from_outcome' ] = from_outcome_combo self . combo [ 'external' ] [ transition_id ] [ 'to_state' ] = to_state_combo self . combo [ 'external' ] [ transition_id ] [ 'to_outcome' ] = to_outcome_combo self . combo [ 'free_ext_from_states' ] = free_from_states self . combo [ 'free_ext_from_outcomes_dict' ] = free_from_outcomes_dict if not model . parent . state . transitions : [ x , y , z , v , free_from_states , free_from_outcomes_dict ] = self . get_possible_combos_for_transition ( None , self . model . parent , self . model , True ) self . combo [ 'free_ext_from_states' ] = free_from_states self . combo [ 'free_ext_from_outcomes_dict' ] = free_from_outcomes_dict | Updates Internal combo knowledge for any actual transition by calling get_possible_combos_for_transition - function for those . |
40,512 | def move_dirty_lock_file ( dirty_lock_file , sm_path ) : if dirty_lock_file is not None and not dirty_lock_file == os . path . join ( sm_path , dirty_lock_file . split ( os . sep ) [ - 1 ] ) : logger . debug ( "Move dirty lock from root tmp folder {0} to state machine folder {1}" "" . format ( dirty_lock_file , os . path . join ( sm_path , dirty_lock_file . split ( os . sep ) [ - 1 ] ) ) ) os . rename ( dirty_lock_file , os . path . join ( sm_path , dirty_lock_file . split ( os . sep ) [ - 1 ] ) ) | Move the dirt_lock file to the sm_path and thereby is not found by auto recovery of backup anymore |
40,513 | def write_backup_meta_data ( self ) : auto_backup_meta_file = os . path . join ( self . _tmp_storage_path , FILE_NAME_AUTO_BACKUP ) storage . storage_utils . write_dict_to_json ( self . meta , auto_backup_meta_file ) | Write the auto backup meta data into the current tmp - storage path |
40,514 | def update_last_backup_meta_data ( self ) : self . meta [ 'last_backup' ] [ 'time' ] = get_time_string_for_float ( self . last_backup_time ) self . meta [ 'last_backup' ] [ 'file_system_path' ] = self . _tmp_storage_path self . meta [ 'last_backup' ] [ 'marked_dirty' ] = self . state_machine_model . state_machine . marked_dirty | Update the auto backup meta data with internal recovery information |
40,515 | def update_last_sm_origin_meta_data ( self ) : self . meta [ 'last_saved' ] [ 'time' ] = self . state_machine_model . state_machine . last_update self . meta [ 'last_saved' ] [ 'file_system_path' ] = self . state_machine_model . state_machine . file_system_path | Update the auto backup meta data with information of the state machine origin |
40,516 | def _check_for_dyn_timed_auto_backup ( self ) : current_time = time . time ( ) self . timer_request_lock . acquire ( ) if self . _timer_request_time is None : return self . timer_request_lock . release ( ) if self . timed_temp_storage_interval < current_time - self . _timer_request_time : self . check_for_auto_backup ( force = True ) else : duration_to_wait = self . timed_temp_storage_interval - ( current_time - self . _timer_request_time ) hard_limit_duration_to_wait = self . force_temp_storage_interval - ( current_time - self . last_backup_time ) hard_limit_active = hard_limit_duration_to_wait < duration_to_wait if hard_limit_active : self . set_timed_thread ( hard_limit_duration_to_wait , self . check_for_auto_backup , True ) else : self . set_timed_thread ( duration_to_wait , self . _check_for_dyn_timed_auto_backup ) self . timer_request_lock . release ( ) | The method implements the timed storage feature . |
40,517 | def check_for_auto_backup ( self , force = False ) : if not self . timed_temp_storage_enabled : return sm = self . state_machine_model . state_machine current_time = time . time ( ) if not self . only_fix_interval and not self . marked_dirty : self . last_backup_time = current_time is_not_timed_or_reached_time_to_force = current_time - self . last_backup_time > self . force_temp_storage_interval or self . only_fix_interval if ( sm . marked_dirty and is_not_timed_or_reached_time_to_force ) or force : if not self . only_fix_interval or self . marked_dirty : thread = threading . Thread ( target = self . perform_temp_storage ) thread . start ( ) if self . only_fix_interval : self . set_timed_thread ( self . force_temp_storage_interval , self . check_for_auto_backup ) else : if not self . only_fix_interval : self . timer_request_lock . acquire ( ) if self . _timer_request_time is None : self . _timer_request_time = current_time self . set_timed_thread ( self . timed_temp_storage_interval , self . _check_for_dyn_timed_auto_backup ) else : self . _timer_request_time = current_time self . timer_request_lock . release ( ) else : self . set_timed_thread ( self . force_temp_storage_interval , self . check_for_auto_backup ) | The method implements the checks for possible auto backup of the state - machine according duration till the last change together with the private method _check_for_dyn_timed_auto_backup . |
40,518 | def on_mouse_click ( self , widget , event ) : import rafcon . gui . helpers . state_machine as gui_helper_state_machine if self . view . get_path_at_pos ( int ( event . x ) , int ( event . y ) ) is not None and len ( self . view . get_selected_items ( ) ) > 0 : return gui_helper_state_machine . insert_state_into_selected_state ( self . _get_state ( ) , False ) | state insertion on mouse click |
40,519 | def on_mouse_motion ( self , widget , event ) : path = self . view . get_path_at_pos ( int ( event . x ) , int ( event . y ) ) if path is not None : self . view . select_path ( path ) else : self . view . unselect_all ( ) | selection on mouse over |
40,520 | def _get_state ( self ) : selected = self . view . get_selected_items ( ) if not selected : return shorthand , state_class = self . view . states [ selected [ 0 ] [ 0 ] ] return state_class ( ) | get state instance which was clicked on |
40,521 | def apply_new_outcome_name ( self , path , new_name ) : if new_name == self . list_store [ path ] [ self . NAME_STORAGE_ID ] : return outcome = self . list_store [ path ] [ self . CORE_STORAGE_ID ] try : outcome . name = new_name logger . debug ( "Outcome name changed to '{0}'" . format ( outcome . name ) ) except ( ValueError , TypeError ) as e : logger . warning ( "The name of the outcome could not be changed: {0}" . format ( e ) ) self . list_store [ path ] [ self . NAME_STORAGE_ID ] = outcome . name | Apply the newly entered outcome name it is was changed |
40,522 | def on_to_state_edited ( self , renderer , path , new_state_identifier ) : def do_self_transition_check ( t_id , new_state_identifier ) : if 'self' in new_state_identifier . split ( '.' ) : insert_self_transition_meta_data ( self . model , t_id , 'outcomes_widget' , combined_action = True ) outcome_id = self . list_store [ path ] [ self . ID_STORAGE_ID ] if outcome_id in self . dict_to_other_state or outcome_id in self . dict_to_other_outcome : transition_parent_state = self . model . parent . state if outcome_id in self . dict_to_other_state : t_id = self . dict_to_other_state [ outcome_id ] [ 2 ] else : t_id = self . dict_to_other_outcome [ outcome_id ] [ 2 ] if new_state_identifier is not None : to_state_id = new_state_identifier . split ( '.' ) [ 1 ] if not transition_parent_state . transitions [ t_id ] . to_state == to_state_id : try : transition_parent_state . transitions [ t_id ] . modify_target ( to_state = to_state_id ) do_self_transition_check ( t_id , new_state_identifier ) except ValueError as e : logger . warning ( "The target of transition couldn't be modified: {0}" . format ( e ) ) else : try : transition_parent_state . remove_transition ( t_id ) except AttributeError as e : logger . warning ( "The transition couldn't be removed: {0}" . format ( e ) ) else : if new_state_identifier is not None and not self . model . state . is_root_state : transition_parent_state = self . model . parent . state to_state_id = new_state_identifier . split ( '.' ) [ 1 ] try : t_id = transition_parent_state . add_transition ( from_state_id = self . model . state . state_id , from_outcome = outcome_id , to_state_id = to_state_id , to_outcome = None , transition_id = None ) do_self_transition_check ( t_id , new_state_identifier ) except ( ValueError , TypeError ) as e : logger . warning ( "The transition couldn't be added: {0}" . format ( e ) ) return else : logger . debug ( "outcome-editor got None in to_state-combo-change no transition is added" ) | Connects the outcome with a transition to the newly set state |
40,523 | def on_to_outcome_edited ( self , renderer , path , new_outcome_identifier ) : if self . model . parent is None : return outcome_id = self . list_store [ path ] [ self . ID_STORAGE_ID ] transition_parent_state = self . model . parent . state if outcome_id in self . dict_to_other_state or outcome_id in self . dict_to_other_outcome : if outcome_id in self . dict_to_other_state : t_id = self . dict_to_other_state [ outcome_id ] [ 2 ] else : t_id = self . dict_to_other_outcome [ outcome_id ] [ 2 ] if new_outcome_identifier is not None : new_to_outcome_id = int ( new_outcome_identifier . split ( '.' ) [ 2 ] ) if not transition_parent_state . transitions [ t_id ] . to_outcome == new_to_outcome_id : to_state_id = self . model . parent . state . state_id try : transition_parent_state . transitions [ t_id ] . modify_target ( to_state = to_state_id , to_outcome = new_to_outcome_id ) except ValueError as e : logger . warning ( "The target of transition couldn't be modified: {0}" . format ( e ) ) else : transition_parent_state . remove_transition ( t_id ) else : if new_outcome_identifier is not None : to_outcome = int ( new_outcome_identifier . split ( '.' ) [ 2 ] ) try : self . model . parent . state . add_transition ( from_state_id = self . model . state . state_id , from_outcome = outcome_id , to_state_id = self . model . parent . state . state_id , to_outcome = to_outcome , transition_id = None ) except ( ValueError , TypeError ) as e : logger . warning ( "The transition couldn't be added: {0}" . format ( e ) ) else : logger . debug ( "outcome-editor got None in to_outcome-combo-change no transition is added" ) | Connects the outcome with a transition to the newly set outcome |
40,524 | def remove_core_element ( self , model ) : assert model . outcome . parent is self . model . state gui_helper_state_machine . delete_core_element_of_model ( model ) | Remove respective core element of handed outcome model |
40,525 | def bring_tab_to_the_top ( self , tab_label ) : page = self . page_dict [ tab_label ] for notebook in self . notebook_names : page_num = self [ notebook ] . page_num ( page ) if not page_num == - 1 : self [ notebook ] . set_current_page ( page_num ) break | Find tab with label tab_label in list of notebook s and set it to the current page . |
40,526 | def compare_variables ( tree_model , iter1 , iter2 , user_data = None ) : path1 = tree_model . get_path ( iter1 ) [ 0 ] path2 = tree_model . get_path ( iter2 ) [ 0 ] name1 = tree_model [ path1 ] [ 0 ] name2 = tree_model [ path2 ] [ 0 ] name1_as_bits = ' ' . join ( format ( ord ( x ) , 'b' ) for x in name1 ) name2_as_bits = ' ' . join ( format ( ord ( x ) , 'b' ) for x in name2 ) if name1_as_bits == name2_as_bits : return 0 elif name1_as_bits > name2_as_bits : return 1 else : return - 1 | Triggered upon updating the list of global variables |
40,527 | def reset_dirty_flags ( self ) : for sm_id , sm in self . state_machines . items ( ) : sm . marked_dirty = False | Set all marked_dirty flags of the state machine to false . |
40,528 | def add_state_machine ( self , state_machine ) : if not isinstance ( state_machine , StateMachine ) : raise AttributeError ( "State machine must be of type StateMachine" ) if state_machine . file_system_path is not None : if self . is_state_machine_open ( state_machine . file_system_path ) : raise AttributeError ( "The state machine is already open {0}" . format ( state_machine . file_system_path ) ) logger . debug ( "Add new state machine with id {0}" . format ( state_machine . state_machine_id ) ) self . _state_machines [ state_machine . state_machine_id ] = state_machine return state_machine . state_machine_id | Add a state machine to the list of managed state machines . If there is no active state machine set yet then set as active state machine . |
40,529 | def remove_state_machine ( self , state_machine_id ) : import rafcon . core . singleton as core_singletons removed_state_machine = None if state_machine_id in self . _state_machines : logger . debug ( "Remove state machine with id {0}" . format ( state_machine_id ) ) removed_state_machine = self . _state_machines . pop ( state_machine_id ) else : logger . error ( "There is no state_machine with state_machine_id: %s" % state_machine_id ) return removed_state_machine removed_state_machine . destroy_execution_histories ( ) return removed_state_machine | Remove the state machine for a specified state machine id from the list of registered state machines . |
40,530 | def get_active_state_machine ( self ) : if self . _active_state_machine_id in self . _state_machines : return self . _state_machines [ self . _active_state_machine_id ] else : return None | Return a reference to the active state - machine |
40,531 | def get_open_state_machine_of_file_system_path ( self , file_system_path ) : for sm in self . state_machines . values ( ) : if sm . file_system_path == file_system_path : return sm | Return a reference to the state machine with respective path if open |
40,532 | def reset_title ( self , title , notebook_identifier ) : current_title = self . get_top_widget ( ) . get_title ( ) upper_title = current_title . split ( '/' ) [ 0 ] . strip ( ) lower_title = current_title . split ( '/' ) [ 1 ] . strip ( ) if notebook_identifier == 'upper' : new_title = title + ' / ' + lower_title else : new_title = upper_title + ' / ' + title self [ 'headerbar' ] . props . title = new_title | Triggered whenever a notebook tab is switched in the left bar . |
40,533 | def add_state ( container_state_m , state_type ) : if container_state_m is None : logger . error ( "Cannot add a state without a parent." ) return False if not isinstance ( container_state_m , StateModel ) or ( isinstance ( container_state_m , StateModel ) and not isinstance ( container_state_m , ContainerStateModel ) ) : logger . error ( "Parent state must be a container, for example a Hierarchy State." + str ( container_state_m ) ) return False state_class = state_type_to_state_class_dict . get ( state_type , None ) if state_class is None : logger . error ( "Cannot create state of type {0}" . format ( state_type ) ) return False new_state = state_class ( ) from rafcon . gui . models . abstract_state import get_state_model_class_for_state new_state_m = get_state_model_class_for_state ( new_state ) ( new_state ) gui_helper_meta_data . put_default_meta_on_state_m ( new_state_m , container_state_m ) container_state_m . expected_future_models . add ( new_state_m ) container_state_m . state . add_state ( new_state ) return True | Add a state to a container state |
40,534 | def extract_child_models_of_state ( state_m , new_state_class ) : assert isinstance ( state_m , StateModel ) assert issubclass ( new_state_class , State ) orig_state = state_m . state current_state_is_container = isinstance ( orig_state , ContainerState ) new_state_is_container = issubclass ( new_state_class , ContainerState ) required_model_properties = [ 'input_data_ports' , 'output_data_ports' , 'outcomes' , 'income' ] obsolete_model_properties = [ ] if current_state_is_container and new_state_is_container : required_model_properties . extend ( [ 'states' , 'data_flows' , 'scoped_variables' ] ) obsolete_model_properties . append ( 'transitions' ) elif current_state_is_container : obsolete_model_properties . extend ( [ 'states' , 'transitions' , 'data_flows' , 'scoped_variables' ] ) def get_element_list ( state_m , prop_name ) : if prop_name == 'income' : return [ state_m . income ] wrapper = getattr ( state_m , prop_name ) list_or_dict = wrapper . _obj if isinstance ( list_or_dict , list ) : return list_or_dict [ : ] return list ( list_or_dict . values ( ) ) required_child_models = { } for prop_name in required_model_properties : required_child_models [ prop_name ] = get_element_list ( state_m , prop_name ) obsolete_child_models = { } for prop_name in obsolete_model_properties : obsolete_child_models [ prop_name ] = get_element_list ( state_m , prop_name ) if isinstance ( state_m , ContainerStateModel ) : decider_state_m = state_m . states . get ( UNIQUE_DECIDER_STATE_ID , None ) if decider_state_m : if new_state_is_container : required_child_models [ 'states' ] . remove ( decider_state_m ) obsolete_child_models [ 'states' ] = [ decider_state_m ] return required_child_models , obsolete_child_models | Retrieve child models of state model |
40,535 | def create_state_model_for_state ( new_state , meta , state_element_models ) : from rafcon . gui . models . abstract_state import get_state_model_class_for_state state_m_class = get_state_model_class_for_state ( new_state ) new_state_m = state_m_class ( new_state , meta = meta , load_meta_data = False , expected_future_models = state_element_models ) error_msg = "New state has not re-used all handed expected future models." check_expected_future_model_list_is_empty ( new_state_m , msg = error_msg ) return new_state_m | Create a new state model with the defined properties |
40,536 | def prepare_state_m_for_insert_as ( state_m_to_insert , previous_state_size ) : if isinstance ( state_m_to_insert , AbstractStateModel ) and not gui_helper_meta_data . model_has_empty_meta ( state_m_to_insert ) : if isinstance ( state_m_to_insert , ContainerStateModel ) : models_dict = { 'state' : state_m_to_insert } for state_element_key in state_m_to_insert . state . state_element_attrs : state_element_list = getattr ( state_m_to_insert , state_element_key ) if hasattr ( state_element_list , 'keys' ) : state_element_list = state_element_list . values ( ) models_dict [ state_element_key ] = { elem . core_element . core_element_id : elem for elem in state_element_list } resize_factor = gui_helper_meta_data . scale_meta_data_according_state ( models_dict , as_template = True ) gui_helper_meta_data . resize_income_of_state_m ( state_m_to_insert , resize_factor ) elif isinstance ( state_m_to_insert , StateModel ) : if previous_state_size : current_size = state_m_to_insert . get_meta_data_editor ( ) [ 'size' ] factor = gui_helper_meta_data . divide_two_vectors ( current_size , previous_state_size ) state_m_to_insert . set_meta_data_editor ( 'size' , previous_state_size ) factor = ( min ( * factor ) , min ( * factor ) ) gui_helper_meta_data . resize_state_meta ( state_m_to_insert , factor ) else : logger . debug ( "For insert as template of {0} no resize of state meta data is performed because " "the meta data has empty fields." . format ( state_m_to_insert ) ) elif not isinstance ( state_m_to_insert , LibraryStateModel ) : raise TypeError ( "For insert as template of {0} no resize of state meta data is performed because " "state model type is not ContainerStateModel or StateModel" . format ( state_m_to_insert ) ) else : logger . info ( "For insert as template of {0} no resize of state meta data is performed because the meta data has " "empty fields." . format ( state_m_to_insert ) ) | Prepares and scales the meta data to fit into actual size of the state . |
40,537 | def insert_state_as ( target_state_m , state , as_template ) : if not isinstance ( target_state_m , ContainerStateModel ) or not isinstance ( target_state_m . state , ContainerState ) : logger . error ( "States can only be inserted in container states" ) return False state_m = get_state_model_class_for_state ( state ) ( state ) if not as_template : gui_helper_meta_data . put_default_meta_on_state_m ( state_m , target_state_m ) else : assert isinstance ( state , LibraryState ) old_lib_state_m = state_m state_m = state_m . state_copy previous_state_size = state_m . get_meta_data_editor ( ) [ 'size' ] gui_helper_meta_data . put_default_meta_on_state_m ( state_m , target_state_m ) prepare_state_m_for_insert_as ( state_m , previous_state_size ) old_lib_state_m . prepare_destruction ( recursive = False ) while state_m . state . state_id in target_state_m . state . states : state_m . state . change_state_id ( ) target_state_m . expected_future_models . add ( state_m ) target_state_m . state . add_state ( state_m . state ) update_models_recursively ( state_m , expected = False ) | Add a state into a target state |
40,538 | def substitute_state_as ( target_state_m , state , as_template , keep_name = False ) : state_m = get_state_model_class_for_state ( state ) ( state ) if as_template : assert isinstance ( state_m , LibraryStateModel ) state_m = state_m . state_copy state_m . state . parent = None if keep_name : state_m . state . name = target_state_m . state . name assert target_state_m . parent . states [ target_state_m . state . state_id ] is target_state_m substitute_state ( target_state_m , state_m , as_template ) | Substitute a target state with a handed state |
40,539 | def orify ( e , changed_callback ) : if not hasattr ( e , "callbacks" ) : e . _set = e . set e . _clear = e . clear e . set = lambda : or_set ( e ) e . clear = lambda : or_clear ( e ) e . callbacks = list ( ) e . callbacks . append ( changed_callback ) | Add another event to the multi_event |
40,540 | def create ( * events ) : or_event = threading . Event ( ) def changed ( ) : if any ( [ event . is_set ( ) for event in events ] ) : or_event . set ( ) else : or_event . clear ( ) for e in events : orify ( e , changed ) changed ( ) return or_event | Creates a new multi_event |
40,541 | def model_changed ( self , model , prop_name , info ) : current_enables = self . _get_config_enables ( ) if not self . _enables == current_enables : filtered_buffer_update_needed = True if all ( self . _enables [ key ] == current_enables [ key ] for key in [ 'VERBOSE' , 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' ] ) : follow_mode_key = 'CONSOLE_FOLLOW_LOGGING' only_follow_mode_changed = self . _enables [ follow_mode_key ] != current_enables [ follow_mode_key ] filtered_buffer_update_needed = not only_follow_mode_changed self . _enables = current_enables self . view . set_enables ( self . _enables ) if filtered_buffer_update_needed : self . update_filtered_buffer ( ) else : self . view . scroll_to_cursor_onscreen ( ) | React to configuration changes |
40,542 | def create_path ( path ) : import os if not os . path . exists ( path ) : os . makedirs ( path ) | Creates a absolute path in the file system . |
40,543 | def get_md5_file_hash ( filename ) : import hashlib BLOCKSIZE = 65536 hasher = hashlib . md5 ( ) with open ( filename , 'rb' ) as afile : buf = afile . read ( BLOCKSIZE ) while len ( buf ) > 0 : hasher . update ( buf ) buf = afile . read ( BLOCKSIZE ) return hasher . hexdigest ( ) | Calculates the MD5 hash of a file |
40,544 | def file_needs_update ( target_file , source_file ) : if not os . path . isfile ( target_file ) or get_md5_file_hash ( target_file ) != get_md5_file_hash ( source_file ) : return True return False | Checks if target_file is not existing or differing from source_file |
40,545 | def copy_file_if_update_required ( source_file , target_file ) : if file_needs_update ( target_file , source_file ) : shutil . copy ( source_file , target_file ) | Copies source_file to target_file if latter one in not existing or outdated |
40,546 | def read_file ( file_path , filename = None ) : file_path = os . path . realpath ( file_path ) if filename : file_path = os . path . join ( file_path , filename ) file_content = None if os . path . isfile ( file_path ) : with open ( file_path , 'r' ) as file_pointer : file_content = file_pointer . read ( ) return file_content | Open file by path and optional filename |
40,547 | def clean_file_system_paths_from_not_existing_paths ( file_system_paths ) : paths_to_delete = [ ] for path in file_system_paths : if not os . path . exists ( path ) : paths_to_delete . append ( path ) for path in paths_to_delete : file_system_paths . remove ( path ) | Cleans list of paths from elements that do not exist |
40,548 | def update_models ( self , model , name , info ) : if info . method_name in [ "add_input_data_port" , "remove_input_data_port" , "input_data_ports" ] : ( model_list , data_list , model_name , model_class , model_key ) = self . get_model_info ( "input_data_port" ) elif info . method_name in [ "add_output_data_port" , "remove_output_data_port" , "output_data_ports" ] : ( model_list , data_list , model_name , model_class , model_key ) = self . get_model_info ( "output_data_port" ) elif info . method_name in [ "add_income" , "remove_income" , "income" ] : ( model_list , data_list , model_name , model_class , model_key ) = self . get_model_info ( "income" ) elif info . method_name in [ "add_outcome" , "remove_outcome" , "outcomes" ] : ( model_list , data_list , model_name , model_class , model_key ) = self . get_model_info ( "outcome" ) else : return if "add" in info . method_name : self . add_missing_model ( model_list , data_list , model_name , model_class , model_key ) elif "remove" in info . method_name : destroy = info . kwargs . get ( 'destroy' , True ) self . remove_specific_model ( model_list , info . result , model_key , destroy ) elif info . method_name in [ "input_data_ports" , "output_data_ports" , "income" , "outcomes" ] : self . re_initiate_model_list ( model_list , data_list , model_name , model_class , model_key ) | This method is always triggered when the core state changes |
40,549 | def _load_income_model ( self ) : self . _add_model ( self . income , self . state . income , IncomeModel ) | Create income model from core income |
40,550 | def _load_outcome_models ( self ) : self . outcomes = [ ] for outcome in self . state . outcomes . values ( ) : self . _add_model ( self . outcomes , outcome , OutcomeModel ) | Create outcome models from core outcomes |
40,551 | def re_initiate_model_list ( self , model_list_or_dict , core_objects_dict , model_name , model_class , model_key ) : if model_name == "income" : if self . income . income != self . state . income : self . _add_model ( self . income , self . state . income , IncomeModel ) return for _ in range ( len ( model_list_or_dict ) ) : self . remove_additional_model ( model_list_or_dict , core_objects_dict , model_name , model_key ) if core_objects_dict : for _ in core_objects_dict : self . add_missing_model ( model_list_or_dict , core_objects_dict , model_name , model_class , model_key ) | Recreate model list |
40,552 | def _add_model ( self , model_list_or_dict , core_element , model_class , model_key = None , load_meta_data = True ) : found_model = self . _get_future_expected_model ( core_element ) if found_model : found_model . parent = self if model_class is IncomeModel : self . income = found_model if found_model else IncomeModel ( core_element , self ) return if model_key is None : model_list_or_dict . append ( found_model if found_model else model_class ( core_element , self ) ) else : model_list_or_dict [ model_key ] = found_model if found_model else model_class ( core_element , self , load_meta_data = load_meta_data ) | Adds one model for a given core element . |
40,553 | def add_missing_model ( self , model_list_or_dict , core_elements_dict , model_name , model_class , model_key ) : def core_element_has_model ( core_object ) : for model_or_key in model_list_or_dict : model = model_or_key if model_key is None else model_list_or_dict [ model_or_key ] if core_object is getattr ( model , model_name ) : return True return False if model_name == "income" : self . _add_model ( self . income , self . state . income , IncomeModel ) return for core_element in core_elements_dict . values ( ) : if core_element_has_model ( core_element ) : continue new_model = self . _get_future_expected_model ( core_element ) if new_model : new_model . parent = self else : if type_helpers . type_inherits_of_type ( model_class , StateModel ) : new_model = model_class ( core_element , self , expected_future_models = self . expected_future_models ) self . expected_future_models = new_model . expected_future_models new_model . expected_future_models = set ( ) else : new_model = model_class ( core_element , self ) if model_key is None : model_list_or_dict . append ( new_model ) else : model_list_or_dict [ getattr ( core_element , model_key ) ] = new_model return True return False | Adds one missing model |
40,554 | def remove_additional_model ( self , model_list_or_dict , core_objects_dict , model_name , model_key , destroy = True ) : if model_name == "income" : self . income . prepare_destruction ( ) self . income = None return for model_or_key in model_list_or_dict : model = model_or_key if model_key is None else model_list_or_dict [ model_or_key ] found = False for core_object in core_objects_dict . values ( ) : if core_object is getattr ( model , model_name ) : found = True break if not found : if model_key is None : if destroy : model . prepare_destruction ( ) model_list_or_dict . remove ( model ) else : if destroy : model_list_or_dict [ model_or_key ] . prepare_destruction ( ) del model_list_or_dict [ model_or_key ] return | Remove one unnecessary model |
40,555 | def _get_future_expected_model ( self , core_element ) : for model in self . expected_future_models : if model . core_element is core_element : self . expected_future_models . remove ( model ) return model return None | Hand model for an core element from expected model list and remove the model from this list |
40,556 | def as_dict ( self , use_preliminary = False ) : config = dict ( ) for key in self . config . keys : if use_preliminary and key in self . preliminary_config : value = self . preliminary_config [ key ] else : value = self . config . get_config_value ( key ) config [ key ] = value return config | Create a copy of the config in form of a dict |
40,557 | def update_config ( self , config_dict , config_file ) : config_path = path . dirname ( config_file ) self . config . config_file_path = config_file self . config . path = config_path for config_key , config_value in config_dict . items ( ) : if config_value != self . config . get_config_value ( config_key ) : self . set_preliminary_config_value ( config_key , config_value ) | Update the content and reference of the config |
40,558 | def get_current_config_value ( self , config_key , use_preliminary = True , default = None ) : if use_preliminary and config_key in self . preliminary_config : return copy ( self . preliminary_config [ config_key ] ) return copy ( self . config . get_config_value ( config_key , default ) ) | Returns the current config value for the given config key |
40,559 | def set_preliminary_config_value ( self , config_key , config_value ) : if config_value != self . config . get_config_value ( config_key ) : self . preliminary_config [ config_key ] = config_value elif config_key in self . preliminary_config : del self . preliminary_config [ config_key ] | Stores a config value as preliminary new value |
40,560 | def apply_preliminary_config ( self , save = True ) : state_machine_refresh_required = False for config_key , config_value in self . preliminary_config . items ( ) : self . config . set_config_value ( config_key , config_value ) if config_key in self . config . keys_requiring_state_machine_refresh : state_machine_refresh_required = True elif config_key in self . config . keys_requiring_restart : self . changed_keys_requiring_restart . add ( config_key ) if config_key == 'AUTO_RECOVERY_LOCK_ENABLED' : import rafcon . gui . models . auto_backup if config_value : rafcon . gui . models . auto_backup . generate_rafcon_instance_lock_file ( ) else : rafcon . gui . models . auto_backup . remove_rafcon_instance_lock_file ( ) self . preliminary_config . clear ( ) if save : self . config . save_configuration ( ) return state_machine_refresh_required | Applies the preliminary config to the configuration |
40,561 | def parent ( self , parent ) : if parent is None : self . _parent = None else : from rafcon . core . states . state import State assert isinstance ( parent , State ) old_parent = self . parent self . _parent = ref ( parent ) valid , message = self . _check_validity ( ) if not valid : if not old_parent : self . _parent = None else : self . _parent = ref ( old_parent ) class_name = self . __class__ . __name__ if global_config . get_config_value ( "LIBRARY_RECOVERY_MODE" ) is True : do_delete_item = True if "not have matching data types" in message : do_delete_item = False self . _parent = ref ( parent ) raise RecoveryModeException ( "{0} invalid within state \"{1}\" (id {2}): {3}" . format ( class_name , parent . name , parent . state_id , message ) , do_delete_item = do_delete_item ) else : raise ValueError ( "{0} invalid within state \"{1}\" (id {2}): {3} {4}" . format ( class_name , parent . name , parent . state_id , message , self ) ) | Setter for the parent state of the state element |
40,562 | def _change_property_with_validity_check ( self , property_name , value ) : assert isinstance ( property_name , string_types ) old_value = getattr ( self , property_name ) setattr ( self , property_name , value ) valid , message = self . _check_validity ( ) if not valid : setattr ( self , property_name , old_value ) class_name = self . __class__ . __name__ raise ValueError ( "The {2}'s '{0}' could not be changed: {1}" . format ( property_name [ 1 : ] , message , class_name ) ) | Helper method to change a property and reset it if the validity check fails |
40,563 | def _check_validity ( self ) : from rafcon . core . states . state import State if not self . parent : return True , "no parent" if not isinstance ( self . parent , State ) : return True , "no parental check" return self . parent . check_child_validity ( self ) | Checks the validity of the state element s properties |
40,564 | def register_new_state_machines ( self , model , prop_name , info ) : if info [ 'method_name' ] == '__setitem__' : self . observe_model ( info [ 'args' ] [ 1 ] ) self . logger . info ( NotificationOverview ( info ) ) elif info [ 'method_name' ] == '__delitem__' : pass else : self . logger . warning ( NotificationOverview ( info ) ) | The method register self as observer newly added StateMachineModels after those were added to the list of state_machines hold by observed StateMachineMangerModel . The method register as observer of observable StateMachineMangerModel . state_machines . |
40,565 | def observe_root_state_assignments ( self , model , prop_name , info ) : if info [ 'old' ] : self . relieve_model ( info [ 'old' ] ) if info [ 'new' ] : self . observe_model ( info [ 'new' ] ) self . logger . info ( "Exchange observed old root_state model with newly assigned one. sm_id: {}" "" . format ( info [ 'new' ] . state . parent . state_machine_id ) ) | The method relieves observed root_state models and observes newly assigned root_state models . |
40,566 | def observe_meta_signal_changes ( self , changed_model , prop_name , info ) : self . logger . info ( NotificationOverview ( info ) ) | The method prints the structure of all meta_signal - notifications as log - messages . |
40,567 | def set_dict ( self , new_dict ) : for key , value in new_dict . items ( ) : if isinstance ( value , dict ) : self [ str ( key ) ] = Vividict ( value ) else : self [ str ( key ) ] = value | Sets the dictionary of the Vividict |
40,568 | def vividict_to_dict ( vividict ) : try : from numpy import ndarray except ImportError : ndarray = dict dictionary = { } def np_to_native ( np_val ) : if isinstance ( np_val , dict ) : for key , value in np_val . items ( ) : np_val [ key ] = np_to_native ( value ) elif isinstance ( np_val , ndarray ) : np_val = np_val . tolist ( ) if isinstance ( np_val , ( list , tuple ) ) : native_list = [ np_to_native ( val ) for val in np_val ] if isinstance ( np_val , tuple ) : return tuple ( native_list ) return native_list if not hasattr ( np_val , 'dtype' ) : return np_val return np_val . item ( ) for key , value in vividict . items ( ) : value = np_to_native ( value ) if isinstance ( value , Vividict ) : value = Vividict . vividict_to_dict ( value ) dictionary [ key ] = value return dictionary | Helper method to create Python dicts from arbitrary Vividict objects |
40,569 | def to_yaml ( cls , dumper , vividict ) : dictionary = cls . vividict_to_dict ( vividict ) node = dumper . represent_mapping ( cls . yaml_tag , dictionary ) return node | Implementation for the abstract method of the base class YAMLObject |
40,570 | def get_selected_object ( self ) : model , paths = self . tree_view . get_selection ( ) . get_selected_rows ( ) if len ( paths ) == 1 : return self . tree_store . get_iter ( paths [ 0 ] ) , paths [ 0 ] else : return None , paths | Gets the selected object in the treeview |
40,571 | def on_add ( self , widget , new_dict = False ) : self . semantic_data_counter += 1 treeiter , path = self . get_selected_object ( ) value = dict ( ) if new_dict else "New Value" if treeiter : target_dict_path_as_list = self . tree_store [ path ] [ self . ID_STORAGE_ID ] if not self . tree_store [ path ] [ self . IS_DICT_STORAGE_ID ] : target_dict_path_as_list . pop ( ) else : target_dict_path_as_list = [ ] target_dict = self . model . state . get_semantic_data ( target_dict_path_as_list ) new_key_string = generate_semantic_data_key ( list ( target_dict . keys ( ) ) ) self . model . state . add_semantic_data ( target_dict_path_as_list , value , new_key_string ) self . reload_tree_store_data ( ) self . select_entry ( target_dict_path_as_list + [ new_key_string ] ) logger . debug ( "Added new semantic data entry!" ) return True | Adds a new entry to the semantic data of a state . Reloads the tree store . |
40,572 | def on_remove ( self , widget , data = None ) : treeiter , path = self . get_selected_object ( ) if not treeiter : return dict_path_as_list = self . tree_store [ path ] [ self . ID_STORAGE_ID ] logger . debug ( "Deleting semantic data entry with name {}!" . format ( dict_path_as_list [ - 1 ] ) ) self . model . state . remove_semantic_data ( dict_path_as_list ) self . reload_tree_store_data ( ) try : self . select_entry ( self . tree_store [ path ] [ self . ID_STORAGE_ID ] ) except IndexError : if len ( self . tree_store ) : if len ( path ) > 1 : possible_before_path = tuple ( list ( path [ : - 1 ] ) + [ path [ - 1 ] - 1 ] ) if possible_before_path [ - 1 ] > - 1 : self . select_entry ( self . tree_store [ possible_before_path ] [ self . ID_STORAGE_ID ] ) else : self . select_entry ( self . tree_store [ path [ : - 1 ] ] [ self . ID_STORAGE_ID ] ) else : self . select_entry ( self . tree_store [ path [ 0 ] - 1 ] [ self . ID_STORAGE_ID ] ) return True | Removes an entry of semantic data of a state . |
40,573 | def add_items_to_tree_iter ( self , input_dict , treeiter , parent_dict_path = None ) : if parent_dict_path is None : parent_dict_path = [ ] self . get_view_selection ( ) for key , value in sorted ( input_dict . items ( ) ) : element_dict_path = copy . copy ( parent_dict_path ) + [ key ] if isinstance ( value , dict ) : new_iter = self . tree_store . append ( treeiter , [ key , "" , True , element_dict_path ] ) self . add_items_to_tree_iter ( value , new_iter , element_dict_path ) else : self . tree_store . append ( treeiter , [ key , value , False , element_dict_path ] ) | Adds all values of the input dict to self . tree_store |
40,574 | def reload_tree_store_data ( self ) : model , paths = self . tree_view . get_selection ( ) . get_selected_rows ( ) self . tree_store . clear ( ) self . add_items_to_tree_iter ( self . model . state . semantic_data , None ) self . tree_view . expand_all ( ) try : for path in paths : self . tree_view . get_selection ( ) . select_path ( path ) except ValueError : pass | Reloads the data of the tree store |
40,575 | def copy_action_callback ( self , * event ) : if react_to_event ( self . view , self . tree_view , event ) and self . active_entry_widget is None : _ , dict_paths = self . get_view_selection ( ) selected_data_list = [ ] for dict_path_as_list in dict_paths : value = self . model . state . semantic_data for path_element in dict_path_as_list : value = value [ path_element ] selected_data_list . append ( ( path_element , value ) ) rafcon . gui . clipboard . global_clipboard . set_semantic_dictionary_list ( selected_data_list ) | Add a copy of all selected row dict value pairs to the clipboard |
40,576 | def paste_action_callback ( self , * event ) : if react_to_event ( self . view , self . tree_view , event ) and self . active_entry_widget is None : _ , dict_paths = self . get_view_selection ( ) selected_data_list = rafcon . gui . clipboard . global_clipboard . get_semantic_dictionary_list ( ) if not dict_paths and not self . model . state . semantic_data : dict_paths = [ [ ] ] for target_dict_path_as_list in dict_paths : prev_value = self . model . state . semantic_data value = self . model . state . semantic_data for path_element in target_dict_path_as_list : prev_value = value value = value [ path_element ] if not isinstance ( value , dict ) and len ( dict_paths ) <= 1 : target_dict_path_as_list . pop ( - 1 ) value = prev_value if isinstance ( value , dict ) : for key_to_paste , value_to_add in selected_data_list : self . model . state . add_semantic_data ( target_dict_path_as_list , value_to_add , key_to_paste ) self . reload_tree_store_data ( ) | Add clipboard key value pairs into all selected sub - dictionary |
40,577 | def cut_action_callback ( self , * event ) : if react_to_event ( self . view , self . tree_view , event ) and self . active_entry_widget is None : _ , dict_paths = self . get_view_selection ( ) stored_data_list = [ ] for dict_path_as_list in dict_paths : if dict_path_as_list : value = self . model . state . semantic_data for path_element in dict_path_as_list : value = value [ path_element ] stored_data_list . append ( ( path_element , value ) ) self . model . state . remove_semantic_data ( dict_path_as_list ) rafcon . gui . clipboard . global_clipboard . set_semantic_dictionary_list ( stored_data_list ) self . reload_tree_store_data ( ) | Add a copy and cut all selected row dict value pairs to the clipboard |
40,578 | def key_edited ( self , path , new_key_str ) : tree_store_path = self . create_tree_store_path_from_key_string ( path ) if isinstance ( path , string_types ) else path if self . tree_store [ tree_store_path ] [ self . KEY_STORAGE_ID ] == new_key_str : return dict_path = self . tree_store [ tree_store_path ] [ self . ID_STORAGE_ID ] old_value = self . model . state . get_semantic_data ( dict_path ) self . model . state . remove_semantic_data ( dict_path ) if new_key_str == "" : target_dict = self . model . state . semantic_data for element in dict_path [ 0 : - 1 ] : target_dict = target_dict [ element ] new_key_str = generate_semantic_data_key ( list ( target_dict . keys ( ) ) ) new_dict_path = self . model . state . add_semantic_data ( dict_path [ 0 : - 1 ] , old_value , key = new_key_str ) self . _changed_id_to = { ':' . join ( dict_path ) : new_dict_path } self . reload_tree_store_data ( ) | Edits the key of a semantic data entry |
40,579 | def value_edited ( self , path , new_value_str ) : tree_store_path = self . create_tree_store_path_from_key_string ( path ) if isinstance ( path , string_types ) else path if self . tree_store [ tree_store_path ] [ self . VALUE_STORAGE_ID ] == new_value_str : return dict_path = self . tree_store [ tree_store_path ] [ self . ID_STORAGE_ID ] self . model . state . add_semantic_data ( dict_path [ 0 : - 1 ] , new_value_str , key = dict_path [ - 1 ] ) self . reload_tree_store_data ( ) | Adds the value of the semantic data entry |
40,580 | def store_widget_properties ( self , widget , widget_name ) : if isinstance ( widget , Gtk . Window ) : maximized = bool ( widget . is_maximized ( ) ) self . set_config_value ( '{0}_MAXIMIZED' . format ( widget_name ) , maximized ) if maximized : return size = widget . get_size ( ) self . set_config_value ( '{0}_SIZE' . format ( widget_name ) , tuple ( size ) ) position = widget . get_position ( ) self . set_config_value ( '{0}_POS' . format ( widget_name ) , tuple ( position ) ) else : position = widget . get_position ( ) self . set_config_value ( '{0}_POS' . format ( widget_name ) , position ) | Sets configuration values for widgets |
40,581 | def update_recently_opened_state_machines_with ( self , state_machine ) : if state_machine . file_system_path : recently_opened_state_machines = self . get_config_value ( 'recently_opened_state_machines' , [ ] ) if state_machine . file_system_path in recently_opened_state_machines : del recently_opened_state_machines [ recently_opened_state_machines . index ( state_machine . file_system_path ) ] recently_opened_state_machines . insert ( 0 , state_machine . file_system_path ) self . set_config_value ( 'recently_opened_state_machines' , recently_opened_state_machines ) | Update recently opened list with file system path of handed state machine model |
40,582 | def extend_recently_opened_by_current_open_state_machines ( self ) : from rafcon . gui . singleton import state_machine_manager_model as state_machine_manager_m for sm_m in state_machine_manager_m . state_machines . values ( ) : self . update_recently_opened_state_machines_with ( sm_m . state_machine ) | Update list with all in the state machine manager opened state machines |
40,583 | def prepare_recently_opened_state_machines_list_for_storage ( self ) : from rafcon . gui . singleton import global_gui_config num = global_gui_config . get_config_value ( 'NUMBER_OF_RECENT_OPENED_STATE_MACHINES_STORED' ) state_machine_paths = self . get_config_value ( 'recently_opened_state_machines' , [ ] ) self . set_config_value ( 'recently_opened_state_machines' , state_machine_paths [ : num ] ) | Reduce number of paths in the recent opened state machines to limit from gui config |
40,584 | def clean_recently_opened_state_machines ( self ) : state_machine_paths = self . get_config_value ( 'recently_opened_state_machines' , [ ] ) filesystem . clean_file_system_paths_from_not_existing_paths ( state_machine_paths ) self . set_config_value ( 'recently_opened_state_machines' , state_machine_paths ) | Remove state machines who s file system path does not exist |
40,585 | def pause ( self ) : if self . state_machine_manager . active_state_machine_id is None : logger . info ( "'Pause' is not a valid action to initiate state machine execution." ) return if self . state_machine_manager . get_active_state_machine ( ) is not None : self . state_machine_manager . get_active_state_machine ( ) . root_state . recursively_pause_states ( ) logger . debug ( "Pause execution ..." ) self . set_execution_mode ( StateMachineExecutionStatus . PAUSED ) | Set the execution mode to paused |
40,586 | def finished_or_stopped ( self ) : return ( self . _status . execution_mode is StateMachineExecutionStatus . STOPPED ) or ( self . _status . execution_mode is StateMachineExecutionStatus . FINISHED ) | Condition check on finished or stopped status |
40,587 | def start ( self , state_machine_id = None , start_state_path = None ) : if not self . finished_or_stopped ( ) : logger . debug ( "Resume execution engine ..." ) self . run_to_states = [ ] if self . state_machine_manager . get_active_state_machine ( ) is not None : self . state_machine_manager . get_active_state_machine ( ) . root_state . recursively_resume_states ( ) if isinstance ( state_machine_id , int ) and state_machine_id != self . state_machine_manager . get_active_state_machine ( ) . state_machine_id : logger . info ( "Resumed state machine with id {0} but start of state machine id {1} was requested." "" . format ( self . state_machine_manager . get_active_state_machine ( ) . state_machine_id , state_machine_id ) ) self . set_execution_mode ( StateMachineExecutionStatus . STARTED ) else : if self . state_machine_running : logger . warning ( "An old state machine is still running! Make sure that it terminates," " before you can start another state machine! {0}" . format ( self ) ) return logger . debug ( "Start execution engine ..." ) if state_machine_id is not None : self . state_machine_manager . active_state_machine_id = state_machine_id if not self . state_machine_manager . active_state_machine_id : logger . error ( "There exists no active state machine!" ) return self . set_execution_mode ( StateMachineExecutionStatus . STARTED ) self . start_state_paths = [ ] if start_state_path : path_list = start_state_path . split ( "/" ) cur_path = "" for path in path_list : if cur_path == "" : cur_path = path else : cur_path = cur_path + "/" + path self . start_state_paths . append ( cur_path ) self . _run_active_state_machine ( ) | Start state machine |
40,588 | def stop ( self ) : logger . debug ( "Stop the state machine execution ..." ) if self . state_machine_manager . get_active_state_machine ( ) is not None : self . state_machine_manager . get_active_state_machine ( ) . root_state . recursively_preempt_states ( ) self . __set_execution_mode_to_stopped ( ) self . _status . execution_condition_variable . acquire ( ) self . _status . execution_condition_variable . notify_all ( ) self . _status . execution_condition_variable . release ( ) self . __running_state_machine = None | Set the execution mode to stopped |
40,589 | def join ( self , timeout = None ) : if self . __wait_for_finishing_thread : if not timeout : while True : self . __wait_for_finishing_thread . join ( 0.5 ) if not self . __wait_for_finishing_thread . isAlive ( ) : break else : self . __wait_for_finishing_thread . join ( timeout ) return not self . __wait_for_finishing_thread . is_alive ( ) else : logger . warning ( "Cannot join as state machine was not started yet." ) return False | Blocking wait for the execution to finish |
40,590 | def _run_active_state_machine ( self ) : self . __running_state_machine = self . state_machine_manager . get_active_state_machine ( ) if not self . __running_state_machine : logger . error ( "The running state machine must not be None" ) self . __running_state_machine . root_state . concurrency_queue = queue . Queue ( maxsize = 0 ) if self . __running_state_machine : self . __running_state_machine . start ( ) self . __wait_for_finishing_thread = threading . Thread ( target = self . _wait_for_finishing ) self . __wait_for_finishing_thread . start ( ) else : logger . warning ( "Currently no active state machine! Please create a new state machine." ) self . set_execution_mode ( StateMachineExecutionStatus . STOPPED ) | Store running state machine and observe its status |
40,591 | def _wait_for_finishing ( self ) : self . state_machine_running = True self . __running_state_machine . join ( ) self . __set_execution_mode_to_finished ( ) self . state_machine_manager . active_state_machine_id = None plugins . run_on_state_machine_execution_finished ( ) self . state_machine_running = False | Observe running state machine and stop engine if execution has finished |
40,592 | def backward_step ( self ) : logger . debug ( "Executing backward step ..." ) self . run_to_states = [ ] self . set_execution_mode ( StateMachineExecutionStatus . BACKWARD ) | Take a backward step for all active states in the state machine |
40,593 | def step_mode ( self , state_machine_id = None ) : logger . debug ( "Activate step mode" ) if state_machine_id is not None : self . state_machine_manager . active_state_machine_id = state_machine_id self . run_to_states = [ ] if self . finished_or_stopped ( ) : self . set_execution_mode ( StateMachineExecutionStatus . STEP_MODE ) self . _run_active_state_machine ( ) else : self . set_execution_mode ( StateMachineExecutionStatus . STEP_MODE ) | Set the execution mode to stepping mode . Transitions are only triggered if a new step is triggered |
40,594 | def run_to_selected_state ( self , path , state_machine_id = None ) : if self . state_machine_manager . get_active_state_machine ( ) is not None : self . state_machine_manager . get_active_state_machine ( ) . root_state . recursively_resume_states ( ) if not self . finished_or_stopped ( ) : logger . debug ( "Resume execution engine and run to selected state!" ) self . run_to_states = [ ] self . run_to_states . append ( path ) self . set_execution_mode ( StateMachineExecutionStatus . RUN_TO_SELECTED_STATE ) else : logger . debug ( "Start execution engine and run to selected state!" ) if state_machine_id is not None : self . state_machine_manager . active_state_machine_id = state_machine_id self . set_execution_mode ( StateMachineExecutionStatus . RUN_TO_SELECTED_STATE ) self . run_to_states = [ ] self . run_to_states . append ( path ) self . _run_active_state_machine ( ) | Execute the state machine until a specific state . This state won t be executed . This is an asynchronous task |
40,595 | def _wait_while_in_pause_or_in_step_mode ( self ) : while ( self . _status . execution_mode is StateMachineExecutionStatus . PAUSED ) or ( self . _status . execution_mode is StateMachineExecutionStatus . STEP_MODE ) : try : self . _status . execution_condition_variable . acquire ( ) self . synchronization_counter += 1 logger . verbose ( "Increase synchronization_counter: " + str ( self . synchronization_counter ) ) self . _status . execution_condition_variable . wait ( ) finally : self . _status . execution_condition_variable . release ( ) | Waits as long as the execution_mode is in paused or step_mode |
40,596 | def _wait_if_required ( self , container_state , next_child_state_to_execute , woke_up_from_pause_or_step_mode ) : wait = True for state_path in copy . deepcopy ( self . run_to_states ) : next_child_state_path = None if next_child_state_to_execute : next_child_state_path = next_child_state_to_execute . get_path ( ) if state_path == container_state . get_path ( ) : wait = True self . run_to_states . remove ( state_path ) break elif state_path == next_child_state_path : wait = True self . run_to_states . remove ( state_path ) break else : wait = False if wait and not woke_up_from_pause_or_step_mode : logger . debug ( "Stepping mode: waiting for next step!" ) try : self . _status . execution_condition_variable . acquire ( ) self . synchronization_counter += 1 logger . verbose ( "Increase synchronization_counter: " + str ( self . synchronization_counter ) ) self . _status . execution_condition_variable . wait ( ) finally : self . _status . execution_condition_variable . release ( ) self . _wait_while_in_pause_or_in_step_mode ( ) container_state . execution_history . new_execution_command_handled = False | Calls a blocking wait for the calling thread depending on the execution mode . |
40,597 | def handle_execution_mode ( self , container_state , next_child_state_to_execute = None ) : self . state_counter_lock . acquire ( ) self . state_counter += 1 self . state_counter_lock . release ( ) woke_up_from_pause_or_step_mode = False if ( self . _status . execution_mode is StateMachineExecutionStatus . PAUSED ) or ( self . _status . execution_mode is StateMachineExecutionStatus . STEP_MODE ) : self . _wait_while_in_pause_or_in_step_mode ( ) container_state . execution_history . new_execution_command_handled = False woke_up_from_pause_or_step_mode = True if self . _status . execution_mode is StateMachineExecutionStatus . STARTED : pass elif self . _status . execution_mode is StateMachineExecutionStatus . STOPPED : logger . debug ( "Execution engine stopped. State '{0}' is going to quit in the case of " "no preemption handling has to be done!" . format ( container_state . name ) ) elif self . _status . execution_mode is StateMachineExecutionStatus . FINISHED : raise Exception else : logger . verbose ( "before wait" ) self . _wait_if_required ( container_state , next_child_state_to_execute , woke_up_from_pause_or_step_mode ) logger . verbose ( "after wait" ) if self . _status . execution_mode is StateMachineExecutionStatus . BACKWARD : pass elif self . _status . execution_mode is StateMachineExecutionStatus . FORWARD_INTO : pass elif self . _status . execution_mode is StateMachineExecutionStatus . FORWARD_OVER : if not container_state . execution_history . new_execution_command_handled : self . run_to_states . append ( container_state . get_path ( ) ) else : pass elif self . _status . execution_mode is StateMachineExecutionStatus . FORWARD_OUT : from rafcon . core . states . state import State if isinstance ( container_state . parent , State ) : if not container_state . execution_history . new_execution_command_handled : from rafcon . core . states . library_state import LibraryState if isinstance ( container_state . parent , LibraryState ) : parent_path = container_state . parent . parent . get_path ( ) else : parent_path = container_state . parent . get_path ( ) self . run_to_states . append ( parent_path ) else : pass else : self . run_to_states = [ ] self . set_execution_mode ( StateMachineExecutionStatus . STARTED ) elif self . _status . execution_mode is StateMachineExecutionStatus . RUN_TO_SELECTED_STATE : pass container_state . execution_history . new_execution_command_handled = True return_value = self . _status . execution_mode return return_value | Checks the current execution status and returns it . |
40,598 | def execute_state_machine_from_path ( self , state_machine = None , path = None , start_state_path = None , wait_for_execution_finished = True ) : import rafcon . core . singleton from rafcon . core . storage import storage rafcon . core . singleton . library_manager . initialize ( ) if not state_machine : state_machine = storage . load_state_machine_from_path ( path ) rafcon . core . singleton . state_machine_manager . add_state_machine ( state_machine ) rafcon . core . singleton . state_machine_execution_engine . start ( state_machine . state_machine_id , start_state_path = start_state_path ) if wait_for_execution_finished : self . join ( ) self . stop ( ) return state_machine | A helper function to start an arbitrary state machine at a given path . |
40,599 | def set_execution_mode ( self , execution_mode , notify = True ) : if not isinstance ( execution_mode , StateMachineExecutionStatus ) : raise TypeError ( "status must be of type StateMachineExecutionStatus" ) self . _status . execution_mode = execution_mode if notify : self . _status . execution_condition_variable . acquire ( ) self . _status . execution_condition_variable . notify_all ( ) self . _status . execution_condition_variable . release ( ) | An observed setter for the execution mode of the state machine status . This is necessary for the monitoring client to update the local state machine in the same way as the root state machine of the server . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.