idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
40,300 | def substitute_selected_state ( state , as_template = False , keep_name = False ) : assert isinstance ( state , State ) from rafcon . core . states . barrier_concurrency_state import DeciderState if isinstance ( state , DeciderState ) : raise ValueError ( "State of type DeciderState can not be substituted." ) smm_m = rafcon . gui . singleton . state_machine_manager_model if not smm_m . selected_state_machine_id : logger . error ( "Selected state machine can not be found, please select a state within a state machine first." ) return False selection = smm_m . state_machines [ smm_m . selected_state_machine_id ] . selection selected_state_m = selection . get_selected_state ( ) if len ( selection . states ) != 1 : logger . error ( "Please select exactly one state for the substitution" ) return False if is_selection_inside_of_library_state ( selected_elements = [ selected_state_m ] ) : logger . warning ( "Substitute is not performed because target state is inside of a library state." ) return gui_helper_state . substitute_state_as ( selected_state_m , state , as_template , keep_name ) return True | Substitute the selected state with the handed state |
40,301 | def modify_origin ( self , from_state , from_outcome ) : if not ( from_state is None and from_outcome is None ) : if not isinstance ( from_state , string_types ) : raise ValueError ( "Invalid transition origin port: from_state must be a string" ) if not isinstance ( from_outcome , int ) : raise ValueError ( "Invalid transition origin port: from_outcome must be of type int" ) old_from_state = self . from_state old_from_outcome = self . from_outcome self . _from_state = from_state self . _from_outcome = from_outcome valid , message = self . _check_validity ( ) if not valid : self . _from_state = old_from_state self . _from_outcome = old_from_outcome raise ValueError ( "The transition origin could not be changed: {0}" . format ( message ) ) | Set both from_state and from_outcome at the same time to modify transition origin |
40,302 | def modify_target ( self , to_state , to_outcome = None ) : if not ( to_state is None and ( to_outcome is not int and to_outcome is not None ) ) : if not isinstance ( to_state , string_types ) : raise ValueError ( "Invalid transition target port: to_state must be a string" ) if not isinstance ( to_outcome , int ) and to_outcome is not None : raise ValueError ( "Invalid transition target port: to_outcome must be of type int or None (if to_state " "is of type str)" ) old_to_state = self . to_state old_to_outcome = self . to_outcome self . _to_state = to_state self . _to_outcome = to_outcome valid , message = self . _check_validity ( ) if not valid : self . _to_state = old_to_state self . _to_outcome = old_to_outcome raise ValueError ( "The transition target could not be changed: {0}" . format ( message ) ) | Set both to_state and to_outcome at the same time to modify transition target |
40,303 | def global_variable_is_editable ( self , gv_name , intro_message = 'edit' ) : if self . model . global_variable_manager . is_locked ( gv_name ) : logger . error ( "{1} of global variable '{0}' is not possible, as it is locked" . format ( gv_name , intro_message ) ) return False return True | Check whether global variable is locked |
40,304 | def on_add ( self , widget , data = None ) : gv_name = "new_global_%s" % self . global_variable_counter self . global_variable_counter += 1 try : self . model . global_variable_manager . set_variable ( gv_name , None ) except ( RuntimeError , AttributeError , TypeError ) as e : logger . warning ( "Addition of new global variable '{0}' failed: {1}" . format ( gv_name , e ) ) self . select_entry ( gv_name ) return True | Create a global variable with default value and select its row |
40,305 | def on_lock ( self , widget , data = None ) : path_list = None if self . view is not None : model , path_list = self . tree_view . get_selection ( ) . get_selected_rows ( ) models = [ self . list_store [ path ] [ self . MODEL_STORAGE_ID ] for path in path_list ] if path_list else [ ] if models : if len ( models ) > 1 : self . _logger . warning ( "Please select only one element to be locked." ) try : self . model . global_variable_manager . lock_variable ( models [ 0 ] ) except AttributeError as e : self . _logger . warning ( "The respective core element of {1}.list_store couldn't be locked. -> {0}" "" . format ( e , self . __class__ . __name__ ) ) return True else : self . _logger . warning ( "Please select an element to be locked." ) | Locks respective selected core element |
40,306 | def remove_core_element ( self , model ) : gv_name = model if self . global_variable_is_editable ( gv_name , "Deletion" ) : try : self . model . global_variable_manager . delete_variable ( gv_name ) except AttributeError as e : logger . warning ( "The respective global variable '{1}' couldn't be removed. -> {0}" "" . format ( e , model ) ) | Remove respective core element of handed global variable name |
40,307 | def assign_notification_from_gvm ( self , model , prop_name , info ) : if info [ 'method_name' ] in [ 'set_locked_variable' ] or info [ 'result' ] is Exception : return if info [ 'method_name' ] in [ 'lock_variable' , 'unlock_variable' ] : key = info . kwargs . get ( 'key' , info . args [ 1 ] ) if len ( info . args ) > 1 else info . kwargs [ 'key' ] if key in self . list_store_iterators : gv_row_path = self . list_store . get_path ( self . list_store_iterators [ key ] ) self . list_store [ gv_row_path ] [ self . IS_LOCKED_AS_STRING_STORAGE_ID ] = str ( self . model . global_variable_manager . is_locked ( key ) ) elif info [ 'method_name' ] in [ 'set_variable' , 'delete_variable' ] : if info [ 'method_name' ] == 'set_variable' : key = info . kwargs . get ( 'key' , info . args [ 1 ] ) if len ( info . args ) > 1 else info . kwargs [ 'key' ] if key in self . list_store_iterators : gv_row_path = self . list_store . get_path ( self . list_store_iterators [ key ] ) self . list_store [ gv_row_path ] [ self . VALUE_AS_STRING_STORAGE_ID ] = str ( self . model . global_variable_manager . get_representation ( key ) ) self . list_store [ gv_row_path ] [ self . DATA_TYPE_AS_STRING_STORAGE_ID ] = self . model . global_variable_manager . get_data_type ( key ) . __name__ return self . update_global_variables_list_store ( ) else : logger . warning ( 'Notification that is not handled' ) | Handles gtkmvc3 notification from global variable manager |
40,308 | def update_global_variables_list_store ( self ) : self . list_store_iterators = { } self . list_store . clear ( ) keys = self . model . global_variable_manager . get_all_keys ( ) keys . sort ( ) for key in keys : iter = self . list_store . append ( [ key , self . model . global_variable_manager . get_data_type ( key ) . __name__ , str ( self . model . global_variable_manager . get_representation ( key ) ) , str ( self . model . global_variable_manager . is_locked ( key ) ) , ] ) self . list_store_iterators [ key ] = iter | Updates the global variable list store |
40,309 | def get_view_for_id ( self , view_class , element_id , parent_item = None ) : from rafcon . gui . mygaphas . items . state import StateView from rafcon . gui . mygaphas . items . connection import DataFlowView , TransitionView if parent_item is None : items = self . get_all_items ( ) else : items = self . get_children ( parent_item ) for item in items : if view_class is StateView and isinstance ( item , StateView ) and item . model . state . state_id == element_id : return item if view_class is TransitionView and isinstance ( item , TransitionView ) and item . model . transition . transition_id == element_id : return item if view_class is DataFlowView and isinstance ( item , DataFlowView ) and item . model . data_flow . data_flow_id == element_id : return item return None | Searches and returns the View for the given id and type |
40,310 | def wait_for_update ( self , trigger_update = False ) : if trigger_update : self . update_now ( ) from gi . repository import Gtk from gi . repository import GLib from threading import Event event = Event ( ) def priority_handled ( event ) : event . set ( ) priority = ( GLib . PRIORITY_HIGH_IDLE + GLib . PRIORITY_DEFAULT_IDLE ) / 2 GLib . idle_add ( priority_handled , event , priority = priority ) while not event . is_set ( ) : Gtk . main_iteration ( ) | Update canvas and handle all events in the gtk queue |
40,311 | def _get_value ( self ) : x , y = self . _point . x , self . _point . y self . _px , self . _py = self . _item_point . canvas . get_matrix_i2i ( self . _item_point , self . _item_target ) . transform_point ( x , y ) return self . _px , self . _py | Return two delegating variables . Each variable should contain a value attribute with the real value . |
40,312 | def convert_string_to_type ( string_value ) : if string_value in [ 'None' , type ( None ) . __name__ ] : return type ( None ) if isinstance ( string_value , type ) or isclass ( string_value ) : return string_value if sys . version_info >= ( 3 , ) : import builtins as builtins23 else : import __builtin__ as builtins23 if hasattr ( builtins23 , string_value ) : obj = getattr ( builtins23 , string_value ) if type ( obj ) is type : return obj try : obj = locate ( string_value ) except ErrorDuringImport as e : raise ValueError ( "Unknown type '{0}'" . format ( e ) ) if type ( obj ) is type : return locate ( string_value ) if isclass ( obj ) : return obj raise ValueError ( "Unknown type '{0}'" . format ( string_value ) ) | Converts a string into a type or class |
40,313 | def convert_string_value_to_type_value ( string_value , data_type ) : from ast import literal_eval try : if data_type in ( str , type ( None ) ) : converted_value = str ( string_value ) elif data_type == int : converted_value = int ( string_value ) elif data_type == float : converted_value = float ( string_value ) elif data_type == bool : converted_value = bool ( literal_eval ( string_value ) ) elif data_type in ( list , dict , tuple ) : converted_value = literal_eval ( string_value ) if type ( converted_value ) != data_type : raise ValueError ( "Invalid syntax: {0}" . format ( string_value ) ) elif data_type == object : try : converted_value = literal_eval ( string_value ) except ( ValueError , SyntaxError ) : converted_value = literal_eval ( '"' + string_value + '"' ) elif isinstance ( data_type , type ) : converted_value = data_type ( string_value ) elif isclass ( data_type ) : converted_value = data_type ( string_value ) else : raise ValueError ( "No conversion from string '{0}' to data type '{0}' defined" . format ( string_value , data_type . __name__ ) ) except ( ValueError , SyntaxError , TypeError ) as e : raise AttributeError ( "Can't convert '{0}' to type '{1}': {2}" . format ( string_value , data_type . __name__ , e ) ) return converted_value | Helper function to convert a given string to a given data type |
40,314 | def type_inherits_of_type ( inheriting_type , base_type ) : assert isinstance ( inheriting_type , type ) or isclass ( inheriting_type ) assert isinstance ( base_type , type ) or isclass ( base_type ) if inheriting_type == base_type : return True else : if len ( inheriting_type . __bases__ ) != 1 : return False return type_inherits_of_type ( inheriting_type . __bases__ [ 0 ] , base_type ) | Checks whether inheriting_type inherits from base_type |
40,315 | def clear_results_db ( session ) : from egoio . db_tables . model_draft import EgoGridPfHvResultBus as BusResult , EgoGridPfHvResultBusT as BusTResult , EgoGridPfHvResultStorage as StorageResult , EgoGridPfHvResultStorageT as StorageTResult , EgoGridPfHvResultGenerator as GeneratorResult , EgoGridPfHvResultGeneratorT as GeneratorTResult , EgoGridPfHvResultLine as LineResult , EgoGridPfHvResultLineT as LineTResult , EgoGridPfHvResultLoad as LoadResult , EgoGridPfHvResultLoadT as LoadTResult , EgoGridPfHvResultTransformer as TransformerResult , EgoGridPfHvResultTransformerT as TransformerTResult , EgoGridPfHvResultMeta as ResultMeta print ( 'Are you sure that you want to clear all results in the OEDB?' ) choice = '' while choice not in [ 'y' , 'n' ] : choice = input ( '(y/n): ' ) if choice == 'y' : print ( 'Are you sure?' ) choice2 = '' while choice2 not in [ 'y' , 'n' ] : choice2 = input ( '(y/n): ' ) if choice2 == 'y' : print ( 'Deleting all results...' ) session . query ( BusResult ) . delete ( ) session . query ( BusTResult ) . delete ( ) session . query ( StorageResult ) . delete ( ) session . query ( StorageTResult ) . delete ( ) session . query ( GeneratorResult ) . delete ( ) session . query ( GeneratorTResult ) . delete ( ) session . query ( LoadResult ) . delete ( ) session . query ( LoadTResult ) . delete ( ) session . query ( LineResult ) . delete ( ) session . query ( LineTResult ) . delete ( ) session . query ( TransformerResult ) . delete ( ) session . query ( TransformerTResult ) . delete ( ) session . query ( ResultMeta ) . delete ( ) session . commit ( ) else : print ( 'Deleting aborted!' ) else : print ( 'Deleting aborted!' ) | Used to clear the result tables in the OEDB . Caution! This deletes EVERY RESULT SET! |
40,316 | def run_sql_script ( conn , scriptname = 'results_md2grid.sql' ) : script_dir = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , 'sql_scripts' ) ) script_str = open ( os . path . join ( script_dir , scriptname ) ) . read ( ) conn . execution_options ( autocommit = True ) . execute ( script_str ) return | This function runs . sql scripts in the folder sql_scripts |
40,317 | def distance ( x0 , x1 , y0 , y1 ) : distance = ( x1 . values - x0 . values ) * ( x1 . values - x0 . values ) + ( y1 . values - y0 . values ) * ( y1 . values - y0 . values ) return distance | Function that calculates the square of the distance between two points . |
40,318 | def calc_nearest_point ( bus1 , network ) : bus1_index = network . buses . index [ network . buses . index == bus1 ] forbidden_buses = np . append ( bus1_index . values , network . lines . bus1 [ network . lines . bus0 == bus1 ] . values ) forbidden_buses = np . append ( forbidden_buses , network . lines . bus0 [ network . lines . bus1 == bus1 ] . values ) forbidden_buses = np . append ( forbidden_buses , network . links . bus0 [ network . links . bus1 == bus1 ] . values ) forbidden_buses = np . append ( forbidden_buses , network . links . bus1 [ network . links . bus0 == bus1 ] . values ) x0 = network . buses . x [ network . buses . index . isin ( bus1_index ) ] y0 = network . buses . y [ network . buses . index . isin ( bus1_index ) ] comparable_buses = network . buses [ ~ network . buses . index . isin ( forbidden_buses ) ] x1 = comparable_buses . x y1 = comparable_buses . y distance = ( x1 . values - x0 . values ) * ( x1 . values - x0 . values ) + ( y1 . values - y0 . values ) * ( y1 . values - y0 . values ) min_distance = distance . min ( ) bus0 = comparable_buses [ ( ( ( x1 . values - x0 . values ) * ( x1 . values - x0 . values ) + ( y1 . values - y0 . values ) * ( y1 . values - y0 . values ) ) == min_distance ) ] bus0 = bus0 . index [ bus0 . index == bus0 . index . max ( ) ] bus0 = '' . join ( bus0 . values ) return bus0 | Function that finds the geographical nearest point in a network from a given bus . |
40,319 | def map_ormclass ( self , name ) : try : self . _mapped [ name ] = getattr ( self . _pkg , self . _prefix + name ) except AttributeError : print ( 'Warning: Relation %s does not exist.' % name ) | Populate _mapped attribute with orm class |
40,320 | def configure_timeindex ( self ) : try : ormclass = self . _mapped [ 'TempResolution' ] if self . version : tr = self . session . query ( ormclass ) . filter ( ormclass . temp_id == self . temp_id ) . filter ( ormclass . version == self . version ) . one ( ) else : tr = self . session . query ( ormclass ) . filter ( ormclass . temp_id == self . temp_id ) . one ( ) except ( KeyError , NoResultFound ) : print ( 'temp_id %s does not exist.' % self . temp_id ) timeindex = pd . DatetimeIndex ( start = tr . start_time , periods = tr . timesteps , freq = tr . resolution ) self . timeindex = timeindex [ self . start_snapshot - 1 : self . end_snapshot ] | Construct a DateTimeIndex with the queried temporal resolution start - and end_snapshot . |
40,321 | def fetch_by_relname ( self , name ) : ormclass = self . _mapped [ name ] query = self . session . query ( ormclass ) if name != carr_ormclass : query = query . filter ( ormclass . scn_name == self . scn_name ) if self . version : query = query . filter ( ormclass . version == self . version ) if name == 'Transformer' : name = 'Trafo' df = pd . read_sql ( query . statement , self . session . bind , index_col = name . lower ( ) + '_id' ) if name == 'Link' : df [ 'bus0' ] = df . bus0 . astype ( int ) df [ 'bus1' ] = df . bus1 . astype ( int ) if 'source' in df : df . source = df . source . map ( self . id_to_source ( ) ) return df | Construct DataFrame with component data from filtered table data . |
40,322 | def series_fetch_by_relname ( self , name , column ) : ormclass = self . _mapped [ name ] id_column = re . findall ( r'[A-Z][^A-Z]*' , name ) [ 0 ] + '_' + 'id' id_column = id_column . lower ( ) query = self . session . query ( getattr ( ormclass , id_column ) , getattr ( ormclass , column ) [ self . start_snapshot : self . end_snapshot ] . label ( column ) ) . filter ( and_ ( ormclass . scn_name == self . scn_name , ormclass . temp_id == self . temp_id ) ) if self . version : query = query . filter ( ormclass . version == self . version ) df = pd . io . sql . read_sql ( query . statement , self . session . bind , columns = [ column ] , index_col = id_column ) df . index = df . index . astype ( str ) df = df [ column ] . apply ( pd . Series ) . transpose ( ) try : assert not df . empty df . index = self . timeindex except AssertionError : print ( "No data for %s in column %s." % ( name , column ) ) return df | Construct DataFrame with component timeseries data from filtered table data . |
40,323 | def build_network ( self , network = None , * args , ** kwargs ) : if network != None : network = network else : network = pypsa . Network ( ) network . set_snapshots ( self . timeindex ) timevarying_override = False if pypsa . __version__ == '0.11.0' : old_to_new_name = { 'Generator' : { 'p_min_pu_fixed' : 'p_min_pu' , 'p_max_pu_fixed' : 'p_max_pu' , 'source' : 'carrier' , 'dispatch' : 'former_dispatch' } , 'Bus' : { 'current_type' : 'carrier' } , 'Transformer' : { 'trafo_id' : 'transformer_id' } , 'Storage' : { 'p_min_pu_fixed' : 'p_min_pu' , 'p_max_pu_fixed' : 'p_max_pu' , 'soc_cyclic' : 'cyclic_state_of_charge' , 'soc_initial' : 'state_of_charge_initial' , 'source' : 'carrier' } } timevarying_override = True else : old_to_new_name = { 'Storage' : { 'soc_cyclic' : 'cyclic_state_of_charge' , 'soc_initial' : 'state_of_charge_initial' } } for comp , comp_t_dict in self . config . items ( ) : pypsa_comp_name = 'StorageUnit' if comp == 'Storage' else comp df = self . fetch_by_relname ( comp ) if comp in old_to_new_name : tmp = old_to_new_name [ comp ] df . rename ( columns = tmp , inplace = True ) network . import_components_from_dataframe ( df , pypsa_comp_name ) if comp_t_dict : for comp_t , columns in comp_t_dict . items ( ) : for col in columns : df_series = self . series_fetch_by_relname ( comp_t , col ) if timevarying_override and comp == 'Generator' and not df_series . empty : idx = df [ df . former_dispatch == 'flexible' ] . index idx = [ i for i in idx if i in df_series . columns ] df_series . drop ( idx , axis = 1 , inplace = True ) try : pypsa . io . import_series_from_dataframe ( network , df_series , pypsa_comp_name , col ) except ( ValueError , AttributeError ) : print ( "Series %s of component %s could not be " "imported" % ( col , pypsa_comp_name ) ) network . import_components_from_dataframe ( self . fetch_by_relname ( carr_ormclass ) , 'Carrier' ) self . network = network return network | Core method to construct PyPSA Network object . |
40,324 | def run ( self ) : logger . debug ( "Starting execution of {0}{1}" . format ( self , " (backwards)" if self . backward_execution else "" ) ) self . setup_run ( ) try : concurrency_history_item = self . setup_forward_or_backward_execution ( ) concurrency_queue = self . start_child_states ( concurrency_history_item ) finished_thread_id = concurrency_queue . get ( ) finisher_state = self . states [ finished_thread_id ] finisher_state . join ( ) if not self . backward_execution : for state_id , state in self . states . items ( ) : state . recursively_preempt_states ( ) for history_index , state in enumerate ( self . states . values ( ) ) : self . join_state ( state , history_index , concurrency_history_item ) self . add_state_execution_output_to_scoped_data ( state . output_data , state ) self . update_scoped_variables_with_output_dictionary ( state . output_data , state ) self . add_state_execution_output_to_scoped_data ( finisher_state . output_data , finisher_state ) self . update_scoped_variables_with_output_dictionary ( finisher_state . output_data , finisher_state ) if self . states [ finished_thread_id ] . backward_execution : return self . finalize_backward_execution ( ) else : self . backward_execution = False transition = self . get_transition_for_outcome ( self . states [ finished_thread_id ] , self . states [ finished_thread_id ] . final_outcome ) if transition is None : transition = self . handle_no_transition ( self . states [ finished_thread_id ] ) if transition is None : self . output_data [ "error" ] = RuntimeError ( "state aborted" ) else : if 'error' in self . states [ finished_thread_id ] . output_data : self . output_data [ "error" ] = self . states [ finished_thread_id ] . output_data [ 'error' ] self . final_outcome = self . outcomes [ transition . to_outcome ] return self . finalize_concurrency_state ( self . final_outcome ) except Exception as e : logger . error ( "{0} had an internal error: {1}\n{2}" . format ( self , str ( e ) , str ( traceback . format_exc ( ) ) ) ) self . output_data [ "error" ] = e self . state_execution_status = StateExecutionStatus . WAIT_FOR_NEXT_STATE return self . finalize ( Outcome ( - 1 , "aborted" ) ) | This defines the sequence of actions that are taken when the preemptive concurrency state is executed |
40,325 | def _check_transition_validity ( self , check_transition ) : valid , message = super ( PreemptiveConcurrencyState , self ) . _check_transition_validity ( check_transition ) if not valid : return False , message if check_transition . to_state != self . state_id : return False , "Only transitions to the parent state are allowed" return True , message | Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState . Start transitions are forbidden in the ConcurrencyState |
40,326 | def recover_specific_version ( self , pointer_on_version_to_recover ) : logger . info ( "Going to history status #{0}" . format ( pointer_on_version_to_recover ) ) undo_redo_list = self . modifications . get_undo_redo_list_from_active_trail_history_item_to_version_id ( pointer_on_version_to_recover ) logger . debug ( "Multiple undo and redo to reach modification history element of version {0} " "-> undo-redo-list is: {1}" . format ( pointer_on_version_to_recover , undo_redo_list ) ) self . state_machine_model . storage_lock . acquire ( ) for elem in undo_redo_list : if elem [ 1 ] == 'undo' : self . _undo ( elem [ 0 ] ) else : self . _redo ( elem [ 0 ] ) self . modifications . reorganize_trail_history_for_version_id ( pointer_on_version_to_recover ) self . change_count += 1 self . state_machine_model . storage_lock . release ( ) | Recovers a specific version of the all_time_history element by doing several undos and redos . |
40,327 | def get_undo_redo_list_from_active_trail_history_item_to_version_id ( self , version_id ) : all_trail_action = [ a . version_id for a in self . single_trail_history ( ) if a is not None ] all_active_action = self . get_all_active_actions ( ) undo_redo_list = [ ] _undo_redo_list = [ ] intermediate_version_id = version_id if self . with_verbose : logger . verbose ( "Version_id : {0} in" . format ( intermediate_version_id ) ) logger . verbose ( "Active actions: {0} in: {1}" . format ( all_active_action , intermediate_version_id in all_active_action ) ) logger . verbose ( "Trail actions : {0} in: {1}" . format ( all_trail_action , intermediate_version_id in all_trail_action ) ) if intermediate_version_id not in all_trail_action : while intermediate_version_id not in all_trail_action : _undo_redo_list . insert ( 0 , ( intermediate_version_id , 'redo' ) ) intermediate_version_id = self . all_time_history [ intermediate_version_id ] . prev_id intermediate_goal_version_id = intermediate_version_id else : intermediate_goal_version_id = version_id intermediate_version_id = self . trail_history [ self . trail_pointer ] . version_id if self . with_verbose : logger . verbose ( "Version_id : {0} {1}" . format ( intermediate_goal_version_id , intermediate_version_id ) ) logger . verbose ( "Active actions: {0} in: {1}" . format ( all_active_action , intermediate_version_id in all_active_action ) ) logger . verbose ( "Trail actions : {0} in: {1}" . format ( all_trail_action , intermediate_version_id in all_trail_action ) ) if intermediate_goal_version_id in all_active_action : while not intermediate_version_id == intermediate_goal_version_id : undo_redo_list . append ( ( intermediate_version_id , 'undo' ) ) intermediate_version_id = self . all_time_history [ intermediate_version_id ] . prev_id elif intermediate_goal_version_id in all_trail_action : while not intermediate_version_id == intermediate_goal_version_id : intermediate_version_id = self . all_time_history [ intermediate_version_id ] . next_id undo_redo_list . append ( ( intermediate_version_id , 'redo' ) ) for elem in _undo_redo_list : undo_redo_list . append ( elem ) return undo_redo_list | Perform fast search from currently active branch to specific version_id and collect all recovery steps . |
40,328 | def set_pane_position ( self , config_id ) : default_pos = constants . DEFAULT_PANE_POS [ config_id ] position = global_runtime_config . get_config_value ( config_id , default_pos ) pane_id = constants . PANE_ID [ config_id ] self . view [ pane_id ] . set_position ( position ) | Adjusts the position of a GTK Pane to a value stored in the runtime config file . If there was no value stored the pane s position is set to a default value . |
40,329 | def model_changed ( self , model , prop_name , info ) : if not self . view : return execution_engine = rafcon . core . singleton . state_machine_execution_engine label_string = str ( execution_engine . status . execution_mode ) label_string = label_string . replace ( "STATE_MACHINE_EXECUTION_STATUS." , "" ) self . view [ 'execution_status_label' ] . set_text ( label_string ) current_execution_mode = execution_engine . status . execution_mode if current_execution_mode is StateMachineExecutionStatus . STARTED : self . view [ 'step_buttons' ] . hide ( ) self . _set_single_button_active ( 'button_start_shortcut' ) elif current_execution_mode is StateMachineExecutionStatus . PAUSED : self . view [ 'step_buttons' ] . hide ( ) self . _set_single_button_active ( 'button_pause_shortcut' ) elif execution_engine . finished_or_stopped ( ) : self . view [ 'step_buttons' ] . hide ( ) self . _set_single_button_active ( 'button_stop_shortcut' ) else : self . view [ 'step_buttons' ] . show ( ) self . _set_single_button_active ( 'button_step_mode_shortcut' ) | Highlight buttons according actual execution status . Furthermore it triggers the label redraw of the active state machine . |
40,330 | def focus_notebook_page_of_controller ( self , controller ) : if controller not in self . get_child_controllers ( ) : return if not self . modification_history_was_focused and isinstance ( controller , ModificationHistoryTreeController ) and self . view is not None : self . view . bring_tab_to_the_top ( 'history' ) self . modification_history_was_focused = True if self . view is not None and isinstance ( controller , ExecutionHistoryTreeController ) : self . view . bring_tab_to_the_top ( 'execution_history' ) self . modification_history_was_focused = False | Puts the focus on the given child controller |
40,331 | def on_notebook_tab_switch ( self , notebook , page , page_num , title_label , window , notebook_identifier ) : title = gui_helper_label . set_notebook_title ( notebook , page_num , title_label ) window . reset_title ( title , notebook_identifier ) self . on_switch_page_check_collapse_button ( notebook , page_num ) | Triggered whenever a left - bar notebook tab is changed . |
40,332 | def _on_key_press ( self , widget , event ) : self . currently_pressed_keys . add ( event . keyval ) if event . keyval in [ Gdk . KEY_Tab , Gdk . KEY_ISO_Left_Tab ] and event . state & Gdk . ModifierType . CONTROL_MASK : self . toggle_sidebars ( ) | Updates the currently pressed keys |
40,333 | def prepare_destruction ( self ) : plugins . run_hook ( "pre_destruction" ) logger . debug ( "Saving runtime config to {0}" . format ( global_runtime_config . config_file_path ) ) for key , widget_name in constants . PANE_ID . items ( ) : global_runtime_config . store_widget_properties ( self . view [ widget_name ] , key . replace ( '_POS' , '' ) ) for window_key in constants . UNDOCKABLE_WINDOW_KEYS : hidden = False if not global_runtime_config . get_config_value ( window_key + "_WINDOW_UNDOCKED" ) : hidden = getattr ( self , window_key . lower ( ) + '_hidden' ) global_runtime_config . set_config_value ( window_key + '_HIDDEN' , hidden ) global_runtime_config . save_configuration ( ) self . get_controller ( 'states_editor_ctrl' ) . prepare_destruction ( ) rafcon . core . singleton . state_machine_manager . delete_all_state_machines ( ) rafcon . core . singleton . library_manager . prepare_destruction ( ) self . destroy ( ) from rafcon . gui . clipboard import global_clipboard global_clipboard . destroy ( ) gui_singletons . main_window_controller = None | Saves current configuration of windows and panes to the runtime config file before RAFCON is closed . |
40,334 | def setup_forward_or_backward_execution ( self ) : if self . backward_execution : last_history_item = self . execution_history . pop_last_item ( ) assert isinstance ( last_history_item , ReturnItem ) self . scoped_data = last_history_item . scoped_data concurrency_history_item = self . execution_history . get_last_history_item ( ) assert isinstance ( concurrency_history_item , ConcurrencyItem ) else : self . execution_history . push_call_history_item ( self , CallType . CONTAINER , self , self . input_data ) concurrency_history_item = self . execution_history . push_concurrency_history_item ( self , len ( self . states ) ) return concurrency_history_item | Sets up the execution of the concurrency states dependent on if the state is executed forward of backward . |
40,335 | def start_child_states ( self , concurrency_history_item , do_not_start_state = None ) : self . state_execution_status = StateExecutionStatus . EXECUTE_CHILDREN concurrency_queue = queue . Queue ( maxsize = 0 ) for index , state in enumerate ( self . states . values ( ) ) : if state is not do_not_start_state : state . input_data = self . get_inputs_for_state ( state ) state . output_data = self . create_output_dictionary_for_state ( state ) state . concurrency_queue = concurrency_queue state . concurrency_queue_id = index state . generate_run_id ( ) if not self . backward_execution : concurrency_history_item . execution_histories [ index ] . push_call_history_item ( state , CallType . EXECUTE , self , state . input_data ) else : last_history_item = concurrency_history_item . execution_histories [ index ] . pop_last_item ( ) assert isinstance ( last_history_item , ReturnItem ) state . start ( concurrency_history_item . execution_histories [ index ] , self . backward_execution , False ) return concurrency_queue | Utility function to start all child states of the concurrency state . |
40,336 | def join_state ( self , state , history_index , concurrency_history_item ) : state . join ( ) if state . backward_execution : self . backward_execution = True state . state_execution_status = StateExecutionStatus . INACTIVE if not self . backward_execution : state . concurrency_queue = None state . execution_history . push_return_history_item ( state , CallType . EXECUTE , self , state . output_data ) else : last_history_item = concurrency_history_item . execution_histories [ history_index ] . pop_last_item ( ) assert isinstance ( last_history_item , CallItem ) | a utility function to join a state |
40,337 | def finalize_backward_execution ( self ) : self . backward_execution = True last_history_item = self . execution_history . pop_last_item ( ) assert isinstance ( last_history_item , ConcurrencyItem ) last_history_item = self . execution_history . pop_last_item ( ) assert isinstance ( last_history_item , CallItem ) self . scoped_data = last_history_item . scoped_data self . state_execution_status = StateExecutionStatus . WAIT_FOR_NEXT_STATE return self . finalize ( ) | Utility function to finalize the backward execution of the concurrency state . |
40,338 | def finalize_concurrency_state ( self , outcome ) : final_outcome = outcome self . write_output_data ( ) self . check_output_data_type ( ) self . execution_history . push_return_history_item ( self , CallType . CONTAINER , self , self . output_data ) self . state_execution_status = StateExecutionStatus . WAIT_FOR_NEXT_STATE singleton . state_machine_execution_engine . _modify_run_to_states ( self ) if self . preempted : final_outcome = Outcome ( - 2 , "preempted" ) return self . finalize ( final_outcome ) | Utility function to finalize the forward execution of the concurrency state . |
40,339 | def _head_length ( self , port ) : if not port : return 0. parent_state_v = self . get_parent_state_v ( ) if parent_state_v is port . parent : return port . port_size [ 1 ] return max ( port . port_size [ 1 ] * 1.5 , self . _calc_line_width ( ) / 1.3 ) | Distance from the center of the port to the perpendicular waypoint |
40,340 | def on_use_runtime_value_toggled ( self , widget , path ) : try : data_port_id = self . list_store [ path ] [ self . ID_STORAGE_ID ] self . toggle_runtime_value_usage ( data_port_id ) except TypeError as e : logger . exception ( "Error while trying to change the use_runtime_value flag" ) | Try to set the use runtime value flag to the newly entered one |
40,341 | def _default_value_cell_data_func ( self , tree_view_column , cell , model , iter , data = None ) : if isinstance ( self . model . state , LibraryState ) : use_runtime_value = model . get_value ( iter , self . USE_RUNTIME_VALUE_STORAGE_ID ) if use_runtime_value : cell . set_property ( "editable" , True ) cell . set_property ( 'text' , model . get_value ( iter , self . RUNTIME_VALUE_STORAGE_ID ) ) cell . set_property ( 'foreground' , "white" ) else : cell . set_property ( "editable" , False ) cell . set_property ( 'text' , model . get_value ( iter , self . DEFAULT_VALUE_STORAGE_ID ) ) cell . set_property ( 'foreground' , "dark grey" ) return | Function set renderer properties for every single cell independently |
40,342 | def _reload_data_port_list_store ( self ) : tmp = self . _get_new_list_store ( ) for data_port_m in self . data_port_model_list : data_port_id = data_port_m . data_port . data_port_id data_type = data_port_m . data_port . data_type data_type_name = data_type . __name__ data_type_module = data_type . __module__ if data_type_module not in [ '__builtin__' , 'builtins' ] : data_type_name = data_type_module + '.' + data_type_name if data_port_m . data_port . default_value is None : default_value = "None" else : default_value = data_port_m . data_port . default_value if not isinstance ( self . model . state , LibraryState ) : tmp . append ( [ data_port_m . data_port . name , data_type_name , str ( default_value ) , data_port_id , None , None , data_port_m ] ) else : use_runtime_value , runtime_value = self . get_data_port_runtime_configuration ( data_port_id ) tmp . append ( [ data_port_m . data_port . name , data_type_name , str ( default_value ) , data_port_id , bool ( use_runtime_value ) , str ( runtime_value ) , data_port_m , ] ) tms = Gtk . TreeModelSort ( model = tmp ) tms . set_sort_column_id ( 0 , Gtk . SortType . ASCENDING ) tms . set_sort_func ( 0 , compare_variables ) tms . sort_column_changed ( ) tmp = tms self . list_store . clear ( ) for elem in tmp : self . list_store . append ( elem [ : ] ) | Reloads the input data port list store from the data port models |
40,343 | def _apply_new_data_port_name ( self , path , new_name ) : try : data_port_id = self . list_store [ path ] [ self . ID_STORAGE_ID ] if self . state_data_port_dict [ data_port_id ] . name != new_name : self . state_data_port_dict [ data_port_id ] . name = new_name except ( TypeError , ValueError ) as e : logger . exception ( "Error while trying to change data port name" ) | Applies the new name of the data port defined by path |
40,344 | def _apply_new_data_port_type ( self , path , new_data_type_str ) : try : data_port_id = self . list_store [ path ] [ self . ID_STORAGE_ID ] if self . state_data_port_dict [ data_port_id ] . data_type . __name__ != new_data_type_str : self . state_data_port_dict [ data_port_id ] . change_data_type ( new_data_type_str ) except ValueError as e : logger . exception ( "Error while changing data type" ) | Applies the new data type of the data port defined by path |
40,345 | def _apply_new_data_port_default_value ( self , path , new_default_value_str ) : try : data_port_id = self . list_store [ path ] [ self . ID_STORAGE_ID ] if isinstance ( self . model . state , LibraryState ) : if self . list_store [ path ] [ self . USE_RUNTIME_VALUE_STORAGE_ID ] : self . set_data_port_runtime_value ( data_port_id , new_default_value_str ) else : if str ( self . state_data_port_dict [ data_port_id ] . default_value ) != new_default_value_str : self . state_data_port_dict [ data_port_id ] . default_value = new_default_value_str except ( TypeError , AttributeError ) as e : logger . exception ( "Error while changing default value" ) | Applies the new default value of the data port defined by path |
40,346 | def _data_ports_changed ( self , model ) : if not isinstance ( model , AbstractStateModel ) : return path_list = None if self . view is not None : model , path_list = self . tree_view . get_selection ( ) . get_selected_rows ( ) selected_data_port_ids = [ self . list_store [ path [ 0 ] ] [ self . ID_STORAGE_ID ] for path in path_list ] if path_list else [ ] self . _reload_data_port_list_store ( ) if selected_data_port_ids : [ self . select_entry ( selected_data_port_id , False ) for selected_data_port_id in selected_data_port_ids ] | Reload list store and reminds selection when the model was changed |
40,347 | def runtime_values_changed ( self , model , prop_name , info ) : if ( "_input_runtime_value" in info . method_name or info . method_name in [ 'use_runtime_value_input_data_ports' , 'input_data_port_runtime_values' ] ) and self . model is model : self . _data_ports_changed ( model ) | Handle cases for the library runtime values |
40,348 | def add_new_data_port ( self ) : try : new_data_port_ids = gui_helper_state_machine . add_data_port_to_selected_states ( 'OUTPUT' , int , [ self . model ] ) if new_data_port_ids : self . select_entry ( new_data_port_ids [ self . model . state ] ) except ValueError : pass | Add a new port with default values and select it |
40,349 | def add_callback_for_action ( self , action , callback ) : if hasattr ( callback , '__call__' ) : if action not in self . __action_to_callbacks : self . __action_to_callbacks [ action ] = [ ] self . __action_to_callbacks [ action ] . append ( callback ) controller = None try : controller = callback . __self__ except AttributeError : try : controller = callback . func . __self__ except AttributeError : pass if controller : if controller not in self . __controller_action_callbacks : self . __controller_action_callbacks [ controller ] = { } if action not in self . __controller_action_callbacks [ controller ] : self . __controller_action_callbacks [ controller ] [ action ] = [ ] self . __controller_action_callbacks [ controller ] [ action ] . append ( callback ) return True | Adds a callback function to an action |
40,350 | def remove_callback_for_action ( self , action , callback ) : if action in self . __action_to_callbacks : if callback in self . __action_to_callbacks [ action ] : self . __action_to_callbacks [ action ] . remove ( callback ) | Remove a callback for a specific action |
40,351 | def check_info_on_no_update_flags ( self , info ) : if 'before' in info and info [ 'method_name' ] == "remove_state" : if info . instance is self . model . state : self . no_update_state_destruction = True else : removed_state_id = info . args [ 1 ] if len ( info . args ) > 1 else info . kwargs [ 'state_id' ] if removed_state_id == self . model . state . state_id or not self . model . state . is_root_state and removed_state_id == self . model . parent . state . state_id : self . no_update_self_or_parent_state_destruction = True self . relieve_all_models ( ) elif 'after' in info and info [ 'method_name' ] == "remove_state" : if info . instance . state_id == self . model . state . state_id : self . no_update_state_destruction = False if not self . no_update_state_destruction and not self . no_update_self_or_parent_state_destruction and ( not self . no_update and 'before' in info or 'after' in info and self . no_update ) : return overview = NotificationOverview ( info , False , self . __class__ . __name__ ) if 'after' in info and isinstance ( overview [ 'result' ] [ - 1 ] , Exception ) : self . no_update = False self . no_update_state_destruction = False return if overview [ 'method_name' ] [ - 1 ] in [ 'group_states' , 'ungroup_state' , "change_state_type" , "change_root_state_type" ] : instance_is_self = self . model . state is overview [ 'instance' ] [ - 1 ] instance_is_parent = self . model . parent and self . model . parent . state is overview [ 'instance' ] [ - 1 ] instance_is_parent_parent = self . model . parent and self . model . parent . parent and self . model . parent . parent . state is overview [ 'instance' ] [ - 1 ] if instance_is_self or instance_is_parent or instance_is_parent_parent : self . no_update = True if 'before' in info else False if overview [ 'prop_name' ] [ - 1 ] == 'state' and overview [ 'method_name' ] [ - 1 ] in [ "change_state_type" ] and self . model . get_state_machine_m ( ) is not None : changed_model = self . model . get_state_machine_m ( ) . get_state_model_by_path ( overview [ 'args' ] [ - 1 ] [ 1 ] . get_path ( ) ) if changed_model not in self . _model_observed : self . observe_model ( changed_model ) | Stop updates while multi - actions |
40,352 | def before_notification_state_machine_observation_control ( self , model , prop_name , info ) : if is_execution_status_update_notification_from_state_machine_model ( prop_name , info ) : return self . check_info_on_no_update_flags ( info ) | Check for multi - actions and set respective no update flags . |
40,353 | def store_value ( self , name , value , parameters = None ) : if not isinstance ( parameters , dict ) : raise TypeError ( "parameters must be a dict" ) hash = self . _parameter_hash ( parameters ) if name not in self . _cache : self . _cache [ name ] = { } self . _cache [ name ] [ hash . hexdigest ( ) ] = value | Stores the value of a certain variable |
40,354 | def get_value ( self , name , parameters = None ) : if not isinstance ( parameters , dict ) : raise TypeError ( "parameters must a dict" ) if name not in self . _cache : return None hash = self . _parameter_hash ( parameters ) hashdigest = hash . hexdigest ( ) return self . _cache [ name ] . get ( hashdigest , None ) | Return the value of a cached variable if applicable |
40,355 | def _normalize_number_values ( self , parameters ) : for key , value in parameters . items ( ) : if isinstance ( value , ( int , float ) ) : parameters [ key ] = str ( Decimal ( value ) . normalize ( self . _context ) ) | Assures equal precision for all number values |
40,356 | def get_view_selection ( self ) : if not self . MODEL_STORAGE_ID : return None , None if len ( self . store ) == 0 : paths = [ ] else : model , paths = self . _tree_selection . get_selected_rows ( ) selected_model_list = [ ] for path in paths : model = self . store [ path ] [ self . MODEL_STORAGE_ID ] selected_model_list . append ( model ) return self . _tree_selection , selected_model_list | Get actual tree selection object and all respective models of selected rows |
40,357 | def state_machine_selection_changed ( self , state_machine_m , signal_name , signal_msg ) : if self . CORE_ELEMENT_CLASS in signal_msg . arg . affected_core_element_classes : self . update_selection_sm_prior ( ) | Notify tree view about state machine selection |
40,358 | def remove_action_callback ( self , * event ) : if react_to_event ( self . view , self . tree_view , event ) and not ( self . active_entry_widget and not is_event_of_key_string ( event , 'Delete' ) ) : self . on_remove ( None ) return True | Callback method for remove action |
40,359 | def _apply_value_on_edited_and_focus_out ( self , renderer , apply_method ) : assert isinstance ( renderer , Gtk . CellRenderer ) def remove_all_handler ( renderer ) : def remove_handler ( widget , data_name ) : handler_id = getattr ( widget , data_name ) if widget . handler_is_connected ( handler_id ) : widget . disconnect ( handler_id ) editable = getattr ( renderer , "editable" ) remove_handler ( editable , "focus_out_handler_id" ) remove_handler ( editable , "cursor_move_handler_id" ) remove_handler ( editable , "insert_at_cursor_handler_id" ) remove_handler ( editable , "entry_widget_expose_event_handler_id" ) remove_handler ( renderer , "editing_cancelled_handler_id" ) def on_focus_out ( entry , event ) : renderer . remove_all_handler ( renderer ) if renderer . ctrl . get_path ( ) is None : return apply_method ( renderer . ctrl . get_path ( ) , entry . get_text ( ) ) def on_cursor_move_in_entry_widget ( entry , step , count , extend_selection ) : self . tree_view_keypress_callback ( entry , None ) def on_editing_started ( renderer , editable , path ) : ctrl = renderer . ctrl [ path , focus_column ] = ctrl . tree_view . get_cursor ( ) if path : ctrl . tree_view . scroll_to_cell ( path , ctrl . widget_columns [ ctrl . widget_columns . index ( focus_column ) ] , use_align = False ) editing_cancelled_handler_id = renderer . connect ( 'editing-canceled' , on_editing_canceled ) focus_out_handler_id = editable . connect ( 'focus-out-event' , on_focus_out ) cursor_move_handler_id = editable . connect ( 'move-cursor' , on_cursor_move_in_entry_widget ) insert_at_cursor_handler_id = editable . connect ( "insert-at-cursor" , on_cursor_move_in_entry_widget ) entry_widget_expose_event_handler_id = editable . connect ( "draw" , self . on_entry_widget_draw_event ) setattr ( renderer , "editable" , editable ) setattr ( renderer , "editing_cancelled_handler_id" , editing_cancelled_handler_id ) setattr ( editable , "focus_out_handler_id" , focus_out_handler_id ) setattr ( editable , "cursor_move_handler_id" , cursor_move_handler_id ) setattr ( editable , "insert_at_cursor_handler_id" , insert_at_cursor_handler_id ) setattr ( editable , "entry_widget_expose_event_handler_id" , entry_widget_expose_event_handler_id ) ctrl . active_entry_widget = editable def on_edited ( renderer , path , new_value_str ) : renderer . remove_all_handler ( renderer ) apply_method ( path , new_value_str ) renderer . ctrl . active_entry_widget = None def on_editing_canceled ( renderer ) : remove_all_handler ( renderer ) renderer . ctrl . active_entry_widget = None renderer . remove_all_handler = remove_all_handler renderer . ctrl = self self . __attached_renderers . append ( renderer ) self . connect_signal ( renderer , 'editing-started' , on_editing_started ) self . connect_signal ( renderer , 'edited' , on_edited ) | Sets up the renderer to apply changed when loosing focus |
40,360 | def on_remove ( self , widget , data = None ) : path_list = None if self . view is not None : model , path_list = self . tree_view . get_selection ( ) . get_selected_rows ( ) old_path = self . get_path ( ) models = [ self . list_store [ path ] [ self . MODEL_STORAGE_ID ] for path in path_list ] if path_list else [ ] if models : try : self . remove_core_elements ( models ) except AttributeError as e : self . _logger . warning ( "The respective core element of {1}.list_store couldn't be removed. -> {0}" "" . format ( e , self . __class__ . __name__ ) ) if len ( self . list_store ) > 0 : self . tree_view . set_cursor ( min ( old_path [ 0 ] , len ( self . list_store ) - 1 ) ) return True else : self . _logger . warning ( "Please select an element to be removed." ) | Removes respective selected core elements and select the next one |
40,361 | def get_state_machine_selection ( self ) : sm_selection = self . model . get_state_machine_m ( ) . selection if self . model . get_state_machine_m ( ) else None return sm_selection , sm_selection . get_selected_elements_of_core_class ( self . CORE_ELEMENT_CLASS ) if sm_selection else set ( ) | An abstract getter method for state machine selection |
40,362 | def _handle_double_click ( self , event ) : if event . get_button ( ) [ 1 ] == 1 : path_info = self . tree_view . get_path_at_pos ( int ( event . x ) , int ( event . y ) ) if path_info : path = path_info [ 0 ] iter = self . list_store . get_iter ( path ) model = self . list_store . get_value ( iter , self . MODEL_STORAGE_ID ) selection = self . model . get_state_machine_m ( ) . selection selection . focus = model | Double click with left mouse button focuses the element |
40,363 | def iter_tree_with_handed_function ( self , function , * function_args ) : def iter_all_children ( row_iter , function , function_args ) : if isinstance ( row_iter , Gtk . TreeIter ) : function ( row_iter , * function_args ) for n in reversed ( range ( self . tree_store . iter_n_children ( row_iter ) ) ) : child_iter = self . tree_store . iter_nth_child ( row_iter , n ) iter_all_children ( child_iter , function , function_args ) else : self . _logger . warning ( "Iter has to be TreeIter -> handed argument is: {0}" . format ( row_iter ) ) next_iter = self . tree_store . get_iter_first ( ) while next_iter : iter_all_children ( next_iter , function , function_args ) next_iter = self . tree_store . iter_next ( next_iter ) | Iterate tree view with condition check function |
40,364 | def update_selection_sm_prior_condition ( self , state_row_iter , selected_model_list , sm_selected_model_list ) : selected_path = self . tree_store . get_path ( state_row_iter ) tree_model_row = self . tree_store [ selected_path ] model = tree_model_row [ self . MODEL_STORAGE_ID ] if model not in sm_selected_model_list and model in selected_model_list : self . _tree_selection . unselect_iter ( state_row_iter ) elif model in sm_selected_model_list and model not in selected_model_list : self . tree_view . expand_to_path ( selected_path ) self . _tree_selection . select_iter ( state_row_iter ) | State machine prior update of tree selection for one tree model row |
40,365 | def update_selection_self_prior_condition ( self , state_row_iter , sm_selected_model_set , selected_model_list ) : selected_path = self . tree_store . get_path ( state_row_iter ) tree_model_row = self . tree_store [ selected_path ] model = tree_model_row [ self . MODEL_STORAGE_ID ] if model in sm_selected_model_set and model not in selected_model_list : sm_selected_model_set . remove ( model ) elif model not in sm_selected_model_set and model in selected_model_list : sm_selected_model_set . add ( model ) | Tree view prior update of one model in the state machine selection |
40,366 | def contains_geometric_info ( var ) : return isinstance ( var , tuple ) and len ( var ) == 2 and all ( isinstance ( val , ( int , float ) ) for val in var ) | Check whether the passed variable is a tuple with two floats or integers |
40,367 | def generate_default_state_meta_data ( parent_state_m , canvas = None , num_child_state = None , gaphas_editor = True ) : parent_size = parent_state_m . get_meta_data_editor ( ) [ 'size' ] if not contains_geometric_info ( parent_size ) : raise ValueError ( "Invalid state size: {}" . format ( parent_size ) ) num_child_state = len ( parent_state_m . states ) if num_child_state is None else num_child_state if canvas is not None : parent_state_v = canvas . get_view_for_model ( parent_state_m ) if not ( parent_state_v . width , parent_state_v . height ) == parent_size : logger . warning ( "Size meta data of model {0} is different to gaphas {1}" "" . format ( ( parent_state_v . width , parent_state_v . height ) , parent_size ) ) parent_state_width , parent_state_height = parent_size new_state_side_size = min ( parent_state_width * 0.2 , parent_state_height * 0.2 ) child_width = new_state_side_size child_height = new_state_side_size child_size = ( child_width , child_height ) child_spacing = max ( child_size ) * 1.2 parent_margin = cal_margin ( parent_size ) max_cols = ( parent_state_width - 2 * parent_margin ) // child_spacing ( row , col ) = divmod ( num_child_state , max_cols ) max_rows = ( parent_state_height - 2 * parent_margin - 0.5 * child_spacing ) // ( 1.5 * child_spacing ) ( overlapping , row ) = divmod ( row , max_rows ) overlapping_step = 0.5 * parent_margin max_overlaps_x = ( parent_state_width - 2 * parent_margin - child_width - ( parent_margin + ( max_cols - 1 ) * child_spacing + child_spacing - child_width ) ) // overlapping_step max_overlaps_y = ( parent_state_height - 2 * parent_margin - child_height - child_spacing * ( 1.5 * ( max_rows - 1 ) + 1 ) ) // overlapping_step max_overlaps_x = 0 if max_overlaps_x < 0 else max_overlaps_x max_overlaps_y = 0 if max_overlaps_y < 0 else max_overlaps_y max_overlaps = min ( max_overlaps_x , max_overlaps_y ) + 1 overlapping = divmod ( overlapping , max_overlaps ) [ 1 ] child_rel_pos_x = parent_margin + col * child_spacing + child_spacing - child_width + overlapping * overlapping_step child_rel_pos_y = child_spacing * ( 1.5 * row + 1. ) + overlapping * overlapping_step return ( child_rel_pos_x , child_rel_pos_y ) , ( new_state_side_size , new_state_side_size ) | Generate default meta data for a child state according its parent state |
40,368 | def add_boundary_clearance ( left , right , top , bottom , frame , clearance = 0.1 ) : width = right - left width = frame [ 'size' ] [ 0 ] if width < frame [ 'size' ] [ 0 ] else width left -= 0.5 * clearance * width left = 0 if left < 0 else left right += 0.5 * clearance * width height = bottom - top height = frame [ 'size' ] [ 1 ] if height < frame [ 'size' ] [ 1 ] else height top -= 0.5 * clearance * height top = 0 if top < 0 else top bottom += 0.5 * clearance * height return left , right , top , bottom | Increase boundary size |
40,369 | def cal_frame_according_boundaries ( left , right , top , bottom , parent_size , gaphas_editor = True , group = True ) : margin = cal_margin ( parent_size ) if group : rel_pos = max ( left - margin , 0 ) , max ( top - margin , 0 ) size = ( min ( right - left + 2 * margin , parent_size [ 0 ] - rel_pos [ 0 ] ) , min ( bottom - top + 2 * margin , parent_size [ 1 ] - rel_pos [ 1 ] ) ) else : rel_pos = left , top size = right - left , bottom - top return margin , rel_pos , size | Generate margin and relative position and size handed boundary parameter and parent size |
40,370 | def offset_rel_pos_of_all_models_in_dict ( models_dict , pos_offset , gaphas_editor = True ) : for child_state_m in models_dict [ 'states' ] . values ( ) : old_rel_pos = child_state_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'rel_pos' ] child_state_m . set_meta_data_editor ( 'rel_pos' , add_pos ( old_rel_pos , pos_offset ) , from_gaphas = gaphas_editor ) if not gaphas_editor : for scoped_variable_m in models_dict [ 'scoped_variables' ] . values ( ) : old_rel_pos = scoped_variable_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'inner_rel_pos' ] scoped_variable_m . set_meta_data_editor ( 'inner_rel_pos' , add_pos ( old_rel_pos , pos_offset ) , gaphas_editor ) connection_models = list ( models_dict [ 'transitions' ] . values ( ) ) + list ( models_dict [ 'data_flows' ] . values ( ) ) for connection_m in connection_models : old_waypoints = connection_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'waypoints' ] new_waypoints = [ ] for waypoint in old_waypoints : from rafcon . gui . models . data_flow import DataFlowModel if isinstance ( connection_m , DataFlowModel ) and gaphas_editor : new_waypoints . append ( add_pos ( waypoint , ( pos_offset [ 0 ] , - pos_offset [ 1 ] ) ) ) else : new_waypoints . append ( add_pos ( waypoint , pos_offset ) ) connection_m . set_meta_data_editor ( 'waypoints' , new_waypoints , from_gaphas = gaphas_editor ) | Add position offset to all handed models in dict |
40,371 | def scale_library_ports_meta_data ( state_m , gaphas_editor = True ) : if state_m . meta_data_was_scaled : return state_m . income . set_meta_data_editor ( 'rel_pos' , state_m . state_copy . income . get_meta_data_editor ( ) [ 'rel_pos' ] ) factor = divide_two_vectors ( state_m . get_meta_data_editor ( ) [ 'size' ] , state_m . state_copy . get_meta_data_editor ( ) [ 'size' ] ) if contains_geometric_info ( factor ) : resize_state_port_meta ( state_m , factor , True ) state_m . meta_data_was_scaled = True else : logger . info ( "Skip resize of library ports meta data {0}" . format ( state_m ) ) | Scale the ports of library model accordingly relative to state_copy meta size . The function assumes that the meta data of ports of the state_copy of the library was copied to respective elements in the library and that those was not adjusted before . |
40,372 | def resize_state_port_meta ( state_m , factor , gaphas_editor = True ) : if not gaphas_editor and isinstance ( state_m , ContainerStateModel ) : port_models = state_m . input_data_ports [ : ] + state_m . output_data_ports [ : ] + state_m . scoped_variables [ : ] else : port_models = state_m . input_data_ports [ : ] + state_m . output_data_ports [ : ] + state_m . outcomes [ : ] port_models += state_m . scoped_variables [ : ] if isinstance ( state_m , ContainerStateModel ) else [ ] _resize_port_models_list ( port_models , 'rel_pos' if gaphas_editor else 'inner_rel_pos' , factor , gaphas_editor ) resize_income_of_state_m ( state_m , factor , gaphas_editor ) | Resize data and logical ports relative positions |
40,373 | def resize_state_meta ( state_m , factor , gaphas_editor = True ) : old_rel_pos = state_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'rel_pos' ] state_m . set_meta_data_editor ( 'rel_pos' , mult_two_vectors ( factor , old_rel_pos ) , from_gaphas = gaphas_editor ) old_size = state_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'size' ] state_m . set_meta_data_editor ( 'size' , mult_two_vectors ( factor , old_size ) , from_gaphas = gaphas_editor ) if gaphas_editor : old_rel_pos = state_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'name' ] [ 'rel_pos' ] state_m . set_meta_data_editor ( 'name.rel_pos' , mult_two_vectors ( factor , old_rel_pos ) , from_gaphas = gaphas_editor ) old_size = state_m . get_meta_data_editor ( for_gaphas = gaphas_editor ) [ 'name' ] [ 'size' ] state_m . set_meta_data_editor ( 'name.size' , mult_two_vectors ( factor , old_size ) , from_gaphas = gaphas_editor ) if isinstance ( state_m , LibraryStateModel ) : if gaphas_editor and state_m . state_copy_initialized : if state_m . meta_data_was_scaled : resize_state_port_meta ( state_m , factor , gaphas_editor ) else : scale_library_ports_meta_data ( state_m , gaphas_editor ) if state_m . state_copy_initialized : resize_state_meta ( state_m . state_copy , factor , gaphas_editor ) else : resize_state_port_meta ( state_m , factor , gaphas_editor ) if isinstance ( state_m , ContainerStateModel ) : _resize_connection_models_list ( state_m . transitions [ : ] + state_m . data_flows [ : ] , factor , gaphas_editor ) for child_state_m in state_m . states . values ( ) : resize_state_meta ( child_state_m , factor , gaphas_editor ) | Resize state meta data recursive what includes also LibraryStateModels meta data and its internal state_copy |
40,374 | def offset_rel_pos_of_models_meta_data_according_parent_state ( models_dict ) : old_parent_rel_pos = models_dict [ 'state' ] . get_meta_data_editor ( ) [ 'rel_pos' ] offset_rel_pos_of_all_models_in_dict ( models_dict , pos_offset = old_parent_rel_pos ) return True | Offset meta data of state elements according the area used indicated by the state meta data . |
40,375 | def get_closest_sibling_state ( state_m , from_logical_port = None ) : if not state_m . parent : logger . warning ( "A state can not have a closest sibling state if it has not parent as {0}" . format ( state_m ) ) return margin = cal_margin ( state_m . parent . get_meta_data_editor ( ) [ 'size' ] ) pos = state_m . get_meta_data_editor ( ) [ 'rel_pos' ] size = state_m . get_meta_data_editor ( ) [ 'size' ] if from_logical_port in [ "outcome" , "income" ] : size = ( margin , margin ) if from_logical_port == "outcome" : outcomes_m = [ outcome_m for outcome_m in state_m . outcomes if outcome_m . outcome . outcome_id >= 0 ] free_outcomes_m = [ oc_m for oc_m in outcomes_m if not state_m . state . parent . get_transition_for_outcome ( state_m . state , oc_m . outcome ) ] if free_outcomes_m : outcome_m = free_outcomes_m [ 0 ] else : outcome_m = outcomes_m [ 0 ] pos = add_pos ( pos , outcome_m . get_meta_data_editor ( ) [ 'rel_pos' ] ) elif from_logical_port == "income" : pos = add_pos ( pos , state_m . income . get_meta_data_editor ( ) [ 'rel_pos' ] ) min_distance = None for sibling_state_m in state_m . parent . states . values ( ) : if sibling_state_m is state_m : continue sibling_pos = sibling_state_m . get_meta_data_editor ( ) [ 'rel_pos' ] sibling_size = sibling_state_m . get_meta_data_editor ( ) [ 'size' ] distance = geometry . cal_dist_between_2_coord_frame_aligned_boxes ( pos , size , sibling_pos , sibling_size ) if not min_distance or min_distance [ 0 ] > distance : min_distance = ( distance , sibling_state_m ) return min_distance | Calculate the closest sibling also from optional logical port of handed state model |
40,376 | def get_action_arguments ( self , target_state_m ) : non_empty_lists_dict = { key : elems for key , elems in self . model_copies . items ( ) if elems } port_attrs = [ 'input_data_ports' , 'output_data_ports' , 'scoped_variables' , 'outcomes' ] port_is_pasted = any ( [ key in non_empty_lists_dict for key in port_attrs ] ) return non_empty_lists_dict , target_state_m . parent if target_state_m . parent and port_is_pasted else target_state_m | Collect argument attributes for action signal |
40,377 | def cut ( self , selection , smart_selection_adaption = False ) : assert isinstance ( selection , Selection ) import rafcon . gui . helpers . state_machine as gui_helper_state_machine if gui_helper_state_machine . is_selection_inside_of_library_state ( selected_elements = selection . get_all ( ) ) : logger . warning ( "Cut is not performed because elements inside of a library state are selected." ) return selection_dict_of_copied_models , parent_m = self . __create_core_and_model_object_copies ( selection , smart_selection_adaption ) non_empty_lists_dict , action_parent_m = self . get_action_arguments ( parent_m if parent_m else None ) action_parent_m . action_signal . emit ( ActionSignalMsg ( action = 'cut' , origin = 'clipboard' , action_parent_m = action_parent_m , affected_models = [ ] , after = False , kwargs = { 'remove' : non_empty_lists_dict } ) ) for models in selection_dict_of_copied_models . values ( ) : gui_helper_state_machine . delete_core_elements_of_models ( models , destroy = True , recursive = True , force = False ) affected_models = [ model for models in non_empty_lists_dict . values ( ) for model in models ] action_parent_m . action_signal . emit ( ActionSignalMsg ( action = 'cut' , origin = 'clipboard' , action_parent_m = action_parent_m , affected_models = affected_models , after = True ) ) | Cuts all selected items and copy them to the clipboard using smart selection adaptation by default |
40,378 | def reset_clipboard ( self ) : for state_element_attr in ContainerState . state_element_attrs : self . model_copies [ state_element_attr ] = [ ] self . copy_parent_state_id = None self . reset_clipboard_mapping_dicts ( ) | Resets the clipboard so that old elements do not pollute the new selection that is copied into the clipboard . |
40,379 | def do_selection_reduction_to_one_parent ( selection ) : all_models_selected = selection . get_all ( ) parent_m_count_dict = { } for model in all_models_selected : parent_m_count_dict [ model . parent ] = parent_m_count_dict [ model . parent ] + 1 if model . parent in parent_m_count_dict else 1 parent_m = None current_count_parent = 0 for possible_parent_m , count in parent_m_count_dict . items ( ) : parent_m = possible_parent_m if current_count_parent < count else parent_m if len ( selection . states ) == 1 and selection . get_selected_state ( ) . state . is_root_state : parent_m = None if len ( all_models_selected ) > 1 : selection . set ( selection . get_selected_state ( ) ) if parent_m is not None : for model in all_models_selected : if model . parent is not parent_m : selection . remove ( model ) return parent_m | Find and reduce selection to one parent state . |
40,380 | def __create_core_and_model_object_copies ( self , selection , smart_selection_adaption ) : all_models_selected = selection . get_all ( ) if not all_models_selected : logger . warning ( "Nothing to copy because state machine selection is empty." ) return parent_m = self . do_selection_reduction_to_one_parent ( selection ) self . copy_parent_state_id = parent_m . state . state_id if parent_m else None if smart_selection_adaption : self . do_smart_selection_adaption ( selection , parent_m ) selected_models_dict = { } for state_element_attr in ContainerState . state_element_attrs : selected_models_dict [ state_element_attr ] = list ( getattr ( selection , state_element_attr ) ) self . destroy_all_models_in_dict ( self . model_copies ) self . model_copies = deepcopy ( selected_models_dict ) new_content_of_clipboard = ', ' . join ( [ "{0} {1}" . format ( len ( elems ) , key if len ( elems ) > 1 else key [ : - 1 ] ) for key , elems in self . model_copies . items ( ) if elems ] ) logger . info ( "The new content is {0}" . format ( new_content_of_clipboard . replace ( '_' , ' ' ) ) ) return selected_models_dict , parent_m | Copy all elements of a selection . |
40,381 | def destroy_all_models_in_dict ( target_dict ) : if target_dict : for model_list in target_dict . values ( ) : if isinstance ( model_list , ( list , tuple ) ) : for model in model_list : model . prepare_destruction ( ) if model . _parent : model . _parent = None else : raise Exception ( "wrong data in clipboard" ) | Method runs the prepare destruction method of models which are assumed in list or tuple as values within a dict |
40,382 | def destroy ( self ) : self . destroy_all_models_in_dict ( self . model_copies ) self . model_copies = None self . copy_parent_state_id = None self . outcome_id_mapping_dict = None self . port_id_mapping_dict = None self . state_id_mapping_dict = None | Destroys the clipboard by relieving all model references . |
40,383 | def extend_extents ( extents , factor = 1.1 ) : width = extents [ 2 ] - extents [ 0 ] height = extents [ 3 ] - extents [ 1 ] add_width = ( factor - 1 ) * width add_height = ( factor - 1 ) * height x1 = extents [ 0 ] - add_width / 2 x2 = extents [ 2 ] + add_width / 2 y1 = extents [ 1 ] - add_height / 2 y2 = extents [ 3 ] + add_height / 2 return x1 , y1 , x2 , y2 | Extend a given bounding box |
40,384 | def calc_rel_pos_to_parent ( canvas , item , handle ) : from gaphas . item import NW if isinstance ( item , ConnectionView ) : return item . canvas . get_matrix_i2i ( item , item . parent ) . transform_point ( * handle . pos ) parent = canvas . get_parent ( item ) if parent : return item . canvas . get_matrix_i2i ( item , parent ) . transform_point ( * handle . pos ) else : return item . canvas . get_matrix_i2c ( item ) . transform_point ( * item . handles ( ) [ NW ] . pos ) | This method calculates the relative position of the given item s handle to its parent |
40,385 | def assert_exactly_one_true ( bool_list ) : assert isinstance ( bool_list , list ) counter = 0 for item in bool_list : if item : counter += 1 return counter == 1 | This method asserts that only one value of the provided list is True . |
40,386 | def get_state_id_for_port ( port ) : parent = port . parent from rafcon . gui . mygaphas . items . state import StateView if isinstance ( parent , StateView ) : return parent . model . state . state_id | This method returns the state ID of the state containing the given port |
40,387 | def get_port_for_handle ( handle , state ) : from rafcon . gui . mygaphas . items . state import StateView if isinstance ( state , StateView ) : if state . income . handle == handle : return state . income else : for outcome in state . outcomes : if outcome . handle == handle : return outcome for input in state . inputs : if input . handle == handle : return input for output in state . outputs : if output . handle == handle : return output for scoped in state . scoped_variables : if scoped . handle == handle : return scoped | Looks for and returns the PortView to the given handle in the provided state |
40,388 | def create_new_connection ( from_port , to_port ) : from rafcon . gui . mygaphas . items . ports import ScopedVariablePortView , LogicPortView , DataPortView if isinstance ( from_port , LogicPortView ) and isinstance ( to_port , LogicPortView ) : return add_transition_to_state ( from_port , to_port ) elif isinstance ( from_port , ( DataPortView , ScopedVariablePortView ) ) and isinstance ( to_port , ( DataPortView , ScopedVariablePortView ) ) : return add_data_flow_to_state ( from_port , to_port ) elif from_port and to_port : logger . error ( "Connection of non-compatible ports: {0} and {1}" . format ( type ( from_port ) , type ( to_port ) ) ) return False | Checks the type of connection and tries to create it |
40,389 | def add_data_flow_to_state ( from_port , to_port ) : from rafcon . gui . mygaphas . items . ports import InputPortView , OutputPortView , ScopedVariablePortView from rafcon . gui . models . container_state import ContainerStateModel from_state_v = from_port . parent to_state_v = to_port . parent from_state_m = from_state_v . model to_state_m = to_state_v . model from_state_id = from_state_m . state . state_id to_state_id = to_state_m . state . state_id from_port_id = from_port . port_id to_port_id = to_port . port_id if not isinstance ( from_port , ( InputPortView , OutputPortView , ScopedVariablePortView ) ) or not isinstance ( from_port , ( InputPortView , OutputPortView , ScopedVariablePortView ) ) : logger . error ( "Data flows only exist between data ports (input, output, scope). Given: {0} and {1}" . format ( type ( from_port ) , type ( to_port ) ) ) return False responsible_parent_m = None if isinstance ( from_state_m , ContainerStateModel ) and check_if_dict_contains_object_reference_in_values ( to_state_m . state , from_state_m . state . states ) : responsible_parent_m = from_state_m elif isinstance ( to_state_m , ContainerStateModel ) and check_if_dict_contains_object_reference_in_values ( from_state_m . state , to_state_m . state . states ) : responsible_parent_m = to_state_m elif isinstance ( from_state_m , ContainerStateModel ) and from_state_m . state is to_state_m . state : responsible_parent_m = from_state_m elif ( not from_state_m . state . is_root_state ) and ( not to_state_m . state . is_root_state ) and from_state_m . state is not to_state_m . state and from_state_m . parent . state . state_id and to_state_m . parent . state . state_id : responsible_parent_m = from_state_m . parent if not isinstance ( responsible_parent_m , ContainerStateModel ) : logger . error ( "Data flows only exist in container states (e.g. hierarchy states)" ) return False try : responsible_parent_m . state . add_data_flow ( from_state_id , from_port_id , to_state_id , to_port_id ) return True except ( ValueError , AttributeError , TypeError ) as e : logger . error ( "Data flow couldn't be added: {0}" . format ( e ) ) return False | Interface method between Gaphas and RAFCON core for adding data flows |
40,390 | def add_transition_to_state ( from_port , to_port ) : from rafcon . gui . mygaphas . items . ports import IncomeView , OutcomeView from_state_v = from_port . parent to_state_v = to_port . parent from_state_m = from_state_v . model to_state_m = to_state_v . model from_state_id = from_state_m . state . state_id to_state_id = to_state_m . state . state_id responsible_parent_m = None if isinstance ( from_port , IncomeView ) : from_state_id = None from_outcome_id = None responsible_parent_m = from_state_m if isinstance ( to_port , IncomeView ) : to_outcome_id = None elif isinstance ( to_port , OutcomeView ) : to_outcome_id = to_port . outcome_id elif isinstance ( from_port , OutcomeView ) : from_outcome_id = from_port . outcome_id if isinstance ( to_port , IncomeView ) : responsible_parent_m = from_state_m . parent to_outcome_id = None elif isinstance ( to_port , OutcomeView ) : responsible_parent_m = to_state_m to_outcome_id = to_port . outcome_id else : raise ValueError ( "Invalid port type" ) from rafcon . gui . models . container_state import ContainerStateModel if not responsible_parent_m : logger . error ( "Transitions only exist between incomes and outcomes. Given: {0} and {1}" . format ( type ( from_port ) , type ( to_port ) ) ) return False elif not isinstance ( responsible_parent_m , ContainerStateModel ) : logger . error ( "Transitions only exist in container states (e.g. hierarchy states)" ) return False try : t_id = responsible_parent_m . state . add_transition ( from_state_id , from_outcome_id , to_state_id , to_outcome_id ) if from_state_id == to_state_id : gui_helper_meta_data . insert_self_transition_meta_data ( responsible_parent_m . states [ from_state_id ] , t_id , combined_action = True ) return True except ( ValueError , AttributeError , TypeError ) as e : logger . error ( "Transition couldn't be added: {0}" . format ( e ) ) return False | Interface method between Gaphas and RAFCON core for adding transitions |
40,391 | def get_relative_positions_of_waypoints ( transition_v ) : handles_list = transition_v . handles ( ) rel_pos_list = [ ] for handle in handles_list : if handle in transition_v . end_handles ( include_waypoints = True ) : continue rel_pos = transition_v . canvas . get_matrix_i2i ( transition_v , transition_v . parent ) . transform_point ( * handle . pos ) rel_pos_list . append ( rel_pos ) return rel_pos_list | This method takes the waypoints of a connection and returns all relative positions of these waypoints . |
40,392 | def update_meta_data_for_transition_waypoints ( graphical_editor_view , transition_v , last_waypoint_list , publish = True ) : from rafcon . gui . mygaphas . items . connection import TransitionView assert isinstance ( transition_v , TransitionView ) transition_m = transition_v . model waypoint_list = get_relative_positions_of_waypoints ( transition_v ) if waypoint_list != last_waypoint_list : transition_m . set_meta_data_editor ( 'waypoints' , waypoint_list ) if publish : graphical_editor_view . emit ( 'meta_data_changed' , transition_m , "waypoints" , False ) | This method updates the relative position meta data of the transitions waypoints if they changed |
40,393 | def update_meta_data_for_port ( graphical_editor_view , item , handle ) : from rafcon . gui . mygaphas . items . ports import IncomeView , OutcomeView , InputPortView , OutputPortView , ScopedVariablePortView for port in item . get_all_ports ( ) : if not handle or handle is port . handle : rel_pos = ( port . handle . pos . x . value , port . handle . pos . y . value ) if isinstance ( port , ( IncomeView , OutcomeView , InputPortView , OutputPortView , ScopedVariablePortView ) ) : port_m = port . model cur_rel_pos = port_m . get_meta_data_editor ( ) [ 'rel_pos' ] if rel_pos != cur_rel_pos : port_m . set_meta_data_editor ( 'rel_pos' , rel_pos ) if handle : graphical_editor_view . emit ( 'meta_data_changed' , port_m , "position" , True ) else : continue if handle : break | This method updates the meta data of the states ports if they changed . |
40,394 | def update_meta_data_for_name_view ( graphical_editor_view , name_v , publish = True ) : from gaphas . item import NW rel_pos = calc_rel_pos_to_parent ( graphical_editor_view . editor . canvas , name_v , name_v . handles ( ) [ NW ] ) state_v = graphical_editor_view . editor . canvas . get_parent ( name_v ) state_v . model . set_meta_data_editor ( 'name.size' , ( name_v . width , name_v . height ) ) state_v . model . set_meta_data_editor ( 'name.rel_pos' , rel_pos ) if publish : graphical_editor_view . emit ( 'meta_data_changed' , state_v . model , "name_size" , False ) | This method updates the meta data of a name view . |
40,395 | def update_meta_data_for_state_view ( graphical_editor_view , state_v , affects_children = False , publish = True ) : from gaphas . item import NW update_meta_data_for_port ( graphical_editor_view , state_v , None ) if affects_children : update_meta_data_for_name_view ( graphical_editor_view , state_v . name_view , publish = False ) for transition_v in state_v . get_transitions ( ) : update_meta_data_for_transition_waypoints ( graphical_editor_view , transition_v , None , publish = False ) for child_state_v in state_v . child_state_views ( ) : update_meta_data_for_state_view ( graphical_editor_view , child_state_v , True , publish = False ) rel_pos = calc_rel_pos_to_parent ( graphical_editor_view . editor . canvas , state_v , state_v . handles ( ) [ NW ] ) state_v . model . set_meta_data_editor ( 'size' , ( state_v . width , state_v . height ) ) state_v . model . set_meta_data_editor ( 'rel_pos' , rel_pos ) if publish : graphical_editor_view . emit ( 'meta_data_changed' , state_v . model , "size" , affects_children ) | This method updates the meta data of a state view |
40,396 | def remove ( self ) : self . canvas . get_first_view ( ) . unselect_item ( self ) for child in self . canvas . get_children ( self ) [ : ] : child . remove ( ) self . remove_income ( ) for outcome_v in self . outcomes [ : ] : self . remove_outcome ( outcome_v ) for input_port_v in self . inputs [ : ] : self . remove_input_port ( input_port_v ) for output_port_v in self . outputs [ : ] : self . remove_output_port ( output_port_v ) for scoped_variable_port_v in self . scoped_variables [ : ] : self . remove_scoped_variable ( scoped_variable_port_v ) self . remove_keep_rect_within_constraint_from_parent ( ) for constraint in self . _constraints [ : ] : self . canvas . solver . remove_constraint ( constraint ) self . _constraints . remove ( constraint ) self . canvas . remove ( self ) | Remove recursively all children and then the StateView itself |
40,397 | def show_content ( self , with_content = False ) : if isinstance ( self . model , LibraryStateModel ) and self . model . show_content ( ) : return not with_content or isinstance ( self . model . state_copy , ContainerStateModel ) return False | Checks if the state is a library with the show_content flag set |
40,398 | def _calculate_port_pos_on_line ( self , port_num , side_length , port_width = None ) : if port_width is None : port_width = 2 * self . border_width border_size = self . border_width pos = 0.5 * border_size + port_num * port_width outermost_pos = max ( side_length / 2. , side_length - 0.5 * border_size - port_width ) pos = min ( pos , outermost_pos ) return pos | Calculate the position of a port on a line |
40,399 | def load_objects_from_json ( path , as_dict = False ) : f = open ( path , 'r' ) if as_dict : result = json . load ( f ) else : result = json . load ( f , cls = JSONObjectDecoder , substitute_modules = substitute_modules ) f . close ( ) return result | Loads a dictionary from a json file . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.